diff --git a/README.md b/README.md
index cf08fa329a5..24d419bd472 100644
--- a/README.md
+++ b/README.md
@@ -22,7 +22,7 @@ Solr is the blazing-fast, open source, multi-modal search platform built on [Apa
It powers full-text, vector, and geospatial search at many of the world's largest organizations.
[![Build Status](https://ci-builds.apache.org/job/Solr/job/Solr-Artifacts-main/badge/icon?subject=Solr%20Artifacts)](https://ci-builds.apache.org/job/Solr/job/Solr-Artifacts-main/)
-[![Build Status](https://ci-builds.apache.org/job/Solr/job/Solr-Check-main/badge/icon?subject=Solr%20Check)](https://ci-builds.apache.org/job/Solr/job/Solr-Check-main/)
+[![Build Status](https://ci-builds.apache.org/job/Solr/job/Solr-Lint-main/badge/icon?subject=Solr%20Lint)](https://ci-builds.apache.org/job/Solr/job/Solr-Lint-main/)
For a complete description of the Solr project, team composition, source
code repositories, and other details, please see the Solr web site at
diff --git a/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/WrapperDownloader.java b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/WrapperDownloader.java
index 88d91028552..adb8f3eaf07 100644
--- a/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/WrapperDownloader.java
+++ b/build-tools/build-infra/src/main/java/org/apache/lucene/gradle/WrapperDownloader.java
@@ -22,7 +22,7 @@
import java.io.InputStream;
import java.io.OutputStream;
import java.net.HttpURLConnection;
-import java.net.URL;
+import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
@@ -54,7 +54,7 @@ public static void main(String[] args) {
}
public static void checkVersion() {
- int major = Runtime.getRuntime().version().feature();
+ int major = Runtime.version().feature();
if (major < 21 || major > 23) {
throw new IllegalStateException(
"java version must be between 21 and 23, your version: " + major);
@@ -89,12 +89,12 @@ public void run(Path destination) throws IOException, NoSuchAlgorithmException {
}
}
- URL url =
- new URL(
+ URI uri =
+ URI.create(
"https://raw.githubusercontent.com/gradle/gradle/v"
+ wrapperVersion
+ "/gradle/wrapper/gradle-wrapper.jar");
- System.err.println("Downloading gradle-wrapper.jar from " + url);
+ System.err.println("Downloading gradle-wrapper.jar from " + uri);
// Zero-copy save the jar to a temp file
Path temp = Files.createTempFile(destination.getParent(), ".gradle-wrapper", ".tmp");
@@ -103,7 +103,7 @@ public void run(Path destination) throws IOException, NoSuchAlgorithmException {
int retryDelay = 30;
HttpURLConnection connection;
while (true) {
- connection = (HttpURLConnection) url.openConnection();
+ connection = (HttpURLConnection) uri.toURL().openConnection();
try {
connection.connect();
} catch (IOException e) {
diff --git a/dev-docs/asf-jenkins.adoc b/dev-docs/asf-jenkins.adoc
index b4d01bb3964..b3c59b6fd9e 100644
--- a/dev-docs/asf-jenkins.adoc
+++ b/dev-docs/asf-jenkins.adoc
@@ -10,13 +10,14 @@ This file aims to document our [ASF Jenkins](https://ci-builds.apache.org/job/So
We run a number of jobs on Jenkins, each validating an overlapping set of concerns:
* `Solr-Artifacts-*` - daily jobs that run `./gradlew assemble` to ensure that build artifacts (except docker images) can be created successfully
-* `Solr-check-*` - "hourly" jobs that run all project tests and static analysis (i.e. `test`, `integrationTest`, and `check`)
+* `Solr-Lint-*` - daily jobs that run static analysis (i.e. `precommit` and `check -x test`) on a branch
+* `Solr-Test-*` - "hourly" jobs that run all (non-integration) tests (i.e. `./gradlew test`)
+* `Solr-TestIntegration-*` - daily jobs that run project integration tests (i.e. `./gradlew integrationTests`)
* `Solr-Docker-Nightly-*` - daily jobs that `./gradlew testDocker dockerPush` to validate docker image packaging. Snapshot images are pushed to hub.docker.com
-* `Solr-reference-guide-*` - hourly jobs that build the Solr reference guide via `./gradlew checkSite` and push the resulting artifact to the staging/preview site `nightlies.apache.org`
+* `Solr-reference-guide-*` - daily jobs that build the Solr reference guide via `./gradlew checkSite` and push the resulting artifact to the staging/preview site `nightlies.apache.org`
* `Solr-Smoketest-*` - daily jobs that produce a snapshot release (via the `assembleRelease` task) and run the release smoketester
Most jobs that validate particular build artifacts are run "daily", which is sufficient to prevent any large breaks from creeping into the build.
-
On the other hand, jobs that run tests are triggered "hourly" in order to squeeze as many test runs as possible out of our Jenkins hardware.
This is a necessary consequence of Solr's heavy use of randomization in its test-suite.
"Hourly" scheduling ensures that a test run is either currently running or in the build queue at all times, and enables us to get the maximum data points from our hardware.
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 751e19073fd..244f68171b7 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -26,6 +26,10 @@ Improvements
* SOLR-16116: Apache Curator is now used to manage all Solr Zookeeper interactions. This should provide more stability in the Solr-Zookeeper interactions.
The solrj-zookeeper module, now has a dependency on curator. (Houston Putman, Kevin Risden, Mike Drob, David Smiley)
+* SOLR-17544: Solr CLI will now stop when you combine mutually exclusive options. Combining -s and -z options is a common example. (Eric Pugh, Christos Malliaridis)
+
+* SOLR-17495: Change Solr CLI delete command to not delete configs by default. Decouple lifecycle of collections from configsets. (Eric Pugh)
+
Optimizations
---------------------
(No changes)
@@ -141,7 +145,7 @@ Improvements
when PKI is used between nodes. (Jason Gerlowski)
* SOLR-17383: Resolved overlapping arguments in the Solr CLI. Removed duplicative but differing arguments,
- consolidated use of short form arguments -v to not have differing meanings based on tool. Provide deprecation warning
+ consolidated use of short form arguments -v to not have differing meanings based on tool. Provide deprecation warning
in command line when deprecated arguments are used. (Eric Pugh, Christos Malliaridis)
* SOLR-17256: Deprecate SolrRequest `setBasePath` and `getBasePath` methods. SolrJ users wishing to temporarily
@@ -179,6 +183,8 @@ Optimizations
* SOLR-16503: Switched from HTTP1 to HTTP2 in SolrClientCloudManager by replacing CloudLegacySolrClient with CloudHttp2SolrClient. (Sanjay Dutt, David Smiley)
+* SOLR-17453: Leverage waitForState() instead of busy waiting in CREATE, MIGRATE, REINDEXCOLLECTION, MOVEREPLICA commands, and in some tests. (Pierre Salagnac)
+
Bug Fixes
---------------------
* SOLR-12429: Uploading a configset with a symbolic link produces a IOException. Now a error message to user generated instead. (Eric Pugh)
@@ -221,6 +227,8 @@ led to the suppression of exceptions. (Andrey Bozhko)
* SOLR-17534: Introduce ClusterState.getCollectionNames, a convenience method (David Smiley)
+* SOLR-17535: Introduce ClusterState.collectionStream to replace getCollectionStates and getCollectionsMap (David Smiley)
+
* SOLR-17545: Upgrade to Gradle 8.10 (Houston Putman)
================== 9.7.1 ==================
diff --git a/solr/bin/solr.cmd b/solr/bin/solr.cmd
index 783a4feed00..5fd6ec44aea 100755
--- a/solr/bin/solr.cmd
+++ b/solr/bin/solr.cmd
@@ -54,7 +54,11 @@ IF NOT DEFINED JAVA_HOME (
set "JAVA_HOME=%%B"
)
)
-IF NOT DEFINED JAVA_HOME goto need_java_home
+IF NOT DEFINED JAVA_HOME (
+ REM Need java home
+ @echo "Please set the JAVA_HOME environment variable to the path where you installed Java !REQUIRED_JAVA_VERSION!+"
+ goto done
+)
set JAVA_HOME=%JAVA_HOME:"=%
IF %JAVA_HOME:~-1%==\ SET JAVA_HOME=%JAVA_HOME:~0,-1%
IF NOT EXIST "%JAVA_HOME%\bin\java.exe" (
@@ -244,59 +248,36 @@ IF "%SOLR_JETTY_HOST%"=="" (
set "SOLR_JETTY_HOST=127.0.0.1"
)
-set FIRST_ARG=%1
-
-IF [%1]==[] goto usage
-IF "%1"=="-h" goto run_solrcli
-IF "%1"=="--help" goto run_solrcli
-IF "%1"=="status" goto run_solrcli
-IF "%1"=="version" goto run_solrcli
-IF "%1"=="-v" goto run_solrcli
-IF "%1"=="--version" goto run_solrcli
-IF "%1"=="assert" goto run_solrcli
-IF "%1"=="zk" goto run_solrcli
-IF "%1"=="export" goto run_solrcli
-IF "%1"=="package" goto run_solrcli
-IF "%1"=="api" goto run_solrcli
-IF "%1"=="post" goto run_solrcli
-
-REM Only allow the command to be the first argument, assume start if not supplied
+REM Handle special commands
IF "%1"=="start" goto set_script_cmd
IF "%1"=="stop" goto set_script_cmd
IF "%1"=="restart" goto set_script_cmd
-IF "%1"=="healthcheck" goto run_solrcli
-IF "%1"=="create" goto run_solrcli
-IF "%1"=="delete" goto run_solrcli
-IF "%1"=="postlogs" goto run_solrcli
+IF "%1"=="auth" goto set_script_cmd
-IF "%1"=="auth" (
- set SCRIPT_CMD=auth
- SHIFT
- goto run_auth
+REM Handle all other commands by simply running SolrCLI
+"%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% %SOLR_TOOL_OPTS% -Dsolr.install.dir="%SOLR_TIP%" ^
+ -Dlog4j.configurationFile="file:///%DEFAULT_SERVER_DIR%\resources\log4j2-console.xml" ^
+ -classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
+ org.apache.solr.cli.SolrCLI %*
+if errorlevel 1 (
+ exit /b 1
)
-IF "%1"=="config" goto run_solrcli
+goto done
+:set_script_cmd
+set SCRIPT_CMD=%1
+SHIFT
+IF "%SCRIPT_CMD%"=="auth" goto run_auth
goto parse_args
:usage
IF NOT "%SCRIPT_ERROR%"=="" ECHO %SCRIPT_ERROR%
-IF [%FIRST_ARG%]==[] goto run_solrcli
-IF "%FIRST_ARG%"=="-h" goto run_solrcli
-IF "%FIRST_ARG%"=="--help" goto run_solrcli
IF "%SCRIPT_CMD%"=="start" goto start_usage
IF "%SCRIPT_CMD%"=="restart" goto start_usage
IF "%SCRIPT_CMD%"=="stop" goto stop_usage
-IF "%SCRIPT_CMD%"=="healthcheck" goto run_solrcli
-IF "%SCRIPT_CMD%"=="create" goto run_solrcli
-IF "%SCRIPT_CMD%"=="delete" goto run_solrcli
-IF "%SCRIPT_CMD%"=="cluster" goto run_solrcli
-IF "%SCRIPT_CMD%"=="zk" goto run_solrcli
-IF "%SCRIPT_CMD%"=="auth" goto run_solrcli
-IF "%SCRIPT_CMD%"=="package" goto run_solrcli
-IF "%SCRIPT_CMD%"=="status" goto run_solrcli
-IF "%SCRIPT_CMD%"=="postlogs" goto run_solrcli
-goto done
+REM Should not be reachable, but just in case
+goto err
:start_usage
@echo.
@@ -374,16 +355,19 @@ goto done
@echo.
goto done
-
-REM Really basic command-line arg parsing
+REM Parse arguments for special commands (start, stop, restart)
:parse_args
set "arg=%~1"
set "firstTwo=%arg:~0,2%"
-IF "%SCRIPT_CMD%"=="" set SCRIPT_CMD=start
-IF [%1]==[] goto process_script_cmd
-IF "%1"=="--help" goto usage
-IF "%1"=="-h" goto usage
+
+REM In case no arguments left, run special command
+IF [%1]==[] goto run_special_command
+
+REM Skip start / restart arguments if stop command
+IF "%SCRIPT_CMD%"=="stop" goto parse_stop_args
+
+:parse_start_args
IF "%1"=="-f" goto set_foreground_mode
IF "%1"=="--foreground" goto set_foreground_mode
IF "%1"=="--verbose" goto set_verbose
@@ -398,8 +382,6 @@ IF "%1"=="--example" goto set_example
IF "%1"=="--host" goto set_host
IF "%1"=="-m" goto set_memory
IF "%1"=="--memory" goto set_memory
-IF "%1"=="-p" goto set_port
-IF "%1"=="--port" goto set_port
IF "%1"=="-z" goto set_zookeeper
IF "%1"=="--zk-host" goto set_zookeeper
IF "%1"=="-s" goto set_solr_url
@@ -407,20 +389,33 @@ IF "%1"=="--solr-url" goto set_solr_url
IF "%1"=="--jvm-opts" goto set_jvm_opts
IF "%1"=="-j" goto set_addl_jetty_config
IF "%1"=="--jettyconfig" goto set_addl_jetty_config
-IF "%1"=="--no-prompt" goto set_noprompt
IF "%1"=="-y" goto set_noprompt
+IF "%1"=="--no-prompt" goto set_noprompt
+
+REM Skip stop arg parsing if not stop command
+IF NOT "%SCRIPT_CMD%"=="stop" goto parse_general_args
+
+:parse_stop_args
IF "%1"=="-k" goto set_stop_key
IF "%1"=="--key" goto set_stop_key
IF "%1"=="--all" goto set_stop_all
+
+:parse_general_args
+
+REM Print usage of command in case help option included
+IF "%1"=="--help" goto usage
+IF "%1"=="-h" goto usage
+
+REM other args supported by all special commands
+IF "%1"=="-p" goto set_port
+IF "%1"=="--port" goto set_port
IF "%firstTwo%"=="-D" goto set_passthru
+
+REM Argument not supported / found
IF NOT "%1"=="" goto invalid_cmd_line
+REM Not reachable, but just in case
goto invalid_cmd_line
-:set_script_cmd
-set SCRIPT_CMD=%1
-SHIFT
-goto parse_args
-
:set_foreground_mode
set FG=1
SHIFT
@@ -670,8 +665,27 @@ set "PASS_TO_RUN_EXAMPLE=--no-prompt !PASS_TO_RUN_EXAMPLE!"
SHIFT
goto parse_args
-REM Perform the requested command after processing args
-:process_script_cmd
+REM Handle invalid arguments passed to special commands (start, stop, restart)
+:invalid_cmd_line
+@echo.
+IF "!SCRIPT_ERROR!"=="" (
+ @echo Invalid command-line option: %1
+) ELSE (
+ @echo ERROR: !SCRIPT_ERROR!
+)
+@echo.
+IF "%SCRIPT_CMD%"=="start" (
+ goto start_usage
+) ELSE IF "%SCRIPT_CMD%"=="restart" (
+ goto start_usage
+) ELSE IF "%SCRIPT_CMD%"=="stop" (
+ goto stop_usage
+)
+REM Not reachable, but just in case
+goto err
+
+REM Process special commands (start, stop, restart)
+:run_special_command
IF "%verbose%"=="1" (
CALL :safe_echo "Using Solr root directory: %SOLR_TIP%"
@@ -712,36 +726,20 @@ IF NOT EXIST "%SOLR_SERVER_DIR%" (
goto err
)
-IF NOT "%EXAMPLE%"=="" goto run_example
-
-:start_solr
-IF "%SOLR_HOME%"=="" set "SOLR_HOME=%SOLR_SERVER_DIR%\solr"
-IF EXIST "%cd%\%SOLR_HOME%" set "SOLR_HOME=%cd%\%SOLR_HOME%"
-
-IF NOT EXIST "%SOLR_HOME%\" (
- IF EXIST "%SOLR_SERVER_DIR%\%SOLR_HOME%" (
- set "SOLR_HOME=%SOLR_SERVER_DIR%\%SOLR_HOME%"
- ) ELSE (
- set "SCRIPT_ERROR=Solr home directory %SOLR_HOME% not found!"
- goto err
- )
-)
-
IF "%STOP_KEY%"=="" set STOP_KEY=solrrocks
-@REM This is quite hacky, but examples rely on a different log4j2.xml
-@REM so that we can write logs for examples to %SOLR_HOME%\..\logs
-IF [%SOLR_LOGS_DIR%] == [] (
- set "SOLR_LOGS_DIR=%SOLR_SERVER_DIR%\logs"
-) ELSE (
- set SOLR_LOGS_DIR=%SOLR_LOGS_DIR:"=%
-)
+IF NOT "%EXAMPLE%"=="" (
+ REM Run the requested example
-set "EXAMPLE_DIR=%SOLR_TIP%\example"
-set TMP_SOLR_HOME=!SOLR_HOME:%EXAMPLE_DIR%=!
-IF NOT "%TMP_SOLR_HOME%"=="%SOLR_HOME%" (
- set "SOLR_LOGS_DIR=%SOLR_HOME%\..\logs"
- set "LOG4J_CONFIG=%SOLR_SERVER_DIR%\resources\log4j2.xml"
+ "%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% %SOLR_TOOL_OPTS% -Dsolr.install.dir="%SOLR_TIP%" ^
+ -Dlog4j.configurationFile="file:///%DEFAULT_SERVER_DIR%\resources\log4j2-console.xml" ^
+ -Dsolr.install.symDir="%SOLR_TIP%" ^
+ -classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
+ org.apache.solr.cli.SolrCLI run_example --script "%SDIR%\solr.cmd" -e %EXAMPLE% --server-dir "%SOLR_SERVER_DIR%" ^
+ --url-scheme !SOLR_URL_SCHEME! !PASS_TO_RUN_EXAMPLE!
+
+ REM End of run_example
+ goto done
)
set IS_RESTART=0
@@ -754,77 +752,112 @@ IF "%SCRIPT_CMD%"=="restart" (
set IS_RESTART=1
)
+REM Skipt to start if not stop or restart (that executes stop first)
+IF "%SCRIPT_CMD%"=="start" goto start_solr
+
@REM stop logic here
+:stop_solr
IF "%SOLR_STOP_WAIT%"=="" (
set SOLR_STOP_WAIT=180
)
-IF "%SCRIPT_CMD%"=="stop" (
- IF "%SOLR_PORT%"=="" (
- IF "%STOP_ALL%"=="1" (
- set found_it=0
- for /f "usebackq" %%i in (`dir /b "%SOLR_TIP%\bin" ^| findstr /i "^solr-.*\.port$"`) do (
- set SOME_SOLR_PORT=
- For /F "delims=" %%J In ('type "%SOLR_TIP%\bin\%%i"') do set SOME_SOLR_PORT=%%~J
- if NOT "!SOME_SOLR_PORT!"=="" (
- for /f "tokens=2,5" %%j in ('netstat -aon ^| find "TCP " ^| find ":0 " ^| find ":!SOME_SOLR_PORT! "') do (
- @REM j is the ip:port and k is the pid
- IF NOT "%%k"=="0" (
- IF "%%j"=="%SOLR_JETTY_HOST%:!SOME_SOLR_PORT!" (
- set found_it=1
- @echo Stopping Solr process %%k running on port !SOME_SOLR_PORT!
- IF "%STOP_PORT%"=="" (
- set /A LOCAL_STOP_PORT=!SOME_SOLR_PORT! - 1000
- ) else (
- set LOCAL_STOP_PORT=%STOP_PORT%
- )
- "%JAVA%" %SOLR_SSL_OPTS% -Djetty.home="%SOLR_SERVER_DIR%" -jar "%SOLR_SERVER_DIR%\start.jar" STOP.PORT=!LOCAL_STOP_PORT! STOP.KEY=%STOP_KEY% --stop
- del "%SOLR_TIP%"\bin\solr-!SOME_SOLR_PORT!.port
- REM wait for the process to terminate
- CALL :wait_for_process_exit %%k !SOLR_STOP_WAIT!
- REM Kill it if it is still running after the graceful shutdown
- IF EXIST "%JAVA_HOME%\bin\jstack.exe" (
- qprocess "%%k" >nul 2>nul && "%JAVA_HOME%\bin\jstack.exe" %%k && taskkill /f /PID %%k
- ) else (
- qprocess "%%k" >nul 2>nul && taskkill /f /PID %%k
- )
+IF "%SOLR_PORT%"=="" (
+ IF "%STOP_ALL%"=="1" (
+ REM Stop all running Solr instances
+ set found_it=0
+ for /f "usebackq" %%i in (`dir /b "%SOLR_TIP%\bin" ^| findstr /i "^solr-.*\.port$"`) do (
+ set SOME_SOLR_PORT=
+ For /F "delims=" %%J In ('type "%SOLR_TIP%\bin\%%i"') do set SOME_SOLR_PORT=%%~J
+ if NOT "!SOME_SOLR_PORT!"=="" (
+ for /f "tokens=2,5" %%j in ('netstat -aon ^| find "TCP " ^| find ":0 " ^| find ":!SOME_SOLR_PORT! "') do (
+ @REM j is the ip:port and k is the pid
+ IF NOT "%%k"=="0" (
+ IF "%%j"=="%SOLR_JETTY_HOST%:!SOME_SOLR_PORT!" (
+ set found_it=1
+ @echo Stopping Solr process %%k running on port !SOME_SOLR_PORT!
+ IF "%STOP_PORT%"=="" (
+ set /A LOCAL_STOP_PORT=!SOME_SOLR_PORT! - 1000
+ ) else (
+ set LOCAL_STOP_PORT=%STOP_PORT%
+ )
+ "%JAVA%" %SOLR_SSL_OPTS% -Djetty.home="%SOLR_SERVER_DIR%" -jar "%SOLR_SERVER_DIR%\start.jar" STOP.PORT=!LOCAL_STOP_PORT! STOP.KEY=%STOP_KEY% --stop
+ del "%SOLR_TIP%"\bin\solr-!SOME_SOLR_PORT!.port
+ REM wait for the process to terminate
+ CALL :wait_for_process_exit %%k !SOLR_STOP_WAIT!
+ REM Kill it if it is still running after the graceful shutdown
+ IF EXIST "%JAVA_HOME%\bin\jstack.exe" (
+ qprocess "%%k" >nul 2>nul && "%JAVA_HOME%\bin\jstack.exe" %%k && taskkill /f /PID %%k
+ ) else (
+ qprocess "%%k" >nul 2>nul && taskkill /f /PID %%k
)
)
)
)
)
- if "!found_it!"=="0" echo No Solr nodes found to stop.
- ) ELSE (
- set "SCRIPT_ERROR=Must specify the port when trying to stop Solr, or use --all to stop all running nodes on this host."
- goto err
)
+ if "!found_it!"=="0" echo No Solr nodes found to stop.
) ELSE (
- set found_it=0
- For /f "tokens=2,5" %%M in ('netstat -nao ^| find "TCP " ^| find ":0 " ^| find ":%SOLR_PORT% "') do (
- IF NOT "%%N"=="0" (
- IF "%%M"=="%SOLR_JETTY_HOST%:%SOLR_PORT%" (
- set found_it=1
- @echo Stopping Solr process %%N running on port %SOLR_PORT%
- IF "%STOP_PORT%"=="" set /A STOP_PORT=%SOLR_PORT% - 1000
- "%JAVA%" %SOLR_SSL_OPTS% %SOLR_TOOL_OPTS% -Djetty.home="%SOLR_SERVER_DIR%" -jar "%SOLR_SERVER_DIR%\start.jar" %SOLR_JETTY_CONFIG% STOP.PORT=!STOP_PORT! STOP.KEY=%STOP_KEY% --stop
- del "%SOLR_TIP%"\bin\solr-%SOLR_PORT%.port
- REM wait for the process to terminate
- CALL :wait_for_process_exit %%N !SOLR_STOP_WAIT!
- REM Kill it if it is still running after the graceful shutdown
- IF EXIST "%JAVA_HOME%\bin\jstack.exe" (
- qprocess "%%N" >nul 2>nul && "%JAVA_HOME%\bin\jstack.exe" %%N && taskkill /f /PID %%N
- ) else (
- qprocess "%%N" >nul 2>nul && taskkill /f /PID %%N
- )
+ set "SCRIPT_ERROR=Must specify the port when trying to stop Solr, or use --all to stop all running nodes on this host."
+ goto err
+ )
+) ELSE (
+ REM Stop Solr running on specific port
+ set found_it=0
+ For /f "tokens=2,5" %%M in ('netstat -nao ^| find "TCP " ^| find ":0 " ^| find ":%SOLR_PORT% "') do (
+ IF NOT "%%N"=="0" (
+ IF "%%M"=="%SOLR_JETTY_HOST%:%SOLR_PORT%" (
+ set found_it=1
+ @echo Stopping Solr process %%N running on port %SOLR_PORT%
+ IF "%STOP_PORT%"=="" set /A STOP_PORT=%SOLR_PORT% - 1000
+ "%JAVA%" %SOLR_SSL_OPTS% %SOLR_TOOL_OPTS% -Djetty.home="%SOLR_SERVER_DIR%" -jar "%SOLR_SERVER_DIR%\start.jar" %SOLR_JETTY_CONFIG% STOP.PORT=!STOP_PORT! STOP.KEY=%STOP_KEY% --stop
+ del "%SOLR_TIP%"\bin\solr-%SOLR_PORT%.port
+ REM wait for the process to terminate
+ CALL :wait_for_process_exit %%N !SOLR_STOP_WAIT!
+ REM Kill it if it is still running after the graceful shutdown
+ IF EXIST "%JAVA_HOME%\bin\jstack.exe" (
+ qprocess "%%N" >nul 2>nul && "%JAVA_HOME%\bin\jstack.exe" %%N && taskkill /f /PID %%N
+ ) else (
+ qprocess "%%N" >nul 2>nul && taskkill /f /PID %%N
)
)
)
- if "!found_it!"=="0" echo No Solr found running on port %SOLR_PORT%
)
+ if "!found_it!"=="0" echo No Solr found running on port %SOLR_PORT%
+)
+
+IF "!IS_RESTART!"=="0" goto done
+
+REM Clean state here, continue with starting (start or restart)
+set SCRIPT_CMD=start
+
+:start_solr
+REM Prepare for starting Solr
+IF "%SOLR_HOME%"=="" set "SOLR_HOME=%SOLR_SERVER_DIR%\solr"
+IF EXIST "%cd%\%SOLR_HOME%" set "SOLR_HOME=%cd%\%SOLR_HOME%"
+
+IF NOT EXIST "%SOLR_HOME%\" (
+ IF EXIST "%SOLR_SERVER_DIR%\%SOLR_HOME%" (
+ set "SOLR_HOME=%SOLR_SERVER_DIR%\%SOLR_HOME%"
+ ) ELSE (
+ set "SCRIPT_ERROR=Solr home directory %SOLR_HOME% not found!"
+ goto err
+ )
+)
+
+@REM This is quite hacky, but examples rely on a different log4j2.xml
+@REM so that we can write logs for examples to %SOLR_HOME%\..\logs
+IF [%SOLR_LOGS_DIR%] == [] (
+ set "SOLR_LOGS_DIR=%SOLR_SERVER_DIR%\logs"
+) ELSE (
+ set SOLR_LOGS_DIR=%SOLR_LOGS_DIR:"=%
+)
- IF "!IS_RESTART!"=="0" goto done
+set "EXAMPLE_DIR=%SOLR_TIP%\example"
+set TMP_SOLR_HOME=!SOLR_HOME:%EXAMPLE_DIR%=!
+IF NOT "%TMP_SOLR_HOME%"=="%SOLR_HOME%" (
+ set "SOLR_LOGS_DIR=%SOLR_HOME%\..\logs"
+ set "LOG4J_CONFIG=%SOLR_SERVER_DIR%\resources\log4j2.xml"
)
-IF "!IS_RESTART!"=="1" set SCRIPT_CMD=start
IF "%SOLR_PORT%"=="" set SOLR_PORT=8983
IF "%STOP_PORT%"=="" set /A STOP_PORT=%SOLR_PORT% - 1000
@@ -841,21 +874,19 @@ IF DEFINED SOLR_ZK_EMBEDDED_HOST (
set "SCRIPT_SOLR_OPTS=%SCRIPT_SOLR_OPTS% -Dsolr.zk.embedded.host=%SOLR_ZK_EMBEDDED_HOST%"
)
-IF "%SCRIPT_CMD%"=="start" (
- REM see if Solr is already running using netstat
- For /f "tokens=2,5" %%j in ('netstat -aon ^| find "TCP " ^| find ":0 " ^| find ":%SOLR_PORT% "') do (
- IF NOT "%%k"=="0" (
- IF "%%j"=="%SOLR_JETTY_HOST%:%SOLR_PORT%" (
- set "SCRIPT_ERROR=Process %%k is already listening on port %SOLR_PORT%. If this is Solr, please stop it first before starting (or use restart). If this is not Solr, then please choose a different port using -p PORT"
- goto err
- )
+REM Make sure Solr is not running using netstat
+For /f "tokens=2,5" %%j in ('netstat -aon ^| find "TCP " ^| find ":0 " ^| find ":%SOLR_PORT% "') do (
+ IF NOT "%%k"=="0" (
+ IF "%%j"=="%SOLR_JETTY_HOST%:%SOLR_PORT%" (
+ set "SCRIPT_ERROR=Process %%k is already listening on port %SOLR_PORT%. If this is Solr, please stop it first before starting (or use restart). If this is not Solr, then please choose a different port using -p PORT"
+ goto err
)
)
+)
- IF "%EMPTY_ADDL_JVM_ARGS%"=="true" (
- set "SCRIPT_ERROR=JVM options are required when using the -a or --jvm-opts option!"
- goto err
- )
+IF "%EMPTY_ADDL_JVM_ARGS%"=="true" (
+ set "SCRIPT_ERROR=JVM options are required when using the -a or --jvm-opts option!"
+ goto err
)
@REM determine if -server flag is supported by current JVM
@@ -1131,50 +1162,6 @@ IF "%FG%"=="1" (
goto done
-:run_example
-REM Run the requested example
-
-"%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% %SOLR_TOOL_OPTS% -Dsolr.install.dir="%SOLR_TIP%" ^
- -Dlog4j.configurationFile="file:///%DEFAULT_SERVER_DIR%\resources\log4j2-console.xml" ^
- -Dsolr.install.symDir="%SOLR_TIP%" ^
- -classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
- org.apache.solr.cli.SolrCLI run_example --script "%SDIR%\solr.cmd" -e %EXAMPLE% --server-dir "%SOLR_SERVER_DIR%" ^
- --url-scheme !SOLR_URL_SCHEME! !PASS_TO_RUN_EXAMPLE!
-
-REM End of run_example
-goto done
-
-:run_solrcli
-"%JAVA%" %SOLR_SSL_OPTS% %AUTHC_OPTS% %SOLR_ZK_CREDS_AND_ACLS% %SOLR_TOOL_OPTS% -Dsolr.install.dir="%SOLR_TIP%" ^
- -Dlog4j.configurationFile="file:///%DEFAULT_SERVER_DIR%\resources\log4j2-console.xml" ^
- -classpath "%DEFAULT_SERVER_DIR%\solr-webapp\webapp\WEB-INF\lib\*;%DEFAULT_SERVER_DIR%\lib\ext\*" ^
- org.apache.solr.cli.SolrCLI %*
-if errorlevel 1 (
- exit /b 1
-)
-goto done
-
-:parse_config_args
-IF [%1]==[] goto run_config
-IF "%1"=="-z" goto set_config_zk
-IF "%1"=="--zk-host" goto set_config_zk
-IF "%1"=="--scheme" goto set_config_url_scheme
-set "CONFIG_ARGS=!CONFIG_ARGS! %1"
-SHIFT
-goto parse_config_args
-
-:set_config_zk
-set ZK_HOST=%~2
-SHIFT
-SHIFT
-goto parse_config_args
-
-:set_config_url_scheme
-set SOLR_URL_SCHEME=%~2
-SHIFT
-SHIFT
-goto parse_config_args
-
:run_auth
REM Options parsing.
REM Note: With the following technique of parsing, it is not possible
@@ -1231,49 +1218,12 @@ if "!AUTH_PORT!"=="" (
--solr-url !SOLR_URL_SCHEME!://%SOLR_TOOL_HOST%:!AUTH_PORT!
goto done
-
-:invalid_cmd_line
-@echo.
-IF "!SCRIPT_ERROR!"=="" (
- @echo Invalid command-line option: %1
-) ELSE (
- @echo ERROR: !SCRIPT_ERROR!
-)
-@echo.
-IF "%FIRST_ARG%"=="start" (
- goto start_usage
-) ELSE IF "%FIRST_ARG:~0,1%" == "-" (
- goto start_usage
-) ELSE IF "%FIRST_ARG%"=="restart" (
- goto start_usage
-) ELSE IF "%FIRST_ARG%"=="stop" (
- goto stop_usage
-) ELSE IF "%FIRST_ARG%"=="healthcheck" (
- goto run_solrcli
-) ELSE IF "%FIRST_ARG%"=="create" (
- goto run_solrcli
-) ELSE IF "%FIRST_ARG%"=="zk" (
- goto run_solrcli
-) ELSE IF "%FIRST_ARG%"=="auth" (
- goto run_solrcli
-) ELSE IF "%FIRST_ARG%"=="status" (
- goto run_solrcli
-)
-
-:need_java_home
-@echo Please set the JAVA_HOME environment variable to the path where you installed Java 21+
-goto done
-
:err
@echo.
@echo ERROR: !SCRIPT_ERROR!
@echo.
exit /b 1
-:done
-ENDLOCAL
-exit /b 0
-
REM Tests what Java we have and sets some global variables
:resolve_java_info
@@ -1347,3 +1297,7 @@ GOTO :eof
)
)
GOTO :eof
+
+:done
+ENDLOCAL
+exit /b 0
diff --git a/solr/core/src/java/org/apache/solr/cli/ApiTool.java b/solr/core/src/java/org/apache/solr/cli/ApiTool.java
index 923505c8310..ede4de68971 100644
--- a/solr/core/src/java/org/apache/solr/cli/ApiTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/ApiTool.java
@@ -19,9 +19,9 @@
import java.io.PrintStream;
import java.net.URI;
-import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
import org.apache.solr.client.solrj.SolrRequest;
import org.apache.solr.client.solrj.impl.JsonMapResponseParser;
import org.apache.solr.client.solrj.request.GenericSolrRequest;
@@ -36,6 +36,16 @@
*
Used to send an arbitrary HTTP request to a Solr API endpoint.
*/
public class ApiTool extends ToolBase {
+
+ private static final Option SOLR_URL_OPTION =
+ Option.builder("s")
+ .longOpt("solr-url")
+ .hasArg()
+ .argName("URL")
+ .required()
+ .desc("Send a GET request to a Solr API endpoint.")
+ .build();
+
public ApiTool() {
this(CLIO.getOutStream());
}
@@ -50,22 +60,16 @@ public String getName() {
}
@Override
- public List getOptions() {
- return List.of(
- Option.builder()
- .longOpt("solr-url")
- .argName("URL")
- .hasArg()
- .required(true)
- .desc("Send a GET request to a Solr API endpoint.")
- .build(),
- SolrCLI.OPTION_CREDENTIALS);
+ public Options getOptions() {
+ return super.getOptions()
+ .addOption(SOLR_URL_OPTION)
+ .addOption(CommonCLIOptions.CREDENTIALS_OPTION);
}
@Override
public void runImpl(CommandLine cli) throws Exception {
- String getUrl = cli.getOptionValue("solr-url");
- String response = callGet(getUrl, cli.getOptionValue(SolrCLI.OPTION_CREDENTIALS.getLongOpt()));
+ String getUrl = cli.getOptionValue(SOLR_URL_OPTION);
+ String response = callGet(getUrl, cli.getOptionValue(CommonCLIOptions.CREDENTIALS_OPTION));
// pretty-print the response to stdout
echo(response);
diff --git a/solr/core/src/java/org/apache/solr/cli/AssertTool.java b/solr/core/src/java/org/apache/solr/cli/AssertTool.java
index d43cd93ec22..b111ef7b481 100644
--- a/solr/core/src/java/org/apache/solr/cli/AssertTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/AssertTool.java
@@ -23,10 +23,11 @@
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.attribute.FileOwnerAttributeView;
-import java.util.List;
import java.util.concurrent.TimeUnit;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionGroup;
+import org.apache.commons.cli.Options;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.request.HealthCheckRequest;
@@ -44,6 +45,105 @@ public class AssertTool extends ToolBase {
private static boolean useExitCode = false;
private static Long timeoutMs = 1000L;
+ private static final Option IS_NOT_ROOT_OPTION =
+ Option.builder().desc("Asserts that we are NOT the root user.").longOpt("not-root").build();
+
+ private static final Option IS_ROOT_OPTION =
+ Option.builder().desc("Asserts that we are the root user.").longOpt("root").build();
+
+ private static final OptionGroup ROOT_OPTION =
+ new OptionGroup().addOption(IS_NOT_ROOT_OPTION).addOption(IS_ROOT_OPTION);
+
+ private static final Option IS_NOT_RUNNING_ON_OPTION =
+ Option.builder()
+ .desc("Asserts that Solr is NOT running on a certain URL. Default timeout is 1000ms.")
+ .longOpt("not-started")
+ .hasArg()
+ .argName("url")
+ .build();
+
+ private static final Option IS_RUNNING_ON_OPTION =
+ Option.builder()
+ .desc("Asserts that Solr is running on a certain URL. Default timeout is 1000ms.")
+ .longOpt("started")
+ .hasArg()
+ .argName("url")
+ .build();
+
+ private static final OptionGroup RUNNING_OPTION =
+ new OptionGroup().addOption(IS_NOT_RUNNING_ON_OPTION).addOption(IS_RUNNING_ON_OPTION);
+
+ private static final Option SAME_USER_OPTION =
+ Option.builder()
+ .desc("Asserts that we run as same user that owns .")
+ .longOpt("same-user")
+ .hasArg()
+ .argName("directory")
+ .build();
+
+ private static final Option DIRECTORY_EXISTS_OPTION =
+ Option.builder()
+ .desc("Asserts that directory exists.")
+ .longOpt("exists")
+ .hasArg()
+ .argName("directory")
+ .build();
+
+ private static final Option DIRECTORY_NOT_EXISTS_OPTION =
+ Option.builder()
+ .desc("Asserts that directory does NOT exist.")
+ .longOpt("not-exists")
+ .hasArg()
+ .argName("directory")
+ .build();
+
+ private static final OptionGroup DIRECTORY_OPTION =
+ new OptionGroup().addOption(DIRECTORY_EXISTS_OPTION).addOption(DIRECTORY_NOT_EXISTS_OPTION);
+
+ private static final Option IS_CLOUD_OPTION =
+ Option.builder()
+ .desc(
+ "Asserts that Solr is running in cloud mode. Also fails if Solr not running. URL should be for root Solr path.")
+ .longOpt("cloud")
+ .hasArg()
+ .argName("url")
+ .build();
+
+ private static final Option IS_NOT_CLOUD_OPTION =
+ Option.builder()
+ .desc(
+ "Asserts that Solr is not running in cloud mode. Also fails if Solr not running. URL should be for root Solr path.")
+ .longOpt("not-cloud")
+ .hasArg()
+ .argName("url")
+ .build();
+
+ private static final OptionGroup CLOUD_OPTION =
+ new OptionGroup().addOption(IS_CLOUD_OPTION).addOption(IS_NOT_CLOUD_OPTION);
+
+ private static final Option MESSAGE_OPTION =
+ Option.builder()
+ .desc("Exception message to be used in place of the default error message.")
+ .longOpt("message")
+ .hasArg()
+ .argName("message")
+ .build();
+
+ private static final Option TIMEOUT_OPTION =
+ Option.builder()
+ .desc("Timeout in ms for commands supporting a timeout.")
+ .longOpt("timeout")
+ .hasArg()
+ .type(Long.class)
+ .argName("ms")
+ .build();
+
+ private static final Option EXIT_CODE_OPTION =
+ Option.builder()
+ .desc("Return an exit code instead of printing error message on assert fail.")
+ .longOpt("exitcode")
+ .build();
+
public AssertTool() {
this(CLIO.getOutStream());
}
@@ -58,72 +158,17 @@ public String getName() {
}
@Override
- public List getOptions() {
- return List.of(
- Option.builder().desc("Asserts that we are NOT the root user.").longOpt("not-root").build(),
- Option.builder().desc("Asserts that we are the root user.").longOpt("root").build(),
- Option.builder()
- .desc("Asserts that Solr is NOT running on a certain URL. Default timeout is 1000ms.")
- .longOpt("not-started")
- .hasArg(true)
- .argName("url")
- .build(),
- Option.builder()
- .desc("Asserts that Solr is running on a certain URL. Default timeout is 1000ms.")
- .longOpt("started")
- .hasArg(true)
- .argName("url")
- .build(),
- Option.builder()
- .desc("Asserts that we run as same user that owns .")
- .longOpt("same-user")
- .hasArg(true)
- .argName("directory")
- .build(),
- Option.builder()
- .desc("Asserts that directory exists.")
- .longOpt("exists")
- .hasArg(true)
- .argName("directory")
- .build(),
- Option.builder()
- .desc("Asserts that directory does NOT exist.")
- .longOpt("not-exists")
- .hasArg(true)
- .argName("directory")
- .build(),
- Option.builder()
- .desc(
- "Asserts that Solr is running in cloud mode. Also fails if Solr not running. URL should be for root Solr path.")
- .longOpt("cloud")
- .hasArg(true)
- .argName("url")
- .build(),
- Option.builder()
- .desc(
- "Asserts that Solr is not running in cloud mode. Also fails if Solr not running. URL should be for root Solr path.")
- .longOpt("not-cloud")
- .hasArg(true)
- .argName("url")
- .build(),
- Option.builder()
- .desc("Exception message to be used in place of the default error message.")
- .longOpt("message")
- .hasArg(true)
- .argName("message")
- .build(),
- Option.builder()
- .desc("Timeout in ms for commands supporting a timeout.")
- .longOpt("timeout")
- .hasArg(true)
- .type(Long.class)
- .argName("ms")
- .build(),
- Option.builder()
- .desc("Return an exit code instead of printing error message on assert fail.")
- .longOpt("exitcode")
- .build(),
- SolrCLI.OPTION_CREDENTIALS);
+ public Options getOptions() {
+ return super.getOptions()
+ .addOptionGroup(ROOT_OPTION)
+ .addOptionGroup(RUNNING_OPTION)
+ .addOption(SAME_USER_OPTION)
+ .addOptionGroup(DIRECTORY_OPTION)
+ .addOptionGroup(CLOUD_OPTION)
+ .addOption(MESSAGE_OPTION)
+ .addOption(TIMEOUT_OPTION)
+ .addOption(EXIT_CODE_OPTION)
+ .addOption(CommonCLIOptions.CREDENTIALS_OPTION);
}
/**
@@ -137,8 +182,6 @@ public List getOptions() {
*/
@Override
public int runTool(CommandLine cli) throws Exception {
- verbose = cli.hasOption(SolrCLI.OPTION_VERBOSE.getLongOpt());
-
int toolExitStatus;
try {
toolExitStatus = runAssert(cli);
@@ -146,7 +189,7 @@ public int runTool(CommandLine cli) throws Exception {
// since this is a CLI, spare the user the stacktrace
String excMsg = exc.getMessage();
if (excMsg != null) {
- if (verbose) {
+ if (isVerbose()) {
CLIO.err("\nERROR: " + exc + "\n");
} else {
CLIO.err("\nERROR: " + excMsg + "\n");
@@ -172,58 +215,49 @@ public void runImpl(CommandLine cli) throws Exception {
* @throws Exception if a tool failed, e.g. authentication failure
*/
protected int runAssert(CommandLine cli) throws Exception {
- if (cli.hasOption("m")) {
- message = cli.getOptionValue("m");
- }
- if (cli.hasOption("message")) {
- message = cli.getOptionValue("message");
- }
- if (cli.hasOption("timeout")) {
- timeoutMs = cli.getParsedOptionValue("timeout");
- }
- if (cli.hasOption("exitcode")) {
- useExitCode = true;
- }
+ message = cli.getOptionValue(MESSAGE_OPTION);
+ timeoutMs = cli.getParsedOptionValue(TIMEOUT_OPTION, timeoutMs);
+ useExitCode = cli.hasOption(EXIT_CODE_OPTION);
int ret = 0;
- if (cli.hasOption("root")) {
+ if (cli.hasOption(IS_ROOT_OPTION)) {
ret += assertRootUser();
}
- if (cli.hasOption("not-root")) {
+ if (cli.hasOption(IS_NOT_ROOT_OPTION)) {
ret += assertNotRootUser();
}
- if (cli.hasOption("exists")) {
- ret += assertFileExists(cli.getOptionValue("exists"));
+ if (cli.hasOption(DIRECTORY_EXISTS_OPTION)) {
+ ret += assertFileExists(cli.getOptionValue(DIRECTORY_EXISTS_OPTION));
}
- if (cli.hasOption("not-exists")) {
- ret += assertFileNotExists(cli.getOptionValue("not-exists"));
+ if (cli.hasOption(DIRECTORY_NOT_EXISTS_OPTION)) {
+ ret += assertFileNotExists(cli.getOptionValue(DIRECTORY_NOT_EXISTS_OPTION));
}
- if (cli.hasOption("same-user")) {
- ret += sameUser(cli.getOptionValue("same-user"));
+ if (cli.hasOption(SAME_USER_OPTION)) {
+ ret += sameUser(cli.getOptionValue(SAME_USER_OPTION));
}
- if (cli.hasOption("started")) {
+ if (cli.hasOption(IS_RUNNING_ON_OPTION)) {
ret +=
assertSolrRunning(
- cli.getOptionValue("started"),
- cli.getOptionValue(SolrCLI.OPTION_CREDENTIALS.getLongOpt()));
+ cli.getOptionValue(IS_RUNNING_ON_OPTION),
+ cli.getOptionValue(CommonCLIOptions.CREDENTIALS_OPTION));
}
- if (cli.hasOption("not-started")) {
+ if (cli.hasOption(IS_NOT_RUNNING_ON_OPTION)) {
ret +=
assertSolrNotRunning(
- cli.getOptionValue("not-started"),
- cli.getOptionValue(SolrCLI.OPTION_CREDENTIALS.getLongOpt()));
+ cli.getOptionValue(IS_NOT_RUNNING_ON_OPTION),
+ cli.getOptionValue(CommonCLIOptions.CREDENTIALS_OPTION));
}
- if (cli.hasOption("cloud")) {
+ if (cli.hasOption(IS_CLOUD_OPTION)) {
ret +=
assertSolrRunningInCloudMode(
- CLIUtils.normalizeSolrUrl(cli.getOptionValue("cloud")),
- cli.getOptionValue(SolrCLI.OPTION_CREDENTIALS.getLongOpt()));
+ CLIUtils.normalizeSolrUrl(cli.getOptionValue(IS_CLOUD_OPTION)),
+ cli.getOptionValue(CommonCLIOptions.CREDENTIALS_OPTION));
}
- if (cli.hasOption("not-cloud")) {
+ if (cli.hasOption(IS_NOT_CLOUD_OPTION)) {
ret +=
assertSolrNotRunningInCloudMode(
- CLIUtils.normalizeSolrUrl(cli.getOptionValue("not-cloud")),
- cli.getOptionValue(SolrCLI.OPTION_CREDENTIALS.getLongOpt()));
+ CLIUtils.normalizeSolrUrl(cli.getOptionValue(IS_NOT_CLOUD_OPTION)),
+ cli.getOptionValue(CommonCLIOptions.CREDENTIALS_OPTION));
}
return ret;
}
diff --git a/solr/core/src/java/org/apache/solr/cli/AuthTool.java b/solr/core/src/java/org/apache/solr/cli/AuthTool.java
index 4dbf9a98130..45b609a2943 100644
--- a/solr/core/src/java/org/apache/solr/cli/AuthTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/AuthTool.java
@@ -35,8 +35,8 @@
import java.util.Locale;
import java.util.stream.Collectors;
import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
import org.apache.lucene.util.Constants;
import org.apache.solr.common.cloud.SolrZkClient;
import org.apache.solr.common.util.StrUtils;
@@ -46,6 +46,70 @@
/** Supports auth command in the bin/solr script. */
public class AuthTool extends ToolBase {
+
+ private static final Option TYPE_OPTION =
+ Option.builder()
+ .longOpt("type")
+ .hasArg()
+ .desc(
+ "The authentication mechanism to enable (basicAuth or kerberos). Defaults to 'basicAuth'.")
+ .build();
+
+ private static final Option PROMPT_OPTION =
+ Option.builder()
+ .longOpt("prompt")
+ .hasArg()
+ .type(Boolean.class)
+ .desc(
+ "Prompts the user to provide the credentials. Use either --credentials or --prompt, not both.")
+ .build();
+
+ private static final Option CONFIG_OPTION =
+ Option.builder()
+ .longOpt("config")
+ .hasArgs()
+ .desc(
+ "Configuration parameters (Solr startup parameters). Required for Kerberos authentication.")
+ .build();
+
+ private static final Option BLOCK_UNKNOWN_OPTION =
+ Option.builder()
+ .longOpt("block-unknown")
+ .desc("Blocks all access for unknown users (requires authentication for all endpoints).")
+ .hasArg()
+ .argName("true|false")
+ .type(Boolean.class)
+ .build();
+
+ private static final Option SOLR_INCLUDE_FILE_OPTION =
+ Option.builder()
+ .longOpt("solr-include-file")
+ .hasArg()
+ .argName("FILE")
+ .desc(
+ "The Solr include file which contains overridable environment variables for configuring Solr configurations.")
+ .build();
+
+ private static final Option UPDATE_INCLUDE_FILE_OPTION =
+ Option.builder()
+ .longOpt("update-include-file-only")
+ .desc(
+ "Only update the solr.in.sh or solr.in.cmd file, and skip actual enabling/disabling"
+ + " authentication (i.e. don't update security.json).")
+ .hasArg()
+ .type(Boolean.class)
+ .build();
+
+ private static final Option AUTH_CONF_DIR_OPTION =
+ Option.builder()
+ .longOpt("auth-conf-dir")
+ .hasArg()
+ .argName("FILE")
+ .required()
+ .desc(
+ "This is where any authentication related configuration files, if any, would be placed.")
+ .build();
+
public AuthTool() {
this(CLIO.getOutStream());
}
@@ -84,79 +148,38 @@ public String getHeader() {
"SOLR_AUTHENTICATION_CLIENT_BUILDER", "SOLR_AUTH_TYPE", "SOLR_AUTHENTICATION_OPTS");
@Override
- public List getOptions() {
- return List.of(
- Option.builder()
- .longOpt("type")
- .hasArg()
- .desc(
- "The authentication mechanism to enable (basicAuth or kerberos). Defaults to 'basicAuth'.")
- .build(),
- Option.builder()
- .longOpt("prompt")
- .hasArg()
- .desc(
- "Prompts the user to provide the credentials. Use either --credentials or --prompt, not both.")
- .build(),
- Option.builder()
- .longOpt("config")
- .hasArgs()
- .desc(
- "Configuration parameters (Solr startup parameters). Required for Kerberos authentication.")
- .build(),
- Option.builder()
- .longOpt("block-unknown")
- .desc(
- "Blocks all access for unknown users (requires authentication for all endpoints).")
- .hasArg()
- .argName("true|false")
- .build(),
- Option.builder()
- .longOpt("solr-include-file")
- .hasArg()
- .argName("FILE")
- .desc(
- "The Solr include file which contains overridable environment variables for configuring Solr configurations.")
- .build(),
- Option.builder()
- .longOpt("update-include-file-only")
- .desc(
- "Only update the solr.in.sh or solr.in.cmd file, and skip actual enabling/disabling"
- + " authentication (i.e. don't update security.json).")
- .hasArg()
- .build(),
- Option.builder()
- .longOpt("auth-conf-dir")
- .hasArg()
- .argName("FILE")
- .required()
- .desc(
- "This is where any authentication related configuration files, if any, would be placed.")
- .build(),
- SolrCLI.OPTION_SOLRURL,
- SolrCLI.OPTION_ZKHOST,
- SolrCLI.OPTION_CREDENTIALS);
+ public Options getOptions() {
+ return super.getOptions()
+ .addOption(TYPE_OPTION)
+ .addOption(PROMPT_OPTION)
+ .addOption(CONFIG_OPTION)
+ .addOption(BLOCK_UNKNOWN_OPTION)
+ .addOption(SOLR_INCLUDE_FILE_OPTION)
+ .addOption(UPDATE_INCLUDE_FILE_OPTION)
+ .addOption(AUTH_CONF_DIR_OPTION)
+ .addOption(CommonCLIOptions.CREDENTIALS_OPTION)
+ .addOptionGroup(getConnectionOptions());
}
- private void ensureArgumentIsValidBooleanIfPresent(CommandLine cli, String argName) {
- if (cli.hasOption(argName)) {
- final String value = cli.getOptionValue(argName);
+ private void ensureArgumentIsValidBooleanIfPresent(CommandLine cli, Option option) {
+ if (cli.hasOption(option)) {
+ final String value = cli.getOptionValue(option);
if (!"true".equalsIgnoreCase(value) && !"false".equalsIgnoreCase(value)) {
- echo("Argument [" + argName + "] must be either true or false, but was [" + value + "]");
+ echo(
+ "Argument ["
+ + option.getLongOpt()
+ + "] must be either true or false, but was ["
+ + value
+ + "]");
SolrCLI.exit(1);
}
}
}
- // @Override
- // public int runTool(CommandLine cli) throws Exception {
-
- // }
-
- private int handleKerberos(CommandLine cli) throws Exception {
+ private void handleKerberos(CommandLine cli) throws Exception {
String cmd = cli.getArgs()[0];
boolean updateIncludeFileOnly =
- Boolean.parseBoolean(cli.getOptionValue("update-include-file-only", "false"));
+ Boolean.parseBoolean(cli.getOptionValue(UPDATE_INCLUDE_FILE_OPTION, "false"));
String securityJson =
"{"
+ "\n \"authentication\":{"
@@ -176,7 +199,7 @@ private int handleKerberos(CommandLine cli) throws Exception {
CLIO.out(
"Unable to access ZooKeeper. Please add the following security.json to ZooKeeper (in case of SolrCloud):\n"
+ securityJson
- + "\n");
+ + "Dsolr.httpclient.config=[basicAuthConfFile]\n");
zkInaccessible = true;
}
if (zkHost == null) {
@@ -217,7 +240,7 @@ private int handleKerberos(CommandLine cli) throws Exception {
}
}
- String config = StrUtils.join(Arrays.asList(cli.getOptionValues("config")), ' ');
+ String config = StrUtils.join(Arrays.asList(cli.getOptionValues(CONFIG_OPTION)), ' ');
// config is base64 encoded (to get around parsing problems), decode it
config = config.replace(" ", "");
config =
@@ -226,7 +249,7 @@ private int handleKerberos(CommandLine cli) throws Exception {
StandardCharsets.UTF_8);
config = config.replace("\n", "").replace("\r", "");
- String solrIncludeFilename = cli.getOptionValue("solr-include-file");
+ String solrIncludeFilename = cli.getOptionValue(SOLR_INCLUDE_FILE_OPTION);
File includeFile = new File(solrIncludeFilename);
if (!includeFile.exists() || !includeFile.canWrite()) {
CLIO.out(
@@ -236,15 +259,14 @@ private int handleKerberos(CommandLine cli) throws Exception {
}
// update the solr.in.sh file to contain the necessary authentication lines
- updateIncludeFileEnableAuth(includeFile.toPath(), null, config, cli);
+ updateIncludeFileEnableAuth(includeFile.toPath(), null, config);
echo(
"Successfully enabled Kerberos authentication; please restart any running Solr nodes.");
- return 0;
-
+ return;
case "disable":
clearSecurityJson(cli, updateIncludeFileOnly);
- solrIncludeFilename = cli.getOptionValue("solr-include-file");
+ solrIncludeFilename = cli.getOptionValue(SOLR_INCLUDE_FILE_OPTION);
includeFile = new File(solrIncludeFilename);
if (!includeFile.exists() || !includeFile.canWrite()) {
CLIO.out(
@@ -255,38 +277,31 @@ private int handleKerberos(CommandLine cli) throws Exception {
}
// update the solr.in.sh file to comment out the necessary authentication lines
- updateIncludeFileDisableAuth(includeFile.toPath(), cli);
- return 0;
-
+ updateIncludeFileDisableAuth(includeFile.toPath());
+ return;
default:
CLIO.out("Valid auth commands are: enable, disable.");
SolrCLI.exit(1);
}
CLIO.out("Options not understood.");
- new HelpFormatter()
- .printHelp("bin/solr auth [OPTIONS]", SolrCLI.getToolOptions(this));
- return 1;
+ SolrCLI.exit(1);
}
- private int handleBasicAuth(CommandLine cli) throws Exception {
+ private void handleBasicAuth(CommandLine cli) throws Exception {
String cmd = cli.getArgs()[0];
- boolean prompt = Boolean.parseBoolean(cli.getOptionValue("prompt", "false"));
+ boolean prompt = Boolean.parseBoolean(cli.getOptionValue(PROMPT_OPTION, "false"));
boolean updateIncludeFileOnly =
- Boolean.parseBoolean(cli.getOptionValue("update-include-file-only", "false"));
+ Boolean.parseBoolean(cli.getOptionValue(UPDATE_INCLUDE_FILE_OPTION, "false"));
switch (cmd) {
case "enable":
- if (!prompt && !cli.hasOption("credentials")) {
+ if (!prompt && !cli.hasOption(CommonCLIOptions.CREDENTIALS_OPTION)) {
CLIO.out("Option --credentials or --prompt is required with enable.");
- new HelpFormatter()
- .printHelp("bin/solr auth [OPTIONS]", SolrCLI.getToolOptions(this));
SolrCLI.exit(1);
} else if (!prompt
- && (cli.getOptionValue(SolrCLI.OPTION_CREDENTIALS.getLongOpt()) == null
- || !cli.getOptionValue(SolrCLI.OPTION_CREDENTIALS.getLongOpt()).contains(":"))) {
+ && (cli.getOptionValue(CommonCLIOptions.CREDENTIALS_OPTION) == null
+ || !cli.getOptionValue(CommonCLIOptions.CREDENTIALS_OPTION).contains(":"))) {
CLIO.out("Option --credentials is not in correct format.");
- new HelpFormatter()
- .printHelp("bin/solr auth [OPTIONS]", SolrCLI.getToolOptions(this));
SolrCLI.exit(1);
}
@@ -296,7 +311,7 @@ private int handleBasicAuth(CommandLine cli) throws Exception {
try {
zkHost = CLIUtils.getZkHost(cli);
} catch (Exception ex) {
- if (cli.hasOption("zk-host")) {
+ if (cli.hasOption(CommonCLIOptions.ZK_HOST_OPTION)) {
CLIO.out(
"Couldn't get ZooKeeper host. Please make sure that ZooKeeper is running and the correct zk-host has been passed in.");
} else {
@@ -306,7 +321,7 @@ private int handleBasicAuth(CommandLine cli) throws Exception {
SolrCLI.exit(1);
}
if (zkHost == null) {
- if (cli.hasOption("zk-host")) {
+ if (cli.hasOption(CommonCLIOptions.ZK_HOST_OPTION)) {
CLIO.out(
"Couldn't get ZooKeeper host. Please make sure that ZooKeeper is running and the correct zk-host has been passed in.");
} else {
@@ -323,8 +338,8 @@ private int handleBasicAuth(CommandLine cli) throws Exception {
}
String username, password;
- if (cli.hasOption(SolrCLI.OPTION_CREDENTIALS.getLongOpt())) {
- String credentials = cli.getOptionValue(SolrCLI.OPTION_CREDENTIALS.getLongOpt());
+ if (cli.hasOption(CommonCLIOptions.CREDENTIALS_OPTION)) {
+ String credentials = cli.getOptionValue(CommonCLIOptions.CREDENTIALS_OPTION);
username = credentials.split(":")[0];
password = credentials.split(":")[1];
} else {
@@ -340,7 +355,8 @@ private int handleBasicAuth(CommandLine cli) throws Exception {
} while (password.length() == 0);
}
- boolean blockUnknown = Boolean.parseBoolean(cli.getOptionValue("block-unknown", "true"));
+ boolean blockUnknown =
+ Boolean.parseBoolean(cli.getOptionValue(BLOCK_UNKNOWN_OPTION, "true"));
String resourceName = "security.json";
final URL resource = SolrCore.class.getClassLoader().getResource(resourceName);
@@ -370,7 +386,7 @@ private int handleBasicAuth(CommandLine cli) throws Exception {
}
}
- String solrIncludeFilename = cli.getOptionValue("solr-include-file");
+ String solrIncludeFilename = cli.getOptionValue(SOLR_INCLUDE_FILE_OPTION);
File includeFile = new File(solrIncludeFilename);
if (!includeFile.exists() || !includeFile.canWrite()) {
CLIO.out(
@@ -378,7 +394,7 @@ private int handleBasicAuth(CommandLine cli) throws Exception {
printAuthEnablingInstructions(username, password);
System.exit(0);
}
- String authConfDir = cli.getOptionValue("auth-conf-dir");
+ String authConfDir = cli.getOptionValue(AUTH_CONF_DIR_OPTION);
File basicAuthConfFile = new File(authConfDir + File.separator + "basicAuth.conf");
if (!basicAuthConfFile.getParentFile().canWrite()) {
@@ -394,7 +410,7 @@ private int handleBasicAuth(CommandLine cli) throws Exception {
// update the solr.in.sh file to contain the necessary authentication lines
updateIncludeFileEnableAuth(
- includeFile.toPath(), basicAuthConfFile.getAbsolutePath(), null, cli);
+ includeFile.toPath(), basicAuthConfFile.getAbsolutePath(), null);
final String successMessage =
String.format(
Locale.ROOT,
@@ -402,12 +418,11 @@ private int handleBasicAuth(CommandLine cli) throws Exception {
username,
password);
echo(successMessage);
- return 0;
-
+ return;
case "disable":
clearSecurityJson(cli, updateIncludeFileOnly);
- solrIncludeFilename = cli.getOptionValue("solr-include-file");
+ solrIncludeFilename = cli.getOptionValue(SOLR_INCLUDE_FILE_OPTION);
includeFile = new File(solrIncludeFilename);
if (!includeFile.exists() || !includeFile.canWrite()) {
CLIO.out(
@@ -418,18 +433,15 @@ private int handleBasicAuth(CommandLine cli) throws Exception {
}
// update the solr.in.sh file to comment out the necessary authentication lines
- updateIncludeFileDisableAuth(includeFile.toPath(), cli);
- return 0;
-
+ updateIncludeFileDisableAuth(includeFile.toPath());
+ return;
default:
CLIO.out("Valid auth commands are: enable, disable.");
SolrCLI.exit(1);
}
CLIO.out("Options not understood.");
- new HelpFormatter()
- .printHelp("bin/solr auth [OPTIONS]", SolrCLI.getToolOptions(this));
- return 1;
+ SolrCLI.exit(1);
}
private void checkSecurityJsonExists(SolrZkClient zkClient)
@@ -517,8 +529,7 @@ private void printAuthEnablingInstructions(String kerberosConfig) {
* null.
*/
private void updateIncludeFileEnableAuth(
- Path includeFile, String basicAuthConfFile, String kerberosConfig, CommandLine cli)
- throws IOException {
+ Path includeFile, String basicAuthConfFile, String kerberosConfig) throws IOException {
assert !(basicAuthConfFile != null
&& kerberosConfig != null); // only one of the two needs to be populated
List includeFileLines = Files.readAllLines(includeFile, StandardCharsets.UTF_8);
@@ -552,7 +563,7 @@ private void updateIncludeFileEnableAuth(
includeFileLines.add("REM The following lines added by solr.cmd for enabling BasicAuth");
includeFileLines.add("set SOLR_AUTH_TYPE=kerberos");
includeFileLines.add(
- "set SOLR_AUTHENTICATION_OPTS=\"-Dsolr.httpclient.config=" + basicAuthConfFile + "\"");
+ "set SOLR_AUTHENTICATION_OPTS=\"-Dsolr.httpclient.config=basicAuthConfFile\"");
} else {
includeFileLines.add("# The following lines added by ./solr for enabling BasicAuth");
includeFileLines.add("SOLR_AUTH_TYPE=\"kerberos\"");
@@ -569,7 +580,7 @@ private void updateIncludeFileEnableAuth(
echoIfVerbose("Updated Solr include file: " + includeFile.toAbsolutePath());
}
- private void updateIncludeFileDisableAuth(Path includeFile, CommandLine cli) throws IOException {
+ private void updateIncludeFileDisableAuth(Path includeFile) throws IOException {
List includeFileLines = Files.readAllLines(includeFile, StandardCharsets.UTF_8);
boolean hasChanged = false;
for (int i = 0; i < includeFileLines.size(); i++) {
@@ -594,12 +605,10 @@ private void updateIncludeFileDisableAuth(Path includeFile, CommandLine cli) thr
@Override
public void runImpl(CommandLine cli) throws Exception {
- SolrCLI.raiseLogLevelUnlessVerbose(cli);
-
- ensureArgumentIsValidBooleanIfPresent(cli, "block-unknown");
- ensureArgumentIsValidBooleanIfPresent(cli, "update-include-file-only");
+ ensureArgumentIsValidBooleanIfPresent(cli, BLOCK_UNKNOWN_OPTION);
+ ensureArgumentIsValidBooleanIfPresent(cli, UPDATE_INCLUDE_FILE_OPTION);
- String type = cli.getOptionValue("type", "basicAuth");
+ String type = cli.getOptionValue(TYPE_OPTION, "basicAuth");
switch (type) {
case "basicAuth":
handleBasicAuth(cli);
diff --git a/solr/core/src/java/org/apache/solr/cli/CLIUtils.java b/solr/core/src/java/org/apache/solr/cli/CLIUtils.java
index 00a80328d3b..57c77e1ff85 100644
--- a/solr/core/src/java/org/apache/solr/cli/CLIUtils.java
+++ b/solr/core/src/java/org/apache/solr/cli/CLIUtils.java
@@ -135,14 +135,14 @@ public static SolrClient getSolrClient(String solrUrl, String credentials) {
public static SolrClient getSolrClient(CommandLine cli, boolean barePath) throws Exception {
String solrUrl = normalizeSolrUrl(cli);
// TODO Replace hard-coded string with Option object
- String credentials = cli.getOptionValue("credentials");
+ String credentials = cli.getOptionValue(CommonCLIOptions.CREDENTIALS_OPTION);
return getSolrClient(solrUrl, credentials, barePath);
}
public static SolrClient getSolrClient(CommandLine cli) throws Exception {
String solrUrl = normalizeSolrUrl(cli);
// TODO Replace hard-coded string with Option object
- String credentials = cli.getOptionValue("credentials");
+ String credentials = cli.getOptionValue(CommonCLIOptions.CREDENTIALS_OPTION);
return getSolrClient(solrUrl, credentials, false);
}
@@ -194,10 +194,10 @@ public static String normalizeSolrUrl(String solrUrl, boolean logUrlFormatWarnin
* ZooKeeper.
*/
public static String normalizeSolrUrl(CommandLine cli) throws Exception {
- String solrUrl = cli.getOptionValue("solr-url");
+ String solrUrl = cli.getOptionValue(CommonCLIOptions.SOLR_URL_OPTION);
if (solrUrl == null) {
- String zkHost = cli.getOptionValue("zk-host");
+ String zkHost = cli.getOptionValue(CommonCLIOptions.ZK_HOST_OPTION);
if (zkHost == null) {
solrUrl = getDefaultSolrUrl();
CLIO.err(
@@ -228,7 +228,7 @@ public static String normalizeSolrUrl(CommandLine cli) throws Exception {
*/
public static String getZkHost(CommandLine cli) throws Exception {
- String zkHost = cli.getOptionValue("zk-host");
+ String zkHost = cli.getOptionValue(CommonCLIOptions.ZK_HOST_OPTION);
if (zkHost != null && !zkHost.isBlank()) {
return zkHost;
}
@@ -256,15 +256,11 @@ public static String getZkHost(CommandLine cli) throws Exception {
return zkHost;
}
- public static SolrZkClient getSolrZkClient(CommandLine cli) throws Exception {
- return getSolrZkClient(cli, getZkHost(cli));
- }
-
public static SolrZkClient getSolrZkClient(CommandLine cli, String zkHost) throws Exception {
if (zkHost == null) {
throw new IllegalStateException(
"Solr at "
- + cli.getOptionValue("solrUrl")
+ + cli.getOptionValue(CommonCLIOptions.SOLR_URL_OPTION)
+ " is running in standalone server mode, this command can only be used when running in SolrCloud mode.\n");
}
return new SolrZkClient.Builder()
diff --git a/solr/core/src/java/org/apache/solr/cli/ClusterTool.java b/solr/core/src/java/org/apache/solr/cli/ClusterTool.java
index b5d94171232..714654829cc 100644
--- a/solr/core/src/java/org/apache/solr/cli/ClusterTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/ClusterTool.java
@@ -19,10 +19,10 @@
import java.io.IOException;
import java.io.PrintStream;
-import java.util.List;
import java.util.concurrent.TimeUnit;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
import org.apache.solr.client.solrj.impl.SolrZkClientTimeout;
import org.apache.solr.cloud.ZkController;
import org.apache.solr.common.cloud.ClusterProperties;
@@ -36,6 +36,23 @@
public class ClusterTool extends ToolBase {
// It is a shame this tool doesn't more closely mimic how the ConfigTool works.
+ private static final Option PROPERTY_OPTION =
+ Option.builder()
+ .longOpt("property")
+ .hasArg()
+ .argName("PROPERTY")
+ .required()
+ .desc("Name of the Cluster property to apply the action to, such as: 'urlScheme'.")
+ .build();
+
+ private static final Option VALUE_OPTION =
+ Option.builder()
+ .longOpt("value")
+ .hasArg()
+ .argName("VALUE")
+ .desc("Set the property to this value.")
+ .build();
+
public ClusterTool() {
this(CLIO.getOutStream());
}
@@ -50,30 +67,18 @@ public String getName() {
}
@Override
- public List getOptions() {
- return List.of(
- Option.builder()
- .longOpt("property")
- .hasArg()
- .argName("PROPERTY")
- .required(true)
- .desc("Name of the Cluster property to apply the action to, such as: 'urlScheme'.")
- .build(),
- Option.builder()
- .longOpt("value")
- .hasArg()
- .argName("VALUE")
- .required(false)
- .desc("Set the property to this value.")
- .build(),
- SolrCLI.OPTION_ZKHOST);
+ public Options getOptions() {
+ return super.getOptions()
+ .addOption(PROPERTY_OPTION)
+ .addOption(VALUE_OPTION)
+ .addOption(CommonCLIOptions.ZK_HOST_OPTION);
}
@Override
public void runImpl(CommandLine cli) throws Exception {
- String propertyName = cli.getOptionValue("property");
- String propertyValue = cli.getOptionValue("value");
+ String propertyName = cli.getOptionValue(PROPERTY_OPTION);
+ String propertyValue = cli.getOptionValue(VALUE_OPTION);
String zkHost = CLIUtils.getZkHost(cli);
if (!ZkController.checkChrootPath(zkHost, true)) {
diff --git a/solr/core/src/java/org/apache/solr/cli/CommonCLIOptions.java b/solr/core/src/java/org/apache/solr/cli/CommonCLIOptions.java
new file mode 100644
index 00000000000..e6425abe1f3
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/cli/CommonCLIOptions.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.cli;
+
+import org.apache.commons.cli.Option;
+
+public final class CommonCLIOptions {
+
+ private CommonCLIOptions() {}
+
+ public static final Option VERBOSE_OPTION =
+ Option.builder("v").longOpt("verbose").desc("Enable verbose command output.").build();
+
+ public static final Option HELP_OPTION =
+ Option.builder("h").longOpt("help").desc("Print this message.").build();
+
+ public static final Option ZK_HOST_OPTION =
+ Option.builder("z")
+ .longOpt("zk-host")
+ .hasArg()
+ .argName("HOST")
+ .desc(
+ "Zookeeper connection string; unnecessary if ZK_HOST is defined in solr.in.sh; otherwise, defaults to "
+ + DefaultValues.ZK_HOST
+ + '.')
+ .build();
+
+ public static final Option SOLR_URL_OPTION =
+ Option.builder("s")
+ .longOpt("solr-url")
+ .hasArg()
+ .argName("HOST")
+ .desc(
+ "Base Solr URL, which can be used to determine the zk-host if that's not known; defaults to: "
+ + CLIUtils.getDefaultSolrUrl()
+ + '.')
+ .build();
+
+ public static final Option RECURSIVE_OPTION =
+ Option.builder("r").longOpt("recursive").desc("Apply the command recursively.").build();
+
+ public static final Option CREDENTIALS_OPTION =
+ Option.builder("u")
+ .longOpt("credentials")
+ .hasArg()
+ .argName("credentials")
+ .desc(
+ "Credentials in the format username:password. Example: --credentials solr:SolrRocks")
+ .build();
+
+ public static final class DefaultValues {
+
+ private DefaultValues() {}
+
+ public static final String ZK_HOST = "localhost:9983";
+
+ public static final String DEFAULT_CONFIG_SET = "_default";
+ }
+}
diff --git a/solr/core/src/java/org/apache/solr/cli/ConfigSetDownloadTool.java b/solr/core/src/java/org/apache/solr/cli/ConfigSetDownloadTool.java
index 12d1c2f69ba..264023640f7 100644
--- a/solr/core/src/java/org/apache/solr/cli/ConfigSetDownloadTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/ConfigSetDownloadTool.java
@@ -21,9 +21,9 @@
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
-import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
import org.apache.solr.common.cloud.SolrZkClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -32,6 +32,24 @@
public class ConfigSetDownloadTool extends ToolBase {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+ private static final Option CONF_NAME_OPTION =
+ Option.builder("n")
+ .longOpt("conf-name")
+ .hasArg()
+ .argName("NAME")
+ .required()
+ .desc("Configset name in ZooKeeper.")
+ .build();
+
+ private static final Option CONF_DIR_OPTION =
+ Option.builder("d")
+ .longOpt("conf-dir")
+ .hasArg()
+ .argName("DIR")
+ .required()
+ .desc("Local directory with configs.")
+ .build();
+
public ConfigSetDownloadTool() {
this(CLIO.getOutStream());
}
@@ -41,25 +59,12 @@ public ConfigSetDownloadTool(PrintStream stdout) {
}
@Override
- public List getOptions() {
- return List.of(
- Option.builder("n")
- .longOpt("conf-name")
- .hasArg()
- .argName("NAME")
- .required(true)
- .desc("Configset name in ZooKeeper.")
- .build(),
- Option.builder("d")
- .longOpt("conf-dir")
- .hasArg()
- .argName("DIR")
- .required(true)
- .desc("Local directory with configs.")
- .build(),
- SolrCLI.OPTION_SOLRURL,
- SolrCLI.OPTION_ZKHOST,
- SolrCLI.OPTION_CREDENTIALS);
+ public Options getOptions() {
+ return super.getOptions()
+ .addOption(CONF_NAME_OPTION)
+ .addOption(CONF_DIR_OPTION)
+ .addOption(CommonCLIOptions.CREDENTIALS_OPTION)
+ .addOptionGroup(getConnectionOptions());
}
@Override
@@ -74,11 +79,10 @@ public String getUsage() {
@Override
public void runImpl(CommandLine cli) throws Exception {
- SolrCLI.raiseLogLevelUnlessVerbose(cli);
String zkHost = CLIUtils.getZkHost(cli);
- String confName = cli.getOptionValue("conf-name");
- String confDir = cli.getOptionValue("conf-dir");
+ String confName = cli.getOptionValue(CONF_NAME_OPTION);
+ String confDir = cli.getOptionValue(CONF_DIR_OPTION);
echoIfVerbose("\nConnecting to ZooKeeper at " + zkHost + " ...");
try (SolrZkClient zkClient = CLIUtils.getSolrZkClient(cli, zkHost)) {
diff --git a/solr/core/src/java/org/apache/solr/cli/ConfigSetUploadTool.java b/solr/core/src/java/org/apache/solr/cli/ConfigSetUploadTool.java
index 58cd4815fe8..a9fc0631e11 100644
--- a/solr/core/src/java/org/apache/solr/cli/ConfigSetUploadTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/ConfigSetUploadTool.java
@@ -20,9 +20,9 @@
import java.lang.invoke.MethodHandles;
import java.nio.file.Path;
import java.nio.file.Paths;
-import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
import org.apache.solr.common.cloud.SolrZkClient;
import org.apache.solr.common.cloud.ZkMaintenanceUtils;
import org.apache.solr.core.ConfigSetService;
@@ -34,6 +34,24 @@
public class ConfigSetUploadTool extends ToolBase {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+ private static final Option CONF_NAME_OPTION =
+ Option.builder("n")
+ .longOpt("conf-name")
+ .hasArg()
+ .argName("NAME")
+ .required()
+ .desc("Configset name in ZooKeeper.")
+ .build();
+
+ private static final Option CONF_DIR_OPTION =
+ Option.builder("d")
+ .longOpt("conf-dir")
+ .hasArg()
+ .argName("DIR")
+ .required()
+ .desc("Local directory with configs.")
+ .build();
+
public ConfigSetUploadTool() {
this(CLIO.getOutStream());
}
@@ -43,25 +61,12 @@ public ConfigSetUploadTool(PrintStream stdout) {
}
@Override
- public List getOptions() {
- return List.of(
- Option.builder("n")
- .longOpt("conf-name")
- .hasArg()
- .argName("NAME")
- .required(true)
- .desc("Configset name in ZooKeeper.")
- .build(),
- Option.builder("d")
- .longOpt("conf-dir")
- .hasArg()
- .argName("DIR")
- .required(true)
- .desc("Local directory with configs.")
- .build(),
- SolrCLI.OPTION_SOLRURL,
- SolrCLI.OPTION_ZKHOST,
- SolrCLI.OPTION_CREDENTIALS);
+ public Options getOptions() {
+ return super.getOptions()
+ .addOption(CONF_NAME_OPTION)
+ .addOption(CONF_DIR_OPTION)
+ .addOption(CommonCLIOptions.CREDENTIALS_OPTION)
+ .addOptionGroup(getConnectionOptions());
}
@Override
@@ -76,14 +81,13 @@ public String getUsage() {
@Override
public void runImpl(CommandLine cli) throws Exception {
- SolrCLI.raiseLogLevelUnlessVerbose(cli);
String zkHost = CLIUtils.getZkHost(cli);
final String solrInstallDir = System.getProperty("solr.install.dir");
Path solrInstallDirPath = Paths.get(solrInstallDir);
- String confName = cli.getOptionValue("conf-name");
- String confDir = cli.getOptionValue("conf-dir");
+ String confName = cli.getOptionValue(CONF_NAME_OPTION);
+ String confDir = cli.getOptionValue(CONF_DIR_OPTION);
echoIfVerbose("\nConnecting to ZooKeeper at " + zkHost + " ...");
try (SolrZkClient zkClient = CLIUtils.getSolrZkClient(cli, zkHost)) {
diff --git a/solr/core/src/java/org/apache/solr/cli/ConfigTool.java b/solr/core/src/java/org/apache/solr/cli/ConfigTool.java
index c2d20a5e111..e5e7c96791c 100644
--- a/solr/core/src/java/org/apache/solr/cli/ConfigTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/ConfigTool.java
@@ -19,11 +19,11 @@
import java.io.PrintStream;
import java.util.HashMap;
-import java.util.List;
import java.util.Map;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.MissingArgumentException;
import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.common.util.NamedList;
import org.noggit.CharArr;
@@ -36,6 +36,42 @@
*/
public class ConfigTool extends ToolBase {
+ private static final Option COLLECTION_NAME_OPTION =
+ Option.builder("c")
+ .longOpt("name")
+ .hasArg()
+ .argName("NAME")
+ .required()
+ .desc("Name of the collection.")
+ .build();
+
+ private static final Option ACTION_OPTION =
+ Option.builder("a")
+ .longOpt("action")
+ .hasArg()
+ .argName("ACTION")
+ .desc(
+ "Config API action, one of: set-property, unset-property, set-user-property, unset-user-property; default is 'set-property'.")
+ .build();
+
+ private static final Option PROPERTY_OPTION =
+ Option.builder()
+ .longOpt("property")
+ .hasArg()
+ .argName("PROP")
+ .required()
+ .desc(
+ "Name of the Config API property to apply the action to, such as: 'updateHandler.autoSoftCommit.maxTime'.")
+ .build();
+
+ private static final Option VALUE_OPTION =
+ Option.builder("v")
+ .longOpt("value")
+ .hasArg()
+ .argName("VALUE")
+ .desc("Set the property to this value; accepts JSON objects and strings.")
+ .build();
+
public ConfigTool() {
this(CLIO.getOutStream());
}
@@ -50,50 +86,23 @@ public String getName() {
}
@Override
- public List getOptions() {
- return List.of(
- Option.builder("c")
- .longOpt("name")
- .argName("NAME")
- .hasArg()
- .required(true)
- .desc("Name of the collection.")
- .build(),
- Option.builder("a")
- .longOpt("action")
- .argName("ACTION")
- .hasArg()
- .required(false)
- .desc(
- "Config API action, one of: set-property, unset-property, set-user-property, unset-user-property; default is 'set-property'.")
- .build(),
- Option.builder()
- .longOpt("property")
- .argName("PROP")
- .hasArg()
- .required(true)
- .desc(
- "Name of the Config API property to apply the action to, such as: 'updateHandler.autoSoftCommit.maxTime'.")
- .build(),
- Option.builder()
- .longOpt("value")
- .argName("VALUE")
- .hasArg()
- .required(false)
- .desc("Set the property to this value; accepts JSON objects and strings.")
- .build(),
- SolrCLI.OPTION_SOLRURL,
- SolrCLI.OPTION_ZKHOST,
- SolrCLI.OPTION_CREDENTIALS);
+ public Options getOptions() {
+ return super.getOptions()
+ .addOption(COLLECTION_NAME_OPTION)
+ .addOption(ACTION_OPTION)
+ .addOption(PROPERTY_OPTION)
+ .addOption(VALUE_OPTION)
+ .addOption(CommonCLIOptions.CREDENTIALS_OPTION)
+ .addOptionGroup(getConnectionOptions());
}
@Override
public void runImpl(CommandLine cli) throws Exception {
String solrUrl = CLIUtils.normalizeSolrUrl(cli);
- String action = cli.getOptionValue("action", "set-property");
- String collection = cli.getOptionValue("name");
- String property = cli.getOptionValue("property");
- String value = cli.getOptionValue("value");
+ String action = cli.getOptionValue(ACTION_OPTION, "set-property");
+ String collection = cli.getOptionValue(COLLECTION_NAME_OPTION);
+ String property = cli.getOptionValue(PROPERTY_OPTION);
+ String value = cli.getOptionValue(VALUE_OPTION);
// value is required unless the property is one of the unset- type.
if (!action.contains("unset-") && value == null) {
@@ -118,8 +127,7 @@ public void runImpl(CommandLine cli) throws Exception {
echoIfVerbose(jsonBody);
try (SolrClient solrClient =
- CLIUtils.getSolrClient(
- solrUrl, cli.getOptionValue(SolrCLI.OPTION_CREDENTIALS.getLongOpt()))) {
+ CLIUtils.getSolrClient(solrUrl, cli.getOptionValue(CommonCLIOptions.CREDENTIALS_OPTION))) {
NamedList result = SolrCLI.postJsonToSolr(solrClient, updatePath, jsonBody);
Integer statusCode = (Integer) result.findRecursive("responseHeader", "status");
if (statusCode == 0) {
diff --git a/solr/core/src/java/org/apache/solr/cli/CreateTool.java b/solr/core/src/java/org/apache/solr/cli/CreateTool.java
index a3c2a4ff4ec..b1607af071b 100644
--- a/solr/core/src/java/org/apache/solr/cli/CreateTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/CreateTool.java
@@ -21,15 +21,16 @@
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
-import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.file.PathUtils;
+import org.apache.solr.cli.CommonCLIOptions.DefaultValues;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrRequest;
import org.apache.solr.client.solrj.SolrServerException;
@@ -52,6 +53,53 @@
/** Supports create command in the bin/solr script. */
public class CreateTool extends ToolBase {
+ private static final Option COLLECTION_NAME_OPTION =
+ Option.builder("c")
+ .longOpt("name")
+ .hasArg()
+ .argName("NAME")
+ .required()
+ .desc("Name of collection or core to create.")
+ .build();
+
+ private static final Option SHARDS_OPTION =
+ Option.builder("sh")
+ .longOpt("shards")
+ .hasArg()
+ .argName("#")
+ .type(Integer.class)
+ .desc("Number of shards; default is 1.")
+ .build();
+
+ private static final Option REPLICATION_FACTOR_OPTION =
+ Option.builder("rf")
+ .longOpt("replication-factor")
+ .hasArg()
+ .argName("#")
+ .type(Integer.class)
+ .desc(
+ "Number of copies of each document across the collection (replicas per shard); default is 1.")
+ .build();
+
+ private static final Option CONF_DIR_OPTION =
+ Option.builder("d")
+ .longOpt("conf-dir")
+ .hasArg()
+ .argName("DIR")
+ .desc(
+ "Configuration directory to copy when creating the new collection; default is "
+ + DefaultValues.DEFAULT_CONFIG_SET
+ + '.')
+ .build();
+
+ private static final Option CONF_NAME_OPTION =
+ Option.builder("n")
+ .longOpt("conf-name")
+ .hasArg()
+ .argName("NAME")
+ .desc("Configuration name; default is the collection name.")
+ .build();
+
public CreateTool() {
this(CLIO.getOutStream());
}
@@ -74,53 +122,22 @@ public String getHeader() {
}
@Override
- public List getOptions() {
- return List.of(
- Option.builder("c")
- .longOpt("name")
- .hasArg()
- .argName("NAME")
- .required(true)
- .desc("Name of collection or core to create.")
- .build(),
- Option.builder("sh")
- .longOpt("shards")
- .hasArg()
- .argName("#")
- .desc("Number of shards; default is 1.")
- .build(),
- Option.builder("rf")
- .longOpt("replication-factor")
- .hasArg()
- .argName("#")
- .desc(
- "Number of copies of each document across the collection (replicas per shard); default is 1.")
- .build(),
- Option.builder("d")
- .longOpt("conf-dir")
- .argName("DIR")
- .hasArg()
- .desc(
- "Configuration directory to copy when creating the new collection; default is "
- + SolrCLI.DEFAULT_CONFIG_SET
- + '.')
- .build(),
- Option.builder("n")
- .longOpt("conf-name")
- .argName("NAME")
- .hasArg()
- .required(false)
- .desc("Configuration name; default is the collection name.")
- .build(),
- SolrCLI.OPTION_SOLRURL,
- SolrCLI.OPTION_ZKHOST,
- SolrCLI.OPTION_CREDENTIALS);
+ public Options getOptions() {
+ Options opts =
+ super.getOptions()
+ .addOption(COLLECTION_NAME_OPTION)
+ .addOption(SHARDS_OPTION)
+ .addOption(REPLICATION_FACTOR_OPTION)
+ .addOption(CONF_DIR_OPTION)
+ .addOption(CONF_NAME_OPTION)
+ .addOption(CommonCLIOptions.CREDENTIALS_OPTION)
+ .addOptionGroup(getConnectionOptions());
+
+ return opts;
}
@Override
public void runImpl(CommandLine cli) throws Exception {
- SolrCLI.raiseLogLevelUnlessVerbose(cli);
-
try (var solrClient = CLIUtils.getSolrClient(cli)) {
if (CLIUtils.isCloudMode(solrClient)) {
createCollection(cli);
@@ -131,11 +148,13 @@ public void runImpl(CommandLine cli) throws Exception {
}
protected void createCore(CommandLine cli, SolrClient solrClient) throws Exception {
- String coreName = cli.getOptionValue("name");
- String solrUrl = cli.getOptionValue("solr-url", CLIUtils.getDefaultSolrUrl());
+ String coreName = cli.getOptionValue(COLLECTION_NAME_OPTION);
+ String solrUrl =
+ cli.getOptionValue(CommonCLIOptions.SOLR_URL_OPTION, CLIUtils.getDefaultSolrUrl());
final String solrInstallDir = System.getProperty("solr.install.dir");
- final String confDirName = cli.getOptionValue("conf-dir", SolrCLI.DEFAULT_CONFIG_SET);
+ final String confDirName =
+ cli.getOptionValue(CONF_DIR_OPTION, DefaultValues.DEFAULT_CONFIG_SET);
// we allow them to pass a directory instead of a configset name
Path configsetDir = Paths.get(confDirName);
@@ -157,7 +176,7 @@ protected void createCore(CommandLine cli, SolrClient solrClient) throws Excepti
coreRootDirectory = (String) systemInfo.get("core_root");
if (CLIUtils.safeCheckCoreExists(
- solrUrl, coreName, cli.getOptionValue(SolrCLI.OPTION_CREDENTIALS.getLongOpt()))) {
+ solrUrl, coreName, cli.getOptionValue(CommonCLIOptions.CREDENTIALS_OPTION))) {
throw new IllegalArgumentException(
"\nCore '"
+ coreName
@@ -204,7 +223,7 @@ protected void createCollection(CommandLine cli) throws Exception {
.withConnectionTimeout(15, TimeUnit.SECONDS)
.withKeyStoreReloadInterval(-1, TimeUnit.SECONDS)
.withOptionalBasicAuthCredentials(
- cli.getOptionValue(SolrCLI.OPTION_CREDENTIALS.getLongOpt()));
+ cli.getOptionValue(CommonCLIOptions.CREDENTIALS_OPTION));
String zkHost = CLIUtils.getZkHost(cli);
echoIfVerbose("Connecting to ZooKeeper at " + zkHost);
try (CloudSolrClient cloudSolrClient = CLIUtils.getCloudHttp2SolrClient(zkHost, builder)) {
@@ -216,10 +235,10 @@ protected void createCollection(CommandLine cli) throws Exception {
protected void createCollection(CloudSolrClient cloudSolrClient, CommandLine cli)
throws Exception {
- String collectionName = cli.getOptionValue("name");
+ String collectionName = cli.getOptionValue(COLLECTION_NAME_OPTION);
final String solrInstallDir = System.getProperty("solr.install.dir");
- String confName = cli.getOptionValue("conf-name");
- String confDir = cli.getOptionValue("conf-dir", SolrCLI.DEFAULT_CONFIG_SET);
+ String confName = cli.getOptionValue(CONF_NAME_OPTION);
+ String confDir = cli.getOptionValue(CONF_DIR_OPTION, DefaultValues.DEFAULT_CONFIG_SET);
Path solrInstallDirPath = Paths.get(solrInstallDir);
Path confDirPath = Paths.get(confDir);
ensureConfDirExists(solrInstallDirPath, confDirPath);
@@ -231,19 +250,15 @@ protected void createCollection(CloudSolrClient cloudSolrClient, CommandLine cli
"No live nodes found! Cannot create a collection until "
+ "there is at least 1 live node in the cluster.");
- String solrUrl = cli.getOptionValue("solr-url");
+ String solrUrl = cli.getOptionValue(CommonCLIOptions.SOLR_URL_OPTION);
if (solrUrl == null) {
String firstLiveNode = liveNodes.iterator().next();
solrUrl = ZkStateReader.from(cloudSolrClient).getBaseUrlForNodeName(firstLiveNode);
}
// build a URL to create the collection
- int numShards = Integer.parseInt(cli.getOptionValue("shards", String.valueOf(1)));
- int replicationFactor = 1;
-
- if (cli.hasOption("replication-factor")) {
- replicationFactor = Integer.parseInt(cli.getOptionValue("replication-factor"));
- }
+ int numShards = cli.getParsedOptionValue(SHARDS_OPTION, 1);
+ int replicationFactor = cli.getParsedOptionValue(REPLICATION_FACTOR_OPTION, 1);
boolean configExistsInZk =
confName != null
@@ -280,7 +295,7 @@ protected void createCollection(CloudSolrClient cloudSolrClient, CommandLine cli
// since creating a collection is a heavy-weight operation, check for existence first
if (CLIUtils.safeCheckCollectionExists(
- solrUrl, collectionName, cli.getOptionValue(SolrCLI.OPTION_CREDENTIALS.getLongOpt()))) {
+ solrUrl, collectionName, cli.getOptionValue(CommonCLIOptions.CREDENTIALS_OPTION))) {
throw new IllegalStateException(
"\nCollection '"
+ collectionName
@@ -339,13 +354,15 @@ private void ensureConfDirExists(Path solrInstallDir, Path confDirName) {
}
private void printDefaultConfigsetWarningIfNecessary(CommandLine cli) {
- final String confDirectoryName = cli.getOptionValue("conf-dir", SolrCLI.DEFAULT_CONFIG_SET);
- final String confName = cli.getOptionValue("conf-name", "");
+ final String confDirectoryName =
+ cli.getOptionValue(CONF_DIR_OPTION, DefaultValues.DEFAULT_CONFIG_SET);
+ final String confName = cli.getOptionValue(CONF_NAME_OPTION, "");
if (confDirectoryName.equals("_default")
&& (confName.equals("") || confName.equals("_default"))) {
- final String collectionName = cli.getOptionValue("name");
- final String solrUrl = cli.getOptionValue("solr-url", CLIUtils.getDefaultSolrUrl());
+ final String collectionName = cli.getOptionValue(COLLECTION_NAME_OPTION);
+ final String solrUrl =
+ cli.getOptionValue(CommonCLIOptions.SOLR_URL_OPTION, CLIUtils.getDefaultSolrUrl());
final String curlCommand =
String.format(
Locale.ROOT,
diff --git a/solr/core/src/java/org/apache/solr/cli/DeleteTool.java b/solr/core/src/java/org/apache/solr/cli/DeleteTool.java
index 5b739523efe..7fc94cb85d3 100644
--- a/solr/core/src/java/org/apache/solr/cli/DeleteTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/DeleteTool.java
@@ -16,18 +16,16 @@
*/
package org.apache.solr.cli;
-import static org.apache.solr.common.params.CommonParams.NAME;
-
import java.io.PrintStream;
import java.lang.invoke.MethodHandles;
import java.util.Collection;
-import java.util.List;
import java.util.Locale;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
@@ -47,6 +45,29 @@
public class DeleteTool extends ToolBase {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+ private static final Option COLLECTION_NAME_OPTION =
+ Option.builder("c")
+ .longOpt("name")
+ .hasArg()
+ .argName("NAME")
+ .required()
+ .desc("Name of the core / collection to delete.")
+ .build();
+
+ private static final Option DELETE_CONFIG_OPTION =
+ Option.builder()
+ .longOpt("delete-config")
+ .desc(
+ "Flag to indicate if the underlying configuration directory for a collection should also be deleted; default is true.")
+ .build();
+
+ private static final Option FORCE_OPTION =
+ Option.builder("f")
+ .longOpt("force")
+ .desc(
+ "Skip safety checks when deleting the configuration directory used by a collection.")
+ .build();
+
public DeleteTool() {
this(CLIO.getOutStream());
}
@@ -62,47 +83,24 @@ public String getName() {
@Override
public String getHeader() {
- return "Deletes a core or collection depending on whether Solr is running in standalone (core) or SolrCloud"
- + " mode (collection). If you're deleting a collection in SolrCloud mode, the default behavior is to also"
- + " delete the configuration directory from Zookeeper so long as it is not being used by another collection.\n"
- + " You can override this behavior by passing --delete-config false when running this command.\n"
+ return "Deletes a collection or core depending on whether Solr is running in SolrCloud or standalone mode. "
+ + "Deleting a collection does not delete it's configuration unless you pass in the --delete-config flag.\n"
+ "\n"
+ "List of options:";
}
@Override
- public List getOptions() {
- return List.of(
- Option.builder("c")
- .longOpt("name")
- .argName("NAME")
- .hasArg()
- .required(true)
- .desc("Name of the core / collection to delete.")
- .build(),
- Option.builder()
- .longOpt("delete-config")
- .hasArg()
- .argName("true|false")
- .required(false)
- .desc(
- "Flag to indicate if the underlying configuration directory for a collection should also be deleted; default is true.")
- .build(),
- Option.builder("f")
- .longOpt("force")
- .required(false)
- .desc(
- "Skip safety checks when deleting the configuration directory used by a collection.")
- .build(),
- SolrCLI.OPTION_SOLRURL,
- SolrCLI.OPTION_ZKHOST,
- SolrCLI.OPTION_CREDENTIALS);
+ public Options getOptions() {
+ return super.getOptions()
+ .addOption(COLLECTION_NAME_OPTION)
+ .addOption(DELETE_CONFIG_OPTION)
+ .addOption(FORCE_OPTION)
+ .addOption(CommonCLIOptions.CREDENTIALS_OPTION)
+ .addOptionGroup(getConnectionOptions());
}
@Override
public void runImpl(CommandLine cli) throws Exception {
- SolrCLI.raiseLogLevelUnlessVerbose(cli);
-
try (var solrClient = CLIUtils.getSolrClient(cli)) {
if (CLIUtils.isCloudMode(solrClient)) {
deleteCollection(cli);
@@ -118,7 +116,8 @@ protected void deleteCollection(CommandLine cli) throws Exception {
.withIdleTimeout(30, TimeUnit.SECONDS)
.withConnectionTimeout(15, TimeUnit.SECONDS)
.withKeyStoreReloadInterval(-1, TimeUnit.SECONDS)
- .withOptionalBasicAuthCredentials(cli.getOptionValue(("credentials")));
+ .withOptionalBasicAuthCredentials(
+ cli.getOptionValue(CommonCLIOptions.CREDENTIALS_OPTION));
String zkHost = CLIUtils.getZkHost(cli);
try (CloudSolrClient cloudSolrClient = CLIUtils.getCloudHttp2SolrClient(zkHost, builder)) {
@@ -137,20 +136,17 @@ protected void deleteCollection(CloudSolrClient cloudSolrClient, CommandLine cli
+ "there is at least 1 live node in the cluster.");
ZkStateReader zkStateReader = ZkStateReader.from(cloudSolrClient);
- String collectionName = cli.getOptionValue(NAME);
+ String collectionName = cli.getOptionValue(COLLECTION_NAME_OPTION);
if (!zkStateReader.getClusterState().hasCollection(collectionName)) {
throw new IllegalArgumentException("Collection " + collectionName + " not found!");
}
String configName =
zkStateReader.getClusterState().getCollection(collectionName).getConfigName();
- boolean deleteConfig = true;
- if (cli.hasOption("delete-config")) {
- deleteConfig = "true".equals(cli.getOptionValue("delete-config"));
- }
+ boolean deleteConfig = cli.hasOption(DELETE_CONFIG_OPTION);
if (deleteConfig && configName != null) {
- if (cli.hasOption("force")) {
+ if (cli.hasOption(FORCE_OPTION)) {
log.warn(
"Skipping safety checks, configuration directory {} will be deleted with impunity.",
configName);
@@ -224,7 +220,7 @@ protected void deleteCollection(CloudSolrClient cloudSolrClient, CommandLine cli
}
protected void deleteCore(CommandLine cli, SolrClient solrClient) throws Exception {
- String coreName = cli.getOptionValue(NAME);
+ String coreName = cli.getOptionValue(COLLECTION_NAME_OPTION);
echo("\nDeleting core '" + coreName + "' using CoreAdminRequest\n");
diff --git a/solr/core/src/java/org/apache/solr/cli/ExportTool.java b/solr/core/src/java/org/apache/solr/cli/ExportTool.java
index 0702173e755..ee8e5a42c78 100644
--- a/solr/core/src/java/org/apache/solr/cli/ExportTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/ExportTool.java
@@ -55,6 +55,7 @@
import java.util.zip.GZIPOutputStream;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
import org.apache.lucene.util.SuppressForbidden;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery;
@@ -88,57 +89,76 @@
/** Supports export command in the bin/solr script. */
public class ExportTool extends ToolBase {
+
+ private static final Option COLLECTION_NAME_OPTION =
+ Option.builder("c")
+ .longOpt("name")
+ .hasArg()
+ .argName("NAME")
+ .desc("Name of the collection.")
+ .build();
+
+ private static final Option OUTPUT_OPTION =
+ Option.builder()
+ .longOpt("output")
+ .hasArg()
+ .argName("PATH")
+ .desc(
+ "Path to output the exported data, and optionally the file name, defaults to 'collection-name'.")
+ .build();
+
+ private static final Option FORMAT_OPTION =
+ Option.builder()
+ .longOpt("format")
+ .hasArg()
+ .argName("FORMAT")
+ .desc("Output format for exported docs (json, jsonl or javabin), defaulting to json.")
+ .build();
+
+ private static final Option COMPRESS_OPTION =
+ Option.builder().longOpt("compress").desc("Compress the output. Defaults to false.").build();
+
+ private static final Option LIMIT_OPTION =
+ Option.builder()
+ .longOpt("limit")
+ .hasArg()
+ .argName("#")
+ .desc("Maximum number of docs to download. Default is 100, use -1 for all docs.")
+ .build();
+
+ private static final Option QUERY_OPTION =
+ Option.builder()
+ .longOpt("query")
+ .hasArg()
+ .argName("QUERY")
+ .desc("A custom query, default is '*:*'.")
+ .build();
+
+ private static final Option FIELDS_OPTION =
+ Option.builder()
+ .longOpt("fields")
+ .hasArg()
+ .argName("FIELDA,FIELDB")
+ .desc("Comma separated list of fields to export. By default all fields are fetched.")
+ .build();
+
@Override
public String getName() {
return "export";
}
@Override
- public List getOptions() {
- return List.of(
- Option.builder("c")
- .longOpt("name")
- .hasArg()
- .argName("NAME")
- .desc("Name of the collection.")
- .build(),
- Option.builder()
- .longOpt("output")
- .hasArg()
- .argName("PATH")
- .desc(
- "Path to output the exported data, and optionally the file name, defaults to 'collection-name'.")
- .build(),
- Option.builder()
- .longOpt("format")
- .hasArg()
- .argName("FORMAT")
- .desc("Output format for exported docs (json, jsonl or javabin), defaulting to json.")
- .build(),
- Option.builder()
- .longOpt("compress")
- .desc("Compress the output. Defaults to false.")
- .build(),
- Option.builder()
- .longOpt("limit")
- .hasArg()
- .argName("#")
- .desc("Maximum number of docs to download. Default is 100, use -1 for all docs.")
- .build(),
- Option.builder()
- .longOpt("query")
- .hasArg()
- .argName("QUERY")
- .desc("A custom query, default is '*:*'.")
- .build(),
- Option.builder()
- .longOpt("fields")
- .hasArg()
- .argName("FIELDA,FIELDB")
- .desc("Comma separated list of fields to export. By default all fields are fetched.")
- .build(),
- SolrCLI.OPTION_SOLRURL,
- SolrCLI.OPTION_CREDENTIALS);
+ public Options getOptions() {
+ return super.getOptions()
+ .addOption(COLLECTION_NAME_OPTION)
+ .addOption(OUTPUT_OPTION)
+ .addOption(FORMAT_OPTION)
+ .addOption(COMPRESS_OPTION)
+ .addOption(LIMIT_OPTION)
+ .addOption(QUERY_OPTION)
+ .addOption(FIELDS_OPTION)
+ .addOption(CommonCLIOptions.SOLR_URL_OPTION)
+ .addOption(CommonCLIOptions.CREDENTIALS_OPTION);
}
public abstract static class Info {
@@ -259,25 +279,27 @@ public void streamDocListInfo(long numFound, long start, Float maxScore) {}
@Override
public void runImpl(CommandLine cli) throws Exception {
String url = null;
- if (cli.hasOption("solr-url")) {
- if (!cli.hasOption("name")) {
+ if (cli.hasOption(CommonCLIOptions.SOLR_URL_OPTION)) {
+ if (!cli.hasOption(COLLECTION_NAME_OPTION)) {
throw new IllegalArgumentException(
"Must specify -c / --name parameter with --solr-url to post documents.");
}
- url = CLIUtils.normalizeSolrUrl(cli) + "/solr/" + cli.getOptionValue("name");
+ url = CLIUtils.normalizeSolrUrl(cli) + "/solr/" + cli.getOptionValue(COLLECTION_NAME_OPTION);
} else {
// think about support --zk-host someday.
throw new IllegalArgumentException("Must specify --solr-url.");
}
- String credentials = cli.getOptionValue(SolrCLI.OPTION_CREDENTIALS.getLongOpt());
+ String credentials = cli.getOptionValue(CommonCLIOptions.CREDENTIALS_OPTION);
Info info = new MultiThreadedRunner(url, credentials);
- info.query = cli.getOptionValue("query", "*:*");
+ info.query = cli.getOptionValue(QUERY_OPTION, "*:*");
info.setOutFormat(
- cli.getOptionValue("output"), cli.getOptionValue("format"), cli.hasOption("compress"));
- info.fields = cli.getOptionValue("fields");
- info.setLimit(cli.getOptionValue("limit", "100"));
+ cli.getOptionValue(OUTPUT_OPTION),
+ cli.getOptionValue(FORMAT_OPTION),
+ cli.hasOption(COMPRESS_OPTION));
+ info.fields = cli.getOptionValue(FIELDS_OPTION);
+ info.setLimit(cli.getOptionValue(LIMIT_OPTION, "100"));
info.output = super.stdout;
info.exportDocs();
}
diff --git a/solr/core/src/java/org/apache/solr/cli/HealthcheckTool.java b/solr/core/src/java/org/apache/solr/cli/HealthcheckTool.java
index 936344a6ee5..8a6b293e17a 100644
--- a/solr/core/src/java/org/apache/solr/cli/HealthcheckTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/HealthcheckTool.java
@@ -30,6 +30,7 @@
import java.util.Set;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrRequest;
import org.apache.solr.client.solrj.impl.CloudHttp2SolrClient;
@@ -53,19 +54,21 @@
public class HealthcheckTool extends ToolBase {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+ private static final Option COLLECTION_NAME_OPTION =
+ Option.builder("c")
+ .longOpt("name")
+ .hasArg()
+ .argName("COLLECTION")
+ .required()
+ .desc("Name of the collection to check.")
+ .build();
+
@Override
- public List getOptions() {
- return List.of(
- Option.builder("c")
- .longOpt("name")
- .hasArg()
- .argName("COLLECTION")
- .required(true)
- .desc("Name of the collection to check.")
- .build(),
- SolrCLI.OPTION_SOLRURL,
- SolrCLI.OPTION_ZKHOST,
- SolrCLI.OPTION_CREDENTIALS);
+ public Options getOptions() {
+ return super.getOptions()
+ .addOption(COLLECTION_NAME_OPTION)
+ .addOption(CommonCLIOptions.CREDENTIALS_OPTION)
+ .addOptionGroup(getConnectionOptions());
}
enum ShardState {
@@ -86,7 +89,6 @@ public HealthcheckTool(PrintStream stdout) {
@Override
public void runImpl(CommandLine cli) throws Exception {
- SolrCLI.raiseLogLevelUnlessVerbose(cli);
String zkHost = CLIUtils.getZkHost(cli);
if (zkHost == null) {
CLIO.err("Healthcheck tool only works in Solr Cloud mode.");
@@ -105,8 +107,7 @@ public String getName() {
}
protected void runCloudTool(CloudSolrClient cloudSolrClient, CommandLine cli) throws Exception {
- SolrCLI.raiseLogLevelUnlessVerbose(cli);
- String collection = cli.getOptionValue("name");
+ String collection = cli.getOptionValue(COLLECTION_NAME_OPTION);
log.debug("Running healthcheck for {}", collection);
@@ -169,13 +170,13 @@ protected void runCloudTool(CloudSolrClient cloudSolrClient, CommandLine cli) th
q.set(DISTRIB, "false");
try (var solrClientForCollection =
CLIUtils.getSolrClient(
- coreUrl, cli.getOptionValue(SolrCLI.OPTION_CREDENTIALS.getLongOpt()))) {
+ coreUrl, cli.getOptionValue(CommonCLIOptions.CREDENTIALS_OPTION))) {
qr = solrClientForCollection.query(q);
numDocs = qr.getResults().getNumFound();
try (var solrClient =
CLIUtils.getSolrClient(
replicaCoreProps.getBaseUrl(),
- cli.getOptionValue(SolrCLI.OPTION_CREDENTIALS.getLongOpt()))) {
+ cli.getOptionValue(CommonCLIOptions.CREDENTIALS_OPTION))) {
NamedList systemInfo =
solrClient.request(
new GenericSolrRequest(
diff --git a/solr/core/src/java/org/apache/solr/cli/LinkConfigTool.java b/solr/core/src/java/org/apache/solr/cli/LinkConfigTool.java
index 379fbfac811..619e1d82e5c 100644
--- a/solr/core/src/java/org/apache/solr/cli/LinkConfigTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/LinkConfigTool.java
@@ -18,10 +18,10 @@
package org.apache.solr.cli;
import java.io.PrintStream;
-import java.util.List;
import java.util.concurrent.TimeUnit;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
import org.apache.solr.client.solrj.impl.SolrZkClientTimeout;
import org.apache.solr.cloud.ZkController;
import org.apache.solr.common.cloud.SolrZkClient;
@@ -29,6 +29,24 @@
/** Supports linking a configset to a collection */
public class LinkConfigTool extends ToolBase {
+ private static final Option COLLECTION_NAME_OPTION =
+ Option.builder("c")
+ .longOpt("name")
+ .hasArg()
+ .argName("NAME")
+ .required()
+ .desc("Name of the collection to link.")
+ .build();
+
+ private static final Option CONF_NAME_OPTION =
+ Option.builder("n")
+ .longOpt("conf-name")
+ .hasArg()
+ .argName("NAME")
+ .required()
+ .desc("Configset name in ZooKeeper.")
+ .build();
+
public LinkConfigTool() {
this(CLIO.getOutStream());
}
@@ -48,30 +66,18 @@ public String getUsage() {
}
@Override
- public List getOptions() {
- return List.of(
- Option.builder("c")
- .longOpt("name")
- .argName("NAME")
- .hasArg()
- .required(true)
- .desc("Name of the collection to link.")
- .build(),
- Option.builder("n")
- .longOpt("conf-name")
- .argName("NAME")
- .hasArg()
- .required(true)
- .desc("Configset name in ZooKeeper.")
- .build(),
- SolrCLI.OPTION_ZKHOST);
+ public Options getOptions() {
+ return super.getOptions()
+ .addOption(COLLECTION_NAME_OPTION)
+ .addOption(CONF_NAME_OPTION)
+ .addOption(CommonCLIOptions.ZK_HOST_OPTION);
}
@Override
public void runImpl(CommandLine cli) throws Exception {
- String collection = cli.getOptionValue("name");
- String confName = cli.getOptionValue("conf-name");
+ String collection = cli.getOptionValue(COLLECTION_NAME_OPTION);
+ String confName = cli.getOptionValue(CONF_NAME_OPTION);
String zkHost = CLIUtils.getZkHost(cli);
try (SolrZkClient zkClient =
diff --git a/solr/core/src/java/org/apache/solr/cli/PackageTool.java b/solr/core/src/java/org/apache/solr/cli/PackageTool.java
index 3a337b4015e..b71c86896cb 100644
--- a/solr/core/src/java/org/apache/solr/cli/PackageTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/PackageTool.java
@@ -24,10 +24,10 @@
import java.lang.invoke.MethodHandles;
import java.nio.file.Files;
import java.nio.file.Path;
-import java.util.List;
import java.util.Map;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.core.config.Configurator;
import org.apache.lucene.util.SuppressForbidden;
@@ -49,6 +49,49 @@ public class PackageTool extends ToolBase {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+ private static final Option COLLECTIONS_OPTION =
+ Option.builder()
+ .longOpt("collections")
+ .hasArg()
+ .argName("COLLECTIONS")
+ .desc(
+ "Specifies that this action should affect plugins for the given collections only, excluding cluster level plugins.")
+ .build();
+
+ private static final Option CLUSTER_OPTION =
+ Option.builder()
+ .longOpt("cluster")
+ .desc("Specifies that this action should affect cluster-level plugins only.")
+ .build();
+
+ private static final Option PARAM_OPTION =
+ Option.builder()
+ .longOpt("param")
+ .hasArgs()
+ .argName("PARAMS")
+ .desc("List of parameters to be used with deploy command.")
+ .build();
+
+ private static final Option UPDATE_OPTION =
+ Option.builder()
+ .longOpt("update")
+ .desc("If a deployment is an update over a previous deployment.")
+ .build();
+
+ private static final Option COLLECTION_OPTION =
+ Option.builder("c")
+ .longOpt("collection")
+ .hasArg()
+ .argName("COLLECTION")
+ .desc("The collection to apply the package to, not required.")
+ .build();
+
+ private static final Option NO_PROMPT_OPTION =
+ Option.builder("y")
+ .longOpt("no-prompt")
+ .desc("Don't prompt for input; accept all default choices, defaults to false.")
+ .build();
+
@SuppressForbidden(
reason = "Need to turn off logging, and SLF4J doesn't seem to provide for a way.")
public PackageTool() {
@@ -115,8 +158,8 @@ public void runImpl(CommandLine cli) throws Exception {
}
break;
case "list-deployed":
- if (cli.hasOption("collection")) {
- String collection = cli.getOptionValue("collection");
+ if (cli.hasOption(COLLECTION_OPTION)) {
+ String collection = cli.getOptionValue(COLLECTION_OPTION);
Map packages =
packageManager.getPackagesDeployed(collection);
printGreen("Packages deployed on " + collection + ":");
@@ -162,23 +205,23 @@ public void runImpl(CommandLine cli) throws Exception {
}
case "deploy":
{
- if (cli.hasOption("cluster") || cli.hasOption("collections")) {
+ if (cli.hasOption(CLUSTER_OPTION) || cli.hasOption(COLLECTIONS_OPTION)) {
Pair parsedVersion = parsePackageVersion(cli.getArgList().get(1));
String packageName = parsedVersion.first();
String version = parsedVersion.second();
- boolean noPrompt = cli.hasOption("no-prompt");
- boolean isUpdate = cli.hasOption("update");
+ boolean noPrompt = cli.hasOption(NO_PROMPT_OPTION);
+ boolean isUpdate = cli.hasOption(UPDATE_OPTION);
String[] collections =
- cli.hasOption("collections")
+ cli.hasOption(COLLECTIONS_OPTION)
? PackageUtils.validateCollections(
- cli.getOptionValue("collections").split(","))
+ cli.getOptionValue(COLLECTIONS_OPTION).split(","))
: new String[] {};
- String[] parameters = cli.getOptionValues("param");
+ String[] parameters = cli.getOptionValues(PARAM_OPTION);
packageManager.deploy(
packageName,
version,
collections,
- cli.hasOption("cluster"),
+ cli.hasOption(CLUSTER_OPTION),
parameters,
isUpdate,
noPrompt);
@@ -190,7 +233,7 @@ public void runImpl(CommandLine cli) throws Exception {
}
case "undeploy":
{
- if (cli.hasOption("cluster") || cli.hasOption("collections")) {
+ if (cli.hasOption(CLUSTER_OPTION) || cli.hasOption(COLLECTIONS_OPTION)) {
Pair parsedVersion = parsePackageVersion(cli.getArgList().get(1));
if (parsedVersion.second() != null) {
throw new SolrException(
@@ -200,13 +243,12 @@ public void runImpl(CommandLine cli) throws Exception {
}
String packageName = parsedVersion.first();
String[] collections =
- cli.hasOption("collections")
+ cli.hasOption(COLLECTIONS_OPTION)
? PackageUtils.validateCollections(
- cli.getOptionValue("collections").split(","))
+ cli.getOptionValue(COLLECTIONS_OPTION).split(","))
: new String[] {};
- packageManager.undeploy(packageName, collections, cli.hasOption("cluster"));
+ packageManager.undeploy(packageName, collections, cli.hasOption(CLUSTER_OPTION));
} else {
-
printRed(
"Either specify --cluster to undeploy cluster level plugins or -collections to undeploy collection level plugins");
}
@@ -318,41 +360,15 @@ private Pair parsePackageVersion(String arg) {
}
@Override
- public List getOptions() {
- return List.of(
- Option.builder()
- .longOpt("collections")
- .hasArg()
- .argName("COLLECTIONS")
- .desc(
- "Specifies that this action should affect plugins for the given collections only, excluding cluster level plugins.")
- .build(),
- Option.builder()
- .longOpt("cluster")
- .desc("Specifies that this action should affect cluster-level plugins only.")
- .build(),
- Option.builder()
- .longOpt("param")
- .hasArgs()
- .argName("PARAMS")
- .desc("List of parameters to be used with deploy command.")
- .build(),
- Option.builder()
- .longOpt("update")
- .desc("If a deployment is an update over a previous deployment.")
- .build(),
- Option.builder("c")
- .longOpt("collection")
- .hasArg()
- .argName("COLLECTION")
- .desc("The collection to apply the package to, not required.")
- .build(),
- Option.builder("y")
- .longOpt("no-prompt")
- .desc("Don't prompt for input; accept all default choices, defaults to false.")
- .build(),
- SolrCLI.OPTION_SOLRURL,
- SolrCLI.OPTION_ZKHOST,
- SolrCLI.OPTION_CREDENTIALS);
+ public Options getOptions() {
+ return super.getOptions()
+ .addOption(COLLECTIONS_OPTION)
+ .addOption(CLUSTER_OPTION)
+ .addOption(PARAM_OPTION)
+ .addOption(UPDATE_OPTION)
+ .addOption(COLLECTION_OPTION)
+ .addOption(NO_PROMPT_OPTION)
+ .addOption(CommonCLIOptions.CREDENTIALS_OPTION)
+ .addOptionGroup(getConnectionOptions());
}
}
diff --git a/solr/core/src/java/org/apache/solr/cli/PostLogsTool.java b/solr/core/src/java/org/apache/solr/cli/PostLogsTool.java
index 85f39ce78f6..c14d9c216db 100644
--- a/solr/core/src/java/org/apache/solr/cli/PostLogsTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/PostLogsTool.java
@@ -37,6 +37,7 @@
import java.util.stream.Stream;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.impl.Http2SolrClient;
import org.apache.solr.client.solrj.request.UpdateRequest;
@@ -48,6 +49,24 @@
/** A command line tool for indexing Solr logs in the out-of-the-box log format. */
public class PostLogsTool extends ToolBase {
+ private static final Option COLLECTION_NAME_OPTION =
+ Option.builder("c")
+ .longOpt("name")
+ .hasArg()
+ .argName("NAME")
+ .required()
+ .desc("Name of the collection.")
+ .build();
+
+ private static final Option ROOT_DIR_OPTION =
+ Option.builder()
+ .longOpt("rootdir")
+ .hasArg()
+ .argName("DIRECTORY")
+ .required()
+ .desc("All files found at or below the root directory will be indexed.")
+ .build();
+
public PostLogsTool() {
this(CLIO.getOutStream());
}
@@ -62,38 +81,26 @@ public String getName() {
}
@Override
- public List getOptions() {
- return List.of(
- Option.builder("c")
- .longOpt("name")
- .hasArg()
- .required(true)
- .argName("NAME")
- .desc("Name of the collection.")
- .build(),
- Option.builder("rootdir")
- .longOpt("rootdir")
- .hasArg()
- .argName("DIRECTORY")
- .required(true)
- .desc("All files found at or below the root directory will be indexed.")
- .build(),
- SolrCLI.OPTION_SOLRURL,
- SolrCLI.OPTION_CREDENTIALS);
+ public Options getOptions() {
+ return super.getOptions()
+ .addOption(COLLECTION_NAME_OPTION)
+ .addOption(ROOT_DIR_OPTION)
+ .addOption(CommonCLIOptions.SOLR_URL_OPTION)
+ .addOption(CommonCLIOptions.CREDENTIALS_OPTION);
}
@Override
public void runImpl(CommandLine cli) throws Exception {
String url = null;
- if (cli.hasOption("solr-url")) {
- url = CLIUtils.normalizeSolrUrl(cli) + "/solr/" + cli.getOptionValue("name");
+ if (cli.hasOption(CommonCLIOptions.SOLR_URL_OPTION)) {
+ url = CLIUtils.normalizeSolrUrl(cli) + "/solr/" + cli.getOptionValue(COLLECTION_NAME_OPTION);
} else {
// Could be required arg, but maybe we want to support --zk-host option too?
throw new IllegalArgumentException("Must specify --solr-url.");
}
- String rootDir = cli.getOptionValue("rootdir");
- String credentials = cli.getOptionValue("credentials");
+ String rootDir = cli.getOptionValue(ROOT_DIR_OPTION);
+ String credentials = cli.getOptionValue(CommonCLIOptions.CREDENTIALS_OPTION);
runCommand(url, rootDir, credentials);
}
diff --git a/solr/core/src/java/org/apache/solr/cli/PostTool.java b/solr/core/src/java/org/apache/solr/cli/PostTool.java
index 6574dd214bf..f6bc4b811f0 100644
--- a/solr/core/src/java/org/apache/solr/cli/PostTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/PostTool.java
@@ -68,6 +68,7 @@
import javax.xml.xpath.XPathFactory;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
import org.apache.commons.io.output.NullOutputStream;
import org.apache.solr.client.api.util.SolrVersion;
import org.apache.solr.client.solrj.SolrClient;
@@ -94,6 +95,93 @@ public class PostTool extends ToolBase {
private static final int MAX_WEB_DEPTH = 10;
public static final String DEFAULT_CONTENT_TYPE = "application/json";
+ private static final Option COLLECTION_NAME_OPTION =
+ Option.builder("c")
+ .longOpt("name")
+ .hasArg()
+ .argName("NAME")
+ .required()
+ .desc("Name of the collection.")
+ .build();
+
+ private static final Option SKIP_COMMIT_OPTION =
+ Option.builder()
+ .longOpt("skip-commit")
+ .desc("Do not 'commit', and thus changes won't be visible till a commit occurs.")
+ .build();
+
+ private static final Option OPTIMIZE_OPTION =
+ Option.builder("o")
+ .longOpt("optimize")
+ .desc("Issue an optimize at end of posting documents.")
+ .build();
+
+ private static final Option MODE_OPTION =
+ Option.builder()
+ .longOpt("mode")
+ .hasArg()
+ .argName("mode")
+ .desc(
+ "Which mode the Post tool is running in, 'files' crawls local directory, 'web' crawls website, 'args' processes input args, and 'stdin' reads a command from standard in. default: files.")
+ .build();
+
+ private static final Option RECURSIVE_OPTION =
+ Option.builder("r")
+ .longOpt("recursive")
+ .hasArg()
+ .argName("recursive")
+ .type(Integer.class)
+ .desc("For web crawl, how deep to go. default: 1")
+ .build();
+
+ private static final Option DELAY_OPTION =
+ Option.builder("d")
+ .longOpt("delay")
+ .hasArg()
+ .argName("delay")
+ .type(Integer.class)
+ .desc(
+ "If recursive then delay will be the wait time between posts. default: 10 for web, 0 for files")
+ .build();
+
+ private static final Option TYPE_OPTION =
+ Option.builder("t")
+ .longOpt("type")
+ .hasArg()
+ .argName("content-type")
+ .desc("Specify a specific mimetype to use, such as application/json.")
+ .build();
+
+ private static final Option FILE_TYPES_OPTION =
+ Option.builder("ft")
+ .longOpt("filetypes")
+ .hasArg()
+ .argName("[,,...]")
+ .desc("default: " + DEFAULT_FILE_TYPES)
+ .build();
+
+ private static final Option PARAMS_OPTION =
+ Option.builder()
+ .longOpt("params")
+ .hasArg()
+ .argName("=[&=...]")
+ .desc("Values must be URL-encoded; these pass through to Solr update request.")
+ .build();
+
+ private static final Option FORMAT_OPTION =
+ Option.builder()
+ .longOpt("format")
+ .desc(
+ "sends application/json content as Solr commands to /update instead of /update/json/docs.")
+ .build();
+
+ private static final Option DRY_RUN_OPTION =
+ Option.builder()
+ .longOpt("dry-run")
+ .desc(
+ "Performs a dry run of the posting process without actually sending documents to Solr. Only works with files mode.")
+ .build();
+
// Input args
int recursive = 0;
int delay = 0;
@@ -169,128 +257,68 @@ public String getName() {
}
@Override
- public List getOptions() {
- return List.of(
- Option.builder("c")
- .longOpt("name")
- .hasArg()
- .required(true)
- .argName("NAME")
- .desc("Name of the collection.")
- .build(),
- Option.builder()
- .longOpt("skip-commit")
- .desc("Do not 'commit', and thus changes won't be visible till a commit occurs.")
- .build(),
- Option.builder("o")
- .longOpt("optimize")
- .desc("Issue an optimize at end of posting documents.")
- .build(),
- Option.builder()
- .longOpt("mode")
- .hasArg()
- .argName("mode")
- .desc(
- "Which mode the Post tool is running in, 'files' crawls local directory, 'web' crawls website, 'args' processes input args, and 'stdin' reads a command from standard in. default: files.")
- .build(),
- Option.builder("r")
- .longOpt("recursive")
- .hasArg()
- .argName("recursive")
- .required(false)
- .desc("For web crawl, how deep to go. default: 1")
- .build(),
- Option.builder()
- .longOpt("delay")
- .hasArg()
- .argName("delay")
- .required(false)
- .desc(
- "If recursive then delay will be the wait time between posts. default: 10 for web, 0 for files")
- .build(),
- Option.builder("t")
- .longOpt("type")
- .hasArg()
- .argName("content-type")
- .required(false)
- .desc("Specify a specific mimetype to use, such as application/json.")
- .build(),
- Option.builder("ft")
- .longOpt("filetypes")
- .hasArg()
- .argName("[,,...]")
- .required(false)
- .desc("default: " + DEFAULT_FILE_TYPES)
- .build(),
- Option.builder()
- .longOpt("params")
- .hasArg()
- .argName("=[&=...]")
- .required(false)
- .desc("Values must be URL-encoded; these pass through to Solr update request.")
- .build(),
- Option.builder()
- .longOpt("format")
- .required(false)
- .desc(
- "sends application/json content as Solr commands to /update instead of /update/json/docs.")
- .build(),
- Option.builder()
- .longOpt("dry-run")
- .required(false)
- .desc(
- "Performs a dry run of the posting process without actually sending documents to Solr. Only works with files mode.")
- .build(),
- SolrCLI.OPTION_SOLRURL,
- SolrCLI.OPTION_CREDENTIALS);
+ public Options getOptions() {
+ return super.getOptions()
+ .addOption(COLLECTION_NAME_OPTION)
+ .addOption(SKIP_COMMIT_OPTION)
+ .addOption(OPTIMIZE_OPTION)
+ .addOption(MODE_OPTION)
+ .addOption(RECURSIVE_OPTION)
+ .addOption(DELAY_OPTION)
+ .addOption(TYPE_OPTION)
+ .addOption(FILE_TYPES_OPTION)
+ .addOption(PARAMS_OPTION)
+ .addOption(FORMAT_OPTION)
+ .addOption(DRY_RUN_OPTION)
+ .addOption(CommonCLIOptions.SOLR_URL_OPTION)
+ .addOption(CommonCLIOptions.CREDENTIALS_OPTION);
}
@Override
public void runImpl(CommandLine cli) throws Exception {
- SolrCLI.raiseLogLevelUnlessVerbose(cli);
-
solrUpdateUrl = null;
- if (cli.hasOption("solr-url")) {
+ if (cli.hasOption(CommonCLIOptions.SOLR_URL_OPTION)) {
String url =
- CLIUtils.normalizeSolrUrl(cli) + "/solr/" + cli.getOptionValue("name") + "/update";
+ CLIUtils.normalizeSolrUrl(cli)
+ + "/solr/"
+ + cli.getOptionValue(COLLECTION_NAME_OPTION)
+ + "/update";
solrUpdateUrl = new URI(url);
} else {
- String url = CLIUtils.getDefaultSolrUrl() + "/solr/" + cli.getOptionValue("name") + "/update";
+ String url =
+ CLIUtils.getDefaultSolrUrl()
+ + "/solr/"
+ + cli.getOptionValue(COLLECTION_NAME_OPTION)
+ + "/update";
solrUpdateUrl = new URI(url);
}
- String mode = cli.getOptionValue("mode", DATA_MODE_FILES);
+ String mode = cli.getOptionValue(MODE_OPTION, DATA_MODE_FILES);
- dryRun = cli.hasOption("dry-run");
+ dryRun = cli.hasOption(DRY_RUN_OPTION);
- if (cli.hasOption("type")) {
- type = cli.getOptionValue("type");
+ if (cli.hasOption(TYPE_OPTION)) {
+ type = cli.getOptionValue(TYPE_OPTION);
// Turn off automatically looking up the mimetype in favour of what is passed in.
auto = false;
}
- format = cli.hasOption("format") ? FORMAT_SOLR : ""; // i.e not solr formatted json commands
-
- if (cli.hasOption("filetypes")) {
- fileTypes = cli.getOptionValue("filetypes");
- }
-
- delay = (mode.equals((DATA_MODE_WEB)) ? 10 : 0);
- if (cli.hasOption("delay")) {
- delay = Integer.parseInt(cli.getOptionValue("delay"));
- }
-
- recursive = Integer.parseInt(cli.getOptionValue("recursive", "1"));
+ format =
+ cli.hasOption(FORMAT_OPTION) ? FORMAT_SOLR : ""; // i.e not solr formatted json commands
+ fileTypes = cli.getOptionValue(FILE_TYPES_OPTION, PostTool.DEFAULT_FILE_TYPES);
- out = cli.hasOption(SolrCLI.OPTION_VERBOSE.getLongOpt()) ? CLIO.getOutStream() : null;
- commit = !cli.hasOption("skip-commit");
- optimize = cli.hasOption("optimize");
+ int defaultDelay = (mode.equals((DATA_MODE_WEB)) ? 10 : 0);
+ delay = cli.getParsedOptionValue(DELAY_OPTION, defaultDelay);
+ recursive = cli.getParsedOptionValue(RECURSIVE_OPTION, 1);
- credentials = cli.getOptionValue(SolrCLI.OPTION_CREDENTIALS.getLongOpt());
+ out = isVerbose() ? CLIO.getOutStream() : null;
+ commit = !cli.hasOption(SKIP_COMMIT_OPTION);
+ optimize = cli.hasOption(OPTIMIZE_OPTION);
+ credentials = cli.getOptionValue(CommonCLIOptions.CREDENTIALS_OPTION);
args = cli.getArgs();
- params = cli.getOptionValue("params", "");
+ params = cli.getOptionValue(PARAMS_OPTION, "");
execute(mode);
}
diff --git a/solr/core/src/java/org/apache/solr/cli/RunExampleTool.java b/solr/core/src/java/org/apache/solr/cli/RunExampleTool.java
index 149dff43530..fff3c04f5c5 100644
--- a/solr/core/src/java/org/apache/solr/cli/RunExampleTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/RunExampleTool.java
@@ -28,7 +28,6 @@
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
-import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Optional;
@@ -37,6 +36,7 @@
import java.util.concurrent.TimeUnit;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
import org.apache.commons.exec.DefaultExecuteResultHandler;
import org.apache.commons.exec.DefaultExecutor;
import org.apache.commons.exec.ExecuteException;
@@ -67,6 +67,99 @@ public class RunExampleTool extends ToolBase {
private static final String PROMPT_NUMBER_TOO_LARGE =
"%d is too large! " + PROMPT_FOR_NUMBER_IN_RANGE;
+ private static final Option NO_PROMPT_OPTION =
+ Option.builder("y")
+ .longOpt("no-prompt")
+ .desc(
+ "Don't prompt for input; accept all defaults when running examples that accept user input.")
+ .build();
+
+ private static final Option EXAMPLE_OPTION =
+ Option.builder("e")
+ .longOpt("example")
+ .hasArg()
+ .argName("NAME")
+ .required()
+ .desc("Name of the example to launch, one of: cloud, techproducts, schemaless, films.")
+ .build();
+
+ private static final Option SCRIPT_OPTION =
+ Option.builder()
+ .longOpt("script")
+ .hasArg()
+ .argName("PATH")
+ .desc("Path to the bin/solr script.")
+ .build();
+
+ private static final Option SERVER_DIR_OPTION =
+ Option.builder("d")
+ .longOpt("server-dir")
+ .hasArg()
+ .argName("DIR")
+ .required()
+ .desc("Path to the Solr server directory.")
+ .build();
+
+ private static final Option FORCE_OPTION =
+ Option.builder("f")
+ .longOpt("force")
+ .argName("FORCE")
+ .desc("Force option in case Solr is run as root.")
+ .build();
+
+ private static final Option EXAMPLE_DIR_OPTION =
+ Option.builder()
+ .longOpt("example-dir")
+ .hasArg()
+ .argName("DIR")
+ .desc(
+ "Path to the Solr example directory; if not provided, ${serverDir}/../example is expected to exist.")
+ .build();
+
+ private static final Option URL_SCHEME_OPTION =
+ Option.builder()
+ .longOpt("url-scheme")
+ .hasArg()
+ .argName("SCHEME")
+ .desc("Solr URL scheme: http or https, defaults to http if not specified.")
+ .build();
+
+ private static final Option PORT_OPTION =
+ Option.builder("p")
+ .longOpt("port")
+ .hasArg()
+ .argName("PORT")
+ .desc("Specify the port to start the Solr HTTP listener on; default is 8983.")
+ .build();
+
+ private static final Option HOST_OPTION =
+ Option.builder()
+ .longOpt("host")
+ .hasArg()
+ .argName("HOSTNAME")
+ .desc("Specify the hostname for this Solr instance.")
+ .build();
+
+ private static final Option USER_MANAGED_OPTION =
+ Option.builder().longOpt("user-managed").desc("Start Solr in User Managed mode.").build();
+
+ private static final Option MEMORY_OPTION =
+ Option.builder("m")
+ .longOpt("memory")
+ .hasArg()
+ .argName("MEM")
+ .desc(
+ "Sets the min (-Xms) and max (-Xmx) heap size for the JVM, such as: -m 4g results in: -Xms4g -Xmx4g; by default, this script sets the heap size to 512m.")
+ .build();
+
+ private static final Option JVM_OPTS_OPTION =
+ Option.builder()
+ .longOpt("jvm-opts")
+ .hasArg()
+ .argName("OPTS")
+ .desc("Additional options to be passed to the JVM when starting example Solr server(s).")
+ .build();
+
protected InputStream userInput;
protected Executor executor;
protected String script;
@@ -91,105 +184,35 @@ public String getName() {
}
@Override
- public List getOptions() {
- return List.of(
- Option.builder("y")
- .longOpt("no-prompt")
- .required(false)
- .desc(
- "Don't prompt for input; accept all defaults when running examples that accept user input.")
- .build(),
- Option.builder("e")
- .longOpt("example")
- .hasArg()
- .argName("NAME")
- .required(true)
- .desc("Name of the example to launch, one of: cloud, techproducts, schemaless, films.")
- .build(),
- Option.builder()
- .longOpt("script")
- .hasArg()
- .argName("PATH")
- .required(false)
- .desc("Path to the bin/solr script.")
- .build(),
- Option.builder()
- .longOpt("server-dir")
- .hasArg()
- .argName("DIR")
- .required(true)
- .desc("Path to the Solr server directory.")
- .build(),
- Option.builder("f")
- .longOpt("force")
- .argName("FORCE")
- .desc("Force option in case Solr is run as root.")
- .build(),
- Option.builder()
- .longOpt("example-dir")
- .hasArg()
- .argName("DIR")
- .required(false)
- .desc(
- "Path to the Solr example directory; if not provided, ${serverDir}/../example is expected to exist.")
- .build(),
- Option.builder()
- .longOpt("url-scheme")
- .hasArg()
- .argName("SCHEME")
- .required(false)
- .desc("Solr URL scheme: http or https, defaults to http if not specified.")
- .build(),
- Option.builder("p")
- .longOpt("port")
- .hasArg()
- .argName("PORT")
- .required(false)
- .desc("Specify the port to start the Solr HTTP listener on; default is 8983.")
- .build(),
- Option.builder()
- .longOpt("host")
- .hasArg()
- .argName("HOSTNAME")
- .required(false)
- .desc("Specify the hostname for this Solr instance.")
- .build(),
- Option.builder()
- .longOpt("user-managed")
- .required(false)
- .desc("Start Solr in User Managed mode.")
- .build(),
- Option.builder("m")
- .longOpt("memory")
- .hasArg()
- .argName("MEM")
- .required(false)
- .desc(
- "Sets the min (-Xms) and max (-Xmx) heap size for the JVM, such as: -m 4g results in: -Xms4g -Xmx4g; by default, this script sets the heap size to 512m.")
- .build(),
- Option.builder()
- .longOpt("jvm-opts")
- .hasArg()
- .argName("OPTS")
- .required(false)
- .desc(
- "Additional options to be passed to the JVM when starting example Solr server(s).")
- .build(),
- SolrCLI.OPTION_ZKHOST);
+ public Options getOptions() {
+ return super.getOptions()
+ .addOption(NO_PROMPT_OPTION)
+ .addOption(EXAMPLE_OPTION)
+ .addOption(SCRIPT_OPTION)
+ .addOption(SERVER_DIR_OPTION)
+ .addOption(FORCE_OPTION)
+ .addOption(EXAMPLE_DIR_OPTION)
+ .addOption(URL_SCHEME_OPTION)
+ .addOption(PORT_OPTION)
+ .addOption(HOST_OPTION)
+ .addOption(USER_MANAGED_OPTION)
+ .addOption(MEMORY_OPTION)
+ .addOption(JVM_OPTS_OPTION)
+ .addOption(CommonCLIOptions.ZK_HOST_OPTION);
}
@Override
public void runImpl(CommandLine cli) throws Exception {
- this.urlScheme = cli.getOptionValue("url-scheme", "http");
+ this.urlScheme = cli.getOptionValue(URL_SCHEME_OPTION, "http");
- serverDir = new File(cli.getOptionValue("server-dir"));
+ serverDir = new File(cli.getOptionValue(SERVER_DIR_OPTION));
if (!serverDir.isDirectory())
throw new IllegalArgumentException(
"Value of --server-dir option is invalid! "
+ serverDir.getAbsolutePath()
+ " is not a directory!");
- script = cli.getOptionValue("script");
+ script = cli.getOptionValue(SCRIPT_OPTION);
if (script != null) {
if (!(new File(script)).isFile())
throw new IllegalArgumentException(
@@ -210,8 +233,8 @@ public void runImpl(CommandLine cli) throws Exception {
}
exampleDir =
- (cli.hasOption("example-dir"))
- ? new File(cli.getOptionValue("example-dir"))
+ (cli.hasOption(EXAMPLE_DIR_OPTION))
+ ? new File(cli.getOptionValue(EXAMPLE_DIR_OPTION))
: new File(serverDir.getParent(), "example");
if (!exampleDir.isDirectory())
throw new IllegalArgumentException(
@@ -227,7 +250,7 @@ public void runImpl(CommandLine cli) throws Exception {
+ "\nscript="
+ script);
- String exampleType = cli.getOptionValue("example");
+ String exampleType = cli.getOptionValue(EXAMPLE_OPTION);
if ("cloud".equals(exampleType)) {
runCloudExample(cli);
} else if ("techproducts".equals(exampleType)
@@ -248,11 +271,11 @@ protected void runExample(CommandLine cli, String exampleName) throws Exception
String configSet =
"techproducts".equals(exampleName) ? "sample_techproducts_configs" : "_default";
- boolean isCloudMode = !cli.hasOption("user-managed");
- String zkHost = cli.getOptionValue('z');
+ boolean isCloudMode = !cli.hasOption(USER_MANAGED_OPTION);
+ String zkHost = cli.getOptionValue(CommonCLIOptions.ZK_HOST_OPTION);
int port =
Integer.parseInt(
- cli.getOptionValue('p', System.getenv().getOrDefault("SOLR_PORT", "8983")));
+ cli.getOptionValue(PORT_OPTION, System.getenv().getOrDefault("SOLR_PORT", "8983")));
Map nodeStatus =
startSolr(new File(exDir, "solr"), isCloudMode, cli, port, zkHost, 30);
@@ -264,7 +287,7 @@ protected void runExample(CommandLine cli, String exampleName) throws Exception
boolean cloudMode = nodeStatus.get("cloud") != null;
if (cloudMode) {
if (CLIUtils.safeCheckCollectionExists(
- solrUrl, collectionName, cli.getOptionValue(SolrCLI.OPTION_CREDENTIALS.getLongOpt()))) {
+ solrUrl, collectionName, cli.getOptionValue(CommonCLIOptions.CREDENTIALS_OPTION))) {
alreadyExists = true;
echo(
"\nWARNING: Collection '"
@@ -274,7 +297,7 @@ protected void runExample(CommandLine cli, String exampleName) throws Exception
} else {
String coreName = collectionName;
if (CLIUtils.safeCheckCoreExists(
- solrUrl, coreName, cli.getOptionValue(SolrCLI.OPTION_CREDENTIALS.getLongOpt()))) {
+ solrUrl, coreName, cli.getOptionValue(CommonCLIOptions.CREDENTIALS_OPTION))) {
alreadyExists = true;
echo(
"\nWARNING: Core '"
@@ -433,12 +456,12 @@ protected void runExample(CommandLine cli, String exampleName) throws Exception
protected void runCloudExample(CommandLine cli) throws Exception {
- boolean prompt = !cli.hasOption("no-prompt");
+ boolean prompt = !cli.hasOption(NO_PROMPT_OPTION);
int numNodes = 2;
int[] cloudPorts = new int[] {8983, 7574, 8984, 7575};
int defaultPort =
Integer.parseInt(
- cli.getOptionValue('p', System.getenv().getOrDefault("SOLR_PORT", "8983")));
+ cli.getOptionValue(PORT_OPTION, System.getenv().getOrDefault("SOLR_PORT", "8983")));
if (defaultPort != 8983) {
// Override the old default port numbers if user has started the example overriding SOLR_PORT
cloudPorts = new int[] {defaultPort, defaultPort + 1, defaultPort + 2, defaultPort + 3};
@@ -502,7 +525,7 @@ protected void runCloudExample(CommandLine cli) throws Exception {
}
// deal with extra args passed to the script to run the example
- String zkHost = cli.getOptionValue('z');
+ String zkHost = cli.getOptionValue(CommonCLIOptions.ZK_HOST_OPTION);
// start the first node (most likely with embedded ZK)
Map nodeStatus =
@@ -539,7 +562,7 @@ protected void runCloudExample(CommandLine cli) throws Exception {
readInput,
prompt,
solrUrl,
- cli.getOptionValue(SolrCLI.OPTION_CREDENTIALS.getLongOpt()));
+ cli.getOptionValue(CommonCLIOptions.CREDENTIALS_OPTION));
echo("\n\nSolrCloud example running, please visit: " + solrUrl + " \n");
}
@@ -593,18 +616,17 @@ protected Map startSolr(
String extraArgs = readExtraArgs(cli.getArgs());
- String host = cli.getOptionValue("host");
- String memory = cli.getOptionValue('m');
+ String host = cli.getOptionValue(HOST_OPTION);
+ String memory = cli.getOptionValue(MEMORY_OPTION);
String hostArg = (host != null && !"localhost".equals(host)) ? " --host " + host : "";
String zkHostArg = (zkHost != null) ? " -z " + zkHost : "";
String memArg = (memory != null) ? " -m " + memory : "";
String cloudModeArg = cloudMode ? "" : "--user-managed";
- String forceArg = cli.hasOption("force") ? " --force" : "";
- String verboseArg = verbose ? "--verbose" : "";
+ String forceArg = cli.hasOption(FORCE_OPTION) ? " --force" : "";
+ String verboseArg = isVerbose() ? "--verbose" : "";
- String jvmOpts =
- cli.hasOption("jvm-opts") ? cli.getOptionValue("jvm-opts") : cli.getOptionValue('a');
+ String jvmOpts = cli.getOptionValue(JVM_OPTS_OPTION);
String jvmOptsArg = (jvmOpts != null) ? " --jvm-opts \"" + jvmOpts + "\"" : "";
File cwd = new File(System.getProperty("user.dir"));
@@ -694,7 +716,7 @@ protected Map startSolr(
if (code != 0) throw new Exception("Failed to start Solr using command: " + startCmd);
return getNodeStatus(
- solrUrl, cli.getOptionValue(SolrCLI.OPTION_CREDENTIALS.getLongOpt()), maxWaitSecs);
+ solrUrl, cli.getOptionValue(CommonCLIOptions.CREDENTIALS_OPTION), maxWaitSecs);
}
protected Map checkPortConflict(
@@ -985,7 +1007,7 @@ protected Integer promptForInt(
}
} catch (NumberFormatException nfe) {
- if (verbose) echo(value + " is not a number!");
+ if (isVerbose()) echo(value + " is not a number!");
if (min != null && max != null) {
value =
diff --git a/solr/core/src/java/org/apache/solr/cli/SnapshotCreateTool.java b/solr/core/src/java/org/apache/solr/cli/SnapshotCreateTool.java
index df232d4415a..49cce02a15c 100644
--- a/solr/core/src/java/org/apache/solr/cli/SnapshotCreateTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/SnapshotCreateTool.java
@@ -17,9 +17,9 @@
package org.apache.solr.cli;
import java.io.PrintStream;
-import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.client.solrj.response.CollectionAdminResponse;
@@ -27,6 +27,24 @@
/** Supports snapshot-create command in the bin/solr script. */
public class SnapshotCreateTool extends ToolBase {
+ private static final Option COLLECTION_NAME_OPTION =
+ Option.builder("c")
+ .longOpt("name")
+ .hasArg()
+ .argName("NAME")
+ .required()
+ .desc("Name of collection to be snapshot.")
+ .build();
+
+ private static final Option SNAPSHOT_NAME_OPTION =
+ Option.builder()
+ .longOpt("snapshot-name")
+ .hasArg()
+ .argName("NAME")
+ .required()
+ .desc("Name of the snapshot to produce")
+ .build();
+
public SnapshotCreateTool() {
this(CLIO.getOutStream());
}
@@ -41,33 +59,18 @@ public String getName() {
}
@Override
- public List getOptions() {
- return List.of(
- SolrCLI.OPTION_ZKHOST,
- SolrCLI.OPTION_SOLRURL,
- Option.builder("c")
- .longOpt("name")
- .argName("NAME")
- .hasArg()
- .required(true)
- .desc("Name of collection to be snapshot.")
- .build(),
- Option.builder()
- .longOpt("snapshot-name")
- .argName("NAME")
- .hasArg()
- .required(true)
- .desc("Name of the snapshot to produce")
- .build(),
- SolrCLI.OPTION_CREDENTIALS);
+ public Options getOptions() {
+ return super.getOptions()
+ .addOption(COLLECTION_NAME_OPTION)
+ .addOption(SNAPSHOT_NAME_OPTION)
+ .addOption(CommonCLIOptions.CREDENTIALS_OPTION)
+ .addOptionGroup(getConnectionOptions());
}
@Override
public void runImpl(CommandLine cli) throws Exception {
- SolrCLI.raiseLogLevelUnlessVerbose(cli);
-
- String snapshotName = cli.getOptionValue("snapshot-name");
- String collectionName = cli.getOptionValue("name");
+ String snapshotName = cli.getOptionValue(SNAPSHOT_NAME_OPTION);
+ String collectionName = cli.getOptionValue(COLLECTION_NAME_OPTION);
try (var solrClient = CLIUtils.getSolrClient(cli)) {
createSnapshot(solrClient, collectionName, snapshotName);
}
diff --git a/solr/core/src/java/org/apache/solr/cli/SnapshotDeleteTool.java b/solr/core/src/java/org/apache/solr/cli/SnapshotDeleteTool.java
index 040d251648b..c9dc90abab7 100644
--- a/solr/core/src/java/org/apache/solr/cli/SnapshotDeleteTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/SnapshotDeleteTool.java
@@ -17,9 +17,9 @@
package org.apache.solr.cli;
import java.io.PrintStream;
-import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.client.solrj.response.CollectionAdminResponse;
@@ -27,6 +27,24 @@
/** Supports snapshot-delete command in the bin/solr script. */
public class SnapshotDeleteTool extends ToolBase {
+ private static final Option COLLECTION_NAME_OPTION =
+ Option.builder("c")
+ .longOpt("name")
+ .hasArg()
+ .argName("NAME")
+ .required()
+ .desc("Name of collection to manage.")
+ .build();
+
+ private static final Option SNAPSHOT_NAME_OPTION =
+ Option.builder()
+ .longOpt("snapshot-name")
+ .hasArg()
+ .argName("NAME")
+ .required()
+ .desc("Name of the snapshot to delete")
+ .build();
+
public SnapshotDeleteTool() {
this(CLIO.getOutStream());
}
@@ -41,33 +59,18 @@ public String getName() {
}
@Override
- public List getOptions() {
- return List.of(
- SolrCLI.OPTION_ZKHOST,
- SolrCLI.OPTION_SOLRURL,
- Option.builder("c")
- .longOpt("name")
- .argName("NAME")
- .hasArg()
- .required(true)
- .desc("Name of collection to manage.")
- .build(),
- Option.builder()
- .longOpt("snapshot-name")
- .argName("NAME")
- .hasArg()
- .required(true)
- .desc("Name of the snapshot to delete")
- .build(),
- SolrCLI.OPTION_CREDENTIALS);
+ public Options getOptions() {
+ return super.getOptions()
+ .addOption(COLLECTION_NAME_OPTION)
+ .addOption(SNAPSHOT_NAME_OPTION)
+ .addOption(CommonCLIOptions.CREDENTIALS_OPTION)
+ .addOptionGroup(getConnectionOptions());
}
@Override
public void runImpl(CommandLine cli) throws Exception {
- SolrCLI.raiseLogLevelUnlessVerbose(cli);
-
- String snapshotName = cli.getOptionValue("snapshot-name");
- String collectionName = cli.getOptionValue("name");
+ String snapshotName = cli.getOptionValue(SNAPSHOT_NAME_OPTION);
+ String collectionName = cli.getOptionValue(COLLECTION_NAME_OPTION);
try (var solrClient = CLIUtils.getSolrClient(cli)) {
deleteSnapshot(solrClient, collectionName, snapshotName);
}
diff --git a/solr/core/src/java/org/apache/solr/cli/SnapshotDescribeTool.java b/solr/core/src/java/org/apache/solr/cli/SnapshotDescribeTool.java
index cd9d1da6f5a..dcae8619f3e 100644
--- a/solr/core/src/java/org/apache/solr/cli/SnapshotDescribeTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/SnapshotDescribeTool.java
@@ -22,10 +22,10 @@
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collection;
-import java.util.List;
import java.util.Locale;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
@@ -37,6 +37,24 @@
/** Supports snapshot-describe command in the bin/solr script. */
public class SnapshotDescribeTool extends ToolBase {
+ private static final Option COLLECTION_NAME_OPTION =
+ Option.builder("c")
+ .longOpt("name")
+ .hasArg()
+ .argName("NAME")
+ .required()
+ .desc("Name of collection to be snapshot.")
+ .build();
+
+ private static final Option SNAPSHOT_NAME_OPTION =
+ Option.builder()
+ .longOpt("snapshot-name")
+ .hasArg()
+ .argName("NAME")
+ .required()
+ .desc("Name of the snapshot to describe")
+ .build();
+
public SnapshotDescribeTool() {
this(CLIO.getOutStream());
}
@@ -54,33 +72,18 @@ public String getName() {
}
@Override
- public List getOptions() {
- return List.of(
- SolrCLI.OPTION_ZKHOST,
- SolrCLI.OPTION_SOLRURL,
- Option.builder("c")
- .longOpt("name")
- .argName("NAME")
- .hasArg()
- .required(true)
- .desc("Name of collection to be snapshot.")
- .build(),
- Option.builder()
- .longOpt("snapshot-name")
- .argName("NAME")
- .hasArg()
- .required(true)
- .desc("Name of the snapshot to describe")
- .build(),
- SolrCLI.OPTION_CREDENTIALS);
+ public Options getOptions() {
+ return super.getOptions()
+ .addOption(COLLECTION_NAME_OPTION)
+ .addOption(SNAPSHOT_NAME_OPTION)
+ .addOption(CommonCLIOptions.CREDENTIALS_OPTION)
+ .addOptionGroup(getConnectionOptions());
}
@Override
public void runImpl(CommandLine cli) throws Exception {
- SolrCLI.raiseLogLevelUnlessVerbose(cli);
-
- String snapshotName = cli.getOptionValue("snapshot-name");
- String collectionName = cli.getOptionValue("name");
+ String snapshotName = cli.getOptionValue(SNAPSHOT_NAME_OPTION);
+ String collectionName = cli.getOptionValue(COLLECTION_NAME_OPTION);
try (var solrClient = CLIUtils.getSolrClient(cli)) {
describeSnapshot(solrClient, collectionName, snapshotName);
}
diff --git a/solr/core/src/java/org/apache/solr/cli/SnapshotExportTool.java b/solr/core/src/java/org/apache/solr/cli/SnapshotExportTool.java
index e6a689e9f8c..6b8cf3a45d0 100644
--- a/solr/core/src/java/org/apache/solr/cli/SnapshotExportTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/SnapshotExportTool.java
@@ -17,10 +17,10 @@
package org.apache.solr.cli;
import java.io.PrintStream;
-import java.util.List;
import java.util.Optional;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.common.params.CollectionAdminParams;
@@ -28,6 +28,51 @@
/** Supports snapshot-export command in the bin/solr script. */
public class SnapshotExportTool extends ToolBase {
+ private static final Option COLLECTION_NAME_OPTION =
+ Option.builder("c")
+ .longOpt("name")
+ .hasArg()
+ .argName("NAME")
+ .required()
+ .desc("Name of collection to be snapshot.")
+ .build();
+
+ private static final Option SNAPSHOT_NAME_OPTION =
+ Option.builder()
+ .longOpt("snapshot-name")
+ .hasArg()
+ .argName("NAME")
+ .required()
+ .desc("Name of the snapshot to be exported.")
+ .build();
+
+ private static final Option DEST_DIR_OPTION =
+ Option.builder()
+ .longOpt("dest-dir")
+ .hasArg()
+ .argName("DIR")
+ .required()
+ .desc("Path of a temporary directory on local filesystem during snapshot export command.")
+ .build();
+
+ private static final Option BACKUP_REPO_NAME_OPTION =
+ Option.builder()
+ .longOpt("backup-repo-name")
+ .hasArg()
+ .argName("DIR")
+ .desc(
+ "Specifies name of the backup repository to be used during snapshot export preparation.")
+ .build();
+
+ private static final Option ASYNC_ID_OPTION =
+ Option.builder()
+ .longOpt("async-id")
+ .hasArg()
+ .argName("ID")
+ .desc(
+ "Specifies the async request identifier to be used during snapshot export preparation.")
+ .build();
+
public SnapshotExportTool() {
this(CLIO.getOutStream());
}
@@ -42,60 +87,24 @@ public String getName() {
}
@Override
- public List getOptions() {
- return List.of(
- SolrCLI.OPTION_ZKHOST,
- SolrCLI.OPTION_SOLRURL,
- Option.builder("c")
- .longOpt("name")
- .argName("NAME")
- .hasArg()
- .required(true)
- .desc("Name of collection to be snapshot.")
- .build(),
- Option.builder()
- .longOpt("snapshot-name")
- .argName("NAME")
- .hasArg()
- .required(true)
- .desc("Name of the snapshot to be exported.")
- .build(),
- Option.builder()
- .longOpt("dest-dir")
- .argName("DIR")
- .hasArg()
- .required(true)
- .desc(
- "Path of a temporary directory on local filesystem during snapshot export command.")
- .build(),
- Option.builder()
- .longOpt("backup-repo-name")
- .argName("DIR")
- .hasArg()
- .required(false)
- .desc(
- "Specifies name of the backup repository to be used during snapshot export preparation.")
- .build(),
- Option.builder()
- .longOpt("async-id")
- .argName("ID")
- .hasArg()
- .required(false)
- .desc(
- "Specifies the async request identifier to be used during snapshot export preparation.")
- .build(),
- SolrCLI.OPTION_CREDENTIALS);
+ public Options getOptions() {
+ return super.getOptions()
+ .addOption(COLLECTION_NAME_OPTION)
+ .addOption(SNAPSHOT_NAME_OPTION)
+ .addOption(DEST_DIR_OPTION)
+ .addOption(BACKUP_REPO_NAME_OPTION)
+ .addOption(ASYNC_ID_OPTION)
+ .addOption(CommonCLIOptions.CREDENTIALS_OPTION)
+ .addOptionGroup(getConnectionOptions());
}
@Override
public void runImpl(CommandLine cli) throws Exception {
- SolrCLI.raiseLogLevelUnlessVerbose(cli);
-
- String snapshotName = cli.getOptionValue("snapshot-name");
- String collectionName = cli.getOptionValue("name");
- String destDir = cli.getOptionValue("dest-dir");
- Optional backupRepo = Optional.ofNullable(cli.getOptionValue("backup-repo-name"));
- Optional asyncReqId = Optional.ofNullable(cli.getOptionValue("async-id"));
+ String snapshotName = cli.getOptionValue(SNAPSHOT_NAME_OPTION);
+ String collectionName = cli.getOptionValue(COLLECTION_NAME_OPTION);
+ String destDir = cli.getOptionValue(DEST_DIR_OPTION);
+ Optional backupRepo = Optional.ofNullable(cli.getOptionValue(BACKUP_REPO_NAME_OPTION));
+ Optional asyncReqId = Optional.ofNullable(cli.getOptionValue(ASYNC_ID_OPTION));
try (var solrClient = CLIUtils.getSolrClient(cli)) {
exportSnapshot(solrClient, collectionName, snapshotName, destDir, backupRepo, asyncReqId);
diff --git a/solr/core/src/java/org/apache/solr/cli/SnapshotListTool.java b/solr/core/src/java/org/apache/solr/cli/SnapshotListTool.java
index e49335e03f2..4b952199e1b 100644
--- a/solr/core/src/java/org/apache/solr/cli/SnapshotListTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/SnapshotListTool.java
@@ -17,9 +17,9 @@
package org.apache.solr.cli;
import java.io.PrintStream;
-import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.client.solrj.response.CollectionAdminResponse;
@@ -29,6 +29,15 @@
/** Supports snapshot-list command in the bin/solr script. */
public class SnapshotListTool extends ToolBase {
+ private static final Option COLLECTION_NAME_OPTION =
+ Option.builder("c")
+ .longOpt("name")
+ .hasArg()
+ .argName("NAME")
+ .required()
+ .desc("Name of collection to list snapshots for.")
+ .build();
+
public SnapshotListTool() {
this(CLIO.getOutStream());
}
@@ -43,25 +52,16 @@ public String getName() {
}
@Override
- public List getOptions() {
- return List.of(
- SolrCLI.OPTION_ZKHOST,
- SolrCLI.OPTION_SOLRURL,
- Option.builder("c")
- .longOpt("name")
- .argName("NAME")
- .hasArg()
- .required(true)
- .desc("Name of collection to list snapshots for.")
- .build(),
- SolrCLI.OPTION_CREDENTIALS);
+ public Options getOptions() {
+ return super.getOptions()
+ .addOption(COLLECTION_NAME_OPTION)
+ .addOption(CommonCLIOptions.CREDENTIALS_OPTION)
+ .addOptionGroup(getConnectionOptions());
}
@Override
public void runImpl(CommandLine cli) throws Exception {
- SolrCLI.raiseLogLevelUnlessVerbose(cli);
-
- String collectionName = cli.getOptionValue("name");
+ String collectionName = cli.getOptionValue(COLLECTION_NAME_OPTION);
try (var solrClient = CLIUtils.getSolrClient(cli)) {
listSnapshots(solrClient, collectionName);
}
diff --git a/solr/core/src/java/org/apache/solr/cli/SolrCLI.java b/solr/core/src/java/org/apache/solr/cli/SolrCLI.java
index de6db46b80e..db12fbc03ba 100755
--- a/solr/core/src/java/org/apache/solr/cli/SolrCLI.java
+++ b/solr/core/src/java/org/apache/solr/cli/SolrCLI.java
@@ -41,7 +41,6 @@
import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest;
import org.apache.solr.common.util.ContentStreamBase;
import org.apache.solr.common.util.NamedList;
-import org.apache.solr.util.StartupLoggingUtils;
import org.apache.solr.util.configuration.SSLConfigurationsFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -51,58 +50,6 @@ public class SolrCLI implements CLIO {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
- public static final String ZK_HOST = "localhost:9983";
-
- public static final Option OPTION_ZKHOST =
- Option.builder("z")
- .longOpt("zk-host")
- .argName("HOST")
- .hasArg()
- .required(false)
- .desc(
- "Zookeeper connection string; unnecessary if ZK_HOST is defined in solr.in.sh; otherwise, defaults to "
- + ZK_HOST
- + '.')
- .build();
- public static final Option OPTION_SOLRURL =
- Option.builder("s")
- .longOpt("solr-url")
- .argName("HOST")
- .hasArg()
- .required(false)
- .desc(
- "Base Solr URL, which can be used to determine the zk-host if that's not known; defaults to: "
- + CLIUtils.getDefaultSolrUrl()
- + '.')
- .build();
-
- public static final Option OPTION_VERBOSE =
- Option.builder()
- .longOpt("verbose")
- .required(false)
- .desc("Enable verbose command output.")
- .build();
-
- public static final Option OPTION_HELP =
- Option.builder("h").longOpt("help").required(false).desc("Print this message.").build();
-
- public static final Option OPTION_RECURSIVE =
- Option.builder("r")
- .longOpt("recursive")
- .required(false)
- .desc("Apply the command recursively.")
- .build();
-
- public static final Option OPTION_CREDENTIALS =
- Option.builder("u")
- .longOpt("credentials")
- .argName("credentials")
- .hasArg()
- .required(false)
- .desc(
- "Credentials in the format username:password. Example: --credentials solr:SolrRocks")
- .build();
-
public static void exit(int exitStatus) {
try {
System.exit(exitStatus);
@@ -230,12 +177,6 @@ protected static void checkSslStoreSysProp(String solrInstallDir, String key) {
}
}
- public static void raiseLogLevelUnlessVerbose(CommandLine cli) {
- if (!cli.hasOption(SolrCLI.OPTION_VERBOSE.getLongOpt())) {
- StartupLoggingUtils.changeLogLevel("WARN");
- }
- }
-
// Creates an instance of the requested tool, using classpath scanning if necessary
private static Tool newTool(String toolType) throws Exception {
if ("healthcheck".equals(toolType)) return new HealthcheckTool();
@@ -280,21 +221,6 @@ private static Tool newTool(String toolType) throws Exception {
throw new IllegalArgumentException(toolType + " is not a valid command!");
}
- /** Returns tool options for given tool, for usage display purposes. Hides deprecated options. */
- public static Options getToolOptions(Tool tool) {
- Options options = new Options();
- options.addOption(OPTION_HELP);
- options.addOption(OPTION_VERBOSE);
-
- List toolOpts = tool.getOptions();
- for (Option toolOpt : toolOpts) {
- if (!toolOpt.isDeprecated()) {
- options.addOption(toolOpt);
- }
- }
- return options;
- }
-
/**
* Returns the value of the option with the given name, or the value of the deprecated option. If
* both values are null, then it returns the default value.
@@ -329,17 +255,7 @@ private static void deprecatedHandlerStdErr(Option o) {
/** Parses the command-line arguments passed by the user. */
public static CommandLine processCommandLineArgs(Tool tool, String[] args) {
- List customOptions = tool.getOptions();
- Options options = new Options();
-
- options.addOption(OPTION_HELP);
- options.addOption(OPTION_VERBOSE);
-
- if (customOptions != null) {
- for (Option customOption : customOptions) {
- options.addOption(customOption);
- }
- }
+ Options options = tool.getOptions();
CommandLine cli = null;
try {
@@ -369,7 +285,7 @@ public static CommandLine processCommandLineArgs(Tool tool, String[] args) {
}
}
- if (cli.hasOption("help")) {
+ if (cli.hasOption(CommonCLIOptions.HELP_OPTION)) {
printToolHelp(tool);
exit(0);
}
@@ -380,16 +296,18 @@ public static CommandLine processCommandLineArgs(Tool tool, String[] args) {
/** Prints tool help for a given tool */
public static void printToolHelp(Tool tool) {
HelpFormatter formatter = getFormatter();
- Options optionsNoDeprecated = new Options();
- SolrCLI.getToolOptions(tool).getOptions().stream()
+ Options nonDeprecatedOptions = new Options();
+
+ tool.getOptions().getOptions().stream()
.filter(option -> !option.isDeprecated())
- .forEach(optionsNoDeprecated::addOption);
+ .forEach(nonDeprecatedOptions::addOption);
+
String usageString = tool.getUsage() == null ? "bin/solr " + tool.getName() : tool.getUsage();
boolean autoGenerateUsage = tool.getUsage() == null;
formatter.printHelp(
usageString,
"\n" + tool.getHeader(),
- optionsNoDeprecated,
+ nonDeprecatedOptions,
tool.getFooter(),
autoGenerateUsage);
}
@@ -458,8 +376,6 @@ public static NamedList postJsonToSolr(
return solrClient.request(req);
}
- public static final String DEFAULT_CONFIG_SET = "_default";
-
private static final long MS_IN_MIN = 60 * 1000L;
private static final long MS_IN_HOUR = MS_IN_MIN * 60L;
private static final long MS_IN_DAY = MS_IN_HOUR * 24L;
diff --git a/solr/core/src/java/org/apache/solr/cli/StatusTool.java b/solr/core/src/java/org/apache/solr/cli/StatusTool.java
index dba8e2fb5c2..45713fbc085 100644
--- a/solr/core/src/java/org/apache/solr/cli/StatusTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/StatusTool.java
@@ -17,8 +17,6 @@
package org.apache.solr.cli;
-import static org.apache.solr.cli.SolrCLI.OPTION_SOLRURL;
-
import java.io.PrintStream;
import java.util.Collection;
import java.util.LinkedHashMap;
@@ -30,6 +28,8 @@
import java.util.concurrent.TimeoutException;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionGroup;
+import org.apache.commons.cli.Options;
import org.apache.solr.cli.SolrProcessManager.SolrProcess;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrRequest;
@@ -47,65 +47,67 @@
* Get the status of a Solr server.
*/
public class StatusTool extends ToolBase {
- private final SolrProcessManager processMgr;
-
- public StatusTool() {
- this(CLIO.getOutStream());
- }
- public StatusTool(PrintStream stdout) {
- super(stdout);
- processMgr = new SolrProcessManager();
- }
-
- @Override
- public String getName() {
- return "status";
- }
-
- private static final Option OPTION_MAXWAITSECS =
+ private static final Option MAX_WAIT_SECS_OPTION =
Option.builder()
.longOpt("max-wait-secs")
- .argName("SECS")
.hasArg()
- .required(false)
+ .argName("SECS")
+ .type(Integer.class)
.deprecated() // Will make it a stealth option, not printed or complained about
.desc("Wait up to the specified number of seconds to see Solr running.")
.build();
- public static final Option OPTION_PORT =
+ public static final Option PORT_OPTION =
Option.builder("p")
.longOpt("port")
- .argName("PORT")
- .required(false)
.hasArg()
+ .argName("PORT")
+ .type(Integer.class)
.desc("Port on localhost to check status for")
.build();
- public static final Option OPTION_SHORT =
+ public static final Option SHORT_OPTION =
Option.builder()
.longOpt("short")
.argName("SHORT")
- .required(false)
.desc("Short format. Prints one URL per line for running instances")
.build();
+ private final SolrProcessManager processMgr;
+
+ public StatusTool() {
+ this(CLIO.getOutStream());
+ }
+
+ public StatusTool(PrintStream stdout) {
+ super(stdout);
+ processMgr = new SolrProcessManager();
+ }
+
@Override
- public List getOptions() {
- return List.of(OPTION_SOLRURL, OPTION_MAXWAITSECS, OPTION_PORT, OPTION_SHORT);
+ public String getName() {
+ return "status";
+ }
+
+ @Override
+ public Options getOptions() {
+ OptionGroup optionGroup = new OptionGroup();
+ optionGroup.addOption(PORT_OPTION);
+ optionGroup.addOption(CommonCLIOptions.SOLR_URL_OPTION);
+ return super.getOptions()
+ .addOption(MAX_WAIT_SECS_OPTION)
+ .addOption(SHORT_OPTION)
+ .addOption(CommonCLIOptions.CREDENTIALS_OPTION)
+ .addOptionGroup(optionGroup);
}
@Override
public void runImpl(CommandLine cli) throws Exception {
- String solrUrl = cli.getOptionValue(OPTION_SOLRURL);
- Integer port =
- cli.hasOption(OPTION_PORT) ? Integer.parseInt(cli.getOptionValue(OPTION_PORT)) : null;
- boolean shortFormat = cli.hasOption(OPTION_SHORT);
- int maxWaitSecs = Integer.parseInt(cli.getOptionValue("max-wait-secs", "0"));
-
- if (port != null && solrUrl != null) {
- throw new IllegalArgumentException("Only one of port or url can be specified");
- }
+ String solrUrl = cli.getOptionValue(CommonCLIOptions.SOLR_URL_OPTION);
+ Integer port = cli.hasOption(PORT_OPTION) ? cli.getParsedOptionValue(PORT_OPTION) : null;
+ boolean shortFormat = cli.hasOption(SHORT_OPTION);
+ int maxWaitSecs = cli.getParsedOptionValue(MAX_WAIT_SECS_OPTION, 0);
if (solrUrl != null) {
if (!URLUtil.hasScheme(solrUrl)) {
@@ -163,8 +165,8 @@ public void runImpl(CommandLine cli) throws Exception {
}
private void printProcessStatus(SolrProcess process, CommandLine cli) throws Exception {
- int maxWaitSecs = Integer.parseInt(cli.getOptionValue("max-wait-secs", "0"));
- boolean shortFormat = cli.hasOption(OPTION_SHORT);
+ int maxWaitSecs = cli.getParsedOptionValue(MAX_WAIT_SECS_OPTION, 0);
+ boolean shortFormat = cli.hasOption(SHORT_OPTION);
String pidUrl = process.getLocalUrl();
if (shortFormat) {
CLIO.out(pidUrl);
@@ -215,7 +217,7 @@ public boolean waitForSolrUp(String solrUrl, CommandLine cli, int maxWaitSecs) t
try {
waitToSeeSolrUp(
solrUrl,
- cli.getOptionValue(SolrCLI.OPTION_CREDENTIALS.getLongOpt()),
+ cli.getOptionValue(CommonCLIOptions.CREDENTIALS_OPTION),
maxWaitSecs,
TimeUnit.SECONDS);
return true;
@@ -251,7 +253,7 @@ public String statusFromRunningSolr(String solrUrl, CommandLine cli) throws Exce
try {
CharArr arr = new CharArr();
new JSONWriter(arr, 2)
- .write(getStatus(solrUrl, cli.getOptionValue(SolrCLI.OPTION_CREDENTIALS.getLongOpt())));
+ .write(getStatus(solrUrl, cli.getOptionValue(CommonCLIOptions.CREDENTIALS_OPTION)));
return arr.toString();
} catch (Exception exc) {
if (CLIUtils.exceptionIsAuthRelated(exc)) {
diff --git a/solr/core/src/java/org/apache/solr/cli/Tool.java b/solr/core/src/java/org/apache/solr/cli/Tool.java
index 0edde7cdae2..a95af511642 100644
--- a/solr/core/src/java/org/apache/solr/cli/Tool.java
+++ b/solr/core/src/java/org/apache/solr/cli/Tool.java
@@ -17,9 +17,8 @@
package org.apache.solr.cli;
-import java.util.List;
import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
public interface Tool {
/** Defines the interface to a Solr tool that can be run from this command-line app. */
@@ -50,7 +49,12 @@ default String getFooter() {
return "\nPlease see the Reference Guide for more tools documentation: https://solr.apache.org/guide/solr/latest/deployment-guide/solr-control-script-reference.html";
}
- List getOptions();
+ /**
+ * Retrieve the {@link Options} supported by this tool.
+ *
+ * @return The {@link Options} this tool supports.
+ */
+ Options getOptions();
int runTool(CommandLine cli) throws Exception;
}
diff --git a/solr/core/src/java/org/apache/solr/cli/ToolBase.java b/solr/core/src/java/org/apache/solr/cli/ToolBase.java
index d35b63e1528..b0a0e1063a7 100644
--- a/solr/core/src/java/org/apache/solr/cli/ToolBase.java
+++ b/solr/core/src/java/org/apache/solr/cli/ToolBase.java
@@ -19,11 +19,14 @@
import java.io.PrintStream;
import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.OptionGroup;
+import org.apache.commons.cli.Options;
+import org.apache.solr.util.StartupLoggingUtils;
public abstract class ToolBase implements Tool {
+ private boolean verbose = false;
protected PrintStream stdout;
- protected boolean verbose = false;
protected ToolBase() {
this(CLIO.getOutStream());
@@ -48,9 +51,29 @@ protected void echo(final String msg) {
stdout.println(msg);
}
+ @Override
+ public Options getOptions() {
+ return new Options()
+ .addOption(CommonCLIOptions.HELP_OPTION)
+ .addOption(CommonCLIOptions.VERBOSE_OPTION);
+ }
+
+ /**
+ * Provides the two ways of connecting to Solr for CLI Tools
+ *
+ * @return OptionGroup validates that only one option is supplied by the caller.
+ */
+ public OptionGroup getConnectionOptions() {
+ OptionGroup optionGroup = new OptionGroup();
+ optionGroup.addOption(CommonCLIOptions.SOLR_URL_OPTION);
+ optionGroup.addOption(CommonCLIOptions.ZK_HOST_OPTION);
+ return optionGroup;
+ }
+
@Override
public int runTool(CommandLine cli) throws Exception {
- verbose = cli.hasOption(SolrCLI.OPTION_VERBOSE.getLongOpt());
+ verbose = cli.hasOption(CommonCLIOptions.VERBOSE_OPTION);
+ raiseLogLevelUnlessVerbose();
int toolExitStatus = 0;
try {
@@ -71,5 +94,11 @@ public int runTool(CommandLine cli) throws Exception {
return toolExitStatus;
}
+ private void raiseLogLevelUnlessVerbose() {
+ if (!verbose) {
+ StartupLoggingUtils.changeLogLevel("WARN");
+ }
+ }
+
public abstract void runImpl(CommandLine cli) throws Exception;
}
diff --git a/solr/core/src/java/org/apache/solr/cli/UpdateACLTool.java b/solr/core/src/java/org/apache/solr/cli/UpdateACLTool.java
index c7ab6d8e632..bf54152d2ac 100644
--- a/solr/core/src/java/org/apache/solr/cli/UpdateACLTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/UpdateACLTool.java
@@ -18,10 +18,9 @@
package org.apache.solr.cli;
import java.io.PrintStream;
-import java.util.List;
import java.util.concurrent.TimeUnit;
import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
import org.apache.solr.client.solrj.impl.SolrZkClientTimeout;
import org.apache.solr.cloud.ZkController;
import org.apache.solr.common.cloud.SolrZkClient;
@@ -53,8 +52,8 @@ public String getUsage() {
}
@Override
- public List getOptions() {
- return List.of(SolrCLI.OPTION_ZKHOST);
+ public Options getOptions() {
+ return super.getOptions().addOption(CommonCLIOptions.ZK_HOST_OPTION);
}
@Override
diff --git a/solr/core/src/java/org/apache/solr/cli/VersionTool.java b/solr/core/src/java/org/apache/solr/cli/VersionTool.java
index f5e84e6c1f8..148bdf36a40 100644
--- a/solr/core/src/java/org/apache/solr/cli/VersionTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/VersionTool.java
@@ -17,10 +17,7 @@
package org.apache.solr.cli;
-import java.util.Collections;
-import java.util.List;
import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.Option;
import org.apache.solr.client.api.util.SolrVersion;
public class VersionTool extends ToolBase {
@@ -30,11 +27,6 @@ public String getName() {
return "version";
}
- @Override
- public List getOptions() {
- return Collections.emptyList();
- }
-
@Override
public void runImpl(CommandLine cli) throws Exception {
CLIO.out("Solr version is: " + SolrVersion.LATEST);
diff --git a/solr/core/src/java/org/apache/solr/cli/ZkCpTool.java b/solr/core/src/java/org/apache/solr/cli/ZkCpTool.java
index a71d8a045ae..7f901e67cf2 100644
--- a/solr/core/src/java/org/apache/solr/cli/ZkCpTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/ZkCpTool.java
@@ -23,12 +23,12 @@
import java.lang.reflect.InvocationTargetException;
import java.nio.file.Path;
import java.nio.file.Paths;
-import java.util.List;
import java.util.Locale;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
import org.apache.solr.client.solrj.impl.SolrZkClientTimeout;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.cloud.SolrZkClient;
@@ -44,6 +44,14 @@
public class ZkCpTool extends ToolBase {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+ private static final Option SOLR_HOME_OPTION =
+ Option.builder()
+ .longOpt("solr-home")
+ .hasArg()
+ .argName("DIR")
+ .desc("Required to look up configuration for compressing state.json.")
+ .build();
+
public ZkCpTool() {
this(CLIO.getOutStream());
}
@@ -53,19 +61,12 @@ public ZkCpTool(PrintStream stdout) {
}
@Override
- public List getOptions() {
- return List.of(
- Option.builder()
- .longOpt("solr-home")
- .argName("DIR")
- .hasArg()
- .required(false)
- .desc("Required to look up configuration for compressing state.json.")
- .build(),
- SolrCLI.OPTION_RECURSIVE,
- SolrCLI.OPTION_SOLRURL,
- SolrCLI.OPTION_ZKHOST,
- SolrCLI.OPTION_CREDENTIALS);
+ public Options getOptions() {
+ return super.getOptions()
+ .addOption(SOLR_HOME_OPTION)
+ .addOption(CommonCLIOptions.RECURSIVE_OPTION)
+ .addOption(CommonCLIOptions.CREDENTIALS_OPTION)
+ .addOptionGroup(getConnectionOptions());
}
@Override
@@ -123,13 +124,12 @@ public String getHeader() {
@Override
public void runImpl(CommandLine cli) throws Exception {
- SolrCLI.raiseLogLevelUnlessVerbose(cli);
String zkHost = CLIUtils.getZkHost(cli);
echoIfVerbose("\nConnecting to ZooKeeper at " + zkHost + " ...");
String src = cli.getArgs()[0];
String dst = cli.getArgs()[1];
- boolean recursive = cli.hasOption("recursive");
+ boolean recursive = cli.hasOption(CommonCLIOptions.RECURSIVE_OPTION);
echo("Copying from '" + src + "' to '" + dst + "'. ZooKeeper at " + zkHost);
boolean srcIsZk = src.toLowerCase(Locale.ROOT).startsWith("zk:");
@@ -158,7 +158,7 @@ public void runImpl(CommandLine cli) throws Exception {
Compressor compressor = new ZLibCompressor();
if (dstIsZk) {
- String solrHome = cli.getOptionValue("solr-home");
+ String solrHome = cli.getOptionValue(SOLR_HOME_OPTION);
if (StrUtils.isNullOrEmpty(solrHome)) {
solrHome = System.getProperty("solr.home");
}
diff --git a/solr/core/src/java/org/apache/solr/cli/ZkLsTool.java b/solr/core/src/java/org/apache/solr/cli/ZkLsTool.java
index 57f7fac3667..57bb45d4e40 100644
--- a/solr/core/src/java/org/apache/solr/cli/ZkLsTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/ZkLsTool.java
@@ -18,9 +18,8 @@
import java.io.PrintStream;
import java.lang.invoke.MethodHandles;
-import java.util.List;
import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
import org.apache.solr.common.cloud.SolrZkClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -38,12 +37,11 @@ public ZkLsTool(PrintStream stdout) {
}
@Override
- public List getOptions() {
- return List.of(
- SolrCLI.OPTION_RECURSIVE,
- SolrCLI.OPTION_SOLRURL,
- SolrCLI.OPTION_ZKHOST,
- SolrCLI.OPTION_CREDENTIALS);
+ public Options getOptions() {
+ return super.getOptions()
+ .addOption(CommonCLIOptions.RECURSIVE_OPTION)
+ .addOption(CommonCLIOptions.CREDENTIALS_OPTION)
+ .addOptionGroup(getConnectionOptions());
}
@Override
@@ -59,14 +57,13 @@ public String getUsage() {
@Override
public void runImpl(CommandLine cli) throws Exception {
- SolrCLI.raiseLogLevelUnlessVerbose(cli);
String zkHost = CLIUtils.getZkHost(cli);
String znode = cli.getArgs()[0];
try (SolrZkClient zkClient = CLIUtils.getSolrZkClient(cli, zkHost)) {
echoIfVerbose("\nConnecting to ZooKeeper at " + zkHost + " ...");
- boolean recursive = cli.hasOption("recursive");
+ boolean recursive = cli.hasOption(CommonCLIOptions.RECURSIVE_OPTION);
echoIfVerbose(
"Getting listing for ZooKeeper node "
+ znode
diff --git a/solr/core/src/java/org/apache/solr/cli/ZkMkrootTool.java b/solr/core/src/java/org/apache/solr/cli/ZkMkrootTool.java
index 2cec28ecf84..ace32ad5ce7 100644
--- a/solr/core/src/java/org/apache/solr/cli/ZkMkrootTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/ZkMkrootTool.java
@@ -20,9 +20,9 @@
import java.io.PrintStream;
import java.lang.invoke.MethodHandles;
-import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
import org.apache.solr.common.cloud.SolrZkClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -31,6 +31,13 @@
public class ZkMkrootTool extends ToolBase {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+ private static final Option FAIL_ON_EXISTS_OPTION =
+ Option.builder()
+ .longOpt("fail-on-exists")
+ .hasArg()
+ .desc("Raise an error if the root exists. Defaults to false.")
+ .build();
+
public ZkMkrootTool() {
this(CLIO.getOutStream());
}
@@ -40,17 +47,11 @@ public ZkMkrootTool(PrintStream stdout) {
}
@Override
- public List getOptions() {
- return List.of(
- Option.builder()
- .longOpt("fail-on-exists")
- .hasArg()
- .required(false)
- .desc("Raise an error if the root exists. Defaults to false.")
- .build(),
- SolrCLI.OPTION_SOLRURL,
- SolrCLI.OPTION_ZKHOST,
- SolrCLI.OPTION_CREDENTIALS);
+ public Options getOptions() {
+ return super.getOptions()
+ .addOption(FAIL_ON_EXISTS_OPTION)
+ .addOption(CommonCLIOptions.CREDENTIALS_OPTION)
+ .addOptionGroup(getConnectionOptions());
}
@Override
@@ -75,10 +76,9 @@ public String getHeader() {
@Override
public void runImpl(CommandLine cli) throws Exception {
- SolrCLI.raiseLogLevelUnlessVerbose(cli);
String zkHost = CLIUtils.getZkHost(cli);
String znode = cli.getArgs()[0];
- boolean failOnExists = cli.hasOption("fail-on-exists");
+ boolean failOnExists = cli.hasOption(FAIL_ON_EXISTS_OPTION);
try (SolrZkClient zkClient = CLIUtils.getSolrZkClient(cli, zkHost)) {
echoIfVerbose("\nConnecting to ZooKeeper at " + zkHost + " ...");
diff --git a/solr/core/src/java/org/apache/solr/cli/ZkMvTool.java b/solr/core/src/java/org/apache/solr/cli/ZkMvTool.java
index f5baefc059a..dba47d5b824 100644
--- a/solr/core/src/java/org/apache/solr/cli/ZkMvTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/ZkMvTool.java
@@ -20,10 +20,9 @@
import java.io.PrintStream;
import java.lang.invoke.MethodHandles;
-import java.util.List;
import java.util.Locale;
import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.common.cloud.SolrZkClient;
import org.slf4j.Logger;
@@ -42,8 +41,10 @@ public ZkMvTool(PrintStream stdout) {
}
@Override
- public List getOptions() {
- return List.of(SolrCLI.OPTION_SOLRURL, SolrCLI.OPTION_ZKHOST, SolrCLI.OPTION_CREDENTIALS);
+ public Options getOptions() {
+ return super.getOptions()
+ .addOption(CommonCLIOptions.CREDENTIALS_OPTION)
+ .addOptionGroup(getConnectionOptions());
}
@Override
@@ -73,7 +74,6 @@ public String getHeader() {
@Override
public void runImpl(CommandLine cli) throws Exception {
- SolrCLI.raiseLogLevelUnlessVerbose(cli);
String zkHost = CLIUtils.getZkHost(cli);
try (SolrZkClient zkClient = CLIUtils.getSolrZkClient(cli, zkHost)) {
diff --git a/solr/core/src/java/org/apache/solr/cli/ZkRmTool.java b/solr/core/src/java/org/apache/solr/cli/ZkRmTool.java
index f2b7c491456..777ad6035a5 100644
--- a/solr/core/src/java/org/apache/solr/cli/ZkRmTool.java
+++ b/solr/core/src/java/org/apache/solr/cli/ZkRmTool.java
@@ -18,10 +18,9 @@
import java.io.PrintStream;
import java.lang.invoke.MethodHandles;
-import java.util.List;
import java.util.Locale;
import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.common.cloud.SolrZkClient;
import org.slf4j.Logger;
@@ -40,12 +39,11 @@ public ZkRmTool(PrintStream stdout) {
}
@Override
- public List getOptions() {
- return List.of(
- SolrCLI.OPTION_RECURSIVE,
- SolrCLI.OPTION_SOLRURL,
- SolrCLI.OPTION_ZKHOST,
- SolrCLI.OPTION_CREDENTIALS);
+ public Options getOptions() {
+ return super.getOptions()
+ .addOption(CommonCLIOptions.RECURSIVE_OPTION)
+ .addOption(CommonCLIOptions.CREDENTIALS_OPTION)
+ .addOptionGroup(getConnectionOptions());
}
@Override
@@ -60,11 +58,10 @@ public String getUsage() {
@Override
public void runImpl(CommandLine cli) throws Exception {
- SolrCLI.raiseLogLevelUnlessVerbose(cli);
String zkHost = CLIUtils.getZkHost(cli);
String target = cli.getArgs()[0];
- boolean recursive = cli.hasOption("recursive");
+ boolean recursive = cli.hasOption(CommonCLIOptions.RECURSIVE_OPTION);
String znode = target;
if (target.toLowerCase(Locale.ROOT).startsWith("zk:")) {
diff --git a/solr/core/src/java/org/apache/solr/cli/ZkToolHelp.java b/solr/core/src/java/org/apache/solr/cli/ZkToolHelp.java
index 14b541d3b33..525030dab11 100644
--- a/solr/core/src/java/org/apache/solr/cli/ZkToolHelp.java
+++ b/solr/core/src/java/org/apache/solr/cli/ZkToolHelp.java
@@ -19,13 +19,25 @@
import static org.apache.solr.cli.SolrCLI.print;
import java.io.PrintStream;
-import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
/** Supports zk help information in the bin/solr script. */
public class ZkToolHelp extends ToolBase {
+ private static final Option PRINT_ZK_SUBCOMMAND_OPTION =
+ Option.builder()
+ .longOpt("print-zk-subcommand-usage")
+ .desc("Reminds user to prepend zk to invoke the command.")
+ .build();
+
+ private static final Option PRINT_LONG_ZK_USAGE_OPTION =
+ Option.builder()
+ .longOpt("print-long-zk-usage")
+ .desc("Invokes the detailed help for zk commands.")
+ .build();
+
public ZkToolHelp() {
this(CLIO.getOutStream());
}
@@ -35,18 +47,10 @@ public ZkToolHelp(PrintStream stdout) {
}
@Override
- public List getOptions() {
- return List.of(
- Option.builder()
- .longOpt("print-zk-subcommand-usage")
- .desc("Reminds user to prepend zk to invoke the command.")
- .required(false)
- .build(),
- Option.builder()
- .longOpt("print-long-zk-usage")
- .required(false)
- .desc("Invokes the detailed help for zk commands.")
- .build());
+ public Options getOptions() {
+ return super.getOptions()
+ .addOption(PRINT_ZK_SUBCOMMAND_OPTION)
+ .addOption(PRINT_LONG_ZK_USAGE_OPTION);
}
@Override
@@ -56,16 +60,14 @@ public String getName() {
@Override
public void runImpl(CommandLine cli) throws Exception {
- SolrCLI.raiseLogLevelUnlessVerbose(cli);
-
- if (cli.hasOption("print-zk-subcommand-usage")) {
+ if (cli.hasOption(PRINT_ZK_SUBCOMMAND_OPTION)) {
String scriptCommand = cli.getArgs()[0];
print(
"You must invoke this subcommand using the zk command. bin/solr zk "
+ scriptCommand
+ ".");
}
- if (cli.hasOption("print-long-zk-usage")) {
+ if (cli.hasOption(PRINT_LONG_ZK_USAGE_OPTION)) {
print("usage:");
print(new ZkLsTool().getUsage());
print(new ZkCpTool().getUsage());
diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateCollectionCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateCollectionCmd.java
index ccfd4a1e5e2..5592303580a 100644
--- a/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateCollectionCmd.java
+++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/CreateCollectionCmd.java
@@ -36,9 +36,11 @@
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
+import java.util.Objects;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
import org.apache.solr.client.solrj.cloud.AlreadyExistsException;
import org.apache.solr.client.solrj.cloud.BadVersionException;
import org.apache.solr.client.solrj.cloud.DelegatingCloudManager;
@@ -221,24 +223,19 @@ public void call(ClusterState clusterState, ZkNodeProps message, NamedList replicaPositions;
diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/MigrateCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/MigrateCmd.java
index cdcb3c27449..95d1eb2ac11 100644
--- a/solr/core/src/java/org/apache/solr/cloud/api/collections/MigrateCmd.java
+++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/MigrateCmd.java
@@ -31,6 +31,7 @@
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
import org.apache.solr.client.solrj.request.CoreAdminRequest;
import org.apache.solr.cloud.DistributedClusterStateUpdater;
import org.apache.solr.cloud.Overseer;
@@ -52,7 +53,6 @@
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.Utils;
import org.apache.solr.handler.component.ShardHandler;
-import org.apache.solr.util.TimeOut;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -272,27 +272,26 @@ private void migrateKey(
// wait for a while until we see the new rule
log.info("Waiting to see routing rule updated in clusterstate");
- TimeOut waitUntil =
- new TimeOut(60, TimeUnit.SECONDS, ccc.getSolrCloudManager().getTimeSource());
- boolean added = false;
- while (!waitUntil.hasTimedOut()) {
- waitUntil.sleep(100);
- sourceCollection = zkStateReader.getClusterState().getCollection(sourceCollection.getName());
- sourceSlice = sourceCollection.getSlice(sourceSlice.getName());
- Map rules = sourceSlice.getRoutingRules();
- if (rules != null) {
- RoutingRule rule = rules.get(sourceRouter.getRouteKeyNoSuffix(splitKey) + "!");
- if (rule != null && rule.getRouteRanges().contains(splitRange)) {
- added = true;
- break;
- }
- }
- }
- if (!added) {
+
+ try {
+ sourceCollection =
+ zkStateReader.waitForState(
+ sourceCollection.getName(),
+ 60,
+ TimeUnit.SECONDS,
+ c -> {
+ Slice s = c.getSlice(sourceSlice.getName());
+ Map rules = s.getRoutingRules();
+ if (rules != null) {
+ RoutingRule rule = rules.get(sourceRouter.getRouteKeyNoSuffix(splitKey) + "!");
+ return rule != null && rule.getRouteRanges().contains(splitRange);
+ }
+ return false;
+ });
+ } catch (TimeoutException e) {
throw new SolrException(
- SolrException.ErrorCode.SERVER_ERROR, "Could not add routing rule: " + m);
+ SolrException.ErrorCode.SERVER_ERROR, "Could not add routing rule: " + m, e);
}
-
log.info("Routing rule added successfully");
// Create temp core on source shard
diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/MoveReplicaCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/MoveReplicaCmd.java
index cd5097993b4..48f065f537e 100644
--- a/solr/core/src/java/org/apache/solr/cloud/api/collections/MoveReplicaCmd.java
+++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/MoveReplicaCmd.java
@@ -33,6 +33,7 @@
import java.util.List;
import java.util.Locale;
import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
import org.apache.solr.cloud.ActiveReplicaWatcher;
import org.apache.solr.common.SolrCloseableLatch;
import org.apache.solr.common.SolrException;
@@ -46,7 +47,6 @@
import org.apache.solr.common.params.CoreAdminParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.Utils;
-import org.apache.solr.util.TimeOut;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -161,9 +161,7 @@ private void moveReplica(
dataDir.toString(),
targetNode,
async,
- coll,
replica,
- slice,
timeout,
waitForFinalState);
} else {
@@ -187,9 +185,7 @@ private void moveHdfsReplica(
String dataDir,
String targetNode,
String async,
- DocCollection coll,
Replica replica,
- Slice slice,
int timeout,
boolean waitForFinalState)
throws Exception {
@@ -198,8 +194,8 @@ private void moveHdfsReplica(
skipCreateReplicaInClusterState = "false";
ZkNodeProps removeReplicasProps =
new ZkNodeProps(
- COLLECTION_PROP, coll.getName(),
- SHARD_ID_PROP, slice.getName(),
+ COLLECTION_PROP, replica.getCollection(),
+ SHARD_ID_PROP, replica.getShard(),
REPLICA_PROP, replica.getName());
removeReplicasProps.getProperties().put(CoreAdminParams.DELETE_DATA_DIR, false);
removeReplicasProps.getProperties().put(CoreAdminParams.DELETE_INDEX, false);
@@ -217,8 +213,8 @@ private void moveHdfsReplica(
String.format(
Locale.ROOT,
"Failed to cleanup replica collection=%s shard=%s name=%s, failure=%s",
- coll.getName(),
- slice.getName(),
+ replica.getCollection(),
+ replica.getShard(),
replica.getName(),
deleteResult.get("failure"));
log.warn(errorString);
@@ -226,17 +222,14 @@ private void moveHdfsReplica(
return;
}
- TimeOut timeOut =
- new TimeOut(20L, TimeUnit.SECONDS, ccc.getSolrCloudManager().getTimeSource());
- while (!timeOut.hasTimedOut()) {
- coll = ccc.getZkStateReader().getClusterState().getCollection(coll.getName());
- if (coll.getReplica(replica.getName()) != null) {
- timeOut.sleep(100);
- } else {
- break;
- }
- }
- if (timeOut.hasTimedOut()) {
+ try {
+ ccc.getZkStateReader()
+ .waitForState(
+ replica.getCollection(),
+ 20L,
+ TimeUnit.SECONDS,
+ c -> c.getReplica(replica.getName()) != null);
+ } catch (TimeoutException e) {
results.add("failure", "Still see deleted replica in clusterstate!");
return;
}
@@ -246,9 +239,9 @@ private void moveHdfsReplica(
ZkNodeProps addReplicasProps =
new ZkNodeProps(
COLLECTION_PROP,
- coll.getName(),
+ replica.getCollection(),
SHARD_ID_PROP,
- slice.getName(),
+ replica.getShard(),
CoreAdminParams.NODE,
targetNode,
CoreAdminParams.CORE_NODE_NAME,
@@ -277,8 +270,8 @@ private void moveHdfsReplica(
String.format(
Locale.ROOT,
"Failed to create replica for collection=%s shard=%s" + " on node=%s, failure=%s",
- coll.getName(),
- slice.getName(),
+ replica.getCollection(),
+ replica.getShard(),
targetNode,
addResult.get("failure"));
results.add("failure", errorString);
@@ -302,8 +295,8 @@ private void moveHdfsReplica(
String.format(
Locale.ROOT,
"Failed to create replica for collection=%s shard=%s" + " on node=%s, failure=%s",
- coll.getName(),
- slice.getName(),
+ replica.getCollection(),
+ replica.getShard(),
targetNode,
addResult.get("failure"));
log.warn(errorString);
diff --git a/solr/core/src/java/org/apache/solr/cloud/api/collections/ReindexCollectionCmd.java b/solr/core/src/java/org/apache/solr/cloud/api/collections/ReindexCollectionCmd.java
index 91299b3c259..15324ec61aa 100644
--- a/solr/core/src/java/org/apache/solr/cloud/api/collections/ReindexCollectionCmd.java
+++ b/solr/core/src/java/org/apache/solr/cloud/api/collections/ReindexCollectionCmd.java
@@ -25,8 +25,10 @@
import java.util.List;
import java.util.Locale;
import java.util.Map;
+import java.util.Objects;
import java.util.TreeMap;
import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
import java.util.stream.Collectors;
@@ -360,22 +362,17 @@ public void call(ClusterState clusterState, ZkNodeProps message, NamedList shardInfo = new SimpleOrderedMap<>();
+ SimpleOrderedMap nl = new SimpleOrderedMap<>();
+ if (rsp.getException() != null) {
+ Throwable cause = rsp.getException();
+ if (cause instanceof SolrServerException) {
+ cause = ((SolrServerException) cause).getRootCause();
+ } else {
+ if (cause.getCause() != null) {
+ cause = cause.getCause();
+ }
+ }
+ nl.add("error", cause.toString());
+ if (!core.getCoreContainer().hideStackTrace()) {
+ StringWriter trace = new StringWriter();
+ cause.printStackTrace(new PrintWriter(trace));
+ nl.add("trace", trace.toString());
+ }
+ } else if (rb.getResults() != null) {
+ nl.add("numFound", rb.getResults().docList.matches());
+ nl.add(
+ "numFoundExact",
+ rb.getResults().docList.hitCountRelation() == TotalHits.Relation.EQUAL_TO);
+ nl.add("maxScore", rb.getResults().docList.maxScore());
}
- subt.stop();
+ nl.add("shardAddress", rb.shortCircuitedURL);
+ nl.add("time", req.getRequestTimer().getTime()); // elapsed time of this request so far
+
+ int pos = rb.shortCircuitedURL.indexOf("://");
+ String shardInfoName =
+ pos != -1 ? rb.shortCircuitedURL.substring(pos + 3) : rb.shortCircuitedURL;
+ shardInfo.add(shardInfoName, nl);
+ rsp.getValues().add(ShardParams.SHARDS_INFO, shardInfo);
}
+ }
+
+ private void processComponents(
+ SolrQueryRequest req,
+ SolrQueryResponse rsp,
+ ResponseBuilder rb,
+ RTimerTree timer,
+ List components)
+ throws IOException {
+ // creates a ShardHandler object only if it's needed
+ final ShardHandler shardHandler1 = getAndPrepShardHandler(req, rb);
+
+ if (!prepareComponents(req, rb, timer, components)) return;
{ // Once all of our components have been prepared, check if this request involves a SortSpec.
// If it does, and if our request includes a cursorMark param, then parse & init the
@@ -445,19 +490,26 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw
if (!rb.isDistrib) {
// a normal non-distributed request
-
try {
// The semantics of debugging vs not debugging are different enough that
// it makes sense to have two control loops
if (!rb.isDebug()) {
// Process
for (SearchComponent c : components) {
+ if (checkLimitsBefore(c, "process", rb.req, rb.rsp, components)) {
+ shortCircuitedResults(req, rb);
+ return;
+ }
c.process(rb);
}
} else {
// Process
RTimerTree subt = timer.sub("process");
for (SearchComponent c : components) {
+ if (checkLimitsBefore(c, "process debug", rb.req, rb.rsp, components)) {
+ shortCircuitedResults(req, rb);
+ return;
+ }
rb.setTimer(subt.sub(c.getName()));
c.process(rb);
rb.getTimer().stop();
@@ -471,22 +523,7 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw
}
} catch (ExitableDirectoryReader.ExitingReaderException ex) {
log.warn("Query: {}; ", req.getParamString(), ex);
- if (rb.rsp.getResponse() == null) {
- rb.rsp.addResponse(new SolrDocumentList());
-
- // If a cursorMark was passed, and we didn't progress, set
- // the nextCursorMark to the same position
- String cursorStr = rb.req.getParams().get(CursorMarkParams.CURSOR_MARK_PARAM);
- if (null != cursorStr) {
- rb.rsp.add(CursorMarkParams.CURSOR_MARK_NEXT, cursorStr);
- }
- }
- if (rb.isDebug()) {
- NamedList debug = new NamedList<>();
- debug.add("explain", new NamedList<>());
- rb.rsp.add("debug", debug);
- }
- rb.rsp.setPartialResults(rb.req);
+ shortCircuitedResults(req, rb);
}
} else {
// a distributed request
@@ -504,7 +541,10 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw
// call all components
for (SearchComponent c : components) {
- // the next stage is the minimum of what all components report
+ if (checkLimitsBefore(c, "distrib", rb.req, rb.rsp, components)) {
+ shortCircuitedResults(req, rb);
+ return;
+ } // the next stage is the minimum of what all components report
nextStage = Math.min(nextStage, c.distributedProcess(rb));
}
@@ -558,6 +598,8 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw
? shardHandler1.takeCompletedIncludingErrors()
: shardHandler1.takeCompletedOrError();
if (srsp == null) break; // no more requests to wait for
+ AtomicReference detailMesg =
+ new AtomicReference<>(); // or perhaps new Object[1] ?
boolean anyResponsesPartial =
srsp.getShardRequest().responses.stream()
@@ -568,9 +610,20 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw
return false;
}
Object recursive = resp.findRecursive("responseHeader", "partialResults");
+ if (recursive != null) {
+ Object message =
+ "[Shard:"
+ + response.getShardAddress()
+ + "]"
+ + resp.findRecursive(
+ "responseHeader",
+ RESPONSE_HEADER_PARTIAL_RESULTS_DETAILS_KEY);
+ detailMesg.compareAndSet(null, message); // first one, ingore rest
+ }
return recursive != null;
});
if (anyResponsesPartial) {
+ rb.rsp.addPartialResponseDetail(detailMesg.get());
rsp.setPartialResults(rb.req);
}
// Was there an exception?
@@ -594,6 +647,11 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw
throwSolrException(srsp.getException());
} else {
rsp.setPartialResults(rb.req);
+ if (publishCpuTime) {
+ totalShardCpuTime += computeShardCpuTime(srsp.getShardRequest().responses);
+ rsp.getResponseHeader().add(ThreadCpuTimer.CPU_TIME, totalShardCpuTime);
+ rsp.addToLog(ThreadCpuTimer.CPU_TIME, totalShardCpuTime);
+ }
}
}
}
@@ -602,6 +660,15 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw
// let the components see the responses to the request
for (SearchComponent c : components) {
+ if (checkLimitsBefore(
+ c,
+ "handleResponses next stage:" + stageInEnglish(nextStage),
+ rb.req,
+ rb.rsp,
+ components)) {
+ shortCircuitedResults(req, rb);
+ return;
+ }
c.handleResponses(rb, srsp.getShardRequest());
}
@@ -613,6 +680,10 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw
}
for (SearchComponent c : components) {
+ if (checkLimitsBefore(
+ c, "finishStage stage:" + stageInEnglish(nextStage), rb.req, rb.rsp, components)) {
+ return;
+ }
c.finishStage(rb);
}
@@ -624,44 +695,97 @@ public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throw
rsp.addToLog(ThreadCpuTimer.CPU_TIME, totalShardCpuTime);
}
}
+ }
- // SOLR-5550: still provide shards.info if requested even for a short circuited distrib request
- if (!rb.isDistrib
- && req.getParams().getBool(ShardParams.SHARDS_INFO, false)
- && rb.shortCircuitedURL != null) {
- NamedList shardInfo = new SimpleOrderedMap<>();
- SimpleOrderedMap nl = new SimpleOrderedMap<>();
- if (rsp.getException() != null) {
- Throwable cause = rsp.getException();
- if (cause instanceof SolrServerException) {
- cause = ((SolrServerException) cause).getRootCause();
- } else {
- if (cause.getCause() != null) {
- cause = cause.getCause();
- }
+ private static boolean prepareComponents(
+ SolrQueryRequest req, ResponseBuilder rb, RTimerTree timer, List components)
+ throws IOException {
+ if (timer == null) {
+ // non-debugging prepare phase
+ for (SearchComponent component : components) {
+ if (checkLimitsBefore(component, "prepare", rb.req, rb.rsp, components)) {
+ shortCircuitedResults(req, rb);
+ return false;
}
- nl.add("error", cause.toString());
- if (!core.getCoreContainer().hideStackTrace()) {
- StringWriter trace = new StringWriter();
- cause.printStackTrace(new PrintWriter(trace));
- nl.add("trace", trace.toString());
+ component.prepare(rb);
+ }
+ } else {
+ // debugging prepare phase
+ RTimerTree subt = timer.sub("prepare");
+ for (SearchComponent c : components) {
+ if (checkLimitsBefore(c, "prepare debug", rb.req, rb.rsp, components)) {
+ shortCircuitedResults(req, rb);
+ return false;
}
- } else if (rb.getResults() != null) {
- nl.add("numFound", rb.getResults().docList.matches());
- nl.add(
- "numFoundExact",
- rb.getResults().docList.hitCountRelation() == TotalHits.Relation.EQUAL_TO);
- nl.add("maxScore", rb.getResults().docList.maxScore());
+ rb.setTimer(subt.sub(c.getName()));
+ c.prepare(rb);
+ rb.getTimer().stop();
}
- nl.add("shardAddress", rb.shortCircuitedURL);
- nl.add("time", req.getRequestTimer().getTime()); // elapsed time of this request so far
+ subt.stop();
+ }
+ return true;
+ }
- int pos = rb.shortCircuitedURL.indexOf("://");
- String shardInfoName =
- pos != -1 ? rb.shortCircuitedURL.substring(pos + 3) : rb.shortCircuitedURL;
- shardInfo.add(shardInfoName, nl);
- rsp.getValues().add(ShardParams.SHARDS_INFO, shardInfo);
+ private static String stageInEnglish(int nextStage) {
+ // This should probably be a enum, but that change should be its own ticket.
+ switch (nextStage) {
+ case STAGE_START:
+ return "START";
+ case STAGE_PARSE_QUERY:
+ return "PARSE_QUERY";
+ case STAGE_TOP_GROUPS:
+ return "TOP_GROUPS";
+ case STAGE_EXECUTE_QUERY:
+ return "EXECUTE_QUERY";
+ case STAGE_GET_FIELDS:
+ return "GET_FIELDS";
+ // nobody wants to think it was DONE and canceled after it completed...
+ case STAGE_DONE:
+ return "FINISHING";
+ default:
+ throw new SolrException(
+ SolrException.ErrorCode.SERVER_ERROR, "Unrecognized stage:" + nextStage);
+ }
+ }
+
+ private static void shortCircuitedResults(SolrQueryRequest req, ResponseBuilder rb) {
+
+ if (rb.rsp.getResponse() == null) {
+ rb.rsp.addResponse(new SolrDocumentList());
+
+ // If a cursorMark was passed, and we didn't progress, set
+ // the nextCursorMark to the same position
+ String cursorStr = rb.req.getParams().get(CursorMarkParams.CURSOR_MARK_PARAM);
+ if (null != cursorStr) {
+ rb.rsp.add(CursorMarkParams.CURSOR_MARK_NEXT, cursorStr);
+ }
}
+ if (rb.isDebug()) {
+ NamedList debug = new NamedList<>();
+ debug.add("explain", new NamedList<>());
+ rb.rsp.add("debug", debug);
+ }
+ rb.rsp.setPartialResults(rb.req);
+ }
+
+ private static boolean checkLimitsBefore(
+ SearchComponent c,
+ String when,
+ SolrQueryRequest req,
+ SolrQueryResponse resp,
+ List components) {
+
+ return getQueryLimits(req, resp)
+ .maybeExitWithPartialResults(
+ () ->
+ "["
+ + when
+ + "] Limit(s) exceeded prior to "
+ + c.getName()
+ + " in "
+ + components.stream()
+ .map(SearchComponent::getName)
+ .collect(Collectors.toList()));
}
private long computeShardCpuTime(List responses) {
diff --git a/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerConfigSetHelper.java b/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerConfigSetHelper.java
index 90eb3eb0c1c..9b5e7a82bf8 100644
--- a/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerConfigSetHelper.java
+++ b/solr/core/src/java/org/apache/solr/handler/designer/SchemaDesignerConfigSetHelper.java
@@ -78,7 +78,6 @@
import org.apache.solr.cloud.ZkSolrResourceLoader;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrInputDocument;
-import org.apache.solr.common.cloud.ClusterState;
import org.apache.solr.common.cloud.DocCollection;
import org.apache.solr.common.cloud.Replica;
import org.apache.solr.common.cloud.SolrZkClient;
@@ -168,24 +167,12 @@ Map analyzeField(String configSet, String fieldName, String fiel
}
List listCollectionsForConfig(String configSet) {
- final List collections = new ArrayList<>();
- Map states =
- zkStateReader().getClusterState().getCollectionStates();
- for (Map.Entry e : states.entrySet()) {
- final String coll = e.getKey();
- if (coll.startsWith(DESIGNER_PREFIX)) {
- continue; // ignore temp
- }
-
- try {
- if (configSet.equals(e.getValue().get().getConfigName()) && e.getValue().get() != null) {
- collections.add(coll);
- }
- } catch (Exception exc) {
- log.warn("Failed to get config name for {}", coll, exc);
- }
- }
- return collections;
+ return zkStateReader()
+ .getClusterState()
+ .collectionStream()
+ .filter(c -> configSet.equals(c.getConfigName()))
+ .map(DocCollection::getName)
+ .toList();
}
@SuppressWarnings("unchecked")
diff --git a/solr/core/src/java/org/apache/solr/request/SolrRequestInfo.java b/solr/core/src/java/org/apache/solr/request/SolrRequestInfo.java
index 61b51a6af20..444867b430b 100644
--- a/solr/core/src/java/org/apache/solr/request/SolrRequestInfo.java
+++ b/solr/core/src/java/org/apache/solr/request/SolrRequestInfo.java
@@ -253,10 +253,12 @@ private void initQueryLimits() {
*/
public QueryLimits getLimits() {
// make sure the ThreadCpuTime is always initialized
- return req == null || rsp == null
- ? QueryLimits.NONE
- : (QueryLimits)
- req.getContext().computeIfAbsent(LIMITS_KEY, (k) -> new QueryLimits(req, rsp));
+ return req == null || rsp == null ? QueryLimits.NONE : getQueryLimits(req, rsp);
+ }
+
+ public static QueryLimits getQueryLimits(SolrQueryRequest request, SolrQueryResponse response) {
+ return (QueryLimits)
+ request.getContext().computeIfAbsent(LIMITS_KEY, (k) -> new QueryLimits(request, response));
}
public SolrDispatchFilter.Action getAction() {
diff --git a/solr/core/src/java/org/apache/solr/response/SolrQueryResponse.java b/solr/core/src/java/org/apache/solr/response/SolrQueryResponse.java
index b399ce57b79..5676a63fe10 100644
--- a/solr/core/src/java/org/apache/solr/response/SolrQueryResponse.java
+++ b/solr/core/src/java/org/apache/solr/response/SolrQueryResponse.java
@@ -207,7 +207,10 @@ public static boolean haveCompleteResults(NamedList> header) {
*/
public void addPartialResponseDetail(Object detail) {
NamedList header = getResponseHeader();
- if (header != null && detail != null) {
+ // never overwrite the original detail message. The first limit violation is the important one.
+ if (header != null
+ && detail != null
+ && header.get(RESPONSE_HEADER_PARTIAL_RESULTS_DETAILS_KEY) == null) {
header.add(RESPONSE_HEADER_PARTIAL_RESULTS_DETAILS_KEY, detail);
}
}
diff --git a/solr/core/src/java/org/apache/solr/search/QueryLimits.java b/solr/core/src/java/org/apache/solr/search/QueryLimits.java
index 7aee5657678..e6e0db5eed9 100644
--- a/solr/core/src/java/org/apache/solr/search/QueryLimits.java
+++ b/solr/core/src/java/org/apache/solr/search/QueryLimits.java
@@ -16,12 +16,14 @@
*/
package org.apache.solr.search;
+import static org.apache.solr.response.SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_DETAILS_KEY;
import static org.apache.solr.search.CpuAllowedLimit.hasCpuLimit;
import static org.apache.solr.search.TimeAllowedLimit.hasTimeLimit;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
+import java.util.function.Supplier;
import org.apache.lucene.index.QueryTimeout;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.CommonParams;
@@ -113,7 +115,8 @@ public String formatExceptionMessage(String label) {
* @throws QueryLimitsExceededException if {@link #allowPartialResults} is false and limits have
* been reached.
*/
- public boolean maybeExitWithPartialResults(String label) throws QueryLimitsExceededException {
+ public boolean maybeExitWithPartialResults(Supplier label)
+ throws QueryLimitsExceededException {
if (isLimitsEnabled() && shouldExit()) {
if (allowPartialResults) {
if (rsp != null) {
@@ -124,17 +127,25 @@ public boolean maybeExitWithPartialResults(String label) throws QueryLimitsExcee
"No request active, but attempting to exit with partial results?");
}
rsp.setPartialResults(requestInfo.getReq());
- rsp.addPartialResponseDetail(formatExceptionMessage(label));
+ if (rsp.getResponseHeader().get(RESPONSE_HEADER_PARTIAL_RESULTS_DETAILS_KEY) == null) {
+ // don't want to add duplicate keys. Although technically legal, there's a strong risk
+ // that clients won't anticipate it and break.
+ rsp.addPartialResponseDetail(formatExceptionMessage(label.get()));
+ }
}
return true;
} else {
- throw new QueryLimitsExceededException(formatExceptionMessage(label));
+ throw new QueryLimitsExceededException(formatExceptionMessage(label.get()));
}
} else {
return false;
}
}
+ public boolean maybeExitWithPartialResults(String label) throws QueryLimitsExceededException {
+ return maybeExitWithPartialResults(() -> label);
+ }
+
/**
* Method to diagnose limit exceeded. Note that while this should always list the exceeded limit,
* it may also nominate additional limits that have been exceeded since the actual check that
diff --git a/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java b/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java
index cc645476aef..99291fc6ad0 100644
--- a/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java
+++ b/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java
@@ -53,6 +53,7 @@
import java.util.Random;
import java.util.Set;
import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
import java.util.function.Supplier;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@@ -96,7 +97,6 @@
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.common.util.StrUtils;
import org.apache.solr.common.util.SuppressForbidden;
-import org.apache.solr.common.util.TimeSource;
import org.apache.solr.common.util.Utils;
import org.apache.solr.common.util.ValidatingJsonMap;
import org.apache.solr.core.CoreContainer;
@@ -125,7 +125,6 @@
import org.apache.solr.servlet.cache.Method;
import org.apache.solr.update.processor.DistributingUpdateProcessorFactory;
import org.apache.solr.util.RTimerTree;
-import org.apache.solr.util.TimeOut;
import org.apache.solr.util.tracing.TraceUtils;
import org.apache.zookeeper.KeeperException;
import org.slf4j.Logger;
@@ -388,18 +387,16 @@ protected void autoCreateSystemColl(String corename) throws Exception {
+ " collection: "
+ Utils.toJSONString(rsp.getValues()));
}
- TimeOut timeOut = new TimeOut(3, TimeUnit.SECONDS, TimeSource.NANO_TIME);
- for (; ; ) {
- if (cores.getZkController().getClusterState().getCollectionOrNull(SYSTEM_COLL) != null) {
- break;
- } else {
- if (timeOut.hasTimedOut()) {
- throw new SolrException(
- ErrorCode.SERVER_ERROR,
- "Could not find " + SYSTEM_COLL + " collection even after 3 seconds");
- }
- timeOut.sleep(50);
- }
+
+ try {
+ cores
+ .getZkController()
+ .getZkStateReader()
+ .waitForState(SYSTEM_COLL, 3, TimeUnit.SECONDS, Objects::nonNull);
+ } catch (TimeoutException e) {
+ throw new SolrException(
+ ErrorCode.SERVER_ERROR,
+ "Could not find " + SYSTEM_COLL + " collection even after 3 seconds");
}
action = RETRY;
diff --git a/solr/core/src/test/org/apache/solr/cli/DeleteToolTest.java b/solr/core/src/test/org/apache/solr/cli/DeleteToolTest.java
index 8f66d104d4e..45c48697663 100644
--- a/solr/core/src/test/org/apache/solr/cli/DeleteToolTest.java
+++ b/solr/core/src/test/org/apache/solr/cli/DeleteToolTest.java
@@ -60,8 +60,6 @@ public void testDeleteCollectionWithBasicAuth() throws Exception {
"delete",
"-c",
"testDeleteCollectionWithBasicAuth",
- "--delete-config",
- "false",
"-z",
cluster.getZkClient().getZkServerAddress(),
"--credentials",
diff --git a/solr/core/src/test/org/apache/solr/cli/SolrCLITest.java b/solr/core/src/test/org/apache/solr/cli/SolrCLITest.java
index d6eb3d6b3ad..4a11cfb296a 100644
--- a/solr/core/src/test/org/apache/solr/cli/SolrCLITest.java
+++ b/solr/core/src/test/org/apache/solr/cli/SolrCLITest.java
@@ -45,7 +45,4 @@ public void testUptime() {
assertEquals(
"106751991167 days, 7 hours, 12 minutes, 56 seconds", SolrCLI.uptime(Long.MAX_VALUE));
}
-
- @Test
- public void testGetCredentials() {}
}
diff --git a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeWithPullReplicasTest.java b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeWithPullReplicasTest.java
index ddb72aad118..6652009774f 100644
--- a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeWithPullReplicasTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyNothingIsSafeWithPullReplicasTest.java
@@ -343,7 +343,7 @@ public void test() throws Exception {
List numShardsNumReplicas = new ArrayList<>(2);
numShardsNumReplicas.add(1);
numShardsNumReplicas.add(1 + getPullReplicaCount());
- checkForCollection("testcollection", numShardsNumReplicas, null);
+ checkForCollection("testcollection", numShardsNumReplicas);
testSuccessful = true;
} finally {
diff --git a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderWithPullReplicasTest.java b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderWithPullReplicasTest.java
index 764035dc043..b097ebea968 100644
--- a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderWithPullReplicasTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderWithPullReplicasTest.java
@@ -255,7 +255,7 @@ public void test() throws Exception {
List numShardsNumReplicas = new ArrayList<>(2);
numShardsNumReplicas.add(1);
numShardsNumReplicas.add(1 + getPullReplicaCount());
- checkForCollection("testcollection", numShardsNumReplicas, null);
+ checkForCollection("testcollection", numShardsNumReplicas);
}
private void tryDelete() throws Exception {
diff --git a/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java b/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java
index 686a0a2554a..16242bcc5eb 100644
--- a/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java
@@ -488,11 +488,13 @@ private JettySolrRunner getJettyForReplica(Replica replica) {
}
private void waitForNodeLeave(String lostNodeName) throws InterruptedException {
- ZkStateReader reader = cluster.getZkStateReader();
- TimeOut timeOut = new TimeOut(20, TimeUnit.SECONDS, TimeSource.NANO_TIME);
- while (reader.getClusterState().getLiveNodes().contains(lostNodeName)) {
- Thread.sleep(100);
- if (timeOut.hasTimedOut()) fail("Wait for " + lostNodeName + " to leave failed!");
+
+ try {
+ cluster
+ .getZkStateReader()
+ .waitForLiveNodes(20, TimeUnit.SECONDS, (o, n) -> !n.contains(lostNodeName));
+ } catch (TimeoutException e) {
+ fail("Wait for " + lostNodeName + " to leave failed!");
}
}
diff --git a/solr/core/src/test/org/apache/solr/cloud/HttpPartitionTest.java b/solr/core/src/test/org/apache/solr/cloud/HttpPartitionTest.java
index 2f3f62ad763..0d4f1c2b722 100644
--- a/solr/core/src/test/org/apache/solr/cloud/HttpPartitionTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/HttpPartitionTest.java
@@ -30,6 +30,7 @@
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
import org.apache.lucene.tests.util.LuceneTestCase;
import org.apache.solr.JSONTestUtil;
import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
@@ -48,13 +49,11 @@
import org.apache.solr.common.cloud.ZkCoreNodeProps;
import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.util.TimeSource;
import org.apache.solr.core.CoreContainer;
import org.apache.solr.core.SolrCore;
import org.apache.solr.embedded.JettySolrRunner;
import org.apache.solr.util.RTimer;
import org.apache.solr.util.TestInjection;
-import org.apache.solr.util.TimeOut;
import org.apache.zookeeper.KeeperException;
import org.junit.BeforeClass;
import org.junit.Test;
@@ -291,27 +290,23 @@ protected void testRf2() throws Exception {
protected void waitForState(String collection, String replicaName, Replica.State state, long ms)
throws KeeperException, InterruptedException {
- TimeOut timeOut = new TimeOut(ms, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME);
- Replica.State replicaState = Replica.State.ACTIVE;
- while (!timeOut.hasTimedOut()) {
- ZkStateReader zkr = ZkStateReader.from(cloudClient);
- zkr.forceUpdateCollection(collection); // force the state to be fresh
- ClusterState cs = zkr.getClusterState();
- Collection slices = cs.getCollection(collection).getActiveSlices();
- Slice slice = slices.iterator().next();
- Replica partitionedReplica = slice.getReplica(replicaName);
- replicaState = partitionedReplica.getState();
- if (replicaState == state) return;
+ ZkStateReader zkr = ZkStateReader.from(cloudClient);
+
+ try {
+ zkr.waitForState(
+ collection,
+ ms,
+ TimeUnit.MILLISECONDS,
+ c -> {
+ Collection slices = c.getActiveSlices();
+ Slice slice = slices.iterator().next();
+ Replica partitionedReplica = slice.getReplica(replicaName);
+ Replica.State replicaState = partitionedReplica.getState();
+ return replicaState == state;
+ });
+ } catch (TimeoutException e) {
+ fail("Timeout waiting for state " + state + " of replica " + replicaName);
}
- assertEquals(
- "Timeout waiting for state "
- + state
- + " of replica "
- + replicaName
- + ", current state "
- + replicaState,
- state,
- replicaState);
}
protected void testRf3() throws Exception {
diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderFailureAfterFreshStartTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderFailureAfterFreshStartTest.java
index 5b7ff3f0428..1bc87e4f4e1 100644
--- a/solr/core/src/test/org/apache/solr/cloud/LeaderFailureAfterFreshStartTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/LeaderFailureAfterFreshStartTest.java
@@ -39,8 +39,6 @@
import org.apache.solr.common.cloud.Replica;
import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.common.params.ModifiableSolrParams;
-import org.apache.solr.common.util.TimeSource;
-import org.apache.solr.util.TimeOut;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -154,11 +152,7 @@ public void test() throws Exception {
// shutdown the original leader
log.info("Now shutting down initial leader");
forceNodeFailures(singletonList(initialLeaderJetty));
- waitForNewLeader(
- cloudClient,
- "shard1",
- (Replica) initialLeaderJetty.client.info,
- new TimeOut(15, TimeUnit.SECONDS, TimeSource.NANO_TIME));
+ waitForNewLeader(cloudClient, "shard1", (Replica) initialLeaderJetty.client.info);
waitTillNodesActive();
log.info("Updating mappings from zk");
updateMappingsFromZk(jettys, clients, true);
diff --git a/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java b/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java
index 7042c3018e2..d9e501e69b6 100644
--- a/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/PeerSyncReplicationTest.java
@@ -44,10 +44,8 @@
import org.apache.solr.common.cloud.Replica;
import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.common.params.ModifiableSolrParams;
-import org.apache.solr.common.util.TimeSource;
import org.apache.solr.core.CoreContainer;
import org.apache.solr.metrics.SolrMetricManager;
-import org.apache.solr.util.TimeOut;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -154,11 +152,7 @@ public void test() throws Exception {
log.info("Now shutting down initial leader");
forceNodeFailures(singletonList(initialLeaderJetty));
log.info("Updating mappings from zk");
- waitForNewLeader(
- cloudClient,
- "shard1",
- (Replica) initialLeaderJetty.client.info,
- new TimeOut(15, TimeUnit.SECONDS, TimeSource.NANO_TIME));
+ waitForNewLeader(cloudClient, "shard1", (Replica) initialLeaderJetty.client.info);
updateMappingsFromZk(jettys, clients, true);
assertEquals(
"PeerSynced node did not become leader",
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestPullReplica.java b/solr/core/src/test/org/apache/solr/cloud/TestPullReplica.java
index f5cf82c701e..ec0564c249d 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestPullReplica.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestPullReplica.java
@@ -749,20 +749,13 @@ static void waitForNumDocsInAllReplicas(
}
}
- static void waitForDeletion(String collection) throws InterruptedException, KeeperException {
- TimeOut t = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME);
- while (cluster.getSolrClient().getClusterState().hasCollection(collection)) {
- log.info("Collection not yet deleted");
- try {
- Thread.sleep(100);
- if (t.hasTimedOut()) {
- fail("Timed out waiting for collection " + collection + " to be deleted.");
- }
- cluster.getZkStateReader().forceUpdateCollection(collection);
- } catch (SolrException e) {
- return;
- }
- }
+ static void waitForDeletion(String collection) {
+ waitForState(
+ "Waiting for collection " + collection + " to be deleted",
+ collection,
+ (n, c) -> c == null,
+ 10,
+ TimeUnit.SECONDS);
}
private DocCollection assertNumberOfReplicas(
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaErrorHandling.java b/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaErrorHandling.java
index f4af183434c..0d68f2f1993 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaErrorHandling.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaErrorHandling.java
@@ -120,7 +120,7 @@ public void tearDown() throws Exception {
log.info("tearDown deleting collection");
CollectionAdminRequest.deleteCollection(collectionName).process(cluster.getSolrClient());
log.info("Collection deleted");
- waitForDeletion(collectionName);
+ TestPullReplica.waitForDeletion(collectionName);
}
collectionName = null;
super.tearDown();
@@ -347,22 +347,6 @@ protected SocketProxy getProxyForReplica(Replica replica) throws Exception {
return proxy;
}
- private void waitForDeletion(String collection) throws InterruptedException, KeeperException {
- TimeOut t = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME);
- while (cluster.getSolrClient().getClusterState().hasCollection(collection)) {
- log.info("Collection not yet deleted");
- try {
- Thread.sleep(100);
- if (t.hasTimedOut()) {
- fail("Timed out waiting for collection " + collection + " to be deleted.");
- }
- cluster.getZkStateReader().forceUpdateCollection(collection);
- } catch (SolrException e) {
- return;
- }
- }
- }
-
private CollectionStatePredicate activeReplicaCount(
int numWriter, int numActive, int numPassive) {
return (liveNodes, collectionState) -> {
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestRebalanceLeaders.java b/solr/core/src/test/org/apache/solr/cloud/TestRebalanceLeaders.java
index 0b59d5d74ab..20343487523 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestRebalanceLeaders.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestRebalanceLeaders.java
@@ -111,8 +111,7 @@ public void removeAllProperties() {
// shardUnique=true for the special property preferredLeader. That was removed at one point, so
// we're explicitly testing that as well.
@Test
- public void testSetArbitraryPropertySliceUnique()
- throws IOException, SolrServerException, InterruptedException {
+ public void testSetArbitraryPropertySliceUnique() throws IOException, SolrServerException {
// Check both special (preferredLeader) and something arbitrary.
doTestSetArbitraryPropertySliceUnique("foo" + random().nextInt(1_000_000));
removeAllProperties();
@@ -124,8 +123,7 @@ public void testSetArbitraryPropertySliceUnique()
// individual properties on individual nodes. This one relies on Solr to pick which replicas to
// set the property on
@Test
- public void testBalancePropertySliceUnique()
- throws InterruptedException, IOException, SolrServerException {
+ public void testBalancePropertySliceUnique() throws IOException, SolrServerException {
// Check both cases of "special" property preferred(Ll)eader
doTestBalancePropertySliceUnique("foo" + random().nextInt(1_000_000));
removeAllProperties();
@@ -160,7 +158,7 @@ public void testRebalanceLeaders() throws Exception {
// on an individual
// replica.
private void doTestSetArbitraryPropertySliceUnique(String propIn)
- throws InterruptedException, IOException, SolrServerException {
+ throws IOException, SolrServerException {
final String prop = (random().nextBoolean()) ? propIn : propIn.toUpperCase(Locale.ROOT);
// First set the property in some replica in some slice
forceUpdateCollectionStatus();
@@ -176,43 +174,29 @@ private void doTestSetArbitraryPropertySliceUnique(String propIn)
Replica rep = reps[random().nextInt(reps.length)];
// Set the property on a particular replica
setProp(slice, rep, prop);
- TimeOut timeout = new TimeOut(timeoutMs, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME);
-
- long count = 0;
- boolean rightRep = false;
- Slice modSlice;
- DocCollection modColl = null; // keeps IDE happy
// ensure that no other replica in that slice has the property when we return.
- while (timeout.hasTimedOut() == false) {
- forceUpdateCollectionStatus();
- modColl = cluster.getSolrClient().getClusterState().getCollection(COLLECTION_NAME);
- modSlice = modColl.getSlice(slice.getName());
- rightRep =
- modSlice
- .getReplica(rep.getName())
- .getBool("property." + prop.toLowerCase(Locale.ROOT), false);
- count =
- modSlice.getReplicas().stream()
- .filter(
- thisRep -> thisRep.getBool("property." + prop.toLowerCase(Locale.ROOT), false))
- .count();
-
- if (count == 1 && rightRep) {
- break;
- }
-
- TimeUnit.MILLISECONDS.sleep(50);
- }
- if (count != 1 || rightRep == false) {
- fail(
- "The property "
- + prop
- + " was not uniquely distributed in slice "
- + slice.getName()
- + " "
- + modColl.toString());
- }
+ waitForState(
+ "Check property is uniquely distributed in slice: " + prop,
+ COLLECTION_NAME,
+ (n, c) -> {
+ forceUpdateCollectionStatus();
+ Slice modSlice = c.getSlice(slice.getName());
+ boolean rightRep =
+ modSlice
+ .getReplica(rep.getName())
+ .getBool("property." + prop.toLowerCase(Locale.ROOT), false);
+ long count =
+ modSlice.getReplicas().stream()
+ .filter(
+ thisRep ->
+ thisRep.getBool("property." + prop.toLowerCase(Locale.ROOT), false))
+ .count();
+
+ return count == 1 && rightRep;
+ },
+ timeoutMs,
+ TimeUnit.MILLISECONDS);
}
}
@@ -332,7 +316,7 @@ private void sendRebalanceCommand() throws SolrServerException, IOException {
// re-assigned with the
// BALANCESHARDUNIQUE command.
private void doTestBalancePropertySliceUnique(String propIn)
- throws InterruptedException, IOException, SolrServerException {
+ throws IOException, SolrServerException {
final String prop = (random().nextBoolean()) ? propIn : propIn.toUpperCase(Locale.ROOT);
// Concentrate the properties on as few replicas a possible
@@ -350,63 +334,55 @@ private void doTestBalancePropertySliceUnique(String propIn)
private void verifyPropCorrectlyDistributed(String prop) {
- TimeOut timeout = new TimeOut(timeoutMs, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME);
-
String propLC = prop.toLowerCase(Locale.ROOT);
- DocCollection docCollection = null;
- while (timeout.hasTimedOut() == false) {
- forceUpdateCollectionStatus();
- docCollection = cluster.getSolrClient().getClusterState().getCollection(COLLECTION_NAME);
- int maxPropCount = Integer.MAX_VALUE;
- int minPropCount = Integer.MIN_VALUE;
- for (Slice slice : docCollection.getSlices()) {
- int repCount = 0;
- for (Replica rep : slice.getReplicas()) {
- if (rep.getBool("property." + propLC, false)) {
- repCount++;
+ waitForState(
+ "Check property is distributed evenly: " + prop,
+ COLLECTION_NAME,
+ (liveNodes, docCollection) -> {
+ int maxPropCount = 0;
+ int minPropCount = Integer.MAX_VALUE;
+ for (Slice slice : docCollection.getSlices()) {
+ int repCount = 0;
+ for (Replica rep : slice.getReplicas()) {
+ if (rep.getBool("property." + propLC, false)) {
+ repCount++;
+ }
+ }
+ maxPropCount = Math.max(maxPropCount, repCount);
+ minPropCount = Math.min(minPropCount, repCount);
}
- }
- maxPropCount = Math.max(maxPropCount, repCount);
- minPropCount = Math.min(minPropCount, repCount);
- }
- if (Math.abs(maxPropCount - minPropCount) < 2) return;
- }
- log.error("Property {} is not distributed evenly. {}", prop, docCollection);
- fail("Property is not distributed evenly " + prop);
+ return Math.abs(maxPropCount - minPropCount) < 2;
+ },
+ timeoutMs,
+ TimeUnit.MILLISECONDS);
}
// Used when we concentrate the leader on a few nodes.
private void verifyPropDistributedAsExpected(
- Map expectedShardReplicaMap, String prop) throws InterruptedException {
+ Map expectedShardReplicaMap, String prop) {
// Make sure that the shard unique are where you expect.
- TimeOut timeout = new TimeOut(timeoutMs, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME);
-
String propLC = prop.toLowerCase(Locale.ROOT);
- boolean failure = false;
- DocCollection docCollection = null;
- while (timeout.hasTimedOut() == false) {
- forceUpdateCollectionStatus();
- docCollection = cluster.getSolrClient().getClusterState().getCollection(COLLECTION_NAME);
- failure = false;
- for (Map.Entry ent : expectedShardReplicaMap.entrySet()) {
- Replica rep = docCollection.getSlice(ent.getKey()).getReplica(ent.getValue());
- if (rep.getBool("property." + propLC, false) == false) {
- failure = true;
- }
- }
- if (failure == false) {
- return;
- }
- TimeUnit.MILLISECONDS.sleep(100);
- }
+ String message =
+ String.format(
+ Locale.ROOT,
+ "Checking properties are on the expected replicas. Props:%s Expected:%s",
+ prop,
+ expectedShardReplicaMap.toString());
- fail(
- prop
- + " properties are not on the expected replicas: "
- + docCollection.toString()
- + System.lineSeparator()
- + "Expected "
- + expectedShardReplicaMap.toString());
+ waitForState(
+ message,
+ COLLECTION_NAME,
+ (liveNodes, docCollection) -> {
+ for (Map.Entry ent : expectedShardReplicaMap.entrySet()) {
+ Replica rep = docCollection.getSlice(ent.getKey()).getReplica(ent.getValue());
+ if (rep.getBool("property." + propLC, false) == false) {
+ return false;
+ }
+ }
+ return true;
+ },
+ timeoutMs,
+ TimeUnit.MILLISECONDS);
}
// Just check that the property is distributed as expectecd. This does _not_ rebalance the leaders
@@ -558,14 +534,6 @@ private void concentratePropByRestartingJettys() throws Exception {
// then start them again to concentrate the leaders. It's not necessary that all shards have a
// leader.
- ExecutorService executorService = ExecutorUtil.newMDCAwareCachedThreadPool("Start Jetty");
-
- for (JettySolrRunner jetty : jettys) {
- cluster.stopJettySolrRunner(jetty);
- }
-
- ExecutorUtil.shutdownAndAwaitTermination(executorService);
-
for (JettySolrRunner jetty : jettys) {
cluster.stopJettySolrRunner(jetty);
}
@@ -575,7 +543,7 @@ private void concentratePropByRestartingJettys() throws Exception {
}
checkReplicasInactive(jettys);
- executorService = ExecutorUtil.newMDCAwareCachedThreadPool("Start Jetty");
+ ExecutorService executorService = ExecutorUtil.newMDCAwareCachedThreadPool("Start Jetty");
for (int idx = 0; idx < jettys.size(); ++idx) {
int finalIdx = idx;
@@ -604,74 +572,61 @@ private void forceUpdateCollectionStatus() {
// Since we have to restart jettys, we don't want to try re-balancing etc. until we're sure all
// jettys that should be up are and all replicas are active.
- private void checkReplicasInactive(List downJettys) throws InterruptedException {
- TimeOut timeout = new TimeOut(timeoutMs, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME);
- DocCollection docCollection = null;
- Set liveNodes = null;
+ private void checkReplicasInactive(List downJettys) {
Set downJettyNodes = new TreeSet<>();
for (JettySolrRunner jetty : downJettys) {
downJettyNodes.add(
jetty.getBaseUrl().getHost() + ":" + jetty.getBaseUrl().getPort() + "_solr");
}
- while (timeout.hasTimedOut() == false) {
- forceUpdateCollectionStatus();
- docCollection = cluster.getSolrClient().getClusterState().getCollection(COLLECTION_NAME);
- liveNodes = cluster.getSolrClient().getClusterState().getLiveNodes();
- boolean expectedInactive = true;
-
- for (Slice slice : docCollection.getSlices()) {
- for (Replica rep : slice.getReplicas()) {
- if (downJettyNodes.contains(rep.getNodeName()) == false) {
- continue; // We are on a live node
- }
- // A replica on an allegedly down node is reported as active.
- if (rep.isActive(liveNodes)) {
- expectedInactive = false;
+
+ waitForState(
+ "Waiting for all replicas to become inactive",
+ COLLECTION_NAME,
+ (liveNodes, docCollection) -> {
+ boolean expectedInactive = true;
+
+ for (Slice slice : docCollection.getSlices()) {
+ for (Replica rep : slice.getReplicas()) {
+ if (downJettyNodes.contains(rep.getNodeName()) == false) {
+ continue; // We are on a live node
+ }
+ // A replica on an allegedly down node is reported as active.
+ if (rep.isActive(liveNodes)) {
+ expectedInactive = false;
+ }
+ }
}
- }
- }
- if (expectedInactive) {
- return;
- }
- TimeUnit.MILLISECONDS.sleep(100);
- }
- fail(
- "timed out waiting for all replicas to become inactive: livenodes: "
- + liveNodes
- + " Collection state: "
- + docCollection.toString());
+ return expectedInactive;
+ },
+ timeoutMs,
+ TimeUnit.MILLISECONDS);
}
// We need to wait around until all replicas are active before expecting rebalancing or
// distributing shard-unique properties to work.
- private void checkAllReplicasActive() throws InterruptedException {
- TimeOut timeout = new TimeOut(timeoutMs, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME);
- while (timeout.hasTimedOut() == false) {
- forceUpdateCollectionStatus();
- DocCollection docCollection =
- cluster.getSolrClient().getClusterState().getCollection(COLLECTION_NAME);
- Set liveNodes = cluster.getSolrClient().getClusterState().getLiveNodes();
- boolean allActive = true;
- for (Slice slice : docCollection.getSlices()) {
- for (Replica rep : slice.getReplicas()) {
- if (rep.isActive(liveNodes) == false) {
- allActive = false;
+ private void checkAllReplicasActive() {
+ waitForState(
+ "Waiting for all replicas to become active",
+ COLLECTION_NAME,
+ (liveNodes, docCollection) -> {
+ boolean allActive = true;
+ for (Slice slice : docCollection.getSlices()) {
+ for (Replica rep : slice.getReplicas()) {
+ if (rep.isActive(liveNodes) == false) {
+ allActive = false;
+ }
+ }
}
- }
- }
- if (allActive) {
- return;
- }
- TimeUnit.MILLISECONDS.sleep(100);
- }
- fail("timed out waiting for all replicas to become active");
+ return allActive;
+ },
+ timeoutMs,
+ TimeUnit.MILLISECONDS);
}
// use a simple heuristic to put as many replicas with the property on as few nodes as possible.
// The point is that then we can execute BALANCESHARDUNIQUE and be sure it worked correctly
- private void concentrateProp(String prop)
- throws InterruptedException, IOException, SolrServerException {
+ private void concentrateProp(String prop) throws IOException, SolrServerException {
// find all the live nodes for each slice, assign the leader to the first replica that is in the
// lowest position on live_nodes
List liveNodes =
@@ -704,43 +659,26 @@ private void concentrateProp(String prop)
}
// make sure that the property in question is unique per shard.
- private Map verifyPropUniquePerShard(String prop) throws InterruptedException {
- Map uniquePropMaps = new TreeMap<>();
-
- TimeOut timeout = new TimeOut(timeoutMs, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME);
- while (timeout.hasTimedOut() == false) {
- uniquePropMaps.clear();
- if (checkUniquePropPerShard(uniquePropMaps, prop)) {
- return uniquePropMaps;
- }
- TimeUnit.MILLISECONDS.sleep(10);
- }
- fail(
- "There should be exactly one replica with value "
- + prop
- + " set to true per shard: "
- + cluster.getSolrClient().getClusterState().getCollection(COLLECTION_NAME).toString());
- return null; // keeps IDE happy.
- }
-
- // return true if every shard has exactly one replica with the unique property set to "true"
- private boolean checkUniquePropPerShard(Map uniques, String prop) {
- forceUpdateCollectionStatus();
- DocCollection docCollection =
- cluster.getSolrClient().getClusterState().getCollection(COLLECTION_NAME);
+ private void verifyPropUniquePerShard(String prop) {
- for (Slice slice : docCollection.getSlices()) {
- int propCount = 0;
- for (Replica rep : slice.getReplicas()) {
- if (rep.getBool("property." + prop.toLowerCase(Locale.ROOT), false)) {
- propCount++;
- uniques.put(slice.getName(), rep.getName());
- }
- }
- if (1 != propCount) {
- return false;
- }
- }
- return true;
+ waitForState(
+ "Waiting to have exactly one replica with " + prop + "set per shard",
+ COLLECTION_NAME,
+ (liveNodes, docCollection) -> {
+ for (Slice slice : docCollection.getSlices()) {
+ int propCount = 0;
+ for (Replica rep : slice.getReplicas()) {
+ if (rep.getBool("property." + prop.toLowerCase(Locale.ROOT), false)) {
+ propCount++;
+ }
+ }
+ if (1 != propCount) {
+ return false;
+ }
+ }
+ return true;
+ },
+ timeoutMs,
+ TimeUnit.MILLISECONDS);
}
}
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestTlogReplica.java b/solr/core/src/test/org/apache/solr/cloud/TestTlogReplica.java
index 563ed686298..d559de6d333 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestTlogReplica.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestTlogReplica.java
@@ -815,17 +815,17 @@ public void testRebalanceLeaders() throws Exception {
cloudClient.request(request);
// Wait until a preferredleader flag is set to the new leader candidate
- TimeOut timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME);
- while (!timeout.hasTimedOut()) {
- Map slices =
- cloudClient.getClusterState().getCollection(collectionName).getSlicesMap();
- Replica me = slices.get(slice.getName()).getReplica(newLeader.getName());
- if (me.getBool("property.preferredleader", false)) {
- break;
- }
- Thread.sleep(100);
- }
- assertFalse("Timeout waiting for setting preferredleader flag", timeout.hasTimedOut());
+ String newLeaderName = newLeader.getName();
+ waitForState(
+ "Waiting for setting preferredleader flag",
+ collectionName,
+ (n, c) -> {
+ Map slices = c.getSlicesMap();
+ Replica me = slices.get(slice.getName()).getReplica(newLeaderName);
+ return me.getBool("property.preferredleader", false);
+ },
+ 10,
+ TimeUnit.SECONDS);
// Rebalance leaders
params = new ModifiableSolrParams();
@@ -837,18 +837,17 @@ public void testRebalanceLeaders() throws Exception {
cloudClient.request(request);
// Wait until a new leader is elected
- timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME);
- while (!timeout.hasTimedOut()) {
- docCollection = getCollectionState(collectionName);
- Replica leader = docCollection.getSlice(slice.getName()).getLeader();
- if (leader != null
- && leader.getName().equals(newLeader.getName())
- && leader.isActive(cloudClient.getClusterState().getLiveNodes())) {
- break;
- }
- Thread.sleep(100);
- }
- assertFalse("Timeout waiting for a new leader to be elected", timeout.hasTimedOut());
+ waitForState(
+ "Waiting for a new leader to be elected",
+ collectionName,
+ (n, c) -> {
+ Replica leader = c.getSlice(slice.getName()).getLeader();
+ return leader != null
+ && leader.getName().equals(newLeaderName)
+ && leader.isActive(cloudClient.getClusterState().getLiveNodes());
+ },
+ 30,
+ TimeUnit.SECONDS);
new UpdateRequest()
.add(sdoc("id", "1"))
@@ -1027,19 +1026,13 @@ private void waitForNumDocsInAllReplicas(
}
}
- private void waitForDeletion(String collection) throws InterruptedException, KeeperException {
- TimeOut t = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME);
- while (cluster.getSolrClient().getClusterState().hasCollection(collection)) {
- try {
- Thread.sleep(100);
- if (t.hasTimedOut()) {
- fail("Timed out waiting for collection " + collection + " to be deleted.");
- }
- cluster.getZkStateReader().forceUpdateCollection(collection);
- } catch (SolrException e) {
- return;
- }
- }
+ private void waitForDeletion(String collection) {
+ waitForState(
+ "Waiting for collection " + collection + " to be deleted",
+ collection,
+ (n, c) -> c == null,
+ 10,
+ TimeUnit.SECONDS);
}
private DocCollection assertNumberOfReplicas(
diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/ShardSplitTest.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/ShardSplitTest.java
index 43f3367f489..28335c920eb 100644
--- a/solr/core/src/test/org/apache/solr/cloud/api/collections/ShardSplitTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/ShardSplitTest.java
@@ -960,7 +960,7 @@ public void splitByRouteFieldTest() throws Exception {
}
List list = collectionInfos.get(collectionName);
- checkForCollection(collectionName, list, null);
+ checkForCollection(collectionName, list);
waitForRecoveriesToFinish(false);
@@ -1031,7 +1031,7 @@ private void splitByRouteKeyTest() throws Exception {
}
List list = collectionInfos.get(collectionName);
- checkForCollection(collectionName, list, null);
+ checkForCollection(collectionName, list);
waitForRecoveriesToFinish(false);
diff --git a/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java b/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java
index 1ceb9636b20..f2e4aaa3c8f 100644
--- a/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java
+++ b/solr/core/src/test/org/apache/solr/core/ExitableDirectoryReaderTest.java
@@ -32,7 +32,7 @@
*/
public class ExitableDirectoryReaderTest extends SolrTestCaseJ4 {
- static int NUM_DOCS = 100;
+ static final int NUM_DOCS = 100;
static final String assertionString = "/response/numFound==" + NUM_DOCS;
static final String failureAssertionString = "/responseHeader/partialResults==true]";
static final String longTimeout = "10000";
@@ -70,9 +70,26 @@ public void testPrefixQuery() throws Exception {
// this time we should get a query cache hit and hopefully no exception? this may change in the
// future if time checks are put into other places.
- assertJQ(req("q", q, "timeAllowed", "1", "sleep", sleep), assertionString);
+
+ // 2024-4-15: it did change..., and now this fails with 1 or 2 ms and passes with 3ms... I see
+ // no way this won't be terribly brittle. Maybe TestInjection of some sort to bring this back?
+
+ // assertJQ(req("q", q, "timeAllowed", "2", "sleep", sleep), assertionString);
+
+ // The idea that the request won't time out due to caching is a flawed test methodology,
+ // It relies on the test running quickly and not stalling. The above test should possibly
+ // be doing something along the lines of this (but we lack api for it)
+ //
+ // SolrCores solrCores = ExitableDirectoryReaderTest.h.getCoreContainer().solrCores;
+ // List cores = solrCores.getCores();
+ // for (SolrCore core : cores) {
+ // if (<<< find the right core >>> ) {
+ // ((SolrCache)core.getSearcher().get().<<>>
+ // }
+ // }
// now do the same for the filter cache
+ // 2024-4-15: this still passes probably because *:* is so fast, but it still worries me
assertJQ(req("q", "*:*", "fq", q, "timeAllowed", "1", "sleep", sleep), failureAssertionString);
// make sure that the result succeeds this time, and that a bad filter wasn't cached
diff --git a/solr/core/src/test/org/apache/solr/search/ComponentStageLimitsTest.java b/solr/core/src/test/org/apache/solr/search/ComponentStageLimitsTest.java
new file mode 100644
index 00000000000..c3c5d72548e
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/search/ComponentStageLimitsTest.java
@@ -0,0 +1,220 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.search;
+
+import static org.apache.solr.response.SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_DETAILS_KEY;
+
+import java.lang.invoke.MethodHandles;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.cloud.SolrCloudTestCase;
+import org.apache.solr.index.NoMergePolicyFactory;
+import org.apache.solr.util.TestInjection;
+import org.apache.solr.util.ThreadCpuTimer;
+import org.junit.AfterClass;
+import org.junit.Assume;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class ComponentStageLimitsTest extends SolrCloudTestCase {
+ private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+ private static final String COLLECTION = "test";
+
+ private static Path createConfigSet() throws Exception {
+ Path configSet = createTempDir();
+ copyMinConf(configSet.toFile());
+ // insert an expensive search component
+ Path solrConfig = configSet.resolve("conf/solrconfig.xml");
+ Files.writeString(
+ solrConfig,
+ Files.readString(solrConfig)
+ .replace(
+ "\n"
+ + "\n"
+ + " ",
+ "class=\"solr.SearchHandler\">\n"
+ + " \n"
+ + " expensiveSearchComponent \n"
+ + " \n"));
+ return configSet.resolve("conf");
+ }
+
+ @BeforeClass
+ public static void setupClass() throws Exception {
+ // Note: copied from TestCpuAllowedLimit.java, Segments not likely important
+ // at the moment, but setup retained for now.
+
+ // Using NoMergePolicy and 100 commits we should get 100 segments (across all shards).
+ // At this point of writing MAX_SEGMENTS_PER_SLICE in lucene is 5, so we should be
+ // ensured that any multithreaded testing will create 20 executable tasks for the
+ // executor that was provided to index-searcher.
+ systemSetPropertySolrTestsMergePolicyFactory(NoMergePolicyFactory.class.getName());
+ System.setProperty(ThreadCpuTimer.ENABLE_CPU_TIME, "true");
+ Path configset = createConfigSet();
+ configureCluster(1).addConfig("conf", configset).configure();
+ SolrClient solrClient = cluster.getSolrClient();
+ CollectionAdminRequest.Create create =
+ CollectionAdminRequest.createCollection(COLLECTION, "conf", 3, 2);
+ create.process(solrClient);
+ waitForState("active", COLLECTION, clusterShape(3, 6));
+ for (int j = 0; j < 100; j++) {
+ solrClient.add(COLLECTION, sdoc("id", "id-" + j, "val_i", j % 5));
+ solrClient.commit(COLLECTION); // need to commit every doc to create many segments.
+ }
+ }
+
+ @AfterClass
+ public static void tearDownClass() {
+ TestInjection.cpuTimerDelayInjectedNS = null;
+ systemClearPropertySolrTestsMergePolicyFactory();
+ }
+
+ @Test
+ public void testLimitPrepare() throws Exception {
+ Assume.assumeTrue("Thread CPU time monitoring is not available", ThreadCpuTimer.isSupported());
+ SolrClient solrClient = cluster.getSolrClient();
+ long sleepMs = 1000;
+ log.info("--- 500ms limit, 1000ms prepare phase - partial results ---");
+ QueryResponse rsp =
+ solrClient.query(
+ COLLECTION,
+ params(
+ "q",
+ "id:*",
+ "sort",
+ "id asc",
+ ExpensiveSearchComponent.SLEEP_MS_PARAM,
+ String.valueOf(sleepMs),
+ "stages",
+ "prepare",
+ "timeAllowed",
+ "500"));
+ System.err.println("rsp=" + rsp.jsonStr());
+ assertEquals(rsp.getHeader().get("status"), 0);
+ assertNotNull("should have partial results", rsp.getHeader().get("partialResults"));
+ String details = (String) rsp.getHeader().get(RESPONSE_HEADER_PARTIAL_RESULTS_DETAILS_KEY);
+ assertNotNull(details);
+ assertTrue(details.contains("[prepare]"));
+ assertTrue(
+ details.contains("exceeded prior to query in [expensiveSearchComponent, query, facet,"));
+ }
+
+ @Test
+ public void testLimitProcess() throws Exception {
+ Assume.assumeTrue("Thread CPU time monitoring is not available", ThreadCpuTimer.isSupported());
+ SolrClient solrClient = cluster.getSolrClient();
+ String msg = "--- 500ms limit, 1000ms process phase - partial results ---";
+ log.info(msg);
+ System.out.println(msg);
+ int sleepMs = 2000;
+ QueryResponse rsp =
+ solrClient.query(
+ COLLECTION,
+ params(
+ "q",
+ "id:*",
+ "sort",
+ "id asc",
+ ExpensiveSearchComponent.SLEEP_MS_PARAM,
+ String.valueOf(sleepMs),
+ "stages",
+ "process",
+ "timeAllowed",
+ "1000"));
+ System.err.println("rsp=" + rsp.jsonStr());
+ assertEquals(rsp.getHeader().get("status"), 0);
+ String details = (String) rsp.getHeader().get(RESPONSE_HEADER_PARTIAL_RESULTS_DETAILS_KEY);
+ System.out.println("details=" + details);
+ assertNotNull(details);
+ assertTrue(details.contains("[process]"));
+ assertTrue(
+ details.contains("exceeded prior to query in [expensiveSearchComponent, query, facet,"));
+ assertNotNull("should have partial results", rsp.getHeader().get("partialResults"));
+ }
+
+ @Test
+ public void testLimitFinish() throws Exception {
+ Assume.assumeTrue("Thread CPU time monitoring is not available", ThreadCpuTimer.isSupported());
+ SolrClient solrClient = cluster.getSolrClient();
+ long sleepMs = 1000;
+ log.info("--- 500ms limit, 1000ms finish phase - partial results ---");
+ sleepMs = 1000;
+ QueryResponse rsp =
+ solrClient.query(
+ COLLECTION,
+ params(
+ "q",
+ "id:*",
+ "sort",
+ "id asc",
+ ExpensiveSearchComponent.SLEEP_MS_PARAM,
+ String.valueOf(sleepMs),
+ "stages",
+ "finish",
+ "timeAllowed",
+ "500"));
+ System.err.println("rsp=" + rsp.jsonStr());
+ assertEquals(rsp.getHeader().get("status"), 0);
+ String details = (String) rsp.getHeader().get(RESPONSE_HEADER_PARTIAL_RESULTS_DETAILS_KEY);
+ assertNotNull(details);
+ assertTrue(details.contains("[finishStage stage:PARSE_QUERY]"));
+ assertTrue(
+ details.contains("exceeded prior to query in [expensiveSearchComponent, query, facet,"));
+ assertNotNull("should have partial results", rsp.getHeader().get("partialResults"));
+ }
+
+ @Test
+ public void testLimitDistrib() throws Exception {
+ Assume.assumeTrue("Thread CPU time monitoring is not available", ThreadCpuTimer.isSupported());
+ SolrClient solrClient = cluster.getSolrClient();
+ long sleepMs = 1000;
+ log.info("--- 500ms limit, 1000ms distrib phase - partial results ---");
+ sleepMs = 1000;
+ QueryResponse rsp =
+ solrClient.query(
+ COLLECTION,
+ params(
+ "q",
+ "id:*",
+ "sort",
+ "id asc",
+ ExpensiveSearchComponent.SLEEP_MS_PARAM,
+ String.valueOf(sleepMs),
+ "stages",
+ "distrib",
+ "timeAllowed",
+ "500"));
+ System.err.println("rsp=" + rsp.jsonStr());
+ assertEquals(rsp.getHeader().get("status"), 0);
+ String details = (String) rsp.getHeader().get(RESPONSE_HEADER_PARTIAL_RESULTS_DETAILS_KEY);
+ assertNotNull(details);
+ assertTrue(details.contains("[distrib]"));
+ assertTrue(
+ details.contains("exceeded prior to query in [expensiveSearchComponent, query, facet,"));
+ assertNotNull("should have partial results", rsp.getHeader().get("partialResults"));
+ }
+}
diff --git a/solr/core/src/test/org/apache/solr/search/TestCpuAllowedLimit.java b/solr/core/src/test/org/apache/solr/search/TestCpuAllowedLimit.java
index 274994099f1..4611de10eca 100644
--- a/solr/core/src/test/org/apache/solr/search/TestCpuAllowedLimit.java
+++ b/solr/core/src/test/org/apache/solr/search/TestCpuAllowedLimit.java
@@ -169,6 +169,7 @@ public void testDistribLimit() throws Exception {
assertEquals("should have partial results", true, rsp.getHeader().get("partialResults"));
log.info("--- timeAllowed, partial results, multithreading ---");
+
rsp =
solrClient.query(
COLLECTION,
@@ -201,11 +202,11 @@ public void testDistribLimit() throws Exception {
"stages",
"prepare,process",
"cpuAllowed",
- "10000"));
- // System.err.println("rsp=" + rsp.jsonStr());
+ "100000"));
+ System.err.println("rsp=" + rsp.jsonStr());
assertNull("should have full results", rsp.getHeader().get("partialResults"));
// cpuAllowed set, should return partial results
- log.info("--- cpuAllowed 1, partial results ---");
+ log.info("--- cpuAllowed 100, partial results ---");
rsp =
solrClient.query(
COLLECTION,
@@ -222,7 +223,7 @@ public void testDistribLimit() throws Exception {
"false"));
// System.err.println("rsp=" + rsp.jsonStr());
assertNotNull("should have partial results", rsp.getHeader().get("partialResults"));
- log.info("--- cpuAllowed 1, partial results omitted ---");
+ log.info("--- cpuAllowed 100, partial results omitted ---");
rsp =
solrClient.query(
COLLECTION,
@@ -243,10 +244,11 @@ public void testDistribLimit() throws Exception {
"foo"));
String s = rsp.jsonStr();
System.err.println("rsp=" + s);
- assertEquals("should have partial results", "omitted", rsp.getHeader().get("partialResults"));
+ assertEquals(
+ "should not have partial results", "omitted", rsp.getHeader().get("partialResults"));
// cpuAllowed set, should return partial results
- log.info("--- cpuAllowed 2, partial results, multi-threaded ---");
+ log.info("--- cpuAllowed 100, partial results, multi-threaded ---");
rsp =
solrClient.query(
COLLECTION,
diff --git a/solr/licenses/netty-LICENSE-ASL.txt b/solr/licenses/netty-LICENSE-ASL.txt
index d6456956733..62589edd12a 100644
--- a/solr/licenses/netty-LICENSE-ASL.txt
+++ b/solr/licenses/netty-LICENSE-ASL.txt
@@ -1,7 +1,7 @@
Apache License
Version 2.0, January 2004
- http://www.apache.org/licenses/
+ https://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
@@ -193,7 +193,7 @@
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
- http://www.apache.org/licenses/LICENSE-2.0
+ https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
diff --git a/solr/licenses/netty-NOTICE.txt b/solr/licenses/netty-NOTICE.txt
index f973663670b..953277d6594 100644
--- a/solr/licenses/netty-NOTICE.txt
+++ b/solr/licenses/netty-NOTICE.txt
@@ -4,7 +4,7 @@
Please visit the Netty web site for more information:
- * http://netty.io/
+ * https://netty.io/
Copyright 2014 The Netty Project
@@ -12,7 +12,7 @@ The Netty Project licenses this file to you under the Apache License,
version 2.0 (the "License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at:
- http://www.apache.org/licenses/LICENSE-2.0
+ https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
@@ -56,7 +56,7 @@ facade for Java, which can be obtained at:
* LICENSE:
* license/LICENSE.slf4j.txt (MIT License)
* HOMEPAGE:
- * http://www.slf4j.org/
+ * https://www.slf4j.org/
This product contains a modified portion of 'Apache Harmony', an open source
Java SE, which can be obtained at:
@@ -66,7 +66,7 @@ Java SE, which can be obtained at:
* LICENSE:
* license/LICENSE.harmony.txt (Apache License 2.0)
* HOMEPAGE:
- * http://archive.apache.org/dist/harmony/
+ * https://archive.apache.org/dist/harmony/
This product contains a modified portion of 'jbzip2', a Java bzip2 compression
and decompression library written by Matthew J. Francis. It can be obtained at:
@@ -125,6 +125,14 @@ and decompression library, which can be obtained at:
* HOMEPAGE:
* https://github.com/jponge/lzma-java
+This product optionally depends on 'zstd-jni', a zstd-jni Java compression
+and decompression library, which can be obtained at:
+
+ * LICENSE:
+ * license/LICENSE.zstd-jni.txt (BSD)
+ * HOMEPAGE:
+ * https://github.com/luben/zstd-jni
+
This product contains a modified portion of 'jfastlz', a Java port of FastLZ compression
and decompression library written by William Kinney. It can be obtained at:
@@ -148,7 +156,7 @@ equivalent functionality. It can be obtained at:
* LICENSE:
* license/LICENSE.bouncycastle.txt (MIT License)
* HOMEPAGE:
- * http://www.bouncycastle.org/
+ * https://www.bouncycastle.org/
This product optionally depends on 'Snappy', a compression library produced
by Google Inc, which can be obtained at:
@@ -162,9 +170,9 @@ This product optionally depends on 'JBoss Marshalling', an alternative Java
serialization API, which can be obtained at:
* LICENSE:
- * license/LICENSE.jboss-marshalling.txt (GNU LGPL 2.1)
+ * license/LICENSE.jboss-marshalling.txt (Apache License 2.0)
* HOMEPAGE:
- * http://www.jboss.org/jbossmarshalling
+ * https://github.com/jboss-remoting/jboss-marshalling
This product optionally depends on 'Caliper', Google's micro-
benchmarking framework, which can be obtained at:
@@ -180,7 +188,7 @@ framework, which can be obtained at:
* LICENSE:
* license/LICENSE.commons-logging.txt (Apache License 2.0)
* HOMEPAGE:
- * http://commons.apache.org/logging/
+ * https://commons.apache.org/logging/
This product optionally depends on 'Apache Log4J', a logging framework, which
can be obtained at:
@@ -188,7 +196,7 @@ can be obtained at:
* LICENSE:
* license/LICENSE.log4j.txt (Apache License 2.0)
* HOMEPAGE:
- * http://logging.apache.org/log4j/
+ * https://logging.apache.org/log4j/
This product optionally depends on 'Aalto XML', an ultra-high performance
non-blocking XML processor, which can be obtained at:
@@ -196,7 +204,7 @@ non-blocking XML processor, which can be obtained at:
* LICENSE:
* license/LICENSE.aalto-xml.txt (Apache License 2.0)
* HOMEPAGE:
- * http://wiki.fasterxml.com/AaltoHome
+ * https://wiki.fasterxml.com/AaltoHome
This product contains a modified version of 'HPACK', a Java implementation of
the HTTP/2 HPACK algorithm written by Twitter. It can be obtained at:
@@ -206,6 +214,22 @@ the HTTP/2 HPACK algorithm written by Twitter. It can be obtained at:
* HOMEPAGE:
* https://github.com/twitter/hpack
+This product contains a modified version of 'HPACK', a Java implementation of
+the HTTP/2 HPACK algorithm written by Cory Benfield. It can be obtained at:
+
+ * LICENSE:
+ * license/LICENSE.hyper-hpack.txt (MIT License)
+ * HOMEPAGE:
+ * https://github.com/python-hyper/hpack/
+
+This product contains a modified version of 'HPACK', a Java implementation of
+the HTTP/2 HPACK algorithm written by Tatsuhiro Tsujikawa. It can be obtained at:
+
+ * LICENSE:
+ * license/LICENSE.nghttp2-hpack.txt (MIT License)
+ * HOMEPAGE:
+ * https://github.com/nghttp2/nghttp2/
+
This product contains a modified portion of 'Apache Commons Lang', a Java library
provides utilities for the java.lang API, which can be obtained at:
@@ -221,3 +245,20 @@ This product contains the Maven wrapper scripts from 'Maven Wrapper', that provi
* license/LICENSE.mvn-wrapper.txt (Apache License 2.0)
* HOMEPAGE:
* https://github.com/takari/maven-wrapper
+
+This product contains the dnsinfo.h header file, that provides a way to retrieve the system DNS configuration on MacOS.
+This private header is also used by Apple's open source
+ mDNSResponder (https://opensource.apple.com/tarballs/mDNSResponder/).
+
+ * LICENSE:
+ * license/LICENSE.dnsinfo.txt (Apple Public Source License 2.0)
+ * HOMEPAGE:
+ * https://www.opensource.apple.com/source/configd/configd-453.19/dnsinfo/dnsinfo.h
+
+This product optionally depends on 'Brotli4j', Brotli compression and
+decompression for Java., which can be obtained at:
+
+ * LICENSE:
+ * license/LICENSE.brotli4j.txt (Apache License 2.0)
+ * HOMEPAGE:
+ * https://github.com/hyperxpro/Brotli4j
diff --git a/solr/licenses/netty-buffer-4.1.112.Final.jar.sha1 b/solr/licenses/netty-buffer-4.1.112.Final.jar.sha1
deleted file mode 100644
index 0ac205b0823..00000000000
--- a/solr/licenses/netty-buffer-4.1.112.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-bdc12df04bb6858890b8aa108060b5b365a26102
diff --git a/solr/licenses/netty-buffer-4.1.114.Final.jar.sha1 b/solr/licenses/netty-buffer-4.1.114.Final.jar.sha1
new file mode 100644
index 00000000000..720e556e6d8
--- /dev/null
+++ b/solr/licenses/netty-buffer-4.1.114.Final.jar.sha1
@@ -0,0 +1 @@
+f1d77d15c0b781cd9395a2a956262766fd0c7602
diff --git a/solr/licenses/netty-codec-4.1.112.Final.jar.sha1 b/solr/licenses/netty-codec-4.1.112.Final.jar.sha1
deleted file mode 100644
index 55360968342..00000000000
--- a/solr/licenses/netty-codec-4.1.112.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-c87f2ec3d9a97bd2b793d16817abb2bab93a7fc3
diff --git a/solr/licenses/netty-codec-4.1.114.Final.jar.sha1 b/solr/licenses/netty-codec-4.1.114.Final.jar.sha1
new file mode 100644
index 00000000000..84b726e18a0
--- /dev/null
+++ b/solr/licenses/netty-codec-4.1.114.Final.jar.sha1
@@ -0,0 +1 @@
+5a49dfa2828d64bf756f670e63259115332744cf
diff --git a/solr/licenses/netty-codec-http-4.1.112.Final.jar.sha1 b/solr/licenses/netty-codec-http-4.1.112.Final.jar.sha1
deleted file mode 100644
index c5a4048f7eb..00000000000
--- a/solr/licenses/netty-codec-http-4.1.112.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-81af1040bfa977f98dd0e1bd9639513ea862ca04
diff --git a/solr/licenses/netty-codec-http-4.1.114.Final.jar.sha1 b/solr/licenses/netty-codec-http-4.1.114.Final.jar.sha1
new file mode 100644
index 00000000000..c7c0248da60
--- /dev/null
+++ b/solr/licenses/netty-codec-http-4.1.114.Final.jar.sha1
@@ -0,0 +1 @@
+fbce5a53884275662e68aaad70f88bf7e5d04164
diff --git a/solr/licenses/netty-codec-http2-4.1.112.Final.jar.sha1 b/solr/licenses/netty-codec-http2-4.1.112.Final.jar.sha1
deleted file mode 100644
index d8ff515db81..00000000000
--- a/solr/licenses/netty-codec-http2-4.1.112.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-7fa28b510f0f16f4d5d7188b86bef59e048f62f9
diff --git a/solr/licenses/netty-codec-http2-4.1.114.Final.jar.sha1 b/solr/licenses/netty-codec-http2-4.1.114.Final.jar.sha1
new file mode 100644
index 00000000000..47c75f00256
--- /dev/null
+++ b/solr/licenses/netty-codec-http2-4.1.114.Final.jar.sha1
@@ -0,0 +1 @@
+19ae07fdf99142a70338f8cda70a3d2edbc8e80a
diff --git a/solr/licenses/netty-codec-socks-4.1.112.Final.jar.sha1 b/solr/licenses/netty-codec-socks-4.1.112.Final.jar.sha1
deleted file mode 100644
index 0b3d39a0cee..00000000000
--- a/solr/licenses/netty-codec-socks-4.1.112.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-9aed7e78c467d06a47a45b5b27466380a6427e2f
diff --git a/solr/licenses/netty-codec-socks-4.1.114.Final.jar.sha1 b/solr/licenses/netty-codec-socks-4.1.114.Final.jar.sha1
new file mode 100644
index 00000000000..decacfe281e
--- /dev/null
+++ b/solr/licenses/netty-codec-socks-4.1.114.Final.jar.sha1
@@ -0,0 +1 @@
+42b1159cac3d196f6bdbd528e29f0fab9dbaae06
diff --git a/solr/licenses/netty-common-4.1.112.Final.jar.sha1 b/solr/licenses/netty-common-4.1.112.Final.jar.sha1
deleted file mode 100644
index 1659204d787..00000000000
--- a/solr/licenses/netty-common-4.1.112.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-b2798069092a981a832b7510d0462ee9efb7a80e
diff --git a/solr/licenses/netty-common-4.1.114.Final.jar.sha1 b/solr/licenses/netty-common-4.1.114.Final.jar.sha1
new file mode 100644
index 00000000000..e4304152793
--- /dev/null
+++ b/solr/licenses/netty-common-4.1.114.Final.jar.sha1
@@ -0,0 +1 @@
+862712e292b162c8ccaa7847a6a54df8178f77e5
diff --git a/solr/licenses/netty-handler-4.1.112.Final.jar.sha1 b/solr/licenses/netty-handler-4.1.112.Final.jar.sha1
deleted file mode 100644
index 43be3612a0d..00000000000
--- a/solr/licenses/netty-handler-4.1.112.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-3d5e2d5bcc6baeeb8c13a230980c6132a778e036
diff --git a/solr/licenses/netty-handler-4.1.114.Final.jar.sha1 b/solr/licenses/netty-handler-4.1.114.Final.jar.sha1
new file mode 100644
index 00000000000..3a44d3b3d57
--- /dev/null
+++ b/solr/licenses/netty-handler-4.1.114.Final.jar.sha1
@@ -0,0 +1 @@
+e56fbde4b9aa628eed15a5dbfbeb97877db88146
diff --git a/solr/licenses/netty-handler-proxy-4.1.112.Final.jar.sha1 b/solr/licenses/netty-handler-proxy-4.1.112.Final.jar.sha1
deleted file mode 100644
index d85e58a7af2..00000000000
--- a/solr/licenses/netty-handler-proxy-4.1.112.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-b23c87a85451b3b0e7c3e8e89698cea6831a8418
diff --git a/solr/licenses/netty-handler-proxy-4.1.114.Final.jar.sha1 b/solr/licenses/netty-handler-proxy-4.1.114.Final.jar.sha1
new file mode 100644
index 00000000000..49fbddcc7db
--- /dev/null
+++ b/solr/licenses/netty-handler-proxy-4.1.114.Final.jar.sha1
@@ -0,0 +1 @@
+a01071edffb4812009312b461ce5f160cdec9b75
diff --git a/solr/licenses/netty-resolver-4.1.112.Final.jar.sha1 b/solr/licenses/netty-resolver-4.1.112.Final.jar.sha1
deleted file mode 100644
index 95eb0338e52..00000000000
--- a/solr/licenses/netty-resolver-4.1.112.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-58a631d9d44c4ed7cc0dcc9cffa6641da9374d72
diff --git a/solr/licenses/netty-resolver-4.1.114.Final.jar.sha1 b/solr/licenses/netty-resolver-4.1.114.Final.jar.sha1
new file mode 100644
index 00000000000..c1135b835f7
--- /dev/null
+++ b/solr/licenses/netty-resolver-4.1.114.Final.jar.sha1
@@ -0,0 +1 @@
+10b23784b23d6a948930f52ba82874f1291b5873
diff --git a/solr/licenses/netty-tcnative-boringssl-static-2.0.61.Final.jar.sha1 b/solr/licenses/netty-tcnative-boringssl-static-2.0.61.Final.jar.sha1
deleted file mode 100644
index 1c3e5f7d66f..00000000000
--- a/solr/licenses/netty-tcnative-boringssl-static-2.0.61.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-944722ba2883fe5825a0c6b38dc727d7151a9371
diff --git a/solr/licenses/netty-tcnative-boringssl-static-2.0.66.Final.jar.sha1 b/solr/licenses/netty-tcnative-boringssl-static-2.0.66.Final.jar.sha1
new file mode 100644
index 00000000000..04aa0028843
--- /dev/null
+++ b/solr/licenses/netty-tcnative-boringssl-static-2.0.66.Final.jar.sha1
@@ -0,0 +1 @@
+7b0abf028c27ce6ad0c17a443a13f175fb4eda27
diff --git a/solr/licenses/netty-tcnative-classes-2.0.61.Final.jar.sha1 b/solr/licenses/netty-tcnative-classes-2.0.61.Final.jar.sha1
deleted file mode 100644
index ca1be9444d7..00000000000
--- a/solr/licenses/netty-tcnative-classes-2.0.61.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-4c6ae851ed97921bc6c6b64e019c2b039f49131a
diff --git a/solr/licenses/netty-tcnative-classes-2.0.66.Final.jar.sha1 b/solr/licenses/netty-tcnative-classes-2.0.66.Final.jar.sha1
new file mode 100644
index 00000000000..77a6debf6b2
--- /dev/null
+++ b/solr/licenses/netty-tcnative-classes-2.0.66.Final.jar.sha1
@@ -0,0 +1 @@
+9588bd2f891157538a78d86c945aa34bf9308dda
diff --git a/solr/licenses/netty-transport-4.1.112.Final.jar.sha1 b/solr/licenses/netty-transport-4.1.112.Final.jar.sha1
deleted file mode 100644
index 5edb433d5f4..00000000000
--- a/solr/licenses/netty-transport-4.1.112.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-77cd136dd3843f5e7cbcf68c824975d745c49ddb
diff --git a/solr/licenses/netty-transport-4.1.114.Final.jar.sha1 b/solr/licenses/netty-transport-4.1.114.Final.jar.sha1
new file mode 100644
index 00000000000..f74dfc80c43
--- /dev/null
+++ b/solr/licenses/netty-transport-4.1.114.Final.jar.sha1
@@ -0,0 +1 @@
+e0225a575f487904be8517092cbd74e01913533c
diff --git a/solr/licenses/netty-transport-classes-epoll-4.1.112.Final.jar.sha1 b/solr/licenses/netty-transport-classes-epoll-4.1.112.Final.jar.sha1
deleted file mode 100644
index 1364819aa58..00000000000
--- a/solr/licenses/netty-transport-classes-epoll-4.1.112.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-67e590356eb53c20aaabd67f61ae66f628e62e3d
diff --git a/solr/licenses/netty-transport-classes-epoll-4.1.114.Final.jar.sha1 b/solr/licenses/netty-transport-classes-epoll-4.1.114.Final.jar.sha1
new file mode 100644
index 00000000000..c9046686be2
--- /dev/null
+++ b/solr/licenses/netty-transport-classes-epoll-4.1.114.Final.jar.sha1
@@ -0,0 +1 @@
+f442c794e6fe89e6974f058bf393353e01fb927d
diff --git a/solr/licenses/netty-transport-native-epoll-4.1.112.Final-linux-x86_64.jar.sha1 b/solr/licenses/netty-transport-native-epoll-4.1.112.Final-linux-x86_64.jar.sha1
deleted file mode 100644
index 9995f7b1048..00000000000
--- a/solr/licenses/netty-transport-native-epoll-4.1.112.Final-linux-x86_64.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-caed6f2ae7aebcef50098a64f55918cb8550d2d0
diff --git a/solr/licenses/netty-transport-native-epoll-4.1.114.Final-linux-x86_64.jar.sha1 b/solr/licenses/netty-transport-native-epoll-4.1.114.Final-linux-x86_64.jar.sha1
new file mode 100644
index 00000000000..672f28c70a2
--- /dev/null
+++ b/solr/licenses/netty-transport-native-epoll-4.1.114.Final-linux-x86_64.jar.sha1
@@ -0,0 +1 @@
+43268d2bef66e72e5a7956045a3caf8395f49ae6
diff --git a/solr/licenses/netty-transport-native-unix-common-4.1.112.Final.jar.sha1 b/solr/licenses/netty-transport-native-unix-common-4.1.112.Final.jar.sha1
deleted file mode 100644
index 265f56097e8..00000000000
--- a/solr/licenses/netty-transport-native-unix-common-4.1.112.Final.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-b50ff619cdcdc48e748cba3405c9988529f28f60
diff --git a/solr/licenses/netty-transport-native-unix-common-4.1.114.Final.jar.sha1 b/solr/licenses/netty-transport-native-unix-common-4.1.114.Final.jar.sha1
new file mode 100644
index 00000000000..9b40cb77ea2
--- /dev/null
+++ b/solr/licenses/netty-transport-native-unix-common-4.1.114.Final.jar.sha1
@@ -0,0 +1 @@
+d1171bb99411f282068f49d780cedf8c9adeabfd
diff --git a/solr/modules/hdfs/src/test/org/apache/solr/hdfs/cloud/SharedFileSystemAutoReplicaFailoverTest.java b/solr/modules/hdfs/src/test/org/apache/solr/hdfs/cloud/SharedFileSystemAutoReplicaFailoverTest.java
index 329cc7fee11..34e3b2a1492 100644
--- a/solr/modules/hdfs/src/test/org/apache/solr/hdfs/cloud/SharedFileSystemAutoReplicaFailoverTest.java
+++ b/solr/modules/hdfs/src/test/org/apache/solr/hdfs/cloud/SharedFileSystemAutoReplicaFailoverTest.java
@@ -31,6 +31,7 @@
import java.util.concurrent.Future;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
import java.util.stream.Collectors;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.lucene.tests.util.LuceneTestCase;
@@ -57,13 +58,11 @@
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.ExecutorUtil;
import org.apache.solr.common.util.SolrNamedThreadFactory;
-import org.apache.solr.common.util.TimeSource;
import org.apache.solr.core.CoreDescriptor;
import org.apache.solr.embedded.JettySolrRunner;
import org.apache.solr.hdfs.util.BadHdfsThreadsFilter;
import org.apache.solr.util.LogLevel;
import org.apache.solr.util.TestInjection;
-import org.apache.solr.util.TimeOut;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
@@ -425,37 +424,43 @@ private boolean waitingForReplicasNotLive(
private void assertSliceAndReplicaCount(
String collection, int numSlices, int numReplicas, int timeOutInMs)
- throws InterruptedException, IOException {
- TimeOut timeOut = new TimeOut(timeOutInMs, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME);
- while (!timeOut.hasTimedOut()) {
- ClusterState clusterState = cloudClient.getClusterState();
- Collection slices = clusterState.getCollection(collection).getActiveSlices();
- if (slices.size() == numSlices) {
- boolean isMatch = true;
- for (Slice slice : slices) {
- int count = 0;
- for (Replica replica : slice.getReplicas()) {
- if (replica.getState() == Replica.State.ACTIVE
- && clusterState.liveNodesContain(replica.getNodeName())) {
- count++;
- }
- }
- if (count < numReplicas) {
- isMatch = false;
- }
- }
- if (isMatch) return;
- }
- Thread.sleep(200);
+ throws InterruptedException {
+
+ try {
+ ZkStateReader.from(cloudClient)
+ .waitForState(
+ collection,
+ timeOutInMs,
+ TimeUnit.MILLISECONDS,
+ (liveNodes, c) -> {
+ Collection slices = c.getActiveSlices();
+ if (slices.size() == numSlices) {
+ for (Slice slice : slices) {
+ int count = 0;
+ for (Replica replica : slice.getReplicas()) {
+ if (replica.getState() == Replica.State.ACTIVE
+ && liveNodes.contains(replica.getNodeName())) {
+ count++;
+ }
+ }
+ if (count < numReplicas) {
+ return false;
+ }
+ }
+ return true;
+ }
+ return false;
+ });
+ } catch (TimeoutException e) {
+ fail(
+ "Expected numSlices="
+ + numSlices
+ + " numReplicas="
+ + numReplicas
+ + " but found "
+ + cloudClient.getClusterState().getCollection(collection)
+ + " with /live_nodes: "
+ + cloudClient.getClusterState().getLiveNodes());
}
- fail(
- "Expected numSlices="
- + numSlices
- + " numReplicas="
- + numReplicas
- + " but found "
- + cloudClient.getClusterState().getCollection(collection)
- + " with /live_nodes: "
- + cloudClient.getClusterState().getLiveNodes());
}
}
diff --git a/solr/modules/hdfs/src/test/org/apache/solr/hdfs/cloud/StressHdfsTest.java b/solr/modules/hdfs/src/test/org/apache/solr/hdfs/cloud/StressHdfsTest.java
index 7bb98b8d97b..326daaddc03 100644
--- a/solr/modules/hdfs/src/test/org/apache/solr/hdfs/cloud/StressHdfsTest.java
+++ b/solr/modules/hdfs/src/test/org/apache/solr/hdfs/cloud/StressHdfsTest.java
@@ -48,9 +48,7 @@
import org.apache.solr.common.params.CollectionParams.CollectionAction;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.util.TimeSource;
import org.apache.solr.hdfs.util.BadHdfsThreadsFilter;
-import org.apache.solr.util.TimeOut;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
@@ -232,14 +230,8 @@ private void createAndDeleteCollection() throws Exception {
request.setPath("/admin/collections");
solrClient.request(request);
- final TimeOut timeout = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME);
- while (cloudClient.getClusterState().hasCollection(DELETE_DATA_DIR_COLLECTION)) {
- if (timeout.hasTimedOut()) {
- throw new AssertionError("Timeout waiting to see removed collection leave clusterstate");
- }
-
- Thread.sleep(200);
- }
+ waitForCollectionToDisappear(
+ DELETE_DATA_DIR_COLLECTION, ZkStateReader.from(cloudClient), true, 10);
// check that all dirs are gone
for (String dataDir : dataDirs) {
diff --git a/solr/modules/opentelemetry/build.gradle b/solr/modules/opentelemetry/build.gradle
index 4426f67649e..2c7546af9fe 100644
--- a/solr/modules/opentelemetry/build.gradle
+++ b/solr/modules/opentelemetry/build.gradle
@@ -26,6 +26,7 @@ dependencies {
implementation platform('io.opentelemetry:opentelemetry-bom')
implementation 'org.slf4j:slf4j-api'
+ implementation platform('io.netty:netty-bom')
implementation 'io.opentelemetry:opentelemetry-api'
implementation 'io.opentelemetry:opentelemetry-sdk-extension-autoconfigure'
runtimeOnly 'io.opentelemetry:opentelemetry-exporter-otlp'
diff --git a/solr/packaging/test/bats_helper.bash b/solr/packaging/test/bats_helper.bash
index 68c29686a99..2d8fbb18375 100644
--- a/solr/packaging/test/bats_helper.bash
+++ b/solr/packaging/test/bats_helper.bash
@@ -71,7 +71,7 @@ delete_all_collections() {
local collection_list="$(solr zk ls /collections -z localhost:${ZK_PORT})"
for collection in $collection_list; do
if [[ -n $collection ]]; then
- solr delete -c $collection >/dev/null 2>&1
+ solr delete -c $collection --delete-config >/dev/null 2>&1
fi
done
}
diff --git a/solr/packaging/test/test_basic_auth.bats b/solr/packaging/test/test_basic_auth.bats
index 333d5cf73d3..a84d9d15d4a 100644
--- a/solr/packaging/test/test_basic_auth.bats
+++ b/solr/packaging/test/test_basic_auth.bats
@@ -57,9 +57,10 @@ teardown() {
assert_output --partial '"numFound":0'
# Test delete
- run solr delete --credentials name:password -c COLL_NAME -z localhost:${ZK_PORT} --verbose
+ run solr delete --credentials name:password -c COLL_NAME -z localhost:${ZK_PORT} --delete-config --verbose
assert_output --partial "Deleted collection 'COLL_NAME'"
refute collection_exists "COLL_NAME"
+ refute config_exists "COLL_NAME"
}
diff --git a/solr/packaging/test/test_create.bats b/solr/packaging/test/test_create.bats
index a7a4c6b15d8..e9199358253 100644
--- a/solr/packaging/test/test_create.bats
+++ b/solr/packaging/test/test_create.bats
@@ -39,3 +39,8 @@ teardown() {
run solr create -c COLL_NAME
assert_output --partial "Created collection 'COLL_NAME'"
}
+
+@test "multiple connection options are prevented" {
+ run solr create -c COLL_NAME2 --solr-url http://localhost:${SOLR_PORT} -z localhost:${ZK_PORT}
+ assert_output --partial "The option 'z' was specified but an option from this group has already been selected: 's'"
+}
diff --git a/solr/packaging/test/test_delete_collection.bats b/solr/packaging/test/test_delete_collection.bats
index d5db4263e70..0ff455d1b27 100644
--- a/solr/packaging/test/test_delete_collection.bats
+++ b/solr/packaging/test/test_delete_collection.bats
@@ -44,6 +44,7 @@ teardown() {
solr delete -c "COLL_NAME"
refute collection_exists "COLL_NAME"
+ assert config_exists "COLL_NAME"
}
@test "can delete collections with solr-url" {
@@ -52,13 +53,14 @@ teardown() {
solr delete -c "COLL_NAME" --solr-url http://localhost:${SOLR_PORT}
refute collection_exists "COLL_NAME"
+ assert config_exists "COLL_NAME"
}
@test "collection delete also deletes zk config" {
solr create -c "COLL_NAME"
assert config_exists "COLL_NAME"
- solr delete -c "COLL_NAME"
+ solr delete -c "COLL_NAME" --delete-config
refute config_exists "COLL_NAME"
}
@@ -66,7 +68,7 @@ teardown() {
solr create -c "COLL_NAME" -n "NONDEFAULT_CONFIG_NAME"
assert config_exists "NONDEFAULT_CONFIG_NAME"
- solr delete -c "COLL_NAME"
+ solr delete -c "COLL_NAME" --delete-config
refute config_exists "NONDEFAULT_CONFIG_NAME"
}
@@ -74,6 +76,6 @@ teardown() {
solr create -c "COLL_NAME"
assert config_exists "COLL_NAME"
- solr delete -c "COLL_NAME" --delete-config false
+ solr delete -c "COLL_NAME"
assert config_exists "COLL_NAME"
}
diff --git a/solr/packaging/test/test_status.bats b/solr/packaging/test/test_status.bats
index d1f8a53bcb4..5399a0b9fd6 100644
--- a/solr/packaging/test/test_status.bats
+++ b/solr/packaging/test/test_status.bats
@@ -53,11 +53,14 @@ teardown() {
solr stop
}
+@test "multiple connection options are prevented" {
+ run solr status --port ${SOLR_PORT} --solr-url http://localhost:${SOLR_PORT}
+ assert_output --partial "The option 's' was specified but an option from this group has already been selected: 'p'"
+}
+
@test "status with invalid --solr-url from user" {
- solr start
run solr status --solr-url http://invalidhost:${SOLR_PORT}
assert_output --partial "Solr at http://invalidhost:${SOLR_PORT} not online"
- solr stop
}
@test "status with --short format" {
diff --git a/solr/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrExporter.java b/solr/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrExporter.java
index ef24cda66a2..ebcff4e87a6 100644
--- a/solr/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrExporter.java
+++ b/solr/prometheus-exporter/src/java/org/apache/solr/prometheus/exporter/SolrExporter.java
@@ -301,7 +301,7 @@ public static void main(String[] args) {
}
String configFile = commandLine.getOptionValue(configOption, DEFAULT_CONFIG);
- int numberOfThreads = commandLine.getParsedOptionValue("num-threads", DEFAULT_NUM_THREADS);
+ int numberOfThreads = commandLine.getParsedOptionValue(numThreadsOption, DEFAULT_NUM_THREADS);
int scrapeInterval =
commandLine.getParsedOptionValue(scrapeIntervalOption, DEFAULT_SCRAPE_INTERVAL);
diff --git a/solr/solr-ref-guide/modules/deployment-guide/pages/solr-control-script-reference.adoc b/solr/solr-ref-guide/modules/deployment-guide/pages/solr-control-script-reference.adoc
index c0e18e46091..49b4ffab143 100644
--- a/solr/solr-ref-guide/modules/deployment-guide/pages/solr-control-script-reference.adoc
+++ b/solr/solr-ref-guide/modules/deployment-guide/pages/solr-control-script-reference.adoc
@@ -794,10 +794,10 @@ The `delete` command detects the mode that Solr is running in and then deletes t
`bin/solr delete --help`
-If you're deleting a collection in SolrCloud mode, the default behavior is to also delete the configuration directory from Zookeeper so long as it is not being used by another collection.
+If you're deleting a collection in SolrCloud mode, the default behavior is to leave the configuration directory in Zookeeper. If you want to delete the configuration then you need to pass in `--delete-config` as well.
-For example, if you created a collection with `bin/solr create -c contacts`, then the delete command `bin/solr delete -c contacts` will check to see if the `/configs/contacts` configuration directory is being used by any other collections.
-If not, then the `/configs/contacts` directory is removed from ZooKeeper. You can override this behavior by passing `--delete-config false` when running this command.atom
+For example, if you created a collection with `bin/solr create -c contacts`, then the delete command `bin/solr delete -c contacts --delete-config` will check to see if the `/configs/contacts` configuration directory is being used by any other collections.
+If not, then the `/configs/contacts` directory is removed from ZooKeeper.
==== Delete Collection or Core Parameters
@@ -816,14 +816,14 @@ Name of the collection or core to delete.
+
[%autowidth,frame=none]
|===
-|Optional |Default: `true`
+|Optional |Default: none
|===
+
-Whether or not the configuration directory should also be deleted from ZooKeeper.
+Specify the configuration directory should also be deleted from ZooKeeper.
+
-If the configuration directory is being used by another collection, then it will not be deleted even if you pass `--delete-config` as `true`.
+If the configuration directory is being used by another collection, then it will not be deleted.
+
-*Example*: `bin/solr delete --delete-config false`
+*Example*: `bin/solr delete --delete-config`
`-f` or `--force`::
+
diff --git a/solr/solr-ref-guide/modules/upgrade-notes/pages/major-changes-in-solr-10.adoc b/solr/solr-ref-guide/modules/upgrade-notes/pages/major-changes-in-solr-10.adoc
index af10bc83cff..33440aec93b 100644
--- a/solr/solr-ref-guide/modules/upgrade-notes/pages/major-changes-in-solr-10.adoc
+++ b/solr/solr-ref-guide/modules/upgrade-notes/pages/major-changes-in-solr-10.adoc
@@ -44,6 +44,9 @@ Some key changes that you may run into are:
To learn about the updated options in each CLI tool, use the `--help` option or look up the tool in the documentation.
+Additionally, the `bin/solr delete` command no longer deletes a configset when you delete a collection. Previously if you deleted a collection, it would also delete it's associated configset if it was the only user of it.
+Now you have to explicitly provide a `--delete-config` option to delete the configsets. This decouples the lifecycle of a configset from that of a collection.
+
=== SolrJ
* Starting in 10, the Maven POM for SolrJ does not refer to SolrJ modules like ZooKeeper. If you require such functionality, you need to add additional dependencies.
diff --git a/solr/solrj-zookeeper/src/java/org/apache/solr/client/solrj/impl/SolrClientNodeStateProvider.java b/solr/solrj-zookeeper/src/java/org/apache/solr/client/solrj/impl/SolrClientNodeStateProvider.java
index a9789c8a141..186788c8914 100644
--- a/solr/solrj-zookeeper/src/java/org/apache/solr/client/solrj/impl/SolrClientNodeStateProvider.java
+++ b/solr/solrj-zookeeper/src/java/org/apache/solr/client/solrj/impl/SolrClientNodeStateProvider.java
@@ -39,7 +39,6 @@
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.cloud.ClusterState;
-import org.apache.solr.common.cloud.DocCollection;
import org.apache.solr.common.cloud.Replica;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.ModifiableSolrParams;
@@ -79,23 +78,18 @@ protected void readReplicaDetails() throws IOException {
if (clusterState == null) { // zkStateReader still initializing
return;
}
- Map all =
- clusterStateProvider.getClusterState().getCollectionStates();
- all.forEach(
- (collName, ref) -> {
- DocCollection coll = ref.get();
- if (coll == null) return;
- coll.forEachReplica(
- (shard, replica) -> {
- Map>> nodeData =
- nodeVsCollectionVsShardVsReplicaInfo.computeIfAbsent(
- replica.getNodeName(), k -> new HashMap<>());
- Map> collData =
- nodeData.computeIfAbsent(collName, k -> new HashMap<>());
- List replicas = collData.computeIfAbsent(shard, k -> new ArrayList<>());
- replicas.add((Replica) replica.clone());
- });
- });
+ clusterState.forEachCollection(
+ coll ->
+ coll.forEachReplica(
+ (shard, replica) -> {
+ Map>> nodeData =
+ nodeVsCollectionVsShardVsReplicaInfo.computeIfAbsent(
+ replica.getNodeName(), k -> new HashMap<>());
+ Map> collData =
+ nodeData.computeIfAbsent(coll.getName(), k -> new HashMap<>());
+ List replicas = collData.computeIfAbsent(shard, k -> new ArrayList<>());
+ replicas.add((Replica) replica.clone());
+ }));
}
@Override
diff --git a/solr/solrj-zookeeper/src/java/org/apache/solr/common/cloud/ZkStateReader.java b/solr/solrj-zookeeper/src/java/org/apache/solr/common/cloud/ZkStateReader.java
index be5c4249d0a..ff559e5f84b 100644
--- a/solr/solrj-zookeeper/src/java/org/apache/solr/common/cloud/ZkStateReader.java
+++ b/solr/solrj-zookeeper/src/java/org/apache/solr/common/cloud/ZkStateReader.java
@@ -634,7 +634,7 @@ private void constructState(Set changedCollections) {
collectionWatches.watchedCollections(),
collectionWatches.activeCollections(),
lazyCollectionStates.keySet(),
- clusterState.getCollectionStates());
+ clusterState.collectionStream().toList());
}
notifyCloudCollectionsListeners();
@@ -1432,8 +1432,7 @@ private DocCollection fetchCollectionState(String coll, Watcher watcher)
zkClient,
Instant.ofEpochMilli(stat.getCtime()));
- ClusterState.CollectionRef collectionRef = state.getCollectionStates().get(coll);
- return collectionRef == null ? null : collectionRef.get();
+ return state.getCollectionOrNull(coll);
} catch (KeeperException.NoNodeException e) {
if (watcher != null) {
// Leave an exists watch in place in case a state.json is created later.
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBSolrClient.java
index ea5ecf396f7..e31f3c08393 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBSolrClient.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBSolrClient.java
@@ -306,9 +306,12 @@ public synchronized Endpoint nextOrError(Exception previousEx) throws SolrServer
suffix = ":" + zombieServers.keySet();
}
// Skipping check time exceeded for the first request
- if (numServersTried > 0 && isTimeExceeded(timeAllowedNano, timeOutTime)) {
+ // Ugly string based hack but no live servers message here is VERY misleading :(
+ if ((previousEx != null && previousEx.getMessage().contains("Limits exceeded!"))
+ || (numServersTried > 0 && isTimeExceeded(timeAllowedNano, timeOutTime))) {
throw new SolrServerException(
- "Time allowed to handle this request exceeded" + suffix, previousEx);
+ "The processing limits for to this request were exceeded, see cause for details",
+ previousEx);
}
if (endpoint == null) {
throw new SolrServerException(
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java
index b1a42644a9d..ce607ac8637 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java
@@ -35,6 +35,7 @@
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.stream.Collectors;
+import java.util.stream.Stream;
import org.apache.solr.common.MapWriter;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
@@ -391,7 +392,10 @@ void setLiveNodes(Set liveNodes) {
/**
* Be aware that this may return collections which may not exist now. You can confirm that this
* collection exists after verifying CollectionRef.get() != null
+ *
+ * @deprecated see {@link #collectionStream()}
*/
+ @Deprecated
public Map getCollectionStates() {
return immutableCollectionStates;
}
@@ -411,28 +415,19 @@ public Set getHostAllowList() {
}
/**
- * Iterate over collections. Unlike {@link #getCollectionStates()} collections passed to the
- * consumer are guaranteed to exist.
- *
- * @param consumer collection consumer.
+ * Streams the resolved {@link DocCollection}s. Use this sparingly in case there are many
+ * collections.
+ */
+ public Stream collectionStream() {
+ return collectionStates.values().stream().map(CollectionRef::get).filter(Objects::nonNull);
+ }
+
+ /**
+ * Calls {@code consumer} with a resolved {@link DocCollection}s for all collections. Use this
+ * sparingly in case there are many collections.
*/
public void forEachCollection(Consumer consumer) {
- collectionStates.forEach(
- (s, collectionRef) -> {
- try {
- DocCollection collection = collectionRef.get();
- if (collection != null) {
- consumer.accept(collection);
- }
- } catch (SolrException e) {
- if (e.getCause() != null
- && e.getCause().getClass().getName().endsWith("NoNodeException")) {
- // don't do anything. This collection does not exist
- } else {
- throw e;
- }
- }
- });
+ collectionStream().forEach(consumer);
}
public static class CollectionRef {
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractChaosMonkeyNothingIsSafeTestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractChaosMonkeyNothingIsSafeTestBase.java
index fb138876b30..8c8a115636e 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractChaosMonkeyNothingIsSafeTestBase.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractChaosMonkeyNothingIsSafeTestBase.java
@@ -303,7 +303,7 @@ public void test() throws Exception {
List numShardsNumReplicas = new ArrayList<>(2);
numShardsNumReplicas.add(1);
numShardsNumReplicas.add(1);
- checkForCollection("testcollection", numShardsNumReplicas, null);
+ checkForCollection("testcollection", numShardsNumReplicas);
testSuccessful = true;
} finally {
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractChaosMonkeySafeLeaderTestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractChaosMonkeySafeLeaderTestBase.java
index ab765d32699..ef371efcad4 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractChaosMonkeySafeLeaderTestBase.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractChaosMonkeySafeLeaderTestBase.java
@@ -203,7 +203,7 @@ public void test() throws Exception {
List numShardsNumReplicas = new ArrayList<>(2);
numShardsNumReplicas.add(1);
numShardsNumReplicas.add(1);
- checkForCollection("testcollection", numShardsNumReplicas, null);
+ checkForCollection("testcollection", numShardsNumReplicas);
}
private void tryDelete() throws Exception {
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java
index 37c90d98db9..f256a5fb937 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java
@@ -16,7 +16,6 @@
*/
package org.apache.solr.cloud;
-import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import java.io.File;
@@ -41,7 +40,6 @@
import org.apache.solr.core.Diagnostics;
import org.apache.solr.core.MockDirectoryFactory;
import org.apache.solr.embedded.JettySolrRunner;
-import org.apache.solr.util.TimeOut;
import org.apache.zookeeper.KeeperException;
import org.junit.BeforeClass;
import org.slf4j.Logger;
@@ -242,65 +240,42 @@ public static void waitForCollectionToDisappear(
log.info("Collection has disappeared - collection:{}", collection);
}
- static void waitForNewLeader(
- CloudSolrClient cloudClient, String shardName, Replica oldLeader, TimeOut timeOut)
+ static void waitForNewLeader(CloudSolrClient cloudClient, String shardName, Replica oldLeader)
throws Exception {
- log.info("Will wait for a node to become leader for {} secs", timeOut.timeLeft(SECONDS));
+ log.info("Will wait for a node to become leader for 15 secs");
ZkStateReader zkStateReader = ZkStateReader.from(cloudClient);
- zkStateReader.forceUpdateCollection(DEFAULT_COLLECTION);
-
- for (; ; ) {
- ClusterState clusterState = zkStateReader.getClusterState();
- DocCollection coll = clusterState.getCollection("collection1");
- Slice slice = coll.getSlice(shardName);
- if (slice.getLeader() != null
- && !slice.getLeader().equals(oldLeader)
- && slice.getLeader().getState() == Replica.State.ACTIVE) {
- if (log.isInfoEnabled()) {
- log.info(
- "Old leader {}, new leader {}. New leader got elected in {} ms",
- oldLeader,
- slice.getLeader(),
- timeOut.timeElapsed(MILLISECONDS));
- }
- break;
- }
-
- if (timeOut.hasTimedOut()) {
- Diagnostics.logThreadDumps("Could not find new leader in specified timeout");
- zkStateReader.getZkClient().printLayoutToStream(System.out);
- fail(
- "Could not find new leader even after waiting for "
- + timeOut.timeElapsed(MILLISECONDS)
- + "ms");
- }
- Thread.sleep(100);
- }
+ long startNs = System.nanoTime();
+ try {
+ zkStateReader.waitForState(
+ "collection1",
+ 15,
+ TimeUnit.SECONDS,
+ (docCollection) -> {
+ if (docCollection == null) return false;
- zkStateReader.waitForState(
- "collection1",
- timeOut.timeLeft(SECONDS),
- TimeUnit.SECONDS,
- (docCollection) -> {
- if (docCollection == null) return false;
-
- Slice slice = docCollection.getSlice(shardName);
- if (slice != null
- && slice.getLeader() != null
- && !slice.getLeader().equals(oldLeader)
- && slice.getLeader().getState() == Replica.State.ACTIVE) {
- if (log.isInfoEnabled()) {
- log.info(
- "Old leader {}, new leader {}. New leader got elected in {} ms",
- oldLeader,
- slice.getLeader(),
- timeOut.timeElapsed(MILLISECONDS));
+ Slice slice = docCollection.getSlice(shardName);
+ if (slice != null
+ && slice.getLeader() != null
+ && !slice.getLeader().equals(oldLeader)
+ && slice.getLeader().getState() == Replica.State.ACTIVE) {
+ if (log.isInfoEnabled()) {
+ log.info(
+ "Old leader {}, new leader {}. New leader got elected in {} ms",
+ oldLeader,
+ slice.getLeader(),
+ TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNs));
+ }
+ return true;
}
- return true;
- }
- return false;
- });
+ return false;
+ });
+ } catch (TimeoutException e) {
+ // If we failed to get a new leader, print some diagnotics before the test fails
+ Diagnostics.logThreadDumps("Could not find new leader in specified timeout");
+ zkStateReader.getZkClient().printLayoutToStream(System.out);
+ fail("Could not find new leader even after waiting for 15s");
+ }
}
public static void verifyReplicaStatus(
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
index dcdab4e86d1..003ef951b0f 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
@@ -44,6 +44,7 @@
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery;
@@ -2217,83 +2218,60 @@ public static SolrInputDocument getDoc(Object... fields) throws Exception {
return sdoc(fields);
}
- private String checkCollectionExpectations(
- String collectionName,
- List numShardsNumReplicaList,
- List nodesAllowedToRunShards)
- throws IOException {
- getCommonCloudSolrClient();
- ClusterState clusterState = cloudClient.getClusterState();
- int expectedSlices = numShardsNumReplicaList.get(0);
- // The Math.min thing is here, because we expect replication-factor to be reduced to if there
- // are not enough live nodes to spread all shards of a collection over different nodes
- int expectedShardsPerSlice = numShardsNumReplicaList.get(1);
- int expectedTotalShards = expectedSlices * expectedShardsPerSlice;
-
- // Map collections = clusterState
- // .getCollectionStates();
- if (clusterState.hasCollection(collectionName)) {
- Map slices = clusterState.getCollection(collectionName).getSlicesMap();
- // did we find expectedSlices slices/shards?
- if (slices.size() != expectedSlices) {
- return "Found new collection "
- + collectionName
- + ", but mismatch on number of slices. Expected: "
- + expectedSlices
- + ", actual: "
- + slices.size();
- }
- int totalShards = 0;
- for (String sliceName : slices.keySet()) {
- for (Replica replica : slices.get(sliceName).getReplicas()) {
- if (nodesAllowedToRunShards != null
- && !nodesAllowedToRunShards.contains(replica.getStr(ZkStateReader.NODE_NAME_PROP))) {
- return "Shard "
- + replica.getName()
- + " created on node "
- + replica.getNodeName()
- + " not allowed to run shards for the created collection "
- + collectionName;
- }
- }
- totalShards += slices.get(sliceName).getReplicas().size();
- }
- if (totalShards != expectedTotalShards) {
- return "Found new collection "
- + collectionName
- + " with correct number of slices, but mismatch on number of shards. Expected: "
- + expectedTotalShards
- + ", actual: "
- + totalShards;
- }
- return null;
- } else {
- return "Could not find new collection " + collectionName;
- }
- }
-
- protected void checkForCollection(
- String collectionName,
- List numShardsNumReplicaList,
- List nodesAllowedToRunShards)
+ protected void checkForCollection(String collectionName, List numShardsNumReplicaList)
throws Exception {
// check for an expectedSlices new collection - we poll the state
- final TimeOut timeout = new TimeOut(120, TimeUnit.SECONDS, TimeSource.NANO_TIME);
- boolean success = false;
- String checkResult = "Didnt get to perform a single check";
- while (!timeout.hasTimedOut()) {
- checkResult =
- checkCollectionExpectations(
- collectionName, numShardsNumReplicaList, nodesAllowedToRunShards);
- if (checkResult == null) {
- success = true;
- break;
- }
- Thread.sleep(500);
- }
- if (!success) {
- super.printLayout();
- fail(checkResult);
+ ZkStateReader reader = ZkStateReader.from(cloudClient);
+
+ AtomicReference message = new AtomicReference<>();
+ try {
+ reader.waitForState(
+ collectionName,
+ 120,
+ TimeUnit.SECONDS,
+ c -> {
+ int expectedSlices = numShardsNumReplicaList.get(0);
+ // The Math.min thing is here, because we expect replication-factor to be reduced to if
+ // there are not enough live nodes to spread all shards of a collection over different
+ // nodes.
+ int expectedShardsPerSlice = numShardsNumReplicaList.get(1);
+ int expectedTotalShards = expectedSlices * expectedShardsPerSlice;
+
+ if (c != null) {
+ Collection slices = c.getSlices();
+ // did we find expectedSlices slices/shards?
+ if (slices.size() != expectedSlices) {
+ message.set(
+ "Found new collection "
+ + collectionName
+ + ", but mismatch on number of slices. Expected: "
+ + expectedSlices
+ + ", actual: "
+ + slices.size());
+ return false;
+ }
+ int totalShards = 0;
+ for (Slice slice : slices) {
+ totalShards += slice.getReplicas().size();
+ }
+ if (totalShards != expectedTotalShards) {
+ message.set(
+ "Found new collection "
+ + collectionName
+ + " with correct number of slices, but mismatch on number of shards. Expected: "
+ + expectedTotalShards
+ + ", actual: "
+ + totalShards);
+ return false;
+ }
+ return true;
+ } else {
+ message.set("Could not find new collection " + collectionName);
+ return false;
+ }
+ });
+ } catch (TimeoutException e) {
+ fail(message.get());
}
}
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractUnloadDistributedZkTestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractUnloadDistributedZkTestBase.java
index 2f77b69b27b..ee7fac68736 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractUnloadDistributedZkTestBase.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractUnloadDistributedZkTestBase.java
@@ -26,6 +26,7 @@
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException;
@@ -33,7 +34,6 @@
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.client.solrj.request.CoreAdminRequest.Unload;
import org.apache.solr.common.SolrInputDocument;
-import org.apache.solr.common.cloud.DocCollection;
import org.apache.solr.common.cloud.Replica;
import org.apache.solr.common.cloud.Slice;
import org.apache.solr.common.cloud.ZkCoreNodeProps;
@@ -41,12 +41,10 @@
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.ExecutorUtil;
import org.apache.solr.common.util.SolrNamedThreadFactory;
-import org.apache.solr.common.util.TimeSource;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrPaths;
import org.apache.solr.embedded.JettySolrRunner;
import org.apache.solr.util.TestInjection;
-import org.apache.solr.util.TimeOut;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -97,42 +95,37 @@ private SolrClient newSolrClient(String url) {
private void checkCoreNamePresenceAndSliceCount(
String collectionName, String coreName, boolean shouldBePresent, int expectedSliceCount)
throws Exception {
- final TimeOut timeout = new TimeOut(45, TimeUnit.SECONDS, TimeSource.NANO_TIME);
- Boolean isPresent = null; // null meaning "don't know"
- while (null == isPresent || shouldBePresent != isPresent) {
- getCommonCloudSolrClient();
- final DocCollection docCollection =
- cloudClient.getClusterState().getCollectionOrNull(collectionName);
- final Collection slices =
- (docCollection != null) ? docCollection.getSlices() : Collections.emptyList();
- if (timeout.hasTimedOut()) {
- printLayout();
- fail(
- "checkCoreNamePresenceAndSliceCount failed:"
- + " collection="
- + collectionName
- + " CoreName="
- + coreName
- + " shouldBePresent="
- + shouldBePresent
- + " isPresent="
- + isPresent
- + " expectedSliceCount="
- + expectedSliceCount
- + " actualSliceCount="
- + slices.size());
- }
- if (expectedSliceCount == slices.size()) {
- isPresent = false;
- for (Slice slice : slices) {
- for (Replica replica : slice.getReplicas()) {
- if (coreName.equals(replica.get("core"))) {
- isPresent = true;
+ ZkStateReader reader = ZkStateReader.from(cloudClient);
+ try {
+ reader.waitForState(
+ collectionName,
+ 45,
+ TimeUnit.SECONDS,
+ c -> {
+ final Collection slices = (c != null) ? c.getSlices() : Collections.emptyList();
+ if (expectedSliceCount == slices.size()) {
+ for (Slice slice : slices) {
+ for (Replica replica : slice.getReplicas()) {
+ if (coreName.equals(replica.get("core"))) {
+ return shouldBePresent;
+ }
+ }
+ }
+ return !shouldBePresent;
+ } else {
+ return false;
}
- }
- }
- }
- Thread.sleep(1000);
+ });
+ } catch (TimeoutException e) {
+ printLayout();
+ fail(
+ "checkCoreNamePresenceAndSliceCount failed:"
+ + " collection="
+ + collectionName
+ + " CoreName="
+ + coreName
+ + " shouldBePresent="
+ + shouldBePresent);
}
}
diff --git a/versions.lock b/versions.lock
index 11c37b2acd7..04ea37b692a 100644
--- a/versions.lock
+++ b/versions.lock
@@ -22,7 +22,7 @@ com.github.jai-imageio:jai-imageio-core:1.4.0 (1 constraints: 5c0ced01)
com.github.junrar:junrar:7.5.3 (1 constraints: 660c1102)
com.github.luben:zstd-jni:1.5.6-3 (1 constraints: 5f0d6136)
com.github.openjson:openjson:1.0.12 (1 constraints: 8b0c6d0e)
-com.github.spotbugs:spotbugs-annotations:4.8.0 (1 constraints: 0e051736)
+com.github.spotbugs:spotbugs-annotations:4.8.6 (1 constraints: 14051d36)
com.github.stephenc.jcip:jcip-annotations:1.0-1 (3 constraints: c71d2c87)
com.github.virtuald:curvesapi:1.07 (1 constraints: 9e0ac7c0)
com.google.android:annotations:4.1.1.4 (2 constraints: b918820a)
@@ -125,21 +125,22 @@ io.grpc:grpc-services:1.65.1 (1 constraints: 1f100ba6)
io.grpc:grpc-stub:1.65.1 (2 constraints: 5d15c9d7)
io.grpc:grpc-util:1.65.1 (2 constraints: ec1876f9)
io.grpc:grpc-xds:1.65.1 (1 constraints: 1f100ba6)
-io.netty:netty-buffer:4.1.112.Final (10 constraints: 329b3f3f)
-io.netty:netty-codec:4.1.112.Final (5 constraints: 2346668a)
-io.netty:netty-codec-http:4.1.112.Final (3 constraints: c724f93e)
-io.netty:netty-codec-http2:4.1.112.Final (1 constraints: 0f0b42d5)
-io.netty:netty-codec-socks:4.1.112.Final (1 constraints: 3b0fa57a)
-io.netty:netty-common:4.1.112.Final (12 constraints: f8b40f68)
-io.netty:netty-handler:4.1.112.Final (3 constraints: e52b7aa2)
-io.netty:netty-handler-proxy:4.1.112.Final (1 constraints: 0f0b42d5)
-io.netty:netty-resolver:4.1.112.Final (2 constraints: a61a1f5d)
-io.netty:netty-tcnative-boringssl-static:2.0.61.Final (1 constraints: d10fc38e)
-io.netty:netty-tcnative-classes:2.0.61.Final (1 constraints: d113ea5d)
-io.netty:netty-transport:4.1.112.Final (9 constraints: 588dbbb9)
-io.netty:netty-transport-classes-epoll:4.1.112.Final (1 constraints: d8128f30)
-io.netty:netty-transport-native-epoll:4.1.112.Final (1 constraints: 0310df9d)
-io.netty:netty-transport-native-unix-common:4.1.112.Final (4 constraints: ef3d2c48)
+io.netty:netty-bom:4.1.114.Final (1 constraints: 8507c567)
+io.netty:netty-buffer:4.1.114.Final (11 constraints: 73a6ad35)
+io.netty:netty-codec:4.1.114.Final (6 constraints: 5a5150a9)
+io.netty:netty-codec-http:4.1.114.Final (4 constraints: fa2f72f8)
+io.netty:netty-codec-http2:4.1.114.Final (2 constraints: 3c16cf31)
+io.netty:netty-codec-socks:4.1.114.Final (2 constraints: 6a1a3f4b)
+io.netty:netty-common:4.1.114.Final (13 constraints: 3dc0e6cd)
+io.netty:netty-handler:4.1.114.Final (4 constraints: 1637bb40)
+io.netty:netty-handler-proxy:4.1.114.Final (2 constraints: 3c16cf31)
+io.netty:netty-resolver:4.1.114.Final (3 constraints: d7254fb5)
+io.netty:netty-tcnative-boringssl-static:2.0.66.Final (2 constraints: d11a4158)
+io.netty:netty-tcnative-classes:2.0.66.Final (2 constraints: d61e8b95)
+io.netty:netty-transport:4.1.114.Final (10 constraints: 97988ae2)
+io.netty:netty-transport-classes-epoll:4.1.114.Final (2 constraints: 071ebe65)
+io.netty:netty-transport-native-epoll:4.1.114.Final (2 constraints: 301bc584)
+io.netty:netty-transport-native-unix-common:4.1.114.Final (5 constraints: 2249871c)
io.opencensus:opencensus-api:0.31.1 (5 constraints: 924d4692)
io.opencensus:opencensus-contrib-http-util:0.31.1 (3 constraints: 7232a9fc)
io.opencensus:opencensus-proto:0.2.0 (1 constraints: e60fd595)
diff --git a/versions.props b/versions.props
index b9bb03803c3..286700c1d75 100644
--- a/versions.props
+++ b/versions.props
@@ -7,7 +7,7 @@ com.carrotsearch:hppc=0.10.0
com.cybozu.labs:langdetect=1.1-20120112
com.fasterxml.jackson:jackson-bom=2.18.0
com.github.ben-manes.caffeine:caffeine=3.1.8
-com.github.spotbugs:*=4.8.0
+com.github.spotbugs:*=4.8.6
com.github.stephenc.jcip:jcip-annotations=1.0-1
com.google.cloud:google-cloud-bom=0.224.0
com.google.errorprone:*=2.23.0
@@ -23,7 +23,7 @@ commons-collections:commons-collections=3.2.2
commons-io:commons-io=2.15.1
io.dropwizard.metrics:*=4.2.26
io.grpc:grpc-*=1.65.1
-io.netty:*=4.1.112.Final
+io.netty:*=4.1.114.Final
io.opentelemetry:opentelemetry-bom=1.40.0
io.prometheus:*=0.16.0
io.swagger.core.v3:*=2.2.22