diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md
index 7822d8ef2b328..1e4d449a9a551 100644
--- a/.github/ISSUE_TEMPLATE.md
+++ b/.github/ISSUE_TEMPLATE.md
@@ -1,40 +1,46 @@
-
+
+
+**Describe the feature**:
+
+
+
**Elasticsearch version**:
**Plugins installed**: []
-**JVM version**:
+**JVM version** (`java -version`):
-**OS version**:
+**OS version** (`uname -a` if on a Unix-like system):
**Description of the problem including expected versus actual behavior**:
**Steps to reproduce**:
+
+Please include a *minimal* but *complete* recreation of the problem, including
+(e.g.) index creation, mappings, settings, query etc. The easier you make for
+us to reproduce it, the more likely that somebody will take the time to look at it.
+
1.
2.
3.
**Provide logs (if relevant)**:
-
-
-**Describe the feature**:
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
index 92b35e97baa05..6a4531f1bdefa 100644
--- a/.github/PULL_REQUEST_TEMPLATE.md
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -11,3 +11,4 @@ attention.
- If submitting code, have you built your formula locally prior to submission with `gradle check`?
- If submitting code, is your pull request against master? Unless there is a good reason otherwise, we prefer pull requests against master and will backport as needed.
- If submitting code, have you checked that your submission is for an [OS that we support](https://www.elastic.co/support/matrix#show_os)?
+- If you are submitting this code for a class then read our [policy](https://github.com/elastic/elasticsearch/blob/master/CONTRIBUTING.md#contributing-as-part-of-a-class) for that.
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 5885bf9def7eb..0192ab13a5557 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -88,8 +88,8 @@ Contributing to the Elasticsearch codebase
**Repository:** [https://github.com/elastic/elasticsearch](https://github.com/elastic/elasticsearch)
Make sure you have [Gradle](http://gradle.org) installed, as
-Elasticsearch uses it as its build system. Gradle must be version 2.13 _exactly_ in
-order to build successfully.
+Elasticsearch uses it as its build system. Gradle must be at least
+version 3.3 in order to build successfully.
Eclipse users can automatically configure their IDE: `gradle eclipse`
then `File: Import: Existing Projects into Workspace`. Select the
@@ -101,7 +101,11 @@ IntelliJ users can automatically configure their IDE: `gradle idea`
then `File->New Project From Existing Sources`. Point to the root of
the source directory, select
`Import project from external model->Gradle`, enable
-`Use auto-import`.
+`Use auto-import`. Additionally, in order to run tests directly from
+IDEA 2017.1 and above it is required to disable IDEA run launcher,
+which can be achieved by adding `-Didea.no.launcher=true`
+[JVM option](https://intellij-support.jetbrains.com/hc/en-us/articles/206544869-Configuring-JVM-options-and-platform-properties)
+
The Elasticsearch codebase makes heavy use of Java `assert`s and the
test runner requires that assertions be enabled within the JVM. This
@@ -139,3 +143,32 @@ Before submitting your changes, run the test suite to make sure that nothing is
```sh
gradle check
```
+
+Contributing as part of a class
+-------------------------------
+In general Elasticsearch is happy to accept contributions that were created as
+part of a class but strongly advise against making the contribution as part of
+the class. So if you have code you wrote for a class feel free to submit it.
+
+Please, please, please do not assign contributing to Elasticsearch as part of a
+class. If you really want to assign writing code for Elasticsearch as an
+assignment then the code contributions should be made to your private clone and
+opening PRs against the primary Elasticsearch clone must be optional, fully
+voluntary, not for a grade, and without any deadlines.
+
+Because:
+
+* While the code review process is likely very educational, it can take wildly
+varying amounts of time depending on who is available, where the change is, and
+how deep the change is. There is no way to predict how long it will take unless
+we rush.
+* We do not rush reviews without a very, very good reason. Class deadlines
+aren't a good enough reason for us to rush reviews.
+* We deeply discourage opening a PR you don't intend to work through the entire
+code review process because it wastes our time.
+* We don't have the capacity to absorb an entire class full of new contributors,
+especially when they are unlikely to become long time contributors.
+
+Finally, we require that you run `gradle check` before submitting a
+non-documentation contribution. This is mentioned above, but it is worth
+repeating in this section because it has come up in this context.
diff --git a/NOTICE.txt b/NOTICE.txt
index c99b958193198..643a060cd05c4 100644
--- a/NOTICE.txt
+++ b/NOTICE.txt
@@ -1,5 +1,5 @@
Elasticsearch
-Copyright 2009-2016 Elasticsearch
+Copyright 2009-2017 Elasticsearch
This product includes software developed by The Apache Software
Foundation (http://www.apache.org/).
diff --git a/README.textile b/README.textile
index dc3a263cd7ce2..9c2b2c5d91e2c 100644
--- a/README.textile
+++ b/README.textile
@@ -50,16 +50,16 @@ h3. Indexing
Let's try and index some twitter like information. First, let's create a twitter user, and add some tweets (the @twitter@ index will be created automatically):
-curl -XPUT 'http://localhost:9200/twitter/user/kimchy?pretty' -d '{ "name" : "Shay Banon" }'
+curl -XPUT 'http://localhost:9200/twitter/user/kimchy?pretty' -H 'Content-Type: application/json' -d '{ "name" : "Shay Banon" }'
-curl -XPUT 'http://localhost:9200/twitter/tweet/1?pretty' -d '
+curl -XPUT 'http://localhost:9200/twitter/tweet/1?pretty' -H 'Content-Type: application/json' -d '
{
"user": "kimchy",
"post_date": "2009-11-15T13:12:00",
"message": "Trying out Elasticsearch, so far so good?"
}'
-curl -XPUT 'http://localhost:9200/twitter/tweet/2?pretty' -d '
+curl -XPUT 'http://localhost:9200/twitter/tweet/2?pretty' -H 'Content-Type: application/json' -d '
{
"user": "kimchy",
"post_date": "2009-11-15T14:12:12",
@@ -87,7 +87,7 @@ curl -XGET 'http://localhost:9200/twitter/tweet/_search?q=user:kimchy&pretty=tru
We can also use the JSON query language Elasticsearch provides instead of a query string:
-curl -XGET 'http://localhost:9200/twitter/tweet/_search?pretty=true' -d '
+curl -XGET 'http://localhost:9200/twitter/tweet/_search?pretty=true' -H 'Content-Type: application/json' -d '
{
"query" : {
"match" : { "user": "kimchy" }
@@ -98,7 +98,7 @@ curl -XGET 'http://localhost:9200/twitter/tweet/_search?pretty=true' -d '
Just for kicks, let's get all the documents stored (we should see the user as well):
-curl -XGET 'http://localhost:9200/twitter/_search?pretty=true' -d '
+curl -XGET 'http://localhost:9200/twitter/_search?pretty=true' -H 'Content-Type: application/json' -d '
{
"query" : {
"match_all" : {}
@@ -109,7 +109,7 @@ curl -XGET 'http://localhost:9200/twitter/_search?pretty=true' -d '
We can also do range search (the @postDate@ was automatically identified as date)
-curl -XGET 'http://localhost:9200/twitter/_search?pretty=true' -d '
+curl -XGET 'http://localhost:9200/twitter/_search?pretty=true' -H 'Content-Type: application/json' -d '
{
"query" : {
"range" : {
@@ -130,16 +130,16 @@ Elasticsearch supports multiple indices, as well as multiple types per index. In
Another way to define our simple twitter system is to have a different index per user (note, though that each index has an overhead). Here is the indexing curl's in this case:
-curl -XPUT 'http://localhost:9200/kimchy/info/1?pretty' -d '{ "name" : "Shay Banon" }'
+curl -XPUT 'http://localhost:9200/kimchy/info/1?pretty' -H 'Content-Type: application/json' -d '{ "name" : "Shay Banon" }'
-curl -XPUT 'http://localhost:9200/kimchy/tweet/1?pretty' -d '
+curl -XPUT 'http://localhost:9200/kimchy/tweet/1?pretty' -H 'Content-Type: application/json' -d '
{
"user": "kimchy",
"post_date": "2009-11-15T13:12:00",
"message": "Trying out Elasticsearch, so far so good?"
}'
-curl -XPUT 'http://localhost:9200/kimchy/tweet/2?pretty' -d '
+curl -XPUT 'http://localhost:9200/kimchy/tweet/2?pretty' -H 'Content-Type: application/json' -d '
{
"user": "kimchy",
"post_date": "2009-11-15T14:12:12",
@@ -152,7 +152,7 @@ The above will index information into the @kimchy@ index, with two types, @info@
Complete control on the index level is allowed. As an example, in the above case, we would want to change from the default 5 shards with 1 replica per index, to only 1 shard with 1 replica per index (== per twitter user). Here is how this can be done (the configuration can be in yaml as well):
-curl -XPUT http://localhost:9200/another_user?pretty -d '
+curl -XPUT http://localhost:9200/another_user?pretty -H 'Content-Type: application/json' -d '
{
"index" : {
"number_of_shards" : 1,
@@ -165,7 +165,7 @@ Search (and similar operations) are multi index aware. This means that we can ea
index (twitter user), for example:
-curl -XGET 'http://localhost:9200/kimchy,another_user/_search?pretty=true' -d '
+curl -XGET 'http://localhost:9200/kimchy,another_user/_search?pretty=true' -H 'Content-Type: application/json' -d '
{
"query" : {
"match_all" : {}
@@ -176,7 +176,7 @@ curl -XGET 'http://localhost:9200/kimchy,another_user/_search?pretty=true' -d '
Or on all the indices:
-curl -XGET 'http://localhost:9200/_search?pretty=true' -d '
+curl -XGET 'http://localhost:9200/_search?pretty=true' -H 'Content-Type: application/json' -d '
{
"query" : {
"match_all" : {}
@@ -200,7 +200,7 @@ We have just covered a very small portion of what Elasticsearch is all about. Fo
h3. Building from Source
-Elasticsearch uses "Gradle":https://gradle.org for its build system. You'll need to have version 2.13 of Gradle installed.
+Elasticsearch uses "Gradle":https://gradle.org for its build system. You'll need to have at least version 3.3 of Gradle installed.
In order to create a distribution, simply run the @gradle assemble@ command in the cloned directory.
diff --git a/TESTING.asciidoc b/TESTING.asciidoc
index dcd6c9981be3a..d9fb3daac98c7 100644
--- a/TESTING.asciidoc
+++ b/TESTING.asciidoc
@@ -25,12 +25,6 @@ run it using Gradle:
gradle run
-------------------------------------
-or to attach a remote debugger, run it as:
-
--------------------------------------
-gradle run --debug-jvm
--------------------------------------
-
=== Test case filtering.
- `tests.class` is a class-filtering shell-like glob pattern,
@@ -351,24 +345,23 @@ VM running trusty by running
These are the linux flavors the Vagrantfile currently supports:
-* ubuntu-1204 aka precise
* ubuntu-1404 aka trusty
* ubuntu-1604 aka xenial
* debian-8 aka jessie, the current debian stable distribution
* centos-6
* centos-7
-* fedora-24
+* fedora-25
* oel-6 aka Oracle Enterprise Linux 6
* oel-7 aka Oracle Enterprise Linux 7
* sles-12
-* opensuse-13
+* opensuse-42 aka Leap
We're missing the following from the support matrix because there aren't high
quality boxes available in vagrant atlas:
* sles-11
-We're missing the follow because our tests are very linux/bash centric:
+We're missing the following because our tests are very linux/bash centric:
* Windows Server 2012
@@ -424,21 +417,59 @@ sudo -E bats $BATS_TESTS/*rpm*.bats
If you wanted to retest all the release artifacts on a single VM you could:
-------------------------------------------------
-gradle vagrantSetUp
-vagrant up ubuntu-1404 --provider virtualbox && vagrant ssh ubuntu-1404
+gradle setupBats
+cd qa/vagrant; vagrant up ubuntu-1404 --provider virtualbox && vagrant ssh ubuntu-1404
cd $BATS_ARCHIVES
sudo -E bats $BATS_TESTS/*.bats
-------------------------------------------------
+You can also use Gradle to prepare the test environment and then starts a single VM:
+
+-------------------------------------------------
+gradle vagrantFedora25#up
+-------------------------------------------------
+
+Or any of vagrantCentos6#up, vagrantCentos7#up, vagrantDebian8#up,
+vagrantFedora25#up, vagrantOel6#up, vagrantOel7#up, vagrantOpensuse13#up,
+vagrantSles12#up, vagrantUbuntu1404#up, vagrantUbuntu1604#up.
+
+Once up, you can then connect to the VM using SSH from the elasticsearch directory:
+
+-------------------------------------------------
+vagrant ssh fedora-25
+-------------------------------------------------
+
+Or from another directory:
+
+-------------------------------------------------
+VAGRANT_CWD=/path/to/elasticsearch vagrant ssh fedora-25
+-------------------------------------------------
+
Note: Starting vagrant VM outside of the elasticsearch folder requires to
indicates the folder that contains the Vagrantfile using the VAGRANT_CWD
-environment variable:
+environment variable.
+
+== Testing backwards compatibility
+
+Backwards compatibility tests exist to test upgrading from each supported version
+to the current version. To run all backcompat tests use:
+
+-------------------------------------------------
+gradle bwcTest
+-------------------------------------------------
+
+A specific version can be tested as well. For example, to test backcompat with
+version 5.3.2 run:
-------------------------------------------------
-gradle vagrantSetUp
-VAGRANT_CWD=/path/to/elasticsearch vagrant up centos-7 --provider virtualbox
+gradle v5.3.2#bwcTest
-------------------------------------------------
+When running `gradle check`, some minimal backcompat checks are run. Which version
+is tested depends on the branch. On master, this will test against the current
+stable branch. On the stable branch, it will test against the latest release
+branch. Finally, on a release branch, it will test against the most recent release.
+
== Coverage analysis
Tests can be run instrumented with jacoco to produce a coverage report in
@@ -462,7 +493,7 @@ Combined (Unit+Integration) coverage:
mvn -Dtests.coverage verify jacoco:report
---------------------------------------------------------------------------
-== Debugging from an IDE
+== Launching and debugging from an IDE
If you want to run elasticsearch from your IDE, the `gradle run` task
supports a remote debugging option:
@@ -471,6 +502,17 @@ supports a remote debugging option:
gradle run --debug-jvm
---------------------------------------------------------------------------
+== Debugging remotely from an IDE
+
+If you want to run Elasticsearch and be able to remotely attach the process
+for debugging purposes from your IDE, can start Elasticsearch using `ES_JAVA_OPTS`:
+
+---------------------------------------------------------------------------
+ES_JAVA_OPTS="-Xdebug -Xrunjdwp:server=y,transport=dt_socket,address=4000,suspend=y" ./bin/elasticsearch
+---------------------------------------------------------------------------
+
+Read your IDE documentation for how to attach a debugger to a JVM process.
+
== Building with extra plugins
Additional plugins may be built alongside elasticsearch, where their
dependency on elasticsearch will be substituted with the local elasticsearch
@@ -482,4 +524,3 @@ included as part of the build by checking the projects of the build.
---------------------------------------------------------------------------
gradle projects
---------------------------------------------------------------------------
-
diff --git a/Vagrantfile b/Vagrantfile
index 806d39cc16067..a4dc935f15d65 100644
--- a/Vagrantfile
+++ b/Vagrantfile
@@ -22,10 +22,6 @@
# under the License.
Vagrant.configure(2) do |config|
- config.vm.define "ubuntu-1204" do |config|
- config.vm.box = "elastic/ubuntu-12.04-x86_64"
- ubuntu_common config
- end
config.vm.define "ubuntu-1404" do |config|
config.vm.box = "elastic/ubuntu-14.04-x86_64"
ubuntu_common config
@@ -42,7 +38,7 @@ Vagrant.configure(2) do |config|
# debian and it works fine.
config.vm.define "debian-8" do |config|
config.vm.box = "elastic/debian-8-x86_64"
- deb_common config, 'echo deb http://cloudfront.debian.net/debian jessie-backports main > /etc/apt/sources.list.d/backports.list', 'backports'
+ deb_common config
end
config.vm.define "centos-6" do |config|
config.vm.box = "elastic/centos-6-x86_64"
@@ -60,12 +56,12 @@ Vagrant.configure(2) do |config|
config.vm.box = "elastic/oraclelinux-7-x86_64"
rpm_common config
end
- config.vm.define "fedora-24" do |config|
- config.vm.box = "elastic/fedora-24-x86_64"
+ config.vm.define "fedora-25" do |config|
+ config.vm.box = "elastic/fedora-25-x86_64"
dnf_common config
end
- config.vm.define "opensuse-13" do |config|
- config.vm.box = "elastic/opensuse-13-x86_64"
+ config.vm.define "opensuse-42" do |config|
+ config.vm.box = "elastic/opensuse-42-x86_64"
opensuse_common config
end
config.vm.define "sles-12" do |config|
@@ -108,16 +104,22 @@ SOURCE_PROMPT
source /etc/profile.d/elasticsearch_prompt.sh
SOURCE_PROMPT
SHELL
+ # Creates a file to mark the machine as created by vagrant. Tests check
+ # for this file and refuse to run if it is not present so that they can't
+ # be run unexpectedly.
+ config.vm.provision "markerfile", type: "shell", inline: <<-SHELL
+ touch /etc/is_vagrant_vm
+ SHELL
end
config.config_procs.push ['2', set_prompt]
end
end
def ubuntu_common(config, extra: '')
- deb_common config, 'apt-add-repository -y ppa:openjdk-r/ppa > /dev/null 2>&1', 'openjdk-r-*', extra: extra
+ deb_common config, extra: extra
end
-def deb_common(config, add_openjdk_repository_command, openjdk_list, extra: '')
+def deb_common(config, extra: '')
# http://foo-o-rama.com/vagrant--stdin-is-not-a-tty--fix.html
config.vm.provision "fix-no-tty", type: "shell" do |s|
s.privileged = false
@@ -127,24 +129,14 @@ def deb_common(config, add_openjdk_repository_command, openjdk_list, extra: '')
update_command: "apt-get update",
update_tracking_file: "/var/cache/apt/archives/last_update",
install_command: "apt-get install -y",
- java_package: "openjdk-8-jdk",
- extra: <<-SHELL
- export DEBIAN_FRONTEND=noninteractive
- ls /etc/apt/sources.list.d/#{openjdk_list}.list > /dev/null 2>&1 ||
- (echo "==> Importing java-8 ppa" &&
- #{add_openjdk_repository_command} &&
- apt-get update)
- #{extra}
-SHELL
- )
+ extra: extra)
end
def rpm_common(config)
provision(config,
update_command: "yum check-update",
update_tracking_file: "/var/cache/yum/last_update",
- install_command: "yum install -y",
- java_package: "java-1.8.0-openjdk-devel")
+ install_command: "yum install -y")
end
def dnf_common(config)
@@ -152,8 +144,7 @@ def dnf_common(config)
update_command: "dnf check-update",
update_tracking_file: "/var/cache/dnf/last_update",
install_command: "dnf install -y",
- install_command_retries: 5,
- java_package: "java-1.8.0-openjdk-devel")
+ install_command_retries: 5)
if Vagrant.has_plugin?("vagrant-cachier")
# Autodetect doesn't work....
config.cache.auto_detect = false
@@ -170,17 +161,12 @@ def suse_common(config, extra)
update_command: "zypper --non-interactive list-updates",
update_tracking_file: "/var/cache/zypp/packages/last_update",
install_command: "zypper --non-interactive --quiet install --no-recommends",
- java_package: "java-1_8_0-openjdk-devel",
extra: extra)
end
def sles_common(config)
extra = <<-SHELL
- zypper rr systemsmanagement_puppet
- zypper addrepo -t yast2 http://demeter.uni-regensburg.de/SLES12-x64/DVD1/ dvd1 || true
- zypper addrepo -t yast2 http://demeter.uni-regensburg.de/SLES12-x64/DVD2/ dvd2 || true
- zypper addrepo http://download.opensuse.org/repositories/Java:Factory/SLE_12/Java:Factory.repo || true
- zypper --no-gpg-checks --non-interactive refresh
+ zypper rr systemsmanagement_puppet puppetlabs-pc1
zypper --non-interactive install git-core
SHELL
suse_common config, extra
@@ -195,7 +181,6 @@ end
# is cached by vagrant-cachier.
# @param install_command [String] The command used to install a package.
# Required. Think `apt-get install #{package}`.
-# @param java_package [String] The name of the java package. Required.
# @param extra [String] Extra provisioning commands run before anything else.
# Optional. Used for things like setting up the ppa for Java 8.
def provision(config,
@@ -203,14 +188,20 @@ def provision(config,
update_tracking_file: 'required',
install_command: 'required',
install_command_retries: 0,
- java_package: 'required',
extra: '')
# Vagrant run ruby 2.0.0 which doesn't have required named parameters....
raise ArgumentError.new('update_command is required') if update_command == 'required'
raise ArgumentError.new('update_tracking_file is required') if update_tracking_file == 'required'
raise ArgumentError.new('install_command is required') if install_command == 'required'
- raise ArgumentError.new('java_package is required') if java_package == 'required'
- config.vm.provision "bats dependencies", type: "shell", inline: <<-SHELL
+ config.vm.provider "virtualbox" do |v|
+ # Give the box more memory and cpu because our tests are beasts!
+ v.memory = Integer(ENV['VAGRANT_MEMORY'] || 8192)
+ v.cpus = Integer(ENV['VAGRANT_CPUS'] || 4)
+ end
+ config.vm.synced_folder "#{Dir.home}/.gradle/caches", "/home/vagrant/.gradle/caches",
+ create: true,
+ owner: "vagrant"
+ config.vm.provision "dependencies", type: "shell", inline: <<-SHELL
set -e
set -o pipefail
@@ -256,7 +247,10 @@ def provision(config,
#{extra}
- installed java || install #{java_package}
+ installed java || {
+ echo "==> Java is not installed on vagrant box ${config.vm.box}"
+ return 1
+ }
ensure tar
ensure curl
ensure unzip
@@ -270,6 +264,18 @@ def provision(config,
/tmp/bats/install.sh /usr
rm -rf /tmp/bats
}
+
+ installed gradle || {
+ echo "==> Installing Gradle"
+ curl -sS -o /tmp/gradle.zip -L https://services.gradle.org/distributions/gradle-3.3-bin.zip
+ unzip /tmp/gradle.zip -d /opt
+ rm -rf /tmp/gradle.zip
+ ln -s /opt/gradle-3.3/bin/gradle /usr/bin/gradle
+ # make nfs mounted gradle home dir writeable
+ chown vagrant:vagrant /home/vagrant/.gradle
+ }
+
+
cat \<\ /etc/profile.d/elasticsearch_vars.sh
export ZIP=/elasticsearch/distribution/zip/build/distributions
export TAR=/elasticsearch/distribution/tar/build/distributions
@@ -279,6 +285,7 @@ export BATS=/project/build/bats
export BATS_UTILS=/project/build/bats/utils
export BATS_TESTS=/project/build/bats/tests
export BATS_ARCHIVES=/project/build/bats/archives
+export GRADLE_HOME=/opt/gradle-3.3
VARS
cat \<\ /etc/sudoers.d/elasticsearch_vars
Defaults env_keep += "ZIP"
diff --git a/benchmarks/build.gradle b/benchmarks/build.gradle
index 36732215d43fb..5a508fa106537 100644
--- a/benchmarks/build.gradle
+++ b/benchmarks/build.gradle
@@ -37,10 +37,7 @@ apply plugin: 'application'
archivesBaseName = 'elasticsearch-benchmarks'
mainClassName = 'org.openjdk.jmh.Main'
-// never try to invoke tests on the benchmark project - there aren't any
-check.dependsOn.remove(test)
-// explicitly override the test task too in case somebody invokes 'gradle test' so it won't trip
-task test(type: Test, overwrite: true)
+test.enabled = false
dependencies {
compile("org.elasticsearch:elasticsearch:${version}") {
@@ -55,11 +52,10 @@ dependencies {
runtime 'org.apache.commons:commons-math3:3.2'
}
-compileJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-try,-unchecked"
+compileJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-try,-unchecked,-processing"
// enable the JMH's BenchmarkProcessor to generate the final benchmark classes
// needs to be added separately otherwise Gradle will quote it and javac will fail
compileJava.options.compilerArgs.addAll(["-processor", "org.openjdk.jmh.generators.BenchmarkProcessor"])
-compileTestJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-try,-unchecked"
forbiddenApis {
// classes generated by JMH can use all sorts of forbidden APIs but we have no influence at all and cannot exclude these classes
diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/routing/allocation/Allocators.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/routing/allocation/Allocators.java
index 4d8f7cfeaac99..591fa400d18da 100644
--- a/benchmarks/src/main/java/org/elasticsearch/benchmark/routing/allocation/Allocators.java
+++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/routing/allocation/Allocators.java
@@ -36,8 +36,6 @@
import org.elasticsearch.gateway.GatewayAllocator;
import java.lang.reflect.InvocationTargetException;
-import java.net.InetAddress;
-import java.net.UnknownHostException;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
@@ -49,7 +47,7 @@ private static class NoopGatewayAllocator extends GatewayAllocator {
public static final NoopGatewayAllocator INSTANCE = new NoopGatewayAllocator();
protected NoopGatewayAllocator() {
- super(Settings.EMPTY, null, null);
+ super(Settings.EMPTY);
}
@Override
diff --git a/build.gradle b/build.gradle
index 1159352cd5dec..00d1730a26cb2 100644
--- a/build.gradle
+++ b/build.gradle
@@ -17,20 +17,25 @@
* under the License.
*/
+import java.nio.file.Path
+import java.util.regex.Matcher
import org.eclipse.jgit.lib.Repository
import org.eclipse.jgit.lib.RepositoryBuilder
import org.gradle.plugins.ide.eclipse.model.SourceFolder
import org.apache.tools.ant.taskdefs.condition.Os
+import org.elasticsearch.gradle.VersionProperties
+import org.elasticsearch.gradle.Version
// common maven publishing configuration
subprojects {
group = 'org.elasticsearch'
- version = org.elasticsearch.gradle.VersionProperties.elasticsearch
+ version = VersionProperties.elasticsearch
description = "Elasticsearch subproject ${project.path}"
}
+Path rootPath = rootDir.toPath()
// setup pom license info, but only for artifacts that are part of elasticsearch
-configure(subprojects.findAll { it.path.startsWith(':x-plugins') == false }) {
+configure(subprojects.findAll { it.projectDir.toPath().startsWith(rootPath) }) {
// we only use maven publish to add tasks for pom generation
plugins.withType(MavenPublishPlugin).whenPluginAdded {
@@ -57,15 +62,102 @@ configure(subprojects.findAll { it.path.startsWith(':x-plugins') == false }) {
}
}
+/* Introspect all versions of ES that may be tested agains for backwards
+ * compatibility. It is *super* important that this logic is the same as the
+ * logic in VersionUtils.java, modulo alphas, betas, and rcs which are ignored
+ * in gradle because they don't have any backwards compatibility guarantees
+ * but are not ignored in VersionUtils.java because the tests expect them not
+ * to be. */
+Version currentVersion = Version.fromString(VersionProperties.elasticsearch.minus('-SNAPSHOT'))
+int prevMajor = currentVersion.major - 1
+File versionFile = file('core/src/main/java/org/elasticsearch/Version.java')
+List versionLines = versionFile.readLines('UTF-8')
+List versions = []
+// keep track of the previous major version's last minor, so we know where wire compat begins
+int prevMinorIndex = -1 // index in the versions list of the last minor from the prev major
+int lastPrevMinor = -1 // the minor version number from the prev major we most recently seen
+for (String line : versionLines) {
+ /* Note that this skips alphas and betas which is fine because they aren't
+ * compatible with anything. */
+ Matcher match = line =~ /\W+public static final Version V_(\d+)_(\d+)_(\d+) .*/
+ if (match.matches()) {
+ int major = Integer.parseInt(match.group(1))
+ int minor = Integer.parseInt(match.group(2))
+ int bugfix = Integer.parseInt(match.group(3))
+ Version foundVersion = new Version(major, minor, bugfix, false)
+ if (currentVersion != foundVersion) {
+ versions.add(foundVersion)
+ }
+ if (major == prevMajor && minor > lastPrevMinor) {
+ prevMinorIndex = versions.size() - 1
+ lastPrevMinor = minor
+ }
+ }
+}
+if (versions.toSorted { it.id } != versions) {
+ println "Versions: ${versions}"
+ throw new GradleException("Versions.java contains out of order version constants")
+}
+if (currentVersion.bugfix == 0) {
+ // If on a release branch, after the initial release of that branch, the bugfix version will
+ // be bumped, and will be != 0. On master and N.x branches, we want to test against the
+ // unreleased version of closest branch. So for those cases, the version includes -SNAPSHOT,
+ // and the bwc distribution will checkout and build that version.
+ Version last = versions[-1]
+ versions[-1] = new Version(last.major, last.minor, last.bugfix, true)
+ if (last.bugfix == 0) {
+ versions[-2] = new Version(
+ versions[-2].major, versions[-2].minor, versions[-2].bugfix, true)
+ }
+}
+
+// injecting groovy property variables into all projects
allprojects {
- // injecting groovy property variables into all projects
project.ext {
// for ide hacks...
isEclipse = System.getProperty("eclipse.launcher") != null || gradle.startParameter.taskNames.contains('eclipse') || gradle.startParameter.taskNames.contains('cleanEclipse')
isIdea = System.getProperty("idea.active") != null || gradle.startParameter.taskNames.contains('idea') || gradle.startParameter.taskNames.contains('cleanIdea')
+ // for backcompat testing
+ indexCompatVersions = versions
+ wireCompatVersions = versions.subList(prevMinorIndex, versions.size())
+ }
+}
+
+task verifyVersions {
+ doLast {
+ if (gradle.startParameter.isOffline()) {
+ throw new GradleException("Must run in online mode to verify versions")
+ }
+ // Read the list from maven central
+ Node xml
+ new URL('https://repo1.maven.org/maven2/org/elasticsearch/elasticsearch/maven-metadata.xml').openStream().withStream { s ->
+ xml = new XmlParser().parse(s)
+ }
+ Set knownVersions = new TreeSet<>(xml.versioning.versions.version.collect { it.text() }.findAll { it ==~ /\d\.\d\.\d/ }.collect { Version.fromString(it) })
+
+ // Limit the known versions to those that should be index compatible, and are not future versions
+ knownVersions = knownVersions.findAll { it.major >= prevMajor && it.before(VersionProperties.elasticsearch) }
+
+ /* Limit the listed versions to those that have been marked as released.
+ * Versions not marked as released don't get the same testing and we want
+ * to make sure that we flip all unreleased versions to released as soon
+ * as possible after release. */
+ Set actualVersions = new TreeSet<>(indexCompatVersions.findAll { false == it.snapshot })
+
+ // Finally, compare!
+ if (knownVersions.equals(actualVersions) == false) {
+ throw new GradleException("out-of-date released versions\nActual :" + actualVersions + "\nExpected:" + knownVersions +
+ "\nUpdate Version.java. Note that Version.CURRENT doesn't count because it is not released.")
+ }
}
}
+task branchConsistency {
+ description 'Ensures this branch is internally consistent. For example, that versions constants match released versions.'
+ group 'Verification'
+ dependsOn verifyVersions
+}
+
subprojects {
project.afterEvaluate {
// include license and notice in jars
@@ -119,12 +211,33 @@ subprojects {
"org.elasticsearch.plugin:transport-netty4-client:${version}": ':modules:transport-netty4',
"org.elasticsearch.plugin:reindex-client:${version}": ':modules:reindex',
"org.elasticsearch.plugin:lang-mustache-client:${version}": ':modules:lang-mustache',
+ "org.elasticsearch.plugin:parent-join-client:${version}": ':modules:parent-join',
+ "org.elasticsearch.plugin:aggs-matrix-stats-client:${version}": ':modules:aggs-matrix-stats',
"org.elasticsearch.plugin:percolator-client:${version}": ':modules:percolator',
]
- configurations.all {
- resolutionStrategy.dependencySubstitution { DependencySubstitutions subs ->
- projectSubstitutions.each { k,v ->
- subs.substitute(subs.module(k)).with(subs.project(v))
+ if (indexCompatVersions[-1].snapshot) {
+ /* The last and second to last versions can be snapshots. Rather than use
+ * snapshots built by CI we connect these versions to projects that build
+ * those those versions from the HEAD of the appropriate branch. */
+ if (indexCompatVersions[-1].bugfix == 0) {
+ ext.projectSubstitutions["org.elasticsearch.distribution.deb:elasticsearch:${indexCompatVersions[-1]}"] = ':distribution:bwc-stable-snapshot'
+ ext.projectSubstitutions["org.elasticsearch.distribution.rpm:elasticsearch:${indexCompatVersions[-1]}"] = ':distribution:bwc-stable-snapshot'
+ ext.projectSubstitutions["org.elasticsearch.distribution.zip:elasticsearch:${indexCompatVersions[-1]}"] = ':distribution:bwc-stable-snapshot'
+ ext.projectSubstitutions["org.elasticsearch.distribution.deb:elasticsearch:${indexCompatVersions[-2]}"] = ':distribution:bwc-release-snapshot'
+ ext.projectSubstitutions["org.elasticsearch.distribution.rpm:elasticsearch:${indexCompatVersions[-2]}"] = ':distribution:bwc-release-snapshot'
+ ext.projectSubstitutions["org.elasticsearch.distribution.zip:elasticsearch:${indexCompatVersions[-2]}"] = ':distribution:bwc-release-snapshot'
+ } else {
+ ext.projectSubstitutions["org.elasticsearch.distribution.deb:elasticsearch:${indexCompatVersions[-1]}"] = ':distribution:bwc-release-snapshot'
+ ext.projectSubstitutions["org.elasticsearch.distribution.rpm:elasticsearch:${indexCompatVersions[-1]}"] = ':distribution:bwc-release-snapshot'
+ ext.projectSubstitutions["org.elasticsearch.distribution.zip:elasticsearch:${indexCompatVersions[-1]}"] = ':distribution:bwc-release-snapshot'
+ }
+ }
+ project.afterEvaluate {
+ configurations.all {
+ resolutionStrategy.dependencySubstitution { DependencySubstitutions subs ->
+ projectSubstitutions.each { k,v ->
+ subs.substitute(subs.module(k)).with(subs.project(v))
+ }
}
}
}
diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle
index 0e8c2dc1412dd..0839b8a22f8fa 100644
--- a/buildSrc/build.gradle
+++ b/buildSrc/build.gradle
@@ -23,14 +23,12 @@ apply plugin: 'groovy'
group = 'org.elasticsearch.gradle'
-// TODO: remove this when upgrading to a version that supports ProgressLogger
-// gradle 2.14 made internal apis unavailable to plugins, and gradle considered
-// ProgressLogger to be an internal api. Until this is made available again,
-// we can't upgrade without losing our nice progress logging
-// NOTE that this check duplicates that in BuildPlugin, but we need to check
-// early here before trying to compile the broken classes in buildSrc
-if (GradleVersion.current() != GradleVersion.version('2.13')) {
- throw new GradleException('Gradle 2.13 is required to build elasticsearch')
+if (GradleVersion.current() < GradleVersion.version('3.3')) {
+ throw new GradleException('Gradle 3.3+ is required to build elasticsearch')
+}
+
+if (JavaVersion.current() < JavaVersion.VERSION_1_8) {
+ throw new GradleException('Java 1.8 is required to build elasticsearch gradle tools')
}
if (project == rootProject) {
@@ -94,11 +92,17 @@ dependencies {
compile 'com.netflix.nebula:gradle-info-plugin:3.0.3'
compile 'org.eclipse.jgit:org.eclipse.jgit:3.2.0.201312181205-r'
compile 'com.perforce:p4java:2012.3.551082' // THIS IS SUPPOSED TO BE OPTIONAL IN THE FUTURE....
- compile 'de.thetaphi:forbiddenapis:2.2'
+ compile 'de.thetaphi:forbiddenapis:2.3'
compile 'org.apache.rat:apache-rat:0.11'
- compile 'ru.vyarus:gradle-animalsniffer-plugin:1.0.1'
}
+// Gradle 2.14+ removed ProgressLogger(-Factory) classes from the public APIs
+// Use logging dependency instead
+
+dependencies {
+ compileOnly "org.gradle:gradle-logging:${GradleVersion.current().getVersion()}"
+ compile 'ru.vyarus:gradle-animalsniffer-plugin:1.2.0' // Gradle 2.14 requires a version > 1.0.1
+}
/*****************************************************************************
* Bootstrap repositories *
@@ -107,6 +111,9 @@ dependencies {
if (project == rootProject) {
repositories {
+ if (System.getProperty("repos.mavenLocal") != null) {
+ mavenLocal()
+ }
mavenCentral()
}
test.exclude 'org/elasticsearch/test/NamingConventionsCheckBadClasses*'
@@ -149,4 +156,11 @@ if (project != rootProject) {
testClass = 'org.elasticsearch.test.NamingConventionsCheckBadClasses$UnitTestCase'
integTestClass = 'org.elasticsearch.test.NamingConventionsCheckBadClasses$IntegTestCase'
}
+
+ task namingConventionsMain(type: org.elasticsearch.gradle.precommit.NamingConventionsTask) {
+ checkForTestsInMain = true
+ testClass = namingConventions.testClass
+ integTestClass = namingConventions.integTestClass
+ }
+ precommit.dependsOn namingConventionsMain
}
diff --git a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingPlugin.groovy b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingPlugin.groovy
index e2230b116c714..d3d07db0d2072 100644
--- a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingPlugin.groovy
+++ b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingPlugin.groovy
@@ -12,10 +12,38 @@ import org.gradle.api.tasks.testing.Test
class RandomizedTestingPlugin implements Plugin {
void apply(Project project) {
+ setupSeed(project)
replaceTestTask(project.tasks)
configureAnt(project.ant)
}
+ /**
+ * Pins the test seed at configuration time so it isn't different on every
+ * {@link RandomizedTestingTask} execution. This is useful if random
+ * decisions in one run of {@linkplain RandomizedTestingTask} influence the
+ * outcome of subsequent runs. Pinning the seed up front like this makes
+ * the reproduction line from one run be useful on another run.
+ */
+ static void setupSeed(Project project) {
+ if (project.rootProject.ext.has('testSeed')) {
+ /* Skip this if we've already pinned the testSeed. It is important
+ * that this checks the rootProject so that we know we've only ever
+ * initialized one time. */
+ return
+ }
+ String testSeed = System.getProperty('tests.seed')
+ if (testSeed == null) {
+ long seed = new Random(System.currentTimeMillis()).nextLong()
+ testSeed = Long.toUnsignedString(seed, 16).toUpperCase(Locale.ROOT)
+ }
+ /* Set the testSeed on the root project first so other projects can use
+ * it during initialization. */
+ project.rootProject.ext.testSeed = testSeed
+ project.rootProject.subprojects {
+ project.ext.testSeed = testSeed
+ }
+ }
+
static void replaceTestTask(TaskContainer tasks) {
Test oldTestTask = tasks.findByPath('test')
if (oldTestTask == null) {
diff --git a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingTask.groovy b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingTask.groovy
index b28e7210ea41d..1817ea57e7abe 100644
--- a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingTask.groovy
+++ b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingTask.groovy
@@ -9,6 +9,7 @@ import org.apache.tools.ant.DefaultLogger
import org.apache.tools.ant.RuntimeConfigurable
import org.apache.tools.ant.UnknownElement
import org.gradle.api.DefaultTask
+import org.gradle.api.InvalidUserDataException
import org.gradle.api.file.FileCollection
import org.gradle.api.file.FileTreeElement
import org.gradle.api.internal.tasks.options.Option
@@ -19,7 +20,7 @@ import org.gradle.api.tasks.Optional
import org.gradle.api.tasks.TaskAction
import org.gradle.api.tasks.util.PatternFilterable
import org.gradle.api.tasks.util.PatternSet
-import org.gradle.logging.ProgressLoggerFactory
+import org.gradle.internal.logging.progress.ProgressLoggerFactory
import org.gradle.util.ConfigureUtil
import javax.inject.Inject
@@ -69,6 +70,10 @@ class RandomizedTestingTask extends DefaultTask {
@Input
String ifNoTests = 'ignore'
+ @Optional
+ @Input
+ String onNonEmptyWorkDirectory = 'fail'
+
TestLoggingConfiguration testLoggingConfig = new TestLoggingConfiguration()
BalancersConfiguration balancersConfig = new BalancersConfiguration(task: this)
@@ -81,6 +86,7 @@ class RandomizedTestingTask extends DefaultTask {
String argLine = null
Map systemProperties = new HashMap<>()
+ Map environmentVariables = new HashMap<>()
PatternFilterable patternSet = new PatternSet()
RandomizedTestingTask() {
@@ -91,7 +97,7 @@ class RandomizedTestingTask extends DefaultTask {
@Inject
ProgressLoggerFactory getProgressLoggerFactory() {
- throw new UnsupportedOperationException();
+ throw new UnsupportedOperationException()
}
void jvmArgs(Iterable arguments) {
@@ -106,6 +112,10 @@ class RandomizedTestingTask extends DefaultTask {
systemProperties.put(property, value)
}
+ void environment(String key, Object value) {
+ environmentVariables.put(key, value)
+ }
+
void include(String... includes) {
this.patternSet.include(includes);
}
@@ -194,7 +204,9 @@ class RandomizedTestingTask extends DefaultTask {
haltOnFailure: true, // we want to capture when a build failed, but will decide whether to rethrow later
shuffleOnSlave: shuffleOnSlave,
leaveTemporary: leaveTemporary,
- ifNoTests: ifNoTests
+ ifNoTests: ifNoTests,
+ onNonEmptyWorkDirectory: onNonEmptyWorkDirectory,
+ newenvironment: true
]
DefaultLogger listener = null
@@ -248,8 +260,16 @@ class RandomizedTestingTask extends DefaultTask {
}
}
for (Map.Entry prop : systemProperties) {
+ if (prop.getKey().equals('tests.seed')) {
+ throw new InvalidUserDataException('Seed should be ' +
+ 'set on the project instead of a system property')
+ }
sysproperty key: prop.getKey(), value: prop.getValue().toString()
}
+ systemProperty 'tests.seed', project.testSeed
+ for (Map.Entry envvar : environmentVariables) {
+ env key: envvar.getKey(), value: envvar.getValue().toString()
+ }
makeListeners()
}
} catch (BuildException e) {
diff --git a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestProgressLogger.groovy b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestProgressLogger.groovy
index 14f5d476be3cb..da25afa938916 100644
--- a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestProgressLogger.groovy
+++ b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/TestProgressLogger.groovy
@@ -25,8 +25,8 @@ import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedStartEvent
import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedSuiteResultEvent
import com.carrotsearch.ant.tasks.junit4.events.aggregated.AggregatedTestResultEvent
import com.carrotsearch.ant.tasks.junit4.listeners.AggregatedEventListener
-import org.gradle.logging.ProgressLogger
-import org.gradle.logging.ProgressLoggerFactory
+import org.gradle.internal.logging.progress.ProgressLogger
+import org.gradle.internal.logging.progress.ProgressLoggerFactory
import static com.carrotsearch.ant.tasks.junit4.FormattingUtils.formatDurationInSeconds
import static com.carrotsearch.ant.tasks.junit4.events.aggregated.TestStatus.ERROR
@@ -77,7 +77,7 @@ class TestProgressLogger implements AggregatedEventListener {
/** Have we finished a whole suite yet? */
volatile boolean suiteFinished = false
/* Note that we probably overuse volatile here but it isn't hurting us and
- lets us move things around without worying about breaking things. */
+ lets us move things around without worrying about breaking things. */
@Subscribe
void onStart(AggregatedStartEvent e) throws IOException {
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy
index 01bab85b0199a..af7716804bf86 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy
@@ -18,7 +18,9 @@
*/
package org.elasticsearch.gradle
+import com.carrotsearch.gradle.junit4.RandomizedTestingTask
import nebula.plugin.extraconfigurations.ProvidedBasePlugin
+import org.apache.tools.ant.taskdefs.condition.Os
import org.elasticsearch.gradle.precommit.PrecommitTasks
import org.gradle.api.GradleException
import org.gradle.api.InvalidUserDataException
@@ -118,9 +120,10 @@ class BuildPlugin implements Plugin {
println " JDK Version : ${gradleJavaVersionDetails}"
println " JAVA_HOME : ${gradleJavaHome}"
}
+ println " Random Testing Seed : ${project.testSeed}"
// enforce gradle version
- GradleVersion minGradle = GradleVersion.version('2.13')
+ GradleVersion minGradle = GradleVersion.version('3.3')
if (GradleVersion.current() < minGradle) {
throw new GradleException("${minGradle} or above is required to build elasticsearch")
}
@@ -201,19 +204,28 @@ class BuildPlugin implements Plugin {
/** Runs the given javascript using jjs from the jdk, and returns the output */
private static String runJavascript(Project project, String javaHome, String script) {
- File tmpScript = File.createTempFile('es-gradle-tmp', '.js')
- tmpScript.setText(script, 'UTF-8')
- ByteArrayOutputStream output = new ByteArrayOutputStream()
+ ByteArrayOutputStream stdout = new ByteArrayOutputStream()
+ ByteArrayOutputStream stderr = new ByteArrayOutputStream()
+ if (Os.isFamily(Os.FAMILY_WINDOWS)) {
+ // gradle/groovy does not properly escape the double quote for windows
+ script = script.replace('"', '\\"')
+ }
+ File jrunscriptPath = new File(javaHome, 'bin/jrunscript')
ExecResult result = project.exec {
- executable = new File(javaHome, 'bin/jjs')
- args tmpScript.toString()
- standardOutput = output
- errorOutput = new ByteArrayOutputStream()
- ignoreExitValue = true // we do not fail so we can first cleanup the tmp file
+ executable = jrunscriptPath
+ args '-e', script
+ standardOutput = stdout
+ errorOutput = stderr
+ ignoreExitValue = true
+ }
+ if (result.exitValue != 0) {
+ project.logger.error("STDOUT:")
+ stdout.toString('UTF-8').eachLine { line -> project.logger.error(line) }
+ project.logger.error("STDERR:")
+ stderr.toString('UTF-8').eachLine { line -> project.logger.error(line) }
+ result.rethrowFailure()
}
- java.nio.file.Files.delete(tmpScript.toPath())
- result.assertNormalExitValue()
- return output.toString('UTF-8').trim()
+ return stdout.toString('UTF-8').trim()
}
/** Return the configuration name used for finding transitive deps of the given dependency. */
@@ -309,7 +321,6 @@ class BuildPlugin implements Plugin {
*
*/
private static Closure fixupDependencies(Project project) {
- // TODO: revisit this when upgrading to Gradle 2.14+, see Javadoc comment above
return { XmlProvider xml ->
// first find if we have dependencies at all, and grab the node
NodeList depsNodes = xml.asNode().get('dependencies')
@@ -332,6 +343,13 @@ class BuildPlugin implements Plugin {
depNode.scope*.value = 'compile'
}
+ // remove any exclusions added by gradle, they contain wildcards and systems like ivy have bugs with wildcards
+ // see https://github.com/elastic/elasticsearch/issues/24490
+ NodeList exclusionsNode = depNode.get('exclusions')
+ if (exclusionsNode.size() > 0) {
+ depNode.remove(exclusionsNode.get(0))
+ }
+
// collect the transitive deps now that we know what this dependency is
String depConfig = transitiveDepConfigName(groupId, artifactId, version)
Configuration configuration = project.configurations.findByName(depConfig)
@@ -418,8 +436,10 @@ class BuildPlugin implements Plugin {
// hack until gradle supports java 9's new "--release" arg
assert minimumJava == JavaVersion.VERSION_1_8
options.compilerArgs << '--release' << '8'
- project.sourceCompatibility = null
- project.targetCompatibility = null
+ doFirst{
+ sourceCompatibility = null
+ targetCompatibility = null
+ }
}
}
}
@@ -466,7 +486,7 @@ class BuildPlugin implements Plugin {
'Build-Java-Version': project.javaVersion)
if (jarTask.manifest.attributes.containsKey('Change') == false) {
logger.warn('Building without git revision id.')
- jarTask.manifest.attributes('Change': 'N/A')
+ jarTask.manifest.attributes('Change': 'Unknown')
}
}
}
@@ -478,16 +498,12 @@ class BuildPlugin implements Plugin {
jvm "${project.javaHome}/bin/java"
parallelism System.getProperty('tests.jvms', 'auto')
ifNoTests 'fail'
+ onNonEmptyWorkDirectory 'wipe'
leaveTemporary true
// TODO: why are we not passing maxmemory to junit4?
jvmArg '-Xmx' + System.getProperty('tests.heap.size', '512m')
jvmArg '-Xms' + System.getProperty('tests.heap.size', '512m')
- if (JavaVersion.current().isJava7()) {
- // some tests need a large permgen, but that only exists on java 7
- jvmArg '-XX:MaxPermSize=128m'
- }
- jvmArg '-XX:MaxDirectMemorySize=512m'
jvmArg '-XX:+HeapDumpOnOutOfMemoryError'
File heapdumpDir = new File(project.buildDir, 'heapdump')
heapdumpDir.mkdirs()
@@ -510,16 +526,19 @@ class BuildPlugin implements Plugin {
systemProperty 'tests.logger.level', 'WARN'
for (Map.Entry property : System.properties.entrySet()) {
if (property.getKey().startsWith('tests.') ||
- property.getKey().startsWith('es.')) {
+ property.getKey().startsWith('es.')) {
+ if (property.getKey().equals('tests.seed')) {
+ /* The seed is already set on the project so we
+ * shouldn't attempt to override it. */
+ continue;
+ }
systemProperty property.getKey(), property.getValue()
}
}
- // System assertions (-esa) are disabled for now because of what looks like a
- // JDK bug triggered by Groovy on JDK7. We should look at re-enabling system
- // assertions when we upgrade to a new version of Groovy (currently 2.4.4) or
- // require JDK8. See https://issues.apache.org/jira/browse/GROOVY-7528.
- enableSystemAssertions false
+ boolean assertionsEnabled = Boolean.parseBoolean(System.getProperty('tests.asserts', 'true'))
+ enableSystemAssertions assertionsEnabled
+ enableAssertions assertionsEnabled
testLogging {
showNumFailuresAtEnd 25
@@ -560,11 +579,22 @@ class BuildPlugin implements Plugin {
/** Configures the test task */
static Task configureTest(Project project) {
- Task test = project.tasks.getByName('test')
+ RandomizedTestingTask test = project.tasks.getByName('test')
test.configure(commonTestConfig(project))
test.configure {
include '**/*Tests.class'
}
+
+ // Add a method to create additional unit tests for a project, which will share the same
+ // randomized testing setup, but by default run no tests.
+ project.extensions.add('additionalTest', { String name, Closure config ->
+ RandomizedTestingTask additionalTest = project.tasks.create(name, RandomizedTestingTask.class)
+ additionalTest.classpath = test.classpath
+ additionalTest.testClassesDir = test.testClassesDir
+ additionalTest.configure(commonTestConfig(project))
+ additionalTest.configure(config)
+ test.dependsOn(additionalTest)
+ });
return test
}
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/NoticeTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/NoticeTask.groovy
new file mode 100644
index 0000000000000..928298db7bfc2
--- /dev/null
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/NoticeTask.groovy
@@ -0,0 +1,99 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.gradle
+
+import org.gradle.api.DefaultTask
+import org.gradle.api.Project
+import org.gradle.api.artifacts.Configuration
+import org.gradle.api.tasks.InputFile
+import org.gradle.api.tasks.OutputFile
+import org.gradle.api.tasks.TaskAction
+
+/**
+ * A task to create a notice file which includes dependencies' notices.
+ */
+public class NoticeTask extends DefaultTask {
+
+ @InputFile
+ File inputFile = project.rootProject.file('NOTICE.txt')
+
+ @OutputFile
+ File outputFile = new File(project.buildDir, "notices/${name}/NOTICE.txt")
+
+ /** Directories to include notices from */
+ private List licensesDirs = new ArrayList<>()
+
+ public NoticeTask() {
+ description = 'Create a notice file from dependencies'
+ // Default licenses directory is ${projectDir}/licenses (if it exists)
+ File licensesDir = new File(project.projectDir, 'licenses')
+ if (licensesDir.exists()) {
+ licensesDirs.add(licensesDir)
+ }
+ }
+
+ /** Add notices from the specified directory. */
+ public void licensesDir(File licensesDir) {
+ licensesDirs.add(licensesDir)
+ }
+
+ @TaskAction
+ public void generateNotice() {
+ StringBuilder output = new StringBuilder()
+ output.append(inputFile.getText('UTF-8'))
+ output.append('\n\n')
+ // This is a map rather than a set so that the sort order is the 3rd
+ // party component names, unaffected by the full path to the various files
+ Map seen = new TreeMap<>()
+ for (File licensesDir : licensesDirs) {
+ licensesDir.eachFileMatch({ it ==~ /.*-NOTICE\.txt/ }) { File file ->
+ String name = file.name.substring(0, file.name.length() - '-NOTICE.txt'.length())
+ if (seen.containsKey(name)) {
+ File prevFile = seen.get(name)
+ if (prevFile.text != file.text) {
+ throw new RuntimeException("Two different notices exist for dependency '" +
+ name + "': " + prevFile + " and " + file)
+ }
+ } else {
+ seen.put(name, file)
+ }
+ }
+ }
+ for (Map.Entry entry : seen.entrySet()) {
+ String name = entry.getKey()
+ File file = entry.getValue()
+ appendFile(file, name, 'NOTICE', output)
+ appendFile(new File(file.parentFile, "${name}-LICENSE.txt"), name, 'LICENSE', output)
+ }
+ outputFile.setText(output.toString(), 'UTF-8')
+ }
+
+ static void appendFile(File file, String name, String type, StringBuilder output) {
+ String text = file.getText('UTF-8')
+ if (text.trim().isEmpty()) {
+ return
+ }
+ output.append('================================================================================\n')
+ output.append("${name} ${type}\n")
+ output.append('================================================================================\n')
+ output.append(text)
+ output.append('\n\n')
+ }
+}
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/Version.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/Version.groovy
new file mode 100644
index 0000000000000..b59f26381f2f3
--- /dev/null
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/Version.groovy
@@ -0,0 +1,78 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.gradle
+
+import groovy.transform.Sortable
+
+/**
+ * Encapsulates comparison and printing logic for an x.y.z version.
+ */
+@Sortable(includes=['id'])
+public class Version {
+
+ final int major
+ final int minor
+ final int bugfix
+ final int id
+ final boolean snapshot
+
+ public Version(int major, int minor, int bugfix, boolean snapshot) {
+ this.major = major
+ this.minor = minor
+ this.bugfix = bugfix
+ this.snapshot = snapshot
+ this.id = major * 100000 + minor * 1000 + bugfix * 10 +
+ (snapshot ? 1 : 0)
+ }
+
+ public static Version fromString(String s) {
+ String[] parts = s.split('\\.')
+ String bugfix = parts[2]
+ boolean snapshot = false
+ if (bugfix.contains('-')) {
+ snapshot = bugfix.endsWith('-SNAPSHOT')
+ bugfix = bugfix.split('-')[0]
+ }
+ return new Version(parts[0] as int, parts[1] as int, bugfix as int,
+ snapshot)
+ }
+
+ @Override
+ public String toString() {
+ String snapshotStr = snapshot ? '-SNAPSHOT' : ''
+ return "${major}.${minor}.${bugfix}${snapshotStr}"
+ }
+
+ public boolean before(String compareTo) {
+ return id < fromString(compareTo).id
+ }
+
+ public boolean onOrBefore(String compareTo) {
+ return id <= fromString(compareTo).id
+ }
+
+ public boolean onOrAfter(String compareTo) {
+ return id >= fromString(compareTo).id
+ }
+
+ public boolean after(String compareTo) {
+ return id > fromString(compareTo).id
+ }
+}
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy
index 9270edbb5690e..f126839a8d48a 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/RestTestsFromSnippetsTask.groovy
@@ -167,6 +167,9 @@ public class RestTestsFromSnippetsTask extends SnippetsTask {
* warning every time. */
current.println(" - skip:")
current.println(" features: ")
+ current.println(" - stash_in_key")
+ current.println(" - stash_in_path")
+ current.println(" - stash_path_replace")
current.println(" - warnings")
}
if (test.skipTest) {
@@ -179,12 +182,14 @@ public class RestTestsFromSnippetsTask extends SnippetsTask {
}
if (test.setup != null) {
// Insert a setup defined outside of the docs
- String setup = setups[test.setup]
- if (setup == null) {
- throw new InvalidUserDataException("Couldn't find setup "
- + "for $test")
+ for (String setupName : test.setup.split(',')) {
+ String setup = setups[setupName]
+ if (setup == null) {
+ throw new InvalidUserDataException("Couldn't find setup "
+ + "for $test")
+ }
+ current.println(setup)
}
- current.println(setup)
}
body(test, false)
@@ -295,7 +300,7 @@ public class RestTestsFromSnippetsTask extends SnippetsTask {
Path dest = outputRoot().toPath().resolve(test.path)
// Replace the extension
String fileName = dest.getName(dest.nameCount - 1)
- dest = dest.parent.resolve(fileName.replace('.asciidoc', '.yaml'))
+ dest = dest.parent.resolve(fileName.replace('.asciidoc', '.yml'))
// Now setup the writer
Files.createDirectories(dest.parent)
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/SnippetsTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/SnippetsTask.groovy
index 518b4da439cf0..94af22f4aa279 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/SnippetsTask.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/SnippetsTask.groovy
@@ -90,6 +90,7 @@ public class SnippetsTask extends DefaultTask {
* tests cleaner.
*/
subst = subst.replace('$body', '\\$body')
+ subst = subst.replace('$_path', '\\$_path')
// \n is a new line....
subst = subst.replace('\\n', '\n')
snippet.contents = snippet.contents.replaceAll(
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy
index d5295519ad294..2e11fdc2681bc 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy
@@ -19,6 +19,7 @@
package org.elasticsearch.gradle.plugin
import org.elasticsearch.gradle.BuildPlugin
+import org.elasticsearch.gradle.NoticeTask
import org.elasticsearch.gradle.test.RestIntegTestTask
import org.elasticsearch.gradle.test.RunTask
import org.gradle.api.Project
@@ -62,15 +63,16 @@ public class PluginBuildPlugin extends BuildPlugin {
project.ext.set("nebulaPublish.maven.jar", false)
}
- project.integTest.dependsOn(project.bundlePlugin)
+ project.integTestCluster.dependsOn(project.bundlePlugin)
project.tasks.run.dependsOn(project.bundlePlugin)
if (isModule) {
- project.integTest.clusterConfig.module(project)
+ project.integTestCluster.module(project)
project.tasks.run.clusterConfig.module(project)
} else {
- project.integTest.clusterConfig.plugin(project.path)
+ project.integTestCluster.plugin(project.path)
project.tasks.run.clusterConfig.plugin(project.path)
addZipPomGeneration(project)
+ addNoticeGeneration(project)
}
project.namingConventions {
@@ -94,7 +96,7 @@ public class PluginBuildPlugin extends BuildPlugin {
provided "com.vividsolutions:jts:${project.versions.jts}"
provided "org.apache.logging.log4j:log4j-api:${project.versions.log4j}"
provided "org.apache.logging.log4j:log4j-core:${project.versions.log4j}"
- provided "net.java.dev.jna:jna:${project.versions.jna}"
+ provided "org.elasticsearch:jna:${project.versions.jna}"
}
}
@@ -118,12 +120,15 @@ public class PluginBuildPlugin extends BuildPlugin {
// add the plugin properties and metadata to test resources, so unit tests can
// know about the plugin (used by test security code to statically initialize the plugin in unit tests)
SourceSet testSourceSet = project.sourceSets.test
- testSourceSet.output.dir(buildProperties.generatedResourcesDir, builtBy: 'pluginProperties')
+ testSourceSet.output.dir(buildProperties.descriptorOutput.parentFile, builtBy: 'pluginProperties')
testSourceSet.resources.srcDir(pluginMetadata)
// create the actual bundle task, which zips up all the files for the plugin
Zip bundle = project.tasks.create(name: 'bundlePlugin', type: Zip, dependsOn: [project.jar, buildProperties]) {
- from buildProperties // plugin properties file
+ from(buildProperties.descriptorOutput.parentFile) {
+ // plugin properties file
+ include(buildProperties.descriptorOutput.name)
+ }
from pluginMetadata // metadata (eg custom security policy)
from project.jar // this plugin's jar
from project.configurations.runtime - project.configurations.provided // the dep jars
@@ -244,4 +249,19 @@ public class PluginBuildPlugin extends BuildPlugin {
}
}
}
+
+ protected void addNoticeGeneration(Project project) {
+ File licenseFile = project.pluginProperties.extension.licenseFile
+ if (licenseFile != null) {
+ project.bundlePlugin.from(licenseFile.parentFile) {
+ include(licenseFile.name)
+ }
+ }
+ File noticeFile = project.pluginProperties.extension.noticeFile
+ if (noticeFile != null) {
+ NoticeTask generateNotice = project.tasks.create('generateNotice', NoticeTask.class)
+ generateNotice.inputFile = noticeFile
+ project.bundlePlugin.from(generateNotice)
+ }
+ }
}
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy
index 5502266693653..1251be265da9a 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesExtension.groovy
@@ -39,10 +39,24 @@ class PluginPropertiesExtension {
@Input
String classname
+ @Input
+ boolean hasNativeController = false
+
/** Indicates whether the plugin jar should be made available for the transport client. */
@Input
boolean hasClientJar = false
+ /** A license file that should be included in the built plugin zip. */
+ @Input
+ File licenseFile = null
+
+ /**
+ * A notice file that should be included in the built plugin zip. This will be
+ * extended with notices from the {@code licenses/} directory.
+ */
+ @Input
+ File noticeFile = null
+
PluginPropertiesExtension(Project project) {
name = project.name
version = project.version
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy
index 7156c2650cbe0..91efe247a016b 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginPropertiesTask.groovy
@@ -22,6 +22,7 @@ import org.elasticsearch.gradle.VersionProperties
import org.gradle.api.InvalidUserDataException
import org.gradle.api.Task
import org.gradle.api.tasks.Copy
+import org.gradle.api.tasks.OutputFile
/**
* Creates a plugin descriptor.
@@ -29,20 +30,22 @@ import org.gradle.api.tasks.Copy
class PluginPropertiesTask extends Copy {
PluginPropertiesExtension extension
- File generatedResourcesDir = new File(project.buildDir, 'generated-resources')
+
+ @OutputFile
+ File descriptorOutput = new File(project.buildDir, 'generated-resources/plugin-descriptor.properties')
PluginPropertiesTask() {
- File templateFile = new File(project.buildDir, 'templates/plugin-descriptor.properties')
+ File templateFile = new File(project.buildDir, "templates/${descriptorOutput.name}")
Task copyPluginPropertiesTemplate = project.tasks.create('copyPluginPropertiesTemplate') {
doLast {
- InputStream resourceTemplate = PluginPropertiesTask.getResourceAsStream('/plugin-descriptor.properties')
+ InputStream resourceTemplate = PluginPropertiesTask.getResourceAsStream("/${descriptorOutput.name}")
templateFile.parentFile.mkdirs()
templateFile.setText(resourceTemplate.getText('UTF-8'), 'UTF-8')
}
}
+
dependsOn(copyPluginPropertiesTemplate)
extension = project.extensions.create('esplugin', PluginPropertiesExtension, project)
- project.clean.delete(generatedResourcesDir)
project.afterEvaluate {
// check require properties are set
if (extension.name == null) {
@@ -55,8 +58,8 @@ class PluginPropertiesTask extends Copy {
throw new InvalidUserDataException('classname is a required setting for esplugin')
}
// configure property substitution
- from(templateFile)
- into(generatedResourcesDir)
+ from(templateFile.parentFile).include(descriptorOutput.name)
+ into(descriptorOutput.parentFile)
Map properties = generateSubstitutions()
expand(properties)
inputs.properties(properties)
@@ -76,7 +79,8 @@ class PluginPropertiesTask extends Copy {
'version': stringSnap(extension.version),
'elasticsearchVersion': stringSnap(VersionProperties.elasticsearch),
'javaVersion': project.targetCompatibility as String,
- 'classname': extension.classname
+ 'classname': extension.classname,
+ 'hasNativeController': extension.hasNativeController
]
}
}
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/DependencyLicensesTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/DependencyLicensesTask.groovy
index 6fa37be309ec1..4d292d87ec39c 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/DependencyLicensesTask.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/DependencyLicensesTask.groovy
@@ -86,6 +86,9 @@ public class DependencyLicensesTask extends DefaultTask {
/** A map of patterns to prefix, used to find the LICENSE and NOTICE file. */
private LinkedHashMap mappings = new LinkedHashMap<>()
+ /** Names of dependencies whose shas should not exist. */
+ private Set ignoreShas = new HashSet<>()
+
/**
* Add a mapping from a regex pattern for the jar name, to a prefix to find
* the LICENSE and NOTICE file for that jar.
@@ -106,6 +109,15 @@ public class DependencyLicensesTask extends DefaultTask {
mappings.put(from, to)
}
+ /**
+ * Add a rule which will skip SHA checking for the given dependency name. This should be used for
+ * locally build dependencies, which cause the sha to change constantly.
+ */
+ @Input
+ public void ignoreSha(String dep) {
+ ignoreShas.add(dep)
+ }
+
@TaskAction
public void checkDependencies() {
if (dependencies.isEmpty()) {
@@ -139,19 +151,27 @@ public class DependencyLicensesTask extends DefaultTask {
for (File dependency : dependencies) {
String jarName = dependency.getName()
- logger.info("Checking license/notice/sha for " + jarName)
- checkSha(dependency, jarName, shaFiles)
+ String depName = jarName - ~/\-\d+.*/
+ if (ignoreShas.contains(depName)) {
+ // local deps should not have sha files!
+ if (getShaFile(jarName).exists()) {
+ throw new GradleException("SHA file ${getShaFile(jarName)} exists for ignored dependency ${depName}")
+ }
+ } else {
+ logger.info("Checking sha for " + jarName)
+ checkSha(dependency, jarName, shaFiles)
+ }
- String name = jarName - ~/\-\d+.*/
- Matcher match = mappingsPattern.matcher(name)
+ logger.info("Checking license/notice for " + depName)
+ Matcher match = mappingsPattern.matcher(depName)
if (match.matches()) {
int i = 0
while (i < match.groupCount() && match.group(i + 1) == null) ++i;
- logger.info("Mapped dependency name ${name} to ${mapped.get(i)} for license check")
- name = mapped.get(i)
+ logger.info("Mapped dependency name ${depName} to ${mapped.get(i)} for license check")
+ depName = mapped.get(i)
}
- checkFile(name, jarName, licenses, 'LICENSE')
- checkFile(name, jarName, notices, 'NOTICE')
+ checkFile(depName, jarName, licenses, 'LICENSE')
+ checkFile(depName, jarName, notices, 'NOTICE')
}
licenses.each { license, count ->
@@ -169,8 +189,12 @@ public class DependencyLicensesTask extends DefaultTask {
}
}
+ private File getShaFile(String jarName) {
+ return new File(licensesDir, jarName + SHA_EXTENSION)
+ }
+
private void checkSha(File jar, String jarName, Set shaFiles) {
- File shaFile = new File(licensesDir, jarName + SHA_EXTENSION)
+ File shaFile = getShaFile(jarName)
if (shaFile.exists() == false) {
throw new GradleException("Missing SHA for ${jarName}. Run 'gradle updateSHAs' to create")
}
@@ -215,6 +239,10 @@ public class DependencyLicensesTask extends DefaultTask {
}
for (File dependency : parentTask.dependencies) {
String jarName = dependency.getName()
+ String depName = jarName - ~/\-\d+.*/
+ if (parentTask.ignoreShas.contains(depName)) {
+ continue
+ }
File shaFile = new File(parentTask.licensesDir, jarName + SHA_EXTENSION)
if (shaFile.exists() == false) {
logger.lifecycle("Adding sha for ${jarName}")
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/NamingConventionsTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/NamingConventionsTask.groovy
index 52de7dac2d5a3..2711a0e38f23b 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/NamingConventionsTask.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/NamingConventionsTask.groovy
@@ -38,17 +38,7 @@ public class NamingConventionsTask extends LoggedExec {
* inputs (ie the jars/class files).
*/
@OutputFile
- File successMarker = new File(project.buildDir, 'markers/namingConventions')
-
- /**
- * The classpath to run the naming conventions checks against. Must contain the files in the test
- * output directory and everything required to load those classes.
- *
- * We don't declare the actual test files as a dependency or input because if they change then
- * this will change.
- */
- @InputFiles
- FileCollection classpath = project.sourceSets.test.runtimeClasspath
+ File successMarker = new File(project.buildDir, "markers/${this.name}")
/**
* Should we skip the integ tests in disguise tests? Defaults to true because only core names its
@@ -69,18 +59,35 @@ public class NamingConventionsTask extends LoggedExec {
@Input
String integTestClass = 'org.elasticsearch.test.ESIntegTestCase'
+ /**
+ * Should the test also check the main classpath for test classes instead of
+ * doing the usual checks to the test classpath.
+ */
+ @Input
+ boolean checkForTestsInMain = false;
+
public NamingConventionsTask() {
// Extra classpath contains the actual test
- project.configurations.create('namingConventions')
- Dependency buildToolsDep = project.dependencies.add('namingConventions',
- "org.elasticsearch.gradle:build-tools:${VersionProperties.elasticsearch}")
- buildToolsDep.transitive = false // We don't need gradle in the classpath. It conflicts.
+ if (false == project.configurations.names.contains('namingConventions')) {
+ project.configurations.create('namingConventions')
+ Dependency buildToolsDep = project.dependencies.add('namingConventions',
+ "org.elasticsearch.gradle:build-tools:${VersionProperties.elasticsearch}")
+ buildToolsDep.transitive = false // We don't need gradle in the classpath. It conflicts.
+ }
FileCollection extraClasspath = project.configurations.namingConventions
dependsOn(extraClasspath)
- description = "Runs NamingConventionsCheck on ${classpath}"
+ FileCollection classpath = project.sourceSets.test.runtimeClasspath
+ inputs.files(classpath)
+ description = "Tests that test classes aren't misnamed or misplaced"
executable = new File(project.javaHome, 'bin/java')
- onlyIf { project.sourceSets.test.output.classesDir.exists() }
+ if (false == checkForTestsInMain) {
+ /* This task is created by default for all subprojects with this
+ * setting and there is no point in running it if the files don't
+ * exist. */
+ onlyIf { project.sourceSets.test.output.classesDir.exists() }
+ }
+
/*
* We build the arguments in a funny afterEvaluate/doFirst closure so that we can wait for the classpath to be
* ready for us. Strangely neither one on their own are good enough.
@@ -104,7 +111,14 @@ public class NamingConventionsTask extends LoggedExec {
if (':build-tools'.equals(project.path)) {
args('--self-test')
}
- args('--', project.sourceSets.test.output.classesDir.absolutePath)
+ if (checkForTestsInMain) {
+ args('--main')
+ args('--')
+ args(project.sourceSets.main.output.classesDir.absolutePath)
+ } else {
+ args('--')
+ args(project.sourceSets.test.output.classesDir.absolutePath)
+ }
}
}
doLast { successMarker.setText("", 'UTF-8') }
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy
index f451beeceb826..f7b30e774e340 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy
@@ -91,6 +91,7 @@ class PrecommitTasks {
if (testForbidden != null) {
testForbidden.configure {
signaturesURLs += getClass().getResource('/forbidden/es-test-signatures.txt')
+ signaturesURLs += getClass().getResource('/forbidden/http-signatures.txt')
}
}
Task forbiddenApis = project.tasks.findByName('forbiddenApis')
@@ -139,6 +140,7 @@ class PrecommitTasks {
configProperties = [
suppressions: checkstyleSuppressions
]
+ toolVersion = 7.5
}
for (String taskName : ['checkstyleMain', 'checkstyleTest']) {
Task task = project.tasks.findByName(taskName)
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy
index 018f9fde2f2c4..33ca6dccfa32e 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/ThirdPartyAuditTask.groovy
@@ -209,9 +209,11 @@ public class ThirdPartyAuditTask extends AntTask {
try {
ant.thirdPartyAudit(failOnUnsupportedJava: false,
failOnMissingClasses: false,
- signaturesFile: new File(getClass().getResource('/forbidden/third-party-audit.txt').toURI()),
classpath: classpath.asPath) {
fileset(dir: tmpDir)
+ signatures {
+ string(value: getClass().getResourceAsStream('/forbidden/third-party-audit.txt').getText('UTF-8'))
+ }
}
} catch (BuildException ignore) {}
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/AntFixture.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/AntFixture.groovy
new file mode 100644
index 0000000000000..34c3046aa2b6b
--- /dev/null
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/AntFixture.groovy
@@ -0,0 +1,291 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.gradle.test
+
+import org.apache.tools.ant.taskdefs.condition.Os
+import org.elasticsearch.gradle.AntTask
+import org.elasticsearch.gradle.LoggedExec
+import org.gradle.api.GradleException
+import org.gradle.api.Task
+import org.gradle.api.tasks.Exec
+import org.gradle.api.tasks.Input
+
+/**
+ * A fixture for integration tests which runs in a separate process launched by Ant.
+ */
+public class AntFixture extends AntTask implements Fixture {
+
+ /** The path to the executable that starts the fixture. */
+ @Input
+ String executable
+
+ private final List arguments = new ArrayList<>()
+
+ @Input
+ public void args(Object... args) {
+ arguments.addAll(args)
+ }
+
+ /**
+ * Environment variables for the fixture process. The value can be any object, which
+ * will have toString() called at execution time.
+ */
+ private final Map environment = new HashMap<>()
+
+ @Input
+ public void env(String key, Object value) {
+ environment.put(key, value)
+ }
+
+ /** A flag to indicate whether the command should be executed from a shell. */
+ @Input
+ boolean useShell = false
+
+ /**
+ * A flag to indicate whether the fixture should be run in the foreground, or spawned.
+ * It is protected so subclasses can override (eg RunTask).
+ */
+ protected boolean spawn = true
+
+ /**
+ * A closure to call before the fixture is considered ready. The closure is passed the fixture object,
+ * as well as a groovy AntBuilder, to enable running ant condition checks. The default wait
+ * condition is for http on the http port.
+ */
+ @Input
+ Closure waitCondition = { AntFixture fixture, AntBuilder ant ->
+ File tmpFile = new File(fixture.cwd, 'wait.success')
+ ant.get(src: "http://${fixture.addressAndPort}",
+ dest: tmpFile.toString(),
+ ignoreerrors: true, // do not fail on error, so logging information can be flushed
+ retries: 10)
+ return tmpFile.exists()
+ }
+
+ private final Task stopTask
+
+ public AntFixture() {
+ stopTask = createStopTask()
+ finalizedBy(stopTask)
+ }
+
+ @Override
+ public Task getStopTask() {
+ return stopTask
+ }
+
+ @Override
+ protected void runAnt(AntBuilder ant) {
+ project.delete(baseDir) // reset everything
+ cwd.mkdirs()
+ final String realExecutable
+ final List realArgs = new ArrayList<>()
+ final Map realEnv = environment
+ // We need to choose which executable we are using. In shell mode, or when we
+ // are spawning and thus using the wrapper script, the executable is the shell.
+ if (useShell || spawn) {
+ if (Os.isFamily(Os.FAMILY_WINDOWS)) {
+ realExecutable = 'cmd'
+ realArgs.add('/C')
+ realArgs.add('"') // quote the entire command
+ } else {
+ realExecutable = 'sh'
+ }
+ } else {
+ realExecutable = executable
+ realArgs.addAll(arguments)
+ }
+ if (spawn) {
+ writeWrapperScript(executable)
+ realArgs.add(wrapperScript)
+ realArgs.addAll(arguments)
+ }
+ if (Os.isFamily(Os.FAMILY_WINDOWS) && (useShell || spawn)) {
+ realArgs.add('"')
+ }
+ commandString.eachLine { line -> logger.info(line) }
+
+ ant.exec(executable: realExecutable, spawn: spawn, dir: cwd, taskname: name) {
+ realEnv.each { key, value -> env(key: key, value: value) }
+ realArgs.each { arg(value: it) }
+ }
+
+ String failedProp = "failed${name}"
+ // first wait for resources, or the failure marker from the wrapper script
+ ant.waitfor(maxwait: '30', maxwaitunit: 'second', checkevery: '500', checkeveryunit: 'millisecond', timeoutproperty: failedProp) {
+ or {
+ resourceexists {
+ file(file: failureMarker.toString())
+ }
+ and {
+ resourceexists {
+ file(file: pidFile.toString())
+ }
+ resourceexists {
+ file(file: portsFile.toString())
+ }
+ }
+ }
+ }
+
+ if (ant.project.getProperty(failedProp) || failureMarker.exists()) {
+ fail("Failed to start ${name}")
+ }
+
+ // the process is started (has a pid) and is bound to a network interface
+ // so now wait undil the waitCondition has been met
+ // TODO: change this to a loop?
+ boolean success
+ try {
+ success = waitCondition(this, ant) == false
+ } catch (Exception e) {
+ String msg = "Wait condition caught exception for ${name}"
+ logger.error(msg, e)
+ fail(msg, e)
+ }
+ if (success == false) {
+ fail("Wait condition failed for ${name}")
+ }
+ }
+
+ /** Returns a debug string used to log information about how the fixture was run. */
+ protected String getCommandString() {
+ String commandString = "\n${name} configuration:\n"
+ commandString += "-----------------------------------------\n"
+ commandString += " cwd: ${cwd}\n"
+ commandString += " command: ${executable} ${arguments.join(' ')}\n"
+ commandString += ' environment:\n'
+ environment.each { k, v -> commandString += " ${k}: ${v}\n" }
+ if (spawn) {
+ commandString += "\n [${wrapperScript.name}]\n"
+ wrapperScript.eachLine('UTF-8', { line -> commandString += " ${line}\n"})
+ }
+ return commandString
+ }
+
+ /**
+ * Writes a script to run the real executable, so that stdout/stderr can be captured.
+ * TODO: this could be removed if we do use our own ProcessBuilder and pump output from the process
+ */
+ private void writeWrapperScript(String executable) {
+ wrapperScript.parentFile.mkdirs()
+ String argsPasser = '"$@"'
+ String exitMarker = "; if [ \$? != 0 ]; then touch run.failed; fi"
+ if (Os.isFamily(Os.FAMILY_WINDOWS)) {
+ argsPasser = '%*'
+ exitMarker = "\r\n if \"%errorlevel%\" neq \"0\" ( type nul >> run.failed )"
+ }
+ wrapperScript.setText("\"${executable}\" ${argsPasser} > run.log 2>&1 ${exitMarker}", 'UTF-8')
+ }
+
+ /** Fail the build with the given message, and logging relevant info*/
+ private void fail(String msg, Exception... suppressed) {
+ if (logger.isInfoEnabled() == false) {
+ // We already log the command at info level. No need to do it twice.
+ commandString.eachLine { line -> logger.error(line) }
+ }
+ logger.error("${name} output:")
+ logger.error("-----------------------------------------")
+ logger.error(" failure marker exists: ${failureMarker.exists()}")
+ logger.error(" pid file exists: ${pidFile.exists()}")
+ logger.error(" ports file exists: ${portsFile.exists()}")
+ // also dump the log file for the startup script (which will include ES logging output to stdout)
+ if (runLog.exists()) {
+ logger.error("\n [log]")
+ runLog.eachLine { line -> logger.error(" ${line}") }
+ }
+ logger.error("-----------------------------------------")
+ GradleException toThrow = new GradleException(msg)
+ for (Exception e : suppressed) {
+ toThrow.addSuppressed(e)
+ }
+ throw toThrow
+ }
+
+ /** Adds a task to kill an elasticsearch node with the given pidfile */
+ private Task createStopTask() {
+ final AntFixture fixture = this
+ final Object pid = "${ -> fixture.pid }"
+ Exec stop = project.tasks.create(name: "${name}#stop", type: LoggedExec)
+ stop.onlyIf { fixture.pidFile.exists() }
+ stop.doFirst {
+ logger.info("Shutting down ${fixture.name} with pid ${pid}")
+ }
+ if (Os.isFamily(Os.FAMILY_WINDOWS)) {
+ stop.executable = 'Taskkill'
+ stop.args('/PID', pid, '/F')
+ } else {
+ stop.executable = 'kill'
+ stop.args('-9', pid)
+ }
+ stop.doLast {
+ project.delete(fixture.pidFile)
+ }
+ return stop
+ }
+
+ /**
+ * A path relative to the build dir that all configuration and runtime files
+ * will live in for this fixture
+ */
+ protected File getBaseDir() {
+ return new File(project.buildDir, "fixtures/${name}")
+ }
+
+ /** Returns the working directory for the process. Defaults to "cwd" inside baseDir. */
+ protected File getCwd() {
+ return new File(baseDir, 'cwd')
+ }
+
+ /** Returns the file the process writes its pid to. Defaults to "pid" inside baseDir. */
+ protected File getPidFile() {
+ return new File(baseDir, 'pid')
+ }
+
+ /** Reads the pid file and returns the process' pid */
+ public int getPid() {
+ return Integer.parseInt(pidFile.getText('UTF-8').trim())
+ }
+
+ /** Returns the file the process writes its bound ports to. Defaults to "ports" inside baseDir. */
+ protected File getPortsFile() {
+ return new File(baseDir, 'ports')
+ }
+
+ /** Returns an address and port suitable for a uri to connect to this node over http */
+ public String getAddressAndPort() {
+ return portsFile.readLines("UTF-8").get(0)
+ }
+
+ /** Returns a file that wraps around the actual command when {@code spawn == true}. */
+ protected File getWrapperScript() {
+ return new File(cwd, Os.isFamily(Os.FAMILY_WINDOWS) ? 'run.bat' : 'run')
+ }
+
+ /** Returns a file that the wrapper script writes when the command failed. */
+ protected File getFailureMarker() {
+ return new File(cwd, 'run.failed')
+ }
+
+ /** Returns a file that the wrapper script writes when the command failed. */
+ protected File getRunLog() {
+ return new File(cwd, 'run.log')
+ }
+}
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy
index 57adaa2576dd1..ab618a0fdc7f7 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy
@@ -46,11 +46,11 @@ class ClusterConfiguration {
int transportPort = 0
/**
- * An override of the data directory. This may only be used with a single node.
- * The value is lazily evaluated at runtime as a String path.
+ * An override of the data directory. Input is the node number and output
+ * is the override data directory.
*/
@Input
- Object dataDir = null
+ Closure dataDir = null
/** Optional override of the cluster name. */
@Input
@@ -72,11 +72,17 @@ class ClusterConfiguration {
boolean useMinimumMasterNodes = true
@Input
- String jvmArgs = "-ea" +
- " " + "-Xms" + System.getProperty('tests.heap.size', '512m') +
+ String jvmArgs = "-Xms" + System.getProperty('tests.heap.size', '512m') +
" " + "-Xmx" + System.getProperty('tests.heap.size', '512m') +
" " + System.getProperty('tests.jvm.argline', '')
+ /**
+ * Should the shared environment be cleaned on cluster startup? Defaults
+ * to {@code true} so we run with a clean cluster but some tests wish to
+ * preserve snapshots between clusters so they set this to true.
+ */
+ @Input
+ boolean cleanShared = true
/**
* A closure to call which returns the unicast host to connect to for cluster formation.
@@ -90,7 +96,7 @@ class ClusterConfiguration {
if (seedNode == node) {
return null
}
- ant.waitfor(maxwait: '20', maxwaitunit: 'second', checkevery: '500', checkeveryunit: 'millisecond') {
+ ant.waitfor(maxwait: '40', maxwaitunit: 'second', checkevery: '500', checkeveryunit: 'millisecond') {
resourceexists {
file(file: seedNode.transportPortsFile.toString())
}
@@ -127,6 +133,8 @@ class ClusterConfiguration {
Map settings = new HashMap<>()
+ Map keystoreSettings = new HashMap<>()
+
// map from destination path, to source file
Map extraConfigFiles = new HashMap<>()
@@ -136,6 +144,8 @@ class ClusterConfiguration {
LinkedHashMap setupCommands = new LinkedHashMap<>()
+ List dependencies = new ArrayList<>()
+
@Input
void systemProperty(String property, String value) {
systemProperties.put(property, value)
@@ -146,6 +156,11 @@ class ClusterConfiguration {
settings.put(name, value)
}
+ @Input
+ void keystoreSetting(String name, String value) {
+ keystoreSettings.put(name, value)
+ }
+
@Input
void plugin(String path) {
Project pluginProject = project.project(path)
@@ -174,4 +189,10 @@ class ClusterConfiguration {
}
extraConfigFiles.put(path, sourceFile)
}
+
+ /** Add dependencies that must be run before the first task setting up the cluster. */
+ @Input
+ void dependsOn(Object... deps) {
+ dependencies.addAll(deps)
+ }
}
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy
index 756c05b07d523..4dbf3efe595f9 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy
@@ -38,6 +38,7 @@ import org.gradle.api.tasks.Copy
import org.gradle.api.tasks.Delete
import org.gradle.api.tasks.Exec
+import java.nio.charset.StandardCharsets
import java.nio.file.Paths
import java.util.concurrent.TimeUnit
@@ -51,22 +52,28 @@ class ClusterFormationTasks {
*
* Returns a list of NodeInfo objects for each node in the cluster.
*/
- static List setup(Project project, Task task, ClusterConfiguration config) {
- if (task.getEnabled() == false) {
- // no need to add cluster formation tasks if the task won't run!
- return
- }
+ static List setup(Project project, String prefix, Task runner, ClusterConfiguration config) {
File sharedDir = new File(project.buildDir, "cluster/shared")
- // first we remove everything in the shared cluster directory to ensure there are no leftovers in repos or anything
- // in theory this should not be necessary but repositories are only deleted in the cluster-state and not on-disk
- // such that snapshots survive failures / test runs and there is no simple way today to fix that.
- Task cleanup = project.tasks.create(name: "${task.name}#prepareCluster.cleanShared", type: Delete, dependsOn: task.dependsOn.collect()) {
- delete sharedDir
- doLast {
- sharedDir.mkdirs()
- }
- }
- List startTasks = [cleanup]
+ Object startDependencies = config.dependencies
+ /* First, if we want a clean environment, we remove everything in the
+ * shared cluster directory to ensure there are no leftovers in repos
+ * or anything in theory this should not be necessary but repositories
+ * are only deleted in the cluster-state and not on-disk such that
+ * snapshots survive failures / test runs and there is no simple way
+ * today to fix that. */
+ if (config.cleanShared) {
+ Task cleanup = project.tasks.create(
+ name: "${prefix}#prepareCluster.cleanShared",
+ type: Delete,
+ dependsOn: startDependencies) {
+ delete sharedDir
+ doLast {
+ sharedDir.mkdirs()
+ }
+ }
+ startDependencies = cleanup
+ }
+ List startTasks = []
List nodes = []
if (config.numNodes < config.numBwcNodes) {
throw new GradleException("numNodes must be >= numBwcNodes [${config.numNodes} < ${config.numBwcNodes}]")
@@ -75,25 +82,25 @@ class ClusterFormationTasks {
throw new GradleException("bwcVersion must not be null if numBwcNodes is > 0")
}
// this is our current version distribution configuration we use for all kinds of REST tests etc.
- String distroConfigName = "${task.name}_elasticsearchDistro"
- Configuration currentDistro = project.configurations.create(distroConfigName)
+ Configuration currentDistro = project.configurations.create("${prefix}_elasticsearchDistro")
+ Configuration bwcDistro = project.configurations.create("${prefix}_elasticsearchBwcDistro")
+ Configuration bwcPlugins = project.configurations.create("${prefix}_elasticsearchBwcPlugins")
configureDistributionDependency(project, config.distribution, currentDistro, VersionProperties.elasticsearch)
- if (config.bwcVersion != null && config.numBwcNodes > 0) {
+ if (config.numBwcNodes > 0) {
+ if (config.bwcVersion == null) {
+ throw new IllegalArgumentException("Must specify bwcVersion when numBwcNodes > 0")
+ }
// if we have a cluster that has a BWC cluster we also need to configure a dependency on the BWC version
// this version uses the same distribution etc. and only differs in the version we depend on.
// from here on everything else works the same as if it's the current version, we fetch the BWC version
// from mirrors using gradles built-in mechanism etc.
- project.configurations {
- elasticsearchBwcDistro
- elasticsearchBwcPlugins
- }
- configureDistributionDependency(project, config.distribution, project.configurations.elasticsearchBwcDistro, config.bwcVersion)
+
+ configureDistributionDependency(project, config.distribution, bwcDistro, config.bwcVersion)
for (Map.Entry entry : config.plugins.entrySet()) {
- configureBwcPluginDependency("${task.name}_elasticsearchBwcPlugins", project, entry.getValue(),
- project.configurations.elasticsearchBwcPlugins, config.bwcVersion)
+ configureBwcPluginDependency("${prefix}_elasticsearchBwcPlugins", project, entry.getValue(), bwcPlugins, config.bwcVersion)
}
- project.configurations.elasticsearchBwcDistro.resolutionStrategy.cacheChangingModulesFor(0, TimeUnit.SECONDS)
- project.configurations.elasticsearchBwcPlugins.resolutionStrategy.cacheChangingModulesFor(0, TimeUnit.SECONDS)
+ bwcDistro.resolutionStrategy.cacheChangingModulesFor(0, TimeUnit.SECONDS)
+ bwcPlugins.resolutionStrategy.cacheChangingModulesFor(0, TimeUnit.SECONDS)
}
for (int i = 0; i < config.numNodes; i++) {
// we start N nodes and out of these N nodes there might be M bwc nodes.
@@ -102,15 +109,16 @@ class ClusterFormationTasks {
Configuration distro = currentDistro
if (i < config.numBwcNodes) {
elasticsearchVersion = config.bwcVersion
- distro = project.configurations.elasticsearchBwcDistro
+ distro = bwcDistro
}
- NodeInfo node = new NodeInfo(config, i, project, task, elasticsearchVersion, sharedDir)
+ NodeInfo node = new NodeInfo(config, i, project, prefix, elasticsearchVersion, sharedDir)
nodes.add(node)
- startTasks.add(configureNode(project, task, cleanup, node, distro, nodes.get(0)))
+ Object dependsOn = startTasks.empty ? startDependencies : startTasks.get(0)
+ startTasks.add(configureNode(project, prefix, runner, dependsOn, node, config, distro, nodes.get(0)))
}
- Task wait = configureWaitTask("${task.name}#wait", project, nodes, startTasks)
- task.dependsOn(wait)
+ Task wait = configureWaitTask("${prefix}#wait", project, nodes, startTasks)
+ runner.dependsOn(wait)
return nodes
}
@@ -150,59 +158,71 @@ class ClusterFormationTasks {
*
* @return a task which starts the node.
*/
- static Task configureNode(Project project, Task task, Object dependsOn, NodeInfo node, Configuration configuration, NodeInfo seedNode) {
+ static Task configureNode(Project project, String prefix, Task runner, Object dependsOn, NodeInfo node, ClusterConfiguration config,
+ Configuration distribution, NodeInfo seedNode) {
// tasks are chained so their execution order is maintained
- Task setup = project.tasks.create(name: taskName(task, node, 'clean'), type: Delete, dependsOn: dependsOn) {
+ Task setup = project.tasks.create(name: taskName(prefix, node, 'clean'), type: Delete, dependsOn: dependsOn) {
delete node.homeDir
delete node.cwd
doLast {
node.cwd.mkdirs()
}
}
- setup = configureCheckPreviousTask(taskName(task, node, 'checkPrevious'), project, setup, node)
- setup = configureStopTask(taskName(task, node, 'stopPrevious'), project, setup, node)
- setup = configureExtractTask(taskName(task, node, 'extract'), project, setup, node, configuration)
- setup = configureWriteConfigTask(taskName(task, node, 'configure'), project, setup, node, seedNode)
+
+ setup = configureCheckPreviousTask(taskName(prefix, node, 'checkPrevious'), project, setup, node)
+ setup = configureStopTask(taskName(prefix, node, 'stopPrevious'), project, setup, node)
+ setup = configureExtractTask(taskName(prefix, node, 'extract'), project, setup, node, distribution)
+ setup = configureWriteConfigTask(taskName(prefix, node, 'configure'), project, setup, node, seedNode)
+ setup = configureCreateKeystoreTask(taskName(prefix, node, 'createKeystore'), project, setup, node)
+ setup = configureAddKeystoreSettingTasks(prefix, project, setup, node)
+
if (node.config.plugins.isEmpty() == false) {
if (node.nodeVersion == VersionProperties.elasticsearch) {
- setup = configureCopyPluginsTask(taskName(task, node, 'copyPlugins'), project, setup, node)
+ setup = configureCopyPluginsTask(taskName(prefix, node, 'copyPlugins'), project, setup, node, prefix)
} else {
- setup = configureCopyBwcPluginsTask(taskName(task, node, 'copyBwcPlugins'), project, setup, node)
+ setup = configureCopyBwcPluginsTask(taskName(prefix, node, 'copyBwcPlugins'), project, setup, node, prefix)
}
}
// install modules
for (Project module : node.config.modules) {
String actionName = pluginTaskName('install', module.name, 'Module')
- setup = configureInstallModuleTask(taskName(task, node, actionName), project, setup, node, module)
+ setup = configureInstallModuleTask(taskName(prefix, node, actionName), project, setup, node, module)
}
// install plugins
for (Map.Entry plugin : node.config.plugins.entrySet()) {
String actionName = pluginTaskName('install', plugin.getKey(), 'Plugin')
- setup = configureInstallPluginTask(taskName(task, node, actionName), project, setup, node, plugin.getValue())
+ setup = configureInstallPluginTask(taskName(prefix, node, actionName), project, setup, node, plugin.getValue(), prefix)
}
// sets up any extra config files that need to be copied over to the ES instance;
// its run after plugins have been installed, as the extra config files may belong to plugins
- setup = configureExtraConfigFilesTask(taskName(task, node, 'extraConfig'), project, setup, node)
+ setup = configureExtraConfigFilesTask(taskName(prefix, node, 'extraConfig'), project, setup, node)
// extra setup commands
for (Map.Entry command : node.config.setupCommands.entrySet()) {
// the first argument is the actual script name, relative to home
Object[] args = command.getValue().clone()
args[0] = new File(node.homeDir, args[0].toString())
- setup = configureExecTask(taskName(task, node, command.getKey()), project, setup, node, args)
+ setup = configureExecTask(taskName(prefix, node, command.getKey()), project, setup, node, args)
}
- Task start = configureStartTask(taskName(task, node, 'start'), project, setup, node)
+ Task start = configureStartTask(taskName(prefix, node, 'start'), project, setup, node)
if (node.config.daemonize) {
- Task stop = configureStopTask(taskName(task, node, 'stop'), project, [], node)
+ Task stop = configureStopTask(taskName(prefix, node, 'stop'), project, [], node)
// if we are running in the background, make sure to stop the server when the task completes
- task.finalizedBy(stop)
+ runner.finalizedBy(stop)
start.finalizedBy(stop)
+ for (Object dependency : config.dependencies) {
+ if (dependency instanceof Fixture) {
+ def depStop = ((Fixture)dependency).stopTask
+ runner.finalizedBy(depStop)
+ start.finalizedBy(depStop)
+ }
+ }
}
return start
}
@@ -276,8 +296,7 @@ class ClusterFormationTasks {
'path.repo' : "${node.sharedDir}/repo",
'path.shared_data' : "${node.sharedDir}/",
// Define a node attribute so we can test that it exists
- 'node.attr.testattr' : 'test',
- 'repositories.url.allowed_urls': 'http://snapshot.test*'
+ 'node.attr.testattr' : 'test'
]
// we set min master nodes to the total number of nodes in the cluster and
// basically skip initial state recovery to allow the cluster to form using a realistic master election
@@ -307,6 +326,33 @@ class ClusterFormationTasks {
}
}
+ /** Adds a task to create keystore */
+ static Task configureCreateKeystoreTask(String name, Project project, Task setup, NodeInfo node) {
+ if (node.config.keystoreSettings.isEmpty()) {
+ return setup
+ } else {
+ File esKeystoreUtil = Paths.get(node.homeDir.toString(), "bin/" + "elasticsearch-keystore").toFile()
+ return configureExecTask(name, project, setup, node, esKeystoreUtil, 'create')
+ }
+ }
+
+ /** Adds tasks to add settings to the keystore */
+ static Task configureAddKeystoreSettingTasks(String parent, Project project, Task setup, NodeInfo node) {
+ Map kvs = node.config.keystoreSettings
+ File esKeystoreUtil = Paths.get(node.homeDir.toString(), "bin/" + "elasticsearch-keystore").toFile()
+ Task parentTask = setup
+ for (Map.Entry entry in kvs) {
+ String key = entry.getKey()
+ String name = taskName(parent, node, 'addToKeystore#' + key)
+ Task t = configureExecTask(name, project, parentTask, node, esKeystoreUtil, 'add', key, '-x')
+ t.doFirst {
+ standardInput = new ByteArrayInputStream(entry.getValue().getBytes(StandardCharsets.UTF_8))
+ }
+ parentTask = t
+ }
+ return parentTask
+ }
+
static Task configureExtraConfigFilesTask(String name, Project project, Task setup, NodeInfo node) {
if (node.config.extraConfigFiles.isEmpty()) {
return setup
@@ -343,7 +389,7 @@ class ClusterFormationTasks {
* For each plugin, if the plugin has rest spec apis in its tests, those api files are also copied
* to the test resources for this project.
*/
- static Task configureCopyPluginsTask(String name, Project project, Task setup, NodeInfo node) {
+ static Task configureCopyPluginsTask(String name, Project project, Task setup, NodeInfo node, String prefix) {
Copy copyPlugins = project.tasks.create(name: name, type: Copy, dependsOn: setup)
List pluginFiles = []
@@ -351,7 +397,7 @@ class ClusterFormationTasks {
Project pluginProject = plugin.getValue()
verifyProjectHasBuildPlugin(name, node.nodeVersion, project, pluginProject)
- String configurationName = "_plugin_${pluginProject.path}"
+ String configurationName = "_plugin_${prefix}_${pluginProject.path}"
Configuration configuration = project.configurations.findByName(configurationName)
if (configuration == null) {
configuration = project.configurations.create(configurationName)
@@ -381,25 +427,27 @@ class ClusterFormationTasks {
}
/** Configures task to copy a plugin based on a zip file resolved using dependencies for an older version */
- static Task configureCopyBwcPluginsTask(String name, Project project, Task setup, NodeInfo node) {
+ static Task configureCopyBwcPluginsTask(String name, Project project, Task setup, NodeInfo node, String prefix) {
+ Configuration bwcPlugins = project.configurations.getByName("${prefix}_elasticsearchBwcPlugins")
for (Map.Entry plugin : node.config.plugins.entrySet()) {
Project pluginProject = plugin.getValue()
verifyProjectHasBuildPlugin(name, node.nodeVersion, project, pluginProject)
- String configurationName = "_plugin_bwc_${pluginProject.path}"
+ String configurationName = "_plugin_bwc_${prefix}_${pluginProject.path}"
Configuration configuration = project.configurations.findByName(configurationName)
if (configuration == null) {
configuration = project.configurations.create(configurationName)
}
final String depName = pluginProject.extensions.findByName('esplugin').name
- Dependency dep = project.configurations.elasticsearchBwcPlugins.dependencies.find {
+
+ Dependency dep = bwcPlugins.dependencies.find {
it.name == depName
}
configuration.dependencies.add(dep)
}
Copy copyPlugins = project.tasks.create(name: name, type: Copy, dependsOn: setup) {
- from project.configurations.elasticsearchBwcPlugins
+ from bwcPlugins
into node.pluginsTmpDir
}
return copyPlugins
@@ -419,12 +467,12 @@ class ClusterFormationTasks {
return installModule
}
- static Task configureInstallPluginTask(String name, Project project, Task setup, NodeInfo node, Project plugin) {
+ static Task configureInstallPluginTask(String name, Project project, Task setup, NodeInfo node, Project plugin, String prefix) {
final FileCollection pluginZip;
if (node.nodeVersion != VersionProperties.elasticsearch) {
- pluginZip = project.configurations.getByName("_plugin_bwc_${plugin.path}")
+ pluginZip = project.configurations.getByName("_plugin_bwc_${prefix}_${plugin.path}")
} else {
- pluginZip = project.configurations.getByName("_plugin_${plugin.path}")
+ pluginZip = project.configurations.getByName("_plugin_${prefix}_${plugin.path}")
}
// delay reading the file location until execution time by wrapping in a closure within a GString
Object file = "${-> new File(node.pluginsTmpDir, pluginZip.singleFile.getName()).toURI().toURL().toString()}"
@@ -540,7 +588,7 @@ class ClusterFormationTasks {
anyNodeFailed |= node.failedMarker.exists()
}
if (ant.properties.containsKey("failed${name}".toString()) || anyNodeFailed) {
- waitFailed(nodes, logger, 'Failed to start elasticsearch')
+ waitFailed(project, nodes, logger, 'Failed to start elasticsearch')
}
// go through each node checking the wait condition
@@ -557,14 +605,14 @@ class ClusterFormationTasks {
}
if (success == false) {
- waitFailed(nodes, logger, 'Elasticsearch cluster failed to pass wait condition')
+ waitFailed(project, nodes, logger, 'Elasticsearch cluster failed to pass wait condition')
}
}
}
return wait
}
- static void waitFailed(List nodes, Logger logger, String msg) {
+ static void waitFailed(Project project, List nodes, Logger logger, String msg) {
for (NodeInfo node : nodes) {
if (logger.isInfoEnabled() == false) {
// We already log the command at info level. No need to do it twice.
@@ -584,6 +632,17 @@ class ClusterFormationTasks {
logger.error("|\n| [log]")
node.startLog.eachLine { line -> logger.error("| ${line}") }
}
+ if (node.pidFile.exists() && node.failedMarker.exists() == false &&
+ (node.httpPortsFile.exists() == false || node.transportPortsFile.exists() == false)) {
+ logger.error("|\n| [jstack]")
+ String pid = node.pidFile.getText('UTF-8')
+ ByteArrayOutputStream output = new ByteArrayOutputStream()
+ project.exec {
+ commandLine = ["${project.javaHome}/bin/jstack", pid]
+ standardOutput = output
+ }
+ output.toString('UTF-8').eachLine { line -> logger.error("| ${line}") }
+ }
logger.error("|-----------------------------------------")
}
throw new GradleException(msg)
@@ -608,11 +667,11 @@ class ClusterFormationTasks {
standardOutput = new ByteArrayOutputStream()
doLast {
String out = standardOutput.toString()
- if (out.contains("${pid} org.elasticsearch.bootstrap.Elasticsearch") == false) {
+ if (out.contains("${ext.pid} org.elasticsearch.bootstrap.Elasticsearch") == false) {
logger.error('jps -l')
logger.error(out)
- logger.error("pid file: ${pidFile}")
- logger.error("pid: ${pid}")
+ logger.error("pid file: ${node.pidFile}")
+ logger.error("pid: ${ext.pid}")
throw new GradleException("jps -l did not report any process with org.elasticsearch.bootstrap.Elasticsearch\n" +
"Did you run gradle clean? Maybe an old pid file is still lying around.")
} else {
@@ -649,11 +708,11 @@ class ClusterFormationTasks {
}
/** Returns a unique task name for this task and node configuration */
- static String taskName(Task parentTask, NodeInfo node, String action) {
+ static String taskName(String prefix, NodeInfo node, String action) {
if (node.config.numNodes > 1) {
- return "${parentTask.name}#node${node.nodeNum}.${action}"
+ return "${prefix}#node${node.nodeNum}.${action}"
} else {
- return "${parentTask.name}#${action}"
+ return "${prefix}#${action}"
}
}
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/Fixture.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/Fixture.groovy
index 46b81624ba3fa..498a1627b3598 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/Fixture.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/Fixture.groovy
@@ -16,272 +16,15 @@
* specific language governing permissions and limitations
* under the License.
*/
-
package org.elasticsearch.gradle.test
-import org.apache.tools.ant.taskdefs.condition.Os
-import org.elasticsearch.gradle.AntTask
-import org.elasticsearch.gradle.LoggedExec
-import org.gradle.api.GradleException
-import org.gradle.api.Task
-import org.gradle.api.tasks.Exec
-import org.gradle.api.tasks.Input
-
/**
- * A fixture for integration tests which runs in a separate process.
+ * Any object that can produce an accompanying stop task, meant to tear down
+ * a previously instantiated service.
*/
-public class Fixture extends AntTask {
-
- /** The path to the executable that starts the fixture. */
- @Input
- String executable
-
- private final List arguments = new ArrayList<>()
-
- @Input
- public void args(Object... args) {
- arguments.addAll(args)
- }
-
- /**
- * Environment variables for the fixture process. The value can be any object, which
- * will have toString() called at execution time.
- */
- private final Map environment = new HashMap<>()
-
- @Input
- public void env(String key, Object value) {
- environment.put(key, value)
- }
-
- /** A flag to indicate whether the command should be executed from a shell. */
- @Input
- boolean useShell = false
-
- /**
- * A flag to indicate whether the fixture should be run in the foreground, or spawned.
- * It is protected so subclasses can override (eg RunTask).
- */
- protected boolean spawn = true
-
- /**
- * A closure to call before the fixture is considered ready. The closure is passed the fixture object,
- * as well as a groovy AntBuilder, to enable running ant condition checks. The default wait
- * condition is for http on the http port.
- */
- @Input
- Closure waitCondition = { Fixture fixture, AntBuilder ant ->
- File tmpFile = new File(fixture.cwd, 'wait.success')
- ant.get(src: "http://${fixture.addressAndPort}",
- dest: tmpFile.toString(),
- ignoreerrors: true, // do not fail on error, so logging information can be flushed
- retries: 10)
- return tmpFile.exists()
- }
+public interface Fixture {
/** A task which will stop this fixture. This should be used as a finalizedBy for any tasks that use the fixture. */
- public final Task stopTask
-
- public Fixture() {
- stopTask = createStopTask()
- finalizedBy(stopTask)
- }
-
- @Override
- protected void runAnt(AntBuilder ant) {
- project.delete(baseDir) // reset everything
- cwd.mkdirs()
- final String realExecutable
- final List realArgs = new ArrayList<>()
- final Map realEnv = environment
- // We need to choose which executable we are using. In shell mode, or when we
- // are spawning and thus using the wrapper script, the executable is the shell.
- if (useShell || spawn) {
- if (Os.isFamily(Os.FAMILY_WINDOWS)) {
- realExecutable = 'cmd'
- realArgs.add('/C')
- realArgs.add('"') // quote the entire command
- } else {
- realExecutable = 'sh'
- }
- } else {
- realExecutable = executable
- realArgs.addAll(arguments)
- }
- if (spawn) {
- writeWrapperScript(executable)
- realArgs.add(wrapperScript)
- realArgs.addAll(arguments)
- }
- if (Os.isFamily(Os.FAMILY_WINDOWS) && (useShell || spawn)) {
- realArgs.add('"')
- }
- commandString.eachLine { line -> logger.info(line) }
-
- ant.exec(executable: realExecutable, spawn: spawn, dir: cwd, taskname: name) {
- realEnv.each { key, value -> env(key: key, value: value) }
- realArgs.each { arg(value: it) }
- }
-
- String failedProp = "failed${name}"
- // first wait for resources, or the failure marker from the wrapper script
- ant.waitfor(maxwait: '30', maxwaitunit: 'second', checkevery: '500', checkeveryunit: 'millisecond', timeoutproperty: failedProp) {
- or {
- resourceexists {
- file(file: failureMarker.toString())
- }
- and {
- resourceexists {
- file(file: pidFile.toString())
- }
- resourceexists {
- file(file: portsFile.toString())
- }
- }
- }
- }
-
- if (ant.project.getProperty(failedProp) || failureMarker.exists()) {
- fail("Failed to start ${name}")
- }
-
- // the process is started (has a pid) and is bound to a network interface
- // so now wait undil the waitCondition has been met
- // TODO: change this to a loop?
- boolean success
- try {
- success = waitCondition(this, ant) == false
- } catch (Exception e) {
- String msg = "Wait condition caught exception for ${name}"
- logger.error(msg, e)
- fail(msg, e)
- }
- if (success == false) {
- fail("Wait condition failed for ${name}")
- }
- }
-
- /** Returns a debug string used to log information about how the fixture was run. */
- protected String getCommandString() {
- String commandString = "\n${name} configuration:\n"
- commandString += "-----------------------------------------\n"
- commandString += " cwd: ${cwd}\n"
- commandString += " command: ${executable} ${arguments.join(' ')}\n"
- commandString += ' environment:\n'
- environment.each { k, v -> commandString += " ${k}: ${v}\n" }
- if (spawn) {
- commandString += "\n [${wrapperScript.name}]\n"
- wrapperScript.eachLine('UTF-8', { line -> commandString += " ${line}\n"})
- }
- return commandString
- }
-
- /**
- * Writes a script to run the real executable, so that stdout/stderr can be captured.
- * TODO: this could be removed if we do use our own ProcessBuilder and pump output from the process
- */
- private void writeWrapperScript(String executable) {
- wrapperScript.parentFile.mkdirs()
- String argsPasser = '"$@"'
- String exitMarker = "; if [ \$? != 0 ]; then touch run.failed; fi"
- if (Os.isFamily(Os.FAMILY_WINDOWS)) {
- argsPasser = '%*'
- exitMarker = "\r\n if \"%errorlevel%\" neq \"0\" ( type nul >> run.failed )"
- }
- wrapperScript.setText("\"${executable}\" ${argsPasser} > run.log 2>&1 ${exitMarker}", 'UTF-8')
- }
-
- /** Fail the build with the given message, and logging relevant info*/
- private void fail(String msg, Exception... suppressed) {
- if (logger.isInfoEnabled() == false) {
- // We already log the command at info level. No need to do it twice.
- commandString.eachLine { line -> logger.error(line) }
- }
- logger.error("${name} output:")
- logger.error("-----------------------------------------")
- logger.error(" failure marker exists: ${failureMarker.exists()}")
- logger.error(" pid file exists: ${pidFile.exists()}")
- logger.error(" ports file exists: ${portsFile.exists()}")
- // also dump the log file for the startup script (which will include ES logging output to stdout)
- if (runLog.exists()) {
- logger.error("\n [log]")
- runLog.eachLine { line -> logger.error(" ${line}") }
- }
- logger.error("-----------------------------------------")
- GradleException toThrow = new GradleException(msg)
- for (Exception e : suppressed) {
- toThrow.addSuppressed(e)
- }
- throw toThrow
- }
-
- /** Adds a task to kill an elasticsearch node with the given pidfile */
- private Task createStopTask() {
- final Fixture fixture = this
- final Object pid = "${ -> fixture.pid }"
- Exec stop = project.tasks.create(name: "${name}#stop", type: LoggedExec)
- stop.onlyIf { fixture.pidFile.exists() }
- stop.doFirst {
- logger.info("Shutting down ${fixture.name} with pid ${pid}")
- }
- if (Os.isFamily(Os.FAMILY_WINDOWS)) {
- stop.executable = 'Taskkill'
- stop.args('/PID', pid, '/F')
- } else {
- stop.executable = 'kill'
- stop.args('-9', pid)
- }
- stop.doLast {
- project.delete(fixture.pidFile)
- }
- return stop
- }
-
- /**
- * A path relative to the build dir that all configuration and runtime files
- * will live in for this fixture
- */
- protected File getBaseDir() {
- return new File(project.buildDir, "fixtures/${name}")
- }
-
- /** Returns the working directory for the process. Defaults to "cwd" inside baseDir. */
- protected File getCwd() {
- return new File(baseDir, 'cwd')
- }
-
- /** Returns the file the process writes its pid to. Defaults to "pid" inside baseDir. */
- protected File getPidFile() {
- return new File(baseDir, 'pid')
- }
-
- /** Reads the pid file and returns the process' pid */
- public int getPid() {
- return Integer.parseInt(pidFile.getText('UTF-8').trim())
- }
-
- /** Returns the file the process writes its bound ports to. Defaults to "ports" inside baseDir. */
- protected File getPortsFile() {
- return new File(baseDir, 'ports')
- }
-
- /** Returns an address and port suitable for a uri to connect to this node over http */
- public String getAddressAndPort() {
- return portsFile.readLines("UTF-8").get(0)
- }
-
- /** Returns a file that wraps around the actual command when {@code spawn == true}. */
- protected File getWrapperScript() {
- return new File(cwd, Os.isFamily(Os.FAMILY_WINDOWS) ? 'run.bat' : 'run')
- }
-
- /** Returns a file that the wrapper script writes when the command failed. */
- protected File getFailureMarker() {
- return new File(cwd, 'run.failed')
- }
+ public Object getStopTask()
- /** Returns a file that the wrapper script writes when the command failed. */
- protected File getRunLog() {
- return new File(cwd, 'run.log')
- }
}
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/MessyTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/MessyTestPlugin.groovy
index 1cca2c5aa49c6..1c0aec1bc00f3 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/MessyTestPlugin.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/MessyTestPlugin.groovy
@@ -48,7 +48,7 @@ class MessyTestPlugin extends StandaloneTestPlugin {
}
private static addPluginResources(Project project, Project pluginProject) {
- String outputDir = "generated-resources/${pluginProject.name}"
+ String outputDir = "${project.buildDir}/generated-resources/${pluginProject.name}"
String taskName = ClusterFormationTasks.pluginTaskName("copy", pluginProject.name, "Metadata")
Copy copyPluginMetadata = project.tasks.create(taskName, Copy.class)
copyPluginMetadata.into(outputDir)
@@ -57,7 +57,7 @@ class MessyTestPlugin extends StandaloneTestPlugin {
project.sourceSets.test.output.dir(outputDir, builtBy: taskName)
// add each generated dir to the test classpath in IDEs
- //project.eclipse.classpath.sourceSets = [project.sourceSets.test]
project.idea.module.singleEntryLibraries= ['TEST': [project.file(outputDir)]]
+ // Eclipse doesn't need this because it gets the entire module as a dependency
}
}
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy
index a9473cc28d280..46542708420f1 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy
@@ -21,7 +21,6 @@ package org.elasticsearch.gradle.test
import org.apache.tools.ant.taskdefs.condition.Os
import org.gradle.api.InvalidUserDataException
import org.gradle.api.Project
-import org.gradle.api.Task
/**
* A container for the files and configuration associated with a single node in a test cluster.
@@ -96,26 +95,23 @@ class NodeInfo {
/** the version of elasticsearch that this node runs */
String nodeVersion
- /** Creates a node to run as part of a cluster for the given task */
- NodeInfo(ClusterConfiguration config, int nodeNum, Project project, Task task, String nodeVersion, File sharedDir) {
+ /** Holds node configuration for part of a test cluster. */
+ NodeInfo(ClusterConfiguration config, int nodeNum, Project project, String prefix, String nodeVersion, File sharedDir) {
this.config = config
this.nodeNum = nodeNum
this.sharedDir = sharedDir
if (config.clusterName != null) {
clusterName = config.clusterName
} else {
- clusterName = "${task.path.replace(':', '_').substring(1)}"
+ clusterName = project.path.replace(':', '_').substring(1) + '_' + prefix
}
- baseDir = new File(project.buildDir, "cluster/${task.name} node${nodeNum}")
+ baseDir = new File(project.buildDir, "cluster/${prefix} node${nodeNum}")
pidFile = new File(baseDir, 'es.pid')
this.nodeVersion = nodeVersion
homeDir = homeDir(baseDir, config.distribution, nodeVersion)
confDir = confDir(baseDir, config.distribution, nodeVersion)
if (config.dataDir != null) {
- if (config.numNodes != 1) {
- throw new IllegalArgumentException("Cannot set data dir for integ test with more than one node")
- }
- dataDir = config.dataDir
+ dataDir = "${config.dataDir(nodeNum)}"
} else {
dataDir = new File(homeDir, "data")
}
@@ -151,6 +147,9 @@ class NodeInfo {
args.addAll("-E", "node.portsfile=true")
String collectedSystemProperties = config.systemProperties.collect { key, value -> "-D${key}=${value}" }.join(" ")
String esJavaOpts = config.jvmArgs.isEmpty() ? collectedSystemProperties : collectedSystemProperties + " " + config.jvmArgs
+ if (Boolean.parseBoolean(System.getProperty('tests.asserts', 'true'))) {
+ esJavaOpts += " -ea -esa"
+ }
env.put('ES_JAVA_OPTS', esJavaOpts)
for (Map.Entry property : System.properties.entrySet()) {
if (property.key.startsWith('tests.es.')) {
@@ -159,7 +158,10 @@ class NodeInfo {
}
}
env.put('ES_JVM_OPTIONS', new File(confDir, 'jvm.options'))
- args.addAll("-E", "path.conf=${confDir}", "-E", "path.data=${-> dataDir.toString()}")
+ args.addAll("-E", "path.conf=${confDir}")
+ if (!System.properties.containsKey("tests.es.path.data")) {
+ args.addAll("-E", "path.data=${-> dataDir.toString()}")
+ }
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
args.add('"') // end the entire command, quoted
}
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy
index 51bccb4fe7580..6494e500f33ab 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy
@@ -20,19 +20,28 @@ package org.elasticsearch.gradle.test
import com.carrotsearch.gradle.junit4.RandomizedTestingTask
import org.elasticsearch.gradle.BuildPlugin
+import org.gradle.api.DefaultTask
import org.gradle.api.Task
+import org.gradle.api.execution.TaskExecutionAdapter
import org.gradle.api.internal.tasks.options.Option
import org.gradle.api.plugins.JavaBasePlugin
import org.gradle.api.tasks.Input
-import org.gradle.util.ConfigureUtil
+import org.gradle.api.tasks.TaskState
+
+import java.nio.charset.StandardCharsets
+import java.nio.file.Files
+import java.util.stream.Stream
/**
- * Runs integration tests, but first starts an ES cluster,
- * and passes the ES cluster info as parameters to the tests.
+ * A wrapper task around setting up a cluster and running rest tests.
*/
-public class RestIntegTestTask extends RandomizedTestingTask {
+public class RestIntegTestTask extends DefaultTask {
+
+ protected ClusterConfiguration clusterConfig
+
+ protected RandomizedTestingTask runner
- ClusterConfiguration clusterConfig
+ protected Task clusterInit
/** Info about nodes in the integ test cluster. Note this is *not* available until runtime. */
List nodes
@@ -42,37 +51,62 @@ public class RestIntegTestTask extends RandomizedTestingTask {
boolean includePackaged = false
public RestIntegTestTask() {
- description = 'Runs rest tests against an elasticsearch cluster.'
- group = JavaBasePlugin.VERIFICATION_GROUP
- dependsOn(project.testClasses)
- classpath = project.sourceSets.test.runtimeClasspath
- testClassesDir = project.sourceSets.test.output.classesDir
- clusterConfig = new ClusterConfiguration(project)
+ runner = project.tasks.create("${name}Runner", RandomizedTestingTask.class)
+ super.dependsOn(runner)
+ clusterInit = project.tasks.create(name: "${name}Cluster#init", dependsOn: project.testClasses)
+ runner.dependsOn(clusterInit)
+ runner.classpath = project.sourceSets.test.runtimeClasspath
+ runner.testClassesDir = project.sourceSets.test.output.classesDir
+ clusterConfig = project.extensions.create("${name}Cluster", ClusterConfiguration.class, project)
// start with the common test configuration
- configure(BuildPlugin.commonTestConfig(project))
+ runner.configure(BuildPlugin.commonTestConfig(project))
// override/add more for rest tests
- parallelism = '1'
- include('**/*IT.class')
- systemProperty('tests.rest.load_packaged', 'false')
+ runner.parallelism = '1'
+ runner.include('**/*IT.class')
+ runner.systemProperty('tests.rest.load_packaged', 'false')
// we pass all nodes to the rest cluster to allow the clients to round-robin between them
// this is more realistic than just talking to a single node
- systemProperty('tests.rest.cluster', "${-> nodes.collect{it.httpUri()}.join(",")}")
- systemProperty('tests.config.dir', "${-> nodes[0].confDir}")
+ runner.systemProperty('tests.rest.cluster', "${-> nodes.collect{it.httpUri()}.join(",")}")
+ runner.systemProperty('tests.config.dir', "${-> nodes[0].confDir}")
// TODO: our "client" qa tests currently use the rest-test plugin. instead they should have their own plugin
// that sets up the test cluster and passes this transport uri instead of http uri. Until then, we pass
// both as separate sysprops
- systemProperty('tests.cluster', "${-> nodes[0].transportUri()}")
+ runner.systemProperty('tests.cluster', "${-> nodes[0].transportUri()}")
+
+ // dump errors and warnings from cluster log on failure
+ TaskExecutionAdapter logDumpListener = new TaskExecutionAdapter() {
+ @Override
+ void afterExecute(Task task, TaskState state) {
+ if (state.failure != null) {
+ for (NodeInfo nodeInfo : nodes) {
+ printLogExcerpt(nodeInfo)
+ }
+ }
+ }
+ }
+ runner.doFirst {
+ project.gradle.addListener(logDumpListener)
+ }
+ runner.doLast {
+ project.gradle.removeListener(logDumpListener)
+ }
// copy the rest spec/tests into the test resources
RestSpecHack.configureDependencies(project)
project.afterEvaluate {
- dependsOn(RestSpecHack.configureTask(project, includePackaged))
+ runner.dependsOn(RestSpecHack.configureTask(project, includePackaged))
}
// this must run after all projects have been configured, so we know any project
// references can be accessed as a fully configured
project.gradle.projectsEvaluated {
- nodes = ClusterFormationTasks.setup(project, this, clusterConfig)
+ if (enabled == false) {
+ runner.enabled = false
+ clusterInit.enabled = false
+ return // no need to add cluster formation tasks if the task won't run!
+ }
+ nodes = ClusterFormationTasks.setup(project, "${name}Cluster", runner, clusterConfig)
+ super.dependsOn(runner.finalizedBy)
}
}
@@ -84,25 +118,16 @@ public class RestIntegTestTask extends RandomizedTestingTask {
clusterConfig.debug = enabled;
}
- @Input
- public void cluster(Closure closure) {
- ConfigureUtil.configure(closure, clusterConfig)
- }
-
- public ClusterConfiguration getCluster() {
- return clusterConfig
- }
-
public List getNodes() {
return nodes
}
@Override
public Task dependsOn(Object... dependencies) {
- super.dependsOn(dependencies)
+ runner.dependsOn(dependencies)
for (Object dependency : dependencies) {
if (dependency instanceof Fixture) {
- finalizedBy(((Fixture)dependency).stopTask)
+ runner.finalizedBy(((Fixture)dependency).getStopTask())
}
}
return this
@@ -110,11 +135,54 @@ public class RestIntegTestTask extends RandomizedTestingTask {
@Override
public void setDependsOn(Iterable> dependencies) {
- super.setDependsOn(dependencies)
+ runner.setDependsOn(dependencies)
for (Object dependency : dependencies) {
if (dependency instanceof Fixture) {
- finalizedBy(((Fixture)dependency).stopTask)
+ runner.finalizedBy(((Fixture)dependency).getStopTask())
}
}
}
+
+ @Override
+ public Task mustRunAfter(Object... tasks) {
+ clusterInit.mustRunAfter(tasks)
+ }
+
+ /** Print out an excerpt of the log from the given node. */
+ protected static void printLogExcerpt(NodeInfo nodeInfo) {
+ File logFile = new File(nodeInfo.homeDir, "logs/${nodeInfo.clusterName}.log")
+ println("\nCluster ${nodeInfo.clusterName} - node ${nodeInfo.nodeNum} log excerpt:")
+ println("(full log at ${logFile})")
+ println('-----------------------------------------')
+ Stream stream = Files.lines(logFile.toPath(), StandardCharsets.UTF_8)
+ try {
+ boolean inStartup = true
+ boolean inExcerpt = false
+ int linesSkipped = 0
+ for (String line : stream) {
+ if (line.startsWith("[")) {
+ inExcerpt = false // clear with the next log message
+ }
+ if (line =~ /(\[WARN\])|(\[ERROR\])/) {
+ inExcerpt = true // show warnings and errors
+ }
+ if (inStartup || inExcerpt) {
+ if (linesSkipped != 0) {
+ println("... SKIPPED ${linesSkipped} LINES ...")
+ }
+ println(line)
+ linesSkipped = 0
+ } else {
+ ++linesSkipped
+ }
+ if (line =~ /recovered \[\d+\] indices into cluster_state/) {
+ inStartup = false
+ }
+ }
+ } finally {
+ stream.close()
+ }
+ println('=========================================')
+
+ }
}
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestTestPlugin.groovy
index 176b02cf9b0de..da1462412812a 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestTestPlugin.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestTestPlugin.groovy
@@ -22,6 +22,7 @@ import org.elasticsearch.gradle.BuildPlugin
import org.gradle.api.InvalidUserDataException
import org.gradle.api.Plugin
import org.gradle.api.Project
+import org.gradle.api.plugins.JavaBasePlugin
/**
* Adds support for starting an Elasticsearch cluster before running integration
@@ -39,11 +40,13 @@ public class RestTestPlugin implements Plugin {
if (false == REQUIRED_PLUGINS.any {project.pluginManager.hasPlugin(it)}) {
throw new InvalidUserDataException('elasticsearch.rest-test '
+ 'requires either elasticsearch.build or '
- + 'elasticsearch.standalone-test')
+ + 'elasticsearch.standalone-rest-test')
}
RestIntegTestTask integTest = project.tasks.create('integTest', RestIntegTestTask.class)
- integTest.cluster.distribution = 'zip' // rest tests should run with the real zip
+ integTest.description = 'Runs rest tests against an elasticsearch cluster.'
+ integTest.group = JavaBasePlugin.VERIFICATION_GROUP
+ integTest.clusterConfig.distribution = 'zip' // rest tests should run with the real zip
integTest.mustRunAfter(project.precommit)
project.check.dependsOn(integTest)
}
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RunTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RunTask.groovy
index a71dc59dbf914..a88152d7865ff 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RunTask.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RunTask.groovy
@@ -18,7 +18,7 @@ public class RunTask extends DefaultTask {
clusterConfig.daemonize = false
clusterConfig.distribution = 'zip'
project.afterEvaluate {
- ClusterFormationTasks.setup(project, this, clusterConfig)
+ ClusterFormationTasks.setup(project, name, this, clusterConfig)
}
}
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy
index 6e01767101755..c48dc890ab080 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/StandaloneRestTestPlugin.groovy
@@ -40,9 +40,9 @@ public class StandaloneRestTestPlugin implements Plugin {
@Override
public void apply(Project project) {
if (project.pluginManager.hasPlugin('elasticsearch.build')) {
- throw new InvalidUserDataException('elasticsearch.standalone-test, '
- + 'elasticsearch.standalone-test, and elasticsearch.build are '
- + 'mutually exclusive')
+ throw new InvalidUserDataException('elasticsearch.standalone-test '
+ + 'elasticsearch.standalone-rest-test, and elasticsearch.build '
+ + 'are mutually exclusive')
}
project.pluginManager.apply(JavaBasePlugin)
project.pluginManager.apply(RandomizedTestingPlugin)
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/TestWithDependenciesPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/TestWithDependenciesPlugin.groovy
new file mode 100644
index 0000000000000..7e370fd69e2d6
--- /dev/null
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/TestWithDependenciesPlugin.groovy
@@ -0,0 +1,66 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.gradle.test
+
+import org.elasticsearch.gradle.plugin.PluginBuildPlugin
+import org.gradle.api.Plugin
+import org.gradle.api.Project
+import org.gradle.api.artifacts.Dependency
+import org.gradle.api.artifacts.ProjectDependency
+import org.gradle.api.tasks.Copy
+
+/**
+ * A plugin to run tests that depend on other plugins or modules.
+ *
+ * This plugin will add the plugin-metadata and properties files for each
+ * dependency to the test source set.
+ */
+class TestWithDependenciesPlugin implements Plugin {
+
+ @Override
+ void apply(Project project) {
+ if (project.isEclipse) {
+ /* The changes this plugin makes both break and aren't needed by
+ * Eclipse. This is because Eclipse flattens main and test
+ * dependencies into a single dependency. Because Eclipse is
+ * "special".... */
+ return
+ }
+
+ project.configurations.testCompile.dependencies.all { Dependency dep ->
+ // this closure is run every time a compile dependency is added
+ if (dep instanceof ProjectDependency && dep.dependencyProject.plugins.hasPlugin(PluginBuildPlugin)) {
+ project.gradle.projectsEvaluated {
+ addPluginResources(project, dep.dependencyProject)
+ }
+ }
+ }
+ }
+
+ private static addPluginResources(Project project, Project pluginProject) {
+ String outputDir = "${project.buildDir}/generated-resources/${pluginProject.name}"
+ String taskName = ClusterFormationTasks.pluginTaskName("copy", pluginProject.name, "Metadata")
+ Copy copyPluginMetadata = project.tasks.create(taskName, Copy.class)
+ copyPluginMetadata.into(outputDir)
+ copyPluginMetadata.from(pluginProject.tasks.pluginProperties)
+ copyPluginMetadata.from(pluginProject.file('src/main/plugin-metadata'))
+ project.sourceSets.test.output.dir(outputDir, builtBy: taskName)
+ }
+}
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/VagrantFixture.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/VagrantFixture.groovy
new file mode 100644
index 0000000000000..fa08a8f9c6667
--- /dev/null
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/VagrantFixture.groovy
@@ -0,0 +1,54 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.gradle.test
+
+import org.elasticsearch.gradle.vagrant.VagrantCommandTask
+import org.gradle.api.Task
+
+/**
+ * A fixture for integration tests which runs in a virtual machine launched by Vagrant.
+ */
+class VagrantFixture extends VagrantCommandTask implements Fixture {
+
+ private VagrantCommandTask stopTask
+
+ public VagrantFixture() {
+ this.stopTask = project.tasks.create(name: "${name}#stop", type: VagrantCommandTask) {
+ command 'halt'
+ }
+ finalizedBy this.stopTask
+ }
+
+ @Override
+ void setBoxName(String boxName) {
+ super.setBoxName(boxName)
+ this.stopTask.setBoxName(boxName)
+ }
+
+ @Override
+ void setEnvironmentVars(Map environmentVars) {
+ super.setEnvironmentVars(environmentVars)
+ this.stopTask.setEnvironmentVars(environmentVars)
+ }
+
+ @Override
+ public Task getStopTask() {
+ return this.stopTask
+ }
+}
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/BatsOverVagrantTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/BatsOverVagrantTask.groovy
index 65b90c4d9a0cd..110f2fc7e8461 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/BatsOverVagrantTask.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/BatsOverVagrantTask.groovy
@@ -27,12 +27,15 @@ import org.gradle.api.tasks.Input
public class BatsOverVagrantTask extends VagrantCommandTask {
@Input
- String command
+ String remoteCommand
BatsOverVagrantTask() {
- project.afterEvaluate {
- args 'ssh', boxName, '--command', command
- }
+ command = 'ssh'
+ }
+
+ void setRemoteCommand(String remoteCommand) {
+ this.remoteCommand = Objects.requireNonNull(remoteCommand)
+ setArgs(['--command', remoteCommand])
}
@Override
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/TapLoggerOutputStream.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/TapLoggerOutputStream.groovy
index 3f980c57a49a6..e15759a1fe588 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/TapLoggerOutputStream.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/TapLoggerOutputStream.groovy
@@ -19,11 +19,9 @@
package org.elasticsearch.gradle.vagrant
import com.carrotsearch.gradle.junit4.LoggingOutputStream
-import groovy.transform.PackageScope
import org.gradle.api.GradleScriptException
import org.gradle.api.logging.Logger
-import org.gradle.logging.ProgressLogger
-import org.gradle.logging.ProgressLoggerFactory
+import org.gradle.internal.logging.progress.ProgressLogger
import java.util.regex.Matcher
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy
index ecba08d7d4cb9..aab120e8d049a 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantCommandTask.groovy
@@ -21,9 +21,15 @@ package org.elasticsearch.gradle.vagrant
import org.apache.commons.io.output.TeeOutputStream
import org.elasticsearch.gradle.LoggedExec
import org.gradle.api.tasks.Input
-import org.gradle.logging.ProgressLoggerFactory
+import org.gradle.api.tasks.Optional
+import org.gradle.api.tasks.TaskAction
+import org.gradle.internal.logging.progress.ProgressLoggerFactory
import javax.inject.Inject
+import java.util.concurrent.CountDownLatch
+import java.util.concurrent.locks.Lock
+import java.util.concurrent.locks.ReadWriteLock
+import java.util.concurrent.locks.ReentrantLock
/**
* Runs a vagrant command. Pretty much like Exec task but with a nicer output
@@ -31,6 +37,12 @@ import javax.inject.Inject
*/
public class VagrantCommandTask extends LoggedExec {
+ @Input
+ String command
+
+ @Input @Optional
+ String subcommand
+
@Input
String boxName
@@ -40,15 +52,36 @@ public class VagrantCommandTask extends LoggedExec {
public VagrantCommandTask() {
executable = 'vagrant'
+ // We're using afterEvaluate here to slot in some logic that captures configurations and
+ // modifies the command line right before the main execution happens. The reason that we
+ // call doFirst instead of just doing the work in the afterEvaluate is that the latter
+ // restricts how subclasses can extend functionality. Calling afterEvaluate is like having
+ // all the logic of a task happening at construction time, instead of at execution time
+ // where a subclass can override or extend the logic.
project.afterEvaluate {
- // It'd be nice if --machine-readable were, well, nice
- standardOutput = new TeeOutputStream(standardOutput, createLoggerOutputStream())
- if (environmentVars != null) {
- environment environmentVars
+ doFirst {
+ if (environmentVars != null) {
+ environment environmentVars
+ }
+
+ // Build our command line for vagrant
+ def vagrantCommand = [executable, command]
+ if (subcommand != null) {
+ vagrantCommand = vagrantCommand + subcommand
+ }
+ commandLine([*vagrantCommand, boxName, *args])
+
+ // It'd be nice if --machine-readable were, well, nice
+ standardOutput = new TeeOutputStream(standardOutput, createLoggerOutputStream())
}
}
}
+ @Inject
+ ProgressLoggerFactory getProgressLoggerFactory() {
+ throw new UnsupportedOperationException()
+ }
+
protected OutputStream createLoggerOutputStream() {
return new VagrantLoggerOutputStream(
command: commandLine.join(' '),
@@ -57,9 +90,4 @@ public class VagrantCommandTask extends LoggedExec {
stuff starts with ==> $box */
squashedPrefix: "==> $boxName: ")
}
-
- @Inject
- ProgressLoggerFactory getProgressLoggerFactory() {
- throw new UnsupportedOperationException();
- }
}
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantLoggerOutputStream.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantLoggerOutputStream.groovy
index 331a638b5cade..e899c0171298b 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantLoggerOutputStream.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantLoggerOutputStream.groovy
@@ -19,9 +19,7 @@
package org.elasticsearch.gradle.vagrant
import com.carrotsearch.gradle.junit4.LoggingOutputStream
-import org.gradle.api.logging.Logger
-import org.gradle.logging.ProgressLogger
-import org.gradle.logging.ProgressLoggerFactory
+import org.gradle.internal.logging.progress.ProgressLogger
/**
* Adapts an OutputStream being written to by vagrant into a ProcessLogger. It
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantPropertiesExtension.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantPropertiesExtension.groovy
index f16913d5be64a..e6e7fca62f97e 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantPropertiesExtension.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantPropertiesExtension.groovy
@@ -25,12 +25,6 @@ class VagrantPropertiesExtension {
@Input
List boxes
- @Input
- Long testSeed
-
- @Input
- String formattedTestSeed
-
@Input
String upgradeFromVersion
diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy
index a5bb054a8b646..c8d77ea2fbfe5 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy
@@ -1,14 +1,15 @@
package org.elasticsearch.gradle.vagrant
+import com.carrotsearch.gradle.junit4.RandomizedTestingPlugin
import org.elasticsearch.gradle.FileContentsTask
-import org.gradle.BuildAdapter
-import org.gradle.BuildResult
import org.gradle.api.*
import org.gradle.api.artifacts.dsl.RepositoryHandler
+import org.gradle.api.execution.TaskExecutionAdapter
import org.gradle.api.internal.artifacts.dependencies.DefaultProjectDependency
import org.gradle.api.tasks.Copy
import org.gradle.api.tasks.Delete
import org.gradle.api.tasks.Exec
+import org.gradle.api.tasks.TaskState
class VagrantTestPlugin implements Plugin {
@@ -17,12 +18,11 @@ class VagrantTestPlugin implements Plugin {
'centos-6',
'centos-7',
'debian-8',
- 'fedora-24',
+ 'fedora-25',
'oel-6',
'oel-7',
- 'opensuse-13',
+ 'opensuse-42',
'sles-12',
- 'ubuntu-1204',
'ubuntu-1404',
'ubuntu-1604'
]
@@ -41,6 +41,7 @@ class VagrantTestPlugin implements Plugin {
private static final BATS = 'bats'
private static final String BATS_TEST_COMMAND ="cd \$BATS_ARCHIVES && sudo bats --tap \$BATS_TESTS/*.$BATS"
+ private static final String PLATFORM_TEST_COMMAND ="rm -rf ~/elasticsearch && rsync -r /elasticsearch/ ~/elasticsearch && cd ~/elasticsearch && \$GRADLE_HOME/bin/gradle test integTest"
@Override
void apply(Project project) {
@@ -82,29 +83,6 @@ class VagrantTestPlugin implements Plugin {
}
}
- private static Set listVersions(Project project) {
- Node xml
- new URL('https://repo1.maven.org/maven2/org/elasticsearch/elasticsearch/maven-metadata.xml').openStream().withStream { s ->
- xml = new XmlParser().parse(s)
- }
- Set versions = new TreeSet<>(xml.versioning.versions.version.collect { it.text() }.findAll { it ==~ /[5]\.\d\.\d/ })
- if (versions.isEmpty() == false) {
- return versions;
- }
-
- // If no version is found, we run the tests with the current version
- return Collections.singleton(project.version);
- }
-
- private static File getVersionsFile(Project project) {
- File versions = new File(project.projectDir, 'versions');
- if (versions.exists() == false) {
- // Use the elasticsearch's versions file from project :qa:vagrant
- versions = project.project(":qa:vagrant").file('versions')
- }
- return versions
- }
-
private static void configureBatsRepositories(Project project) {
RepositoryHandler repos = project.repositories
@@ -123,33 +101,13 @@ class VagrantTestPlugin implements Plugin {
private static void createBatsConfiguration(Project project) {
project.configurations.create(BATS)
- Long seed
- String formattedSeed = null
- String[] upgradeFromVersions
-
- String maybeTestsSeed = System.getProperty("tests.seed", null);
- if (maybeTestsSeed != null) {
- List seeds = maybeTestsSeed.tokenize(':')
- if (seeds.size() != 0) {
- String masterSeed = seeds.get(0)
- seed = new BigInteger(masterSeed, 16).longValue()
- formattedSeed = maybeTestsSeed
- }
- }
- if (formattedSeed == null) {
- seed = new Random().nextLong()
- formattedSeed = String.format("%016X", seed)
- }
-
- String maybeUpdradeFromVersions = System.getProperty("tests.packaging.upgrade.from.versions", null)
- if (maybeUpdradeFromVersions != null) {
- upgradeFromVersions = maybeUpdradeFromVersions.split(",")
- } else {
- upgradeFromVersions = getVersionsFile(project)
+ String upgradeFromVersion = System.getProperty("tests.packaging.upgradeVersion");
+ if (upgradeFromVersion == null) {
+ String firstPartOfSeed = project.rootProject.testSeed.tokenize(':').get(0)
+ final long seed = Long.parseUnsignedLong(firstPartOfSeed, 16)
+ upgradeFromVersion = project.indexCompatVersions[new Random(seed).nextInt(project.indexCompatVersions.size())]
}
- String upgradeFromVersion = upgradeFromVersions[new Random(seed).nextInt(upgradeFromVersions.length)]
-
DISTRIBUTION_ARCHIVES.each {
// Adds a dependency for the current version
project.dependencies.add(BATS, project.dependencies.project(path: ":distribution:${it}", configuration: 'archives'))
@@ -160,10 +118,7 @@ class VagrantTestPlugin implements Plugin {
project.dependencies.add(BATS, "org.elasticsearch.distribution.${it}:elasticsearch:${upgradeFromVersion}@${it}")
}
- project.extensions.esvagrant.testSeed = seed
- project.extensions.esvagrant.formattedTestSeed = formattedSeed
project.extensions.esvagrant.upgradeFromVersion = upgradeFromVersion
- project.extensions.esvagrant.upgradeFromVersions = upgradeFromVersions
}
private static void createCleanTask(Project project) {
@@ -193,7 +148,6 @@ class VagrantTestPlugin implements Plugin {
Task createBatsDirsTask = project.tasks.create('createBatsDirs')
createBatsDirsTask.outputs.dir batsDir
- createBatsDirsTask.dependsOn project.tasks.vagrantVerifyVersions
createBatsDirsTask.doLast {
batsDir.mkdirs()
}
@@ -223,7 +177,7 @@ class VagrantTestPlugin implements Plugin {
// Now we iterate over dependencies of the bats configuration. When a project dependency is found,
// we bring back its own archives, test files or test utils.
project.afterEvaluate {
- project.configurations.bats.dependencies.findAll {it.configuration == BATS }.each { d ->
+ project.configurations.bats.dependencies.findAll {it.targetConfiguration == BATS }.each { d ->
if (d instanceof DefaultProjectDependency) {
DefaultProjectDependency externalBatsDependency = (DefaultProjectDependency) d
Project externalBatsProject = externalBatsDependency.dependencyProject
@@ -254,51 +208,9 @@ class VagrantTestPlugin implements Plugin {
contents project.extensions.esvagrant.upgradeFromVersion
}
- Task vagrantSetUpTask = project.tasks.create('vagrantSetUp')
+ Task vagrantSetUpTask = project.tasks.create('setupBats')
vagrantSetUpTask.dependsOn 'vagrantCheckVersion'
vagrantSetUpTask.dependsOn copyBatsTests, copyBatsUtils, copyBatsArchives, createVersionFile, createUpgradeFromFile
- vagrantSetUpTask.doFirst {
- project.gradle.addBuildListener new BuildAdapter() {
- @Override
- void buildFinished(BuildResult result) {
- if (result.failure) {
- println "Reproduce with: gradle packagingTest "
- +"-Pvagrant.boxes=${project.extensions.esvagrant.boxes} "
- + "-Dtests.seed=${project.extensions.esvagrant.formattedSeed} "
- + "-Dtests.packaging.upgrade.from.versions=${project.extensions.esvagrant.upgradeFromVersions.join(",")}"
- }
- }
- }
- }
- }
-
- private static void createUpdateVersionsTask(Project project) {
- project.tasks.create('vagrantUpdateVersions') {
- description 'Update file containing options for the\n "starting" version in the "upgrade from" packaging tests.'
- group 'Verification'
- doLast {
- File versions = getVersionsFile(project)
- versions.text = listVersions(project).join('\n') + '\n'
- }
- }
- }
-
- private static void createVerifyVersionsTask(Project project) {
- project.tasks.create('vagrantVerifyVersions') {
- description 'Update file containing options for the\n "starting" version in the "upgrade from" packaging tests.'
- group 'Verification'
- doLast {
- String maybeUpdateFromVersions = System.getProperty("tests.packaging.upgrade.from.versions", null)
- if (maybeUpdateFromVersions == null) {
- Set versions = listVersions(project)
- Set actualVersions = new TreeSet<>(project.extensions.esvagrant.upgradeFromVersions)
- if (!versions.equals(actualVersions)) {
- throw new GradleException("out-of-date versions " + actualVersions +
- ", expected " + versions + "; run gradle vagrantUpdateVersions")
- }
- }
- }
- }
}
private static void createCheckVagrantVersionTask(Project project) {
@@ -350,16 +262,26 @@ class VagrantTestPlugin implements Plugin {
}
}
+ private static void createPlatformTestTask(Project project) {
+ project.tasks.create('platformTest') {
+ group 'Verification'
+ description "Test unit and integ tests on different platforms using vagrant.\n" +
+ " Specify the vagrant boxes to test using the gradle property 'vagrant.boxes'.\n" +
+ " 'all' can be used to test all available boxes. The available boxes are: \n" +
+ " ${BOXES}"
+ dependsOn 'vagrantCheckVersion'
+ }
+ }
+
private static void createVagrantTasks(Project project) {
createCleanTask(project)
createStopTask(project)
createSmokeTestTask(project)
- createUpdateVersionsTask(project)
- createVerifyVersionsTask(project)
createCheckVagrantVersionTask(project)
createCheckVirtualBoxVersionTask(project)
createPrepareVagrantTestEnvTask(project)
createPackagingTestTask(project)
+ createPlatformTestTask(project)
}
private static void createVagrantBoxesTasks(Project project) {
@@ -377,12 +299,15 @@ class VagrantTestPlugin implements Plugin {
assert project.tasks.virtualboxCheckVersion != null
Task virtualboxCheckVersion = project.tasks.virtualboxCheckVersion
- assert project.tasks.vagrantSetUp != null
- Task vagrantSetUp = project.tasks.vagrantSetUp
+ assert project.tasks.setupBats != null
+ Task setupBats = project.tasks.setupBats
assert project.tasks.packagingTest != null
Task packagingTest = project.tasks.packagingTest
+ assert project.tasks.platformTest != null
+ Task platformTest = project.tasks.platformTest
+
/*
* We always use the main project.rootDir as Vagrant's current working directory (VAGRANT_CWD)
* so that boxes are not duplicated for every Gradle project that use this VagrantTestPlugin.
@@ -399,24 +324,23 @@ class VagrantTestPlugin implements Plugin {
// always add a halt task for all boxes, so clean makes sure they are all shutdown
Task halt = project.tasks.create("vagrant${boxTask}#halt", VagrantCommandTask) {
+ command 'halt'
boxName box
environmentVars vagrantEnvVars
- args 'halt', box
}
stop.dependsOn(halt)
- if (project.extensions.esvagrant.boxes.contains(box) == false) {
- // we only need a halt task if this box was not specified
- continue;
- }
Task update = project.tasks.create("vagrant${boxTask}#update", VagrantCommandTask) {
+ command 'box'
+ subcommand 'update'
boxName box
environmentVars vagrantEnvVars
- args 'box', 'update', box
- dependsOn vagrantCheckVersion, virtualboxCheckVersion, vagrantSetUp
+ dependsOn vagrantCheckVersion, virtualboxCheckVersion
}
+ update.mustRunAfter(setupBats)
Task up = project.tasks.create("vagrant${boxTask}#up", VagrantCommandTask) {
+ command 'up'
boxName box
environmentVars vagrantEnvVars
/* Its important that we try to reprovision the box even if it already
@@ -429,7 +353,7 @@ class VagrantTestPlugin implements Plugin {
vagrant's default but its possible to change that default and folks do.
But the boxes that we use are unlikely to work properly with other
virtualization providers. Thus the lock. */
- args 'up', box, '--provision', '--provider', 'virtualbox'
+ args '--provision', '--provider', 'virtualbox'
/* It'd be possible to check if the box is already up here and output
SKIPPED but that would require running vagrant status which is slow! */
dependsOn update
@@ -444,14 +368,59 @@ class VagrantTestPlugin implements Plugin {
}
vagrantSmokeTest.dependsOn(smoke)
- Task packaging = project.tasks.create("vagrant${boxTask}#packagingtest", BatsOverVagrantTask) {
+ Task packaging = project.tasks.create("vagrant${boxTask}#packagingTest", BatsOverVagrantTask) {
+ remoteCommand BATS_TEST_COMMAND
+ boxName box
+ environmentVars vagrantEnvVars
+ dependsOn up, setupBats
+ finalizedBy halt
+ }
+
+ TaskExecutionAdapter packagingReproListener = new TaskExecutionAdapter() {
+ @Override
+ void afterExecute(Task task, TaskState state) {
+ if (state.failure != null) {
+ println "REPRODUCE WITH: gradle ${packaging.path} " +
+ "-Dtests.seed=${project.testSeed} "
+ }
+ }
+ }
+ packaging.doFirst {
+ project.gradle.addListener(packagingReproListener)
+ }
+ packaging.doLast {
+ project.gradle.removeListener(packagingReproListener)
+ }
+ if (project.extensions.esvagrant.boxes.contains(box)) {
+ packagingTest.dependsOn(packaging)
+ }
+
+ Task platform = project.tasks.create("vagrant${boxTask}#platformTest", VagrantCommandTask) {
+ command 'ssh'
boxName box
environmentVars vagrantEnvVars
dependsOn up
finalizedBy halt
- command BATS_TEST_COMMAND
+ args '--command', PLATFORM_TEST_COMMAND + " -Dtests.seed=${-> project.testSeed}"
+ }
+ TaskExecutionAdapter platformReproListener = new TaskExecutionAdapter() {
+ @Override
+ void afterExecute(Task task, TaskState state) {
+ if (state.failure != null) {
+ println "REPRODUCE WITH: gradle ${platform.path} " +
+ "-Dtests.seed=${project.testSeed} "
+ }
+ }
+ }
+ platform.doFirst {
+ project.gradle.addListener(platformReproListener)
+ }
+ platform.doLast {
+ project.gradle.removeListener(platformReproListener)
+ }
+ if (project.extensions.esvagrant.boxes.contains(box)) {
+ platformTest.dependsOn(platform)
}
- packagingTest.dependsOn(packaging)
}
}
}
diff --git a/buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheck.java b/buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheck.java
index cbfa31d1aaf5b..9bd14675d34a4 100644
--- a/buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheck.java
+++ b/buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheck.java
@@ -28,6 +28,7 @@
import java.nio.file.Paths;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.HashSet;
+import java.util.Objects;
import java.util.Set;
/**
@@ -49,6 +50,7 @@ public static void main(String[] args) throws IOException {
Path rootPath = null;
boolean skipIntegTestsInDisguise = false;
boolean selfTest = false;
+ boolean checkMainClasses = false;
for (int i = 0; i < args.length; i++) {
String arg = args[i];
switch (arg) {
@@ -64,6 +66,9 @@ public static void main(String[] args) throws IOException {
case "--self-test":
selfTest = true;
break;
+ case "--main":
+ checkMainClasses = true;
+ break;
case "--":
rootPath = Paths.get(args[++i]);
break;
@@ -73,28 +78,43 @@ public static void main(String[] args) throws IOException {
}
NamingConventionsCheck check = new NamingConventionsCheck(testClass, integTestClass);
- check.check(rootPath, skipIntegTestsInDisguise);
+ if (checkMainClasses) {
+ check.checkMain(rootPath);
+ } else {
+ check.checkTests(rootPath, skipIntegTestsInDisguise);
+ }
if (selfTest) {
- assertViolation("WrongName", check.missingSuffix);
- assertViolation("WrongNameTheSecond", check.missingSuffix);
- assertViolation("DummyAbstractTests", check.notRunnable);
- assertViolation("DummyInterfaceTests", check.notRunnable);
- assertViolation("InnerTests", check.innerClasses);
- assertViolation("NotImplementingTests", check.notImplementing);
- assertViolation("PlainUnit", check.pureUnitTest);
+ if (checkMainClasses) {
+ assertViolation(NamingConventionsCheckInMainTests.class.getName(), check.testsInMain);
+ assertViolation(NamingConventionsCheckInMainIT.class.getName(), check.testsInMain);
+ } else {
+ assertViolation("WrongName", check.missingSuffix);
+ assertViolation("WrongNameTheSecond", check.missingSuffix);
+ assertViolation("DummyAbstractTests", check.notRunnable);
+ assertViolation("DummyInterfaceTests", check.notRunnable);
+ assertViolation("InnerTests", check.innerClasses);
+ assertViolation("NotImplementingTests", check.notImplementing);
+ assertViolation("PlainUnit", check.pureUnitTest);
+ }
}
// Now we should have no violations
- assertNoViolations("Not all subclasses of " + check.testClass.getSimpleName()
- + " match the naming convention. Concrete classes must end with [Tests]", check.missingSuffix);
+ assertNoViolations(
+ "Not all subclasses of " + check.testClass.getSimpleName()
+ + " match the naming convention. Concrete classes must end with [Tests]",
+ check.missingSuffix);
assertNoViolations("Classes ending with [Tests] are abstract or interfaces", check.notRunnable);
assertNoViolations("Found inner classes that are tests, which are excluded from the test runner", check.innerClasses);
assertNoViolations("Pure Unit-Test found must subclass [" + check.testClass.getSimpleName() + "]", check.pureUnitTest);
assertNoViolations("Classes ending with [Tests] must subclass [" + check.testClass.getSimpleName() + "]", check.notImplementing);
+ assertNoViolations(
+ "Classes ending with [Tests] or [IT] or extending [" + check.testClass.getSimpleName() + "] must be in src/test/java",
+ check.testsInMain);
if (skipIntegTestsInDisguise == false) {
- assertNoViolations("Subclasses of " + check.integTestClass.getSimpleName() +
- " should end with IT as they are integration tests", check.integTestsInDisguise);
+ assertNoViolations(
+ "Subclasses of " + check.integTestClass.getSimpleName() + " should end with IT as they are integration tests",
+ check.integTestsInDisguise);
}
}
@@ -104,84 +124,76 @@ public static void main(String[] args) throws IOException {
private final Set> integTestsInDisguise = new HashSet<>();
private final Set> notRunnable = new HashSet<>();
private final Set> innerClasses = new HashSet<>();
+ private final Set> testsInMain = new HashSet<>();
private final Class> testClass;
private final Class> integTestClass;
public NamingConventionsCheck(Class> testClass, Class> integTestClass) {
- this.testClass = testClass;
+ this.testClass = Objects.requireNonNull(testClass, "--test-class is required");
this.integTestClass = integTestClass;
}
- public void check(Path rootPath, boolean skipTestsInDisguised) throws IOException {
- Files.walkFileTree(rootPath, new FileVisitor() {
- /**
- * The package name of the directory we are currently visiting. Kept as a string rather than something fancy because we load
- * just about every class and doing so requires building a string out of it anyway. At least this way we don't need to build the
- * first part of the string over and over and over again.
- */
- private String packageName;
-
+ public void checkTests(Path rootPath, boolean skipTestsInDisguised) throws IOException {
+ Files.walkFileTree(rootPath, new TestClassVisitor() {
@Override
- public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
- // First we visit the root directory
- if (packageName == null) {
- // And it package is empty string regardless of the directory name
- packageName = "";
- } else {
- packageName += dir.getFileName() + ".";
+ protected void visitTestClass(Class> clazz) {
+ if (skipTestsInDisguised == false && integTestClass.isAssignableFrom(clazz)) {
+ integTestsInDisguise.add(clazz);
+ }
+ if (Modifier.isAbstract(clazz.getModifiers()) || Modifier.isInterface(clazz.getModifiers())) {
+ notRunnable.add(clazz);
+ } else if (isTestCase(clazz) == false) {
+ notImplementing.add(clazz);
+ } else if (Modifier.isStatic(clazz.getModifiers())) {
+ innerClasses.add(clazz);
}
- return FileVisitResult.CONTINUE;
}
@Override
- public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
- // Go up one package by jumping back to the second to last '.'
- packageName = packageName.substring(0, 1 + packageName.lastIndexOf('.', packageName.length() - 2));
- return FileVisitResult.CONTINUE;
+ protected void visitIntegrationTestClass(Class> clazz) {
+ if (isTestCase(clazz) == false) {
+ notImplementing.add(clazz);
+ }
}
@Override
- public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
- String filename = file.getFileName().toString();
- if (filename.endsWith(".class")) {
- String className = filename.substring(0, filename.length() - ".class".length());
- Class> clazz = loadClassWithoutInitializing(packageName + className);
- if (clazz.getName().endsWith("Tests")) {
- if (skipTestsInDisguised == false && integTestClass.isAssignableFrom(clazz)) {
- integTestsInDisguise.add(clazz);
- }
- if (Modifier.isAbstract(clazz.getModifiers()) || Modifier.isInterface(clazz.getModifiers())) {
- notRunnable.add(clazz);
- } else if (isTestCase(clazz) == false) {
- notImplementing.add(clazz);
- } else if (Modifier.isStatic(clazz.getModifiers())) {
- innerClasses.add(clazz);
- }
- } else if (clazz.getName().endsWith("IT")) {
- if (isTestCase(clazz) == false) {
- notImplementing.add(clazz);
- }
- } else if (Modifier.isAbstract(clazz.getModifiers()) == false && Modifier.isInterface(clazz.getModifiers()) == false) {
- if (isTestCase(clazz)) {
- missingSuffix.add(clazz);
- } else if (junit.framework.Test.class.isAssignableFrom(clazz)) {
- pureUnitTest.add(clazz);
- }
- }
+ protected void visitOtherClass(Class> clazz) {
+ if (Modifier.isAbstract(clazz.getModifiers()) || Modifier.isInterface(clazz.getModifiers())) {
+ return;
+ }
+ if (isTestCase(clazz)) {
+ missingSuffix.add(clazz);
+ } else if (junit.framework.Test.class.isAssignableFrom(clazz)) {
+ pureUnitTest.add(clazz);
}
- return FileVisitResult.CONTINUE;
+ }
+ });
+ }
+
+ public void checkMain(Path rootPath) throws IOException {
+ Files.walkFileTree(rootPath, new TestClassVisitor() {
+ @Override
+ protected void visitTestClass(Class> clazz) {
+ testsInMain.add(clazz);
}
- private boolean isTestCase(Class> clazz) {
- return testClass.isAssignableFrom(clazz);
+ @Override
+ protected void visitIntegrationTestClass(Class> clazz) {
+ testsInMain.add(clazz);
}
@Override
- public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException {
- throw exc;
+ protected void visitOtherClass(Class> clazz) {
+ if (Modifier.isAbstract(clazz.getModifiers()) || Modifier.isInterface(clazz.getModifiers())) {
+ return;
+ }
+ if (isTestCase(clazz)) {
+ testsInMain.add(clazz);
+ }
}
});
+
}
/**
@@ -203,7 +215,7 @@ private static void assertNoViolations(String message, Set> set) {
* similar enough.
*/
private static void assertViolation(String className, Set> set) {
- className = "org.elasticsearch.test.NamingConventionsCheckBadClasses$" + className;
+ className = className.startsWith("org") ? className : "org.elasticsearch.test.NamingConventionsCheckBadClasses$" + className;
if (false == set.remove(loadClassWithoutInitializing(className))) {
System.err.println("Error in NamingConventionsCheck! Expected [" + className + "] to be a violation but wasn't.");
System.exit(1);
@@ -229,4 +241,74 @@ static Class> loadClassWithoutInitializing(String name) {
throw new RuntimeException(e);
}
}
+
+ abstract class TestClassVisitor implements FileVisitor {
+ /**
+ * The package name of the directory we are currently visiting. Kept as a string rather than something fancy because we load
+ * just about every class and doing so requires building a string out of it anyway. At least this way we don't need to build the
+ * first part of the string over and over and over again.
+ */
+ private String packageName;
+
+ /**
+ * Visit classes named like a test.
+ */
+ protected abstract void visitTestClass(Class> clazz);
+ /**
+ * Visit classes named like an integration test.
+ */
+ protected abstract void visitIntegrationTestClass(Class> clazz);
+ /**
+ * Visit classes not named like a test at all.
+ */
+ protected abstract void visitOtherClass(Class> clazz);
+
+ @Override
+ public final FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
+ // First we visit the root directory
+ if (packageName == null) {
+ // And it package is empty string regardless of the directory name
+ packageName = "";
+ } else {
+ packageName += dir.getFileName() + ".";
+ }
+ return FileVisitResult.CONTINUE;
+ }
+
+ @Override
+ public final FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException {
+ // Go up one package by jumping back to the second to last '.'
+ packageName = packageName.substring(0, 1 + packageName.lastIndexOf('.', packageName.length() - 2));
+ return FileVisitResult.CONTINUE;
+ }
+
+ @Override
+ public final FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
+ String filename = file.getFileName().toString();
+ if (filename.endsWith(".class")) {
+ String className = filename.substring(0, filename.length() - ".class".length());
+ Class> clazz = loadClassWithoutInitializing(packageName + className);
+ if (clazz.getName().endsWith("Tests")) {
+ visitTestClass(clazz);
+ } else if (clazz.getName().endsWith("IT")) {
+ visitIntegrationTestClass(clazz);
+ } else {
+ visitOtherClass(clazz);
+ }
+ }
+ return FileVisitResult.CONTINUE;
+ }
+
+ /**
+ * Is this class a test case?
+ */
+ protected boolean isTestCase(Class> clazz) {
+ return testClass.isAssignableFrom(clazz);
+ }
+
+ @Override
+ public final FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException {
+ throw exc;
+ }
+ }
}
diff --git a/buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheckInMainIT.java b/buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheckInMainIT.java
new file mode 100644
index 0000000000000..46adc7f065b16
--- /dev/null
+++ b/buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheckInMainIT.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.test;
+
+/**
+ * This class should fail the naming conventions self test.
+ */
+public class NamingConventionsCheckInMainIT {
+}
diff --git a/buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheckInMainTests.java b/buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheckInMainTests.java
new file mode 100644
index 0000000000000..27c0b41eb3f6a
--- /dev/null
+++ b/buildSrc/src/main/java/org/elasticsearch/test/NamingConventionsCheckInMainTests.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.test;
+
+/**
+ * This class should fail the naming conventions self test.
+ */
+public class NamingConventionsCheckInMainTests {
+}
diff --git a/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.test-with-dependencies.properties b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.test-with-dependencies.properties
new file mode 100644
index 0000000000000..bcb374a85c618
--- /dev/null
+++ b/buildSrc/src/main/resources/META-INF/gradle-plugins/elasticsearch.test-with-dependencies.properties
@@ -0,0 +1,20 @@
+#
+# Licensed to Elasticsearch under one or more contributor
+# license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright
+# ownership. Elasticsearch licenses this file to you under
+# the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+implementation-class=org.elasticsearch.gradle.test.TestWithDependenciesPlugin
diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml
index c8251702484c2..678155c656170 100644
--- a/buildSrc/src/main/resources/checkstyle_suppressions.xml
+++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml
@@ -1,7 +1,7 @@
+ "-//Puppy Crawl//DTD Suppressions 1.1//EN"
+ "http://www.puppycrawl.com/dtds/suppressions_1_1.dtd">
@@ -10,11 +10,12 @@
+
+
+
-
-
-
+
@@ -26,7 +27,6 @@
-
@@ -37,6 +37,7 @@
+
@@ -53,11 +54,7 @@
-
-
-
-
@@ -125,57 +122,33 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
-
-
-
-
-
-
@@ -192,7 +165,6 @@
-
@@ -202,18 +174,12 @@
-
-
-
-
-
-
@@ -231,11 +197,9 @@
-
-
@@ -255,18 +219,12 @@
-
-
-
-
-
-
@@ -281,25 +239,20 @@
-
-
-
-
-
@@ -312,13 +265,11 @@
-
-
@@ -328,67 +279,51 @@
-
-
+
+
-
-
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
-
-
-
-
-
-
-
-
@@ -398,11 +333,6 @@
-
-
-
-
-
@@ -412,7 +342,6 @@
-
@@ -420,33 +349,23 @@
-
+
-
-
-
-
-
-
-
-
-
-
@@ -454,79 +373,46 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
-
-
-
-
-
-
-
-
-
-
@@ -539,10 +425,8 @@
+
-
-
-
@@ -558,19 +442,14 @@
-
-
-
-
-
@@ -603,14 +482,12 @@
-
-
@@ -652,26 +529,22 @@
-
-
-
-
@@ -679,109 +552,77 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
-
-
+
+
-
-
-
-
-
-
+
+
+
-
-
-
-
-
+
+
-
-
-
-
+
+
@@ -789,11 +630,10 @@
+
-
-
@@ -807,8 +647,6 @@
-
-
@@ -822,26 +660,20 @@
-
-
-
-
-
-
@@ -849,15 +681,14 @@
-
-
+
@@ -880,27 +711,21 @@
-
-
-
-
-
-
-
+
@@ -909,76 +734,49 @@
-
-
-
-
-
-
+
-
-
-
-
-
-
-
-
-
-
-
+
+
-
-
-
-
-
-
+
-
-
-
-
-
-
-
-
-
-
-
-
+
-
+
+
+
+
-
+
@@ -987,8 +785,6 @@
-
-
@@ -997,10 +793,4 @@
-
-
-
-
-
-
-
+
\ No newline at end of file
diff --git a/buildSrc/src/main/resources/eclipse.settings/org.eclipse.jdt.core.prefs b/buildSrc/src/main/resources/eclipse.settings/org.eclipse.jdt.core.prefs
index 9bee5e587b03f..48c93f444ba2a 100644
--- a/buildSrc/src/main/resources/eclipse.settings/org.eclipse.jdt.core.prefs
+++ b/buildSrc/src/main/resources/eclipse.settings/org.eclipse.jdt.core.prefs
@@ -1,6 +1,5 @@
eclipse.preferences.version=1
-# previous configuration from maven build
# this is merged with gradle's generated properties during 'gradle eclipse'
# NOTE: null pointer analysis etc is not enabled currently, it seems very unstable
diff --git a/buildSrc/src/main/resources/forbidden/es-all-signatures.txt b/buildSrc/src/main/resources/forbidden/es-all-signatures.txt
index 37f03f4c91c28..f1d271d602ce1 100644
--- a/buildSrc/src/main/resources/forbidden/es-all-signatures.txt
+++ b/buildSrc/src/main/resources/forbidden/es-all-signatures.txt
@@ -26,13 +26,25 @@ java.util.concurrent.ThreadLocalRandom
java.security.MessageDigest#clone() @ use org.elasticsearch.common.hash.MessageDigests
-@defaultMessage this should not have been added to lucene in the first place
-org.apache.lucene.index.IndexReader#getCombinedCoreAndDeletesKey()
-
-@defaultMessage Soon to be removed
-org.apache.lucene.document.FieldType#numericType()
-
@defaultMessage Don't use MethodHandles in slow ways, don't be lenient in tests.
java.lang.invoke.MethodHandle#invoke(java.lang.Object[])
java.lang.invoke.MethodHandle#invokeWithArguments(java.lang.Object[])
java.lang.invoke.MethodHandle#invokeWithArguments(java.util.List)
+
+@defaultMessage Don't open socket connections
+java.net.URL#openStream()
+java.net.URLConnection#connect()
+java.net.URLConnection#getInputStream()
+java.net.Socket#connect(java.net.SocketAddress)
+java.net.Socket#connect(java.net.SocketAddress, int)
+java.nio.channels.SocketChannel#open(java.net.SocketAddress)
+java.nio.channels.SocketChannel#connect(java.net.SocketAddress)
+
+# This method is misleading, and uses lenient boolean parsing under the hood. If you intend to parse
+# a system property as a boolean, use
+# org.elasticsearch.common.Booleans#parseBoolean(java.lang.String) on the result of
+# java.lang.SystemProperty#getProperty(java.lang.String) instead. If you were not intending to parse
+# a system property as a boolean, but instead parse a string to a boolean, use
+# org.elasticsearch.common.Booleans#parseBoolean(java.lang.String) directly on the string.
+@defaultMessage use org.elasticsearch.common.Booleans#parseBoolean(java.lang.String)
+java.lang.Boolean#getBoolean(java.lang.String)
diff --git a/buildSrc/src/main/resources/forbidden/es-core-signatures.txt b/buildSrc/src/main/resources/forbidden/es-core-signatures.txt
index 059be403a672f..6507f05be5cd3 100644
--- a/buildSrc/src/main/resources/forbidden/es-core-signatures.txt
+++ b/buildSrc/src/main/resources/forbidden/es-core-signatures.txt
@@ -36,16 +36,6 @@ org.apache.lucene.index.IndexReader#decRef()
org.apache.lucene.index.IndexReader#incRef()
org.apache.lucene.index.IndexReader#tryIncRef()
-@defaultMessage Close listeners can only installed via ElasticsearchDirectoryReader#addReaderCloseListener
-org.apache.lucene.index.IndexReader#addReaderClosedListener(org.apache.lucene.index.IndexReader$ReaderClosedListener)
-org.apache.lucene.index.IndexReader#removeReaderClosedListener(org.apache.lucene.index.IndexReader$ReaderClosedListener)
-
-@defaultMessage Pass the precision step from the mappings explicitly instead
-org.apache.lucene.search.LegacyNumericRangeQuery#newDoubleRange(java.lang.String,java.lang.Double,java.lang.Double,boolean,boolean)
-org.apache.lucene.search.LegacyNumericRangeQuery#newFloatRange(java.lang.String,java.lang.Float,java.lang.Float,boolean,boolean)
-org.apache.lucene.search.LegacyNumericRangeQuery#newIntRange(java.lang.String,java.lang.Integer,java.lang.Integer,boolean,boolean)
-org.apache.lucene.search.LegacyNumericRangeQuery#newLongRange(java.lang.String,java.lang.Long,java.lang.Long,boolean,boolean)
-
@defaultMessage Only use wait / notify when really needed try to use concurrency primitives, latches or callbacks instead.
java.lang.Object#wait()
java.lang.Object#wait(long)
diff --git a/buildSrc/src/main/resources/forbidden/http-signatures.txt b/buildSrc/src/main/resources/forbidden/http-signatures.txt
new file mode 100644
index 0000000000000..dcf20bbb09387
--- /dev/null
+++ b/buildSrc/src/main/resources/forbidden/http-signatures.txt
@@ -0,0 +1,45 @@
+# Licensed to Elasticsearch under one or more contributor
+# license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright
+# ownership. Elasticsearch licenses this file to you under
+# the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on
+# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+# either express or implied. See the License for the specific
+# language governing permissions and limitations under the License.
+
+@defaultMessage Explicitly specify the ContentType of HTTP entities when creating
+org.apache.http.entity.StringEntity#(java.lang.String)
+org.apache.http.entity.StringEntity#(java.lang.String,java.lang.String)
+org.apache.http.entity.StringEntity#(java.lang.String,java.nio.charset.Charset)
+org.apache.http.entity.ByteArrayEntity#(byte[])
+org.apache.http.entity.ByteArrayEntity#(byte[],int,int)
+org.apache.http.entity.FileEntity#(java.io.File)
+org.apache.http.entity.InputStreamEntity#(java.io.InputStream)
+org.apache.http.entity.InputStreamEntity#(java.io.InputStream,long)
+org.apache.http.nio.entity.NByteArrayEntity#(byte[])
+org.apache.http.nio.entity.NByteArrayEntity#(byte[],int,int)
+org.apache.http.nio.entity.NFileEntity#(java.io.File)
+org.apache.http.nio.entity.NStringEntity#(java.lang.String)
+org.apache.http.nio.entity.NStringEntity#(java.lang.String,java.lang.String)
+
+@defaultMessage Use non-deprecated constructors
+org.apache.http.nio.entity.NFileEntity#(java.io.File,java.lang.String)
+org.apache.http.nio.entity.NFileEntity#(java.io.File,java.lang.String,boolean)
+org.apache.http.entity.FileEntity#(java.io.File,java.lang.String)
+org.apache.http.entity.StringEntity#(java.lang.String,java.lang.String,java.lang.String)
+
+@defaultMessage BasicEntity is easy to mess up and forget to set content type
+org.apache.http.entity.BasicHttpEntity#()
+
+@defaultMessage EntityTemplate is easy to mess up and forget to set content type
+org.apache.http.entity.EntityTemplate#(org.apache.http.entity.ContentProducer)
+
+@defaultMessage SerializableEntity uses java serialization and makes it easy to forget to set content type
+org.apache.http.entity.SerializableEntity#(java.io.Serializable)
diff --git a/buildSrc/src/main/resources/plugin-descriptor.properties b/buildSrc/src/main/resources/plugin-descriptor.properties
index ebde46d326ba9..67c6ee39968cd 100644
--- a/buildSrc/src/main/resources/plugin-descriptor.properties
+++ b/buildSrc/src/main/resources/plugin-descriptor.properties
@@ -30,11 +30,15 @@ name=${name}
# 'classname': the name of the class to load, fully-qualified.
classname=${classname}
#
-# 'java.version' version of java the code is built against
+# 'java.version': version of java the code is built against
# use the system property java.specification.version
# version string must be a sequence of nonnegative decimal integers
# separated by "."'s and may have leading zeros
java.version=${javaVersion}
#
-# 'elasticsearch.version' version of elasticsearch compiled against
+# 'elasticsearch.version': version of elasticsearch compiled against
elasticsearch.version=${elasticsearchVersion}
+### optional elements for plugins:
+#
+# 'has.native.controller': whether or not the plugin has a native controller
+has.native.controller=${hasNativeController}
diff --git a/buildSrc/version.properties b/buildSrc/version.properties
index 15d2f32096221..e7243b9dad9ee 100644
--- a/buildSrc/version.properties
+++ b/buildSrc/version.properties
@@ -1,25 +1,31 @@
-elasticsearch = 6.0.0-alpha1
-lucene = 6.4.0-snapshot-084f7a0
+# When updating elasticsearch, please update 'rest' version in core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy
+elasticsearch = 6.0.0-alpha3
+lucene = 7.0.0-snapshot-a0aef2f
# optional dependencies
spatial4j = 0.6
jts = 1.13
-jackson = 2.8.1
+jackson = 2.8.6
snakeyaml = 1.15
# When updating log4j, please update also docs/java-api/index.asciidoc
-log4j = 2.7
+log4j = 2.8.2
slf4j = 1.6.2
-jna = 4.2.2
+jna = 4.4.0
# test dependencies
-randomizedrunner = 2.4.0
-junit = 4.11
+randomizedrunner = 2.5.0
+junit = 4.12
httpclient = 4.5.2
+# When updating httpcore, please also update core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy
httpcore = 4.4.5
+# When updating httpasyncclient, please also update core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy
+httpasyncclient = 4.1.2
commonslogging = 1.1.3
commonscodec = 1.10
hamcrest = 1.3
securemock = 1.2
+# When updating mocksocket, please also update core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy
mocksocket = 1.1
+
# benchmark dependencies
jmh = 1.17.3
diff --git a/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/bulk/BulkBenchmarkTask.java b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/bulk/BulkBenchmarkTask.java
index 214a75d12cc01..e9cde26e6c870 100644
--- a/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/bulk/BulkBenchmarkTask.java
+++ b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/ops/bulk/BulkBenchmarkTask.java
@@ -95,7 +95,7 @@ private static final class LoadGenerator {
private final BlockingQueue> bulkQueue;
private final int bulkSize;
- public LoadGenerator(Path bulkDataFile, BlockingQueue> bulkQueue, int bulkSize) {
+ LoadGenerator(Path bulkDataFile, BlockingQueue> bulkQueue, int bulkSize) {
this.bulkDataFile = bulkDataFile;
this.bulkQueue = bulkQueue;
this.bulkSize = bulkSize;
@@ -143,7 +143,7 @@ private static final class BulkIndexer implements Runnable {
private final BulkRequestExecutor bulkRequestExecutor;
private final SampleRecorder sampleRecorder;
- public BulkIndexer(BlockingQueue> bulkData, int warmupIterations, int measurementIterations,
+ BulkIndexer(BlockingQueue> bulkData, int warmupIterations, int measurementIterations,
SampleRecorder sampleRecorder, BulkRequestExecutor bulkRequestExecutor) {
this.bulkData = bulkData;
this.warmupIterations = warmupIterations;
diff --git a/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/rest/RestClientBenchmark.java b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/rest/RestClientBenchmark.java
index b342d93fba5a1..9210526e7c81c 100644
--- a/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/rest/RestClientBenchmark.java
+++ b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/rest/RestClientBenchmark.java
@@ -73,7 +73,7 @@ private static final class RestBulkRequestExecutor implements BulkRequestExecuto
private final RestClient client;
private final String actionMetaData;
- public RestBulkRequestExecutor(RestClient client, String index, String type) {
+ RestBulkRequestExecutor(RestClient client, String index, String type) {
this.client = client;
this.actionMetaData = String.format(Locale.ROOT, "{ \"index\" : { \"_index\" : \"%s\", \"_type\" : \"%s\" } }%n", index, type);
}
diff --git a/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/transport/TransportClientBenchmark.java b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/transport/TransportClientBenchmark.java
index 6d6e5ade8275a..d2aee2251a67b 100644
--- a/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/transport/TransportClientBenchmark.java
+++ b/client/benchmark/src/main/java/org/elasticsearch/client/benchmark/transport/TransportClientBenchmark.java
@@ -28,6 +28,7 @@
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
+import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.plugin.noop.NoopPlugin;
import org.elasticsearch.plugin.noop.action.bulk.NoopBulkAction;
@@ -70,7 +71,7 @@ private static final class TransportBulkRequestExecutor implements BulkRequestEx
private final String indexName;
private final String typeName;
- public TransportBulkRequestExecutor(TransportClient client, String indexName, String typeName) {
+ TransportBulkRequestExecutor(TransportClient client, String indexName, String typeName) {
this.client = client;
this.indexName = indexName;
this.typeName = typeName;
@@ -80,7 +81,7 @@ public TransportBulkRequestExecutor(TransportClient client, String indexName, St
public boolean bulkIndex(List bulkData) {
NoopBulkRequestBuilder builder = NoopBulkAction.INSTANCE.newRequestBuilder(client);
for (String bulkItem : bulkData) {
- builder.add(new IndexRequest(indexName, typeName).source(bulkItem.getBytes(StandardCharsets.UTF_8)));
+ builder.add(new IndexRequest(indexName, typeName).source(bulkItem.getBytes(StandardCharsets.UTF_8), XContentType.JSON));
}
BulkResponse bulkResponse;
try {
diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/NoopPlugin.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/NoopPlugin.java
index ac45f20dc2587..e8ed27715c10a 100644
--- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/NoopPlugin.java
+++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/NoopPlugin.java
@@ -23,15 +23,23 @@
import org.elasticsearch.plugin.noop.action.bulk.TransportNoopBulkAction;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionResponse;
+import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
+import org.elasticsearch.cluster.node.DiscoveryNodes;
+import org.elasticsearch.common.settings.ClusterSettings;
+import org.elasticsearch.common.settings.IndexScopedSettings;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.settings.SettingsFilter;
import org.elasticsearch.plugin.noop.action.search.NoopSearchAction;
import org.elasticsearch.plugin.noop.action.search.RestNoopSearchAction;
import org.elasticsearch.plugin.noop.action.search.TransportNoopSearchAction;
import org.elasticsearch.plugins.ActionPlugin;
import org.elasticsearch.plugins.Plugin;
+import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestHandler;
import java.util.Arrays;
import java.util.List;
+import java.util.function.Supplier;
public class NoopPlugin extends Plugin implements ActionPlugin {
@Override
@@ -43,7 +51,11 @@ public class NoopPlugin extends Plugin implements ActionPlugin {
}
@Override
- public List> getRestHandlers() {
- return Arrays.asList(RestNoopBulkAction.class, RestNoopSearchAction.class);
+ public List getRestHandlers(Settings settings, RestController restController, ClusterSettings clusterSettings,
+ IndexScopedSettings indexScopedSettings, SettingsFilter settingsFilter, IndexNameExpressionResolver indexNameExpressionResolver,
+ Supplier nodesInCluster) {
+ return Arrays.asList(
+ new RestNoopBulkAction(settings, restController),
+ new RestNoopSearchAction(settings, restController));
}
}
diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/NoopBulkRequestBuilder.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/NoopBulkRequestBuilder.java
index ceaf9f8cc9d17..1034e722e8789 100644
--- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/NoopBulkRequestBuilder.java
+++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/NoopBulkRequestBuilder.java
@@ -33,6 +33,7 @@
import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.common.xcontent.XContentType;
public class NoopBulkRequestBuilder extends ActionRequestBuilder
implements WriteRequestBuilder {
@@ -95,17 +96,17 @@ public NoopBulkRequestBuilder add(UpdateRequestBuilder request) {
/**
* Adds a framed data in binary format
*/
- public NoopBulkRequestBuilder add(byte[] data, int from, int length) throws Exception {
- request.add(data, from, length, null, null);
+ public NoopBulkRequestBuilder add(byte[] data, int from, int length, XContentType xContentType) throws Exception {
+ request.add(data, from, length, null, null, xContentType);
return this;
}
/**
* Adds a framed data in binary format
*/
- public NoopBulkRequestBuilder add(byte[] data, int from, int length, @Nullable String defaultIndex, @Nullable String defaultType)
- throws Exception {
- request.add(data, from, length, defaultIndex, defaultType);
+ public NoopBulkRequestBuilder add(byte[] data, int from, int length, @Nullable String defaultIndex, @Nullable String defaultType,
+ XContentType xContentType) throws Exception {
+ request.add(data, from, length, defaultIndex, defaultType, xContentType);
return this;
}
diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/RestNoopBulkAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/RestNoopBulkAction.java
index 06082ed7d294c..ca5f32205674c 100644
--- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/RestNoopBulkAction.java
+++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/RestNoopBulkAction.java
@@ -18,8 +18,8 @@
*/
package org.elasticsearch.plugin.noop.action.bulk;
-import org.elasticsearch.action.DocWriteResponse;
import org.elasticsearch.action.DocWriteRequest;
+import org.elasticsearch.action.DocWriteResponse;
import org.elasticsearch.action.bulk.BulkItemResponse;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkShardRequest;
@@ -28,7 +28,6 @@
import org.elasticsearch.client.Requests;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.Strings;
-import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.shard.ShardId;
@@ -47,7 +46,6 @@
import static org.elasticsearch.rest.RestStatus.OK;
public class RestNoopBulkAction extends BaseRestHandler {
- @Inject
public RestNoopBulkAction(Settings settings, RestController controller) {
super(settings);
@@ -59,6 +57,11 @@ public RestNoopBulkAction(Settings settings, RestController controller) {
controller.registerHandler(PUT, "/{index}/{type}/_noop_bulk", this);
}
+ @Override
+ public String getName() {
+ return "noop_bulk_action";
+ }
+
@Override
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
BulkRequest bulkRequest = Requests.bulkRequest();
@@ -75,7 +78,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC
}
bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT));
bulkRequest.setRefreshPolicy(request.param("refresh"));
- bulkRequest.add(request.content(), defaultIndex, defaultType, defaultRouting, defaultFields, null, defaultPipeline, null, true);
+ bulkRequest.add(request.requiredContent(), defaultIndex, defaultType, defaultRouting, defaultFields,
+ null, defaultPipeline, null, true, request.getXContentType());
// short circuit the call to the transport layer
return channel -> {
@@ -91,7 +95,7 @@ private static class BulkRestBuilderListener extends RestBuilderListener listener) {
listener.onResponse(new SearchResponse(new InternalSearchResponse(
- new InternalSearchHits(
- new InternalSearchHit[0], 0L, 0.0f),
+ new SearchHits(
+ new SearchHit[0], 0L, 0.0f),
new InternalAggregations(Collections.emptyList()),
new Suggest(Collections.emptyList()),
- new SearchProfileShardResults(Collections.emptyMap()), false, false), "", 1, 1, 0, new ShardSearchFailure[0]));
+ new SearchProfileShardResults(Collections.emptyMap()), false, false, 1), "", 1, 1, 0, new ShardSearchFailure[0]));
}
}
diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle
index 162e8608d4431..9203b8978fd05 100644
--- a/client/rest-high-level/build.gradle
+++ b/client/rest-high-level/build.gradle
@@ -1,3 +1,5 @@
+import org.elasticsearch.gradle.precommit.PrecommitTasks
+
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
@@ -24,6 +26,8 @@ group = 'org.elasticsearch.client'
dependencies {
compile "org.elasticsearch:elasticsearch:${version}"
compile "org.elasticsearch.client:rest:${version}"
+ compile "org.elasticsearch.plugin:parent-join-client:${version}"
+ compile "org.elasticsearch.plugin:aggs-matrix-stats-client:${version}"
testCompile "org.elasticsearch.client:test:${version}"
testCompile "org.elasticsearch.test:framework:${version}"
@@ -39,3 +43,9 @@ dependencyLicenses {
it.group.startsWith('org.elasticsearch') == false
}
}
+
+forbiddenApisMain {
+ // core does not depend on the httpclient for compile so we add the signatures here. We don't add them for test as they are already
+ // specified
+ signaturesURLs += [PrecommitTasks.getResource('/forbidden/http-signatures.txt')]
+}
\ No newline at end of file
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java
new file mode 100644
index 0000000000000..9e881cf7b9add
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java
@@ -0,0 +1,548 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.client;
+
+import org.apache.http.HttpEntity;
+import org.apache.http.client.methods.HttpDelete;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.client.methods.HttpHead;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.client.methods.HttpPut;
+import org.apache.http.entity.ByteArrayEntity;
+import org.apache.http.entity.ContentType;
+import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.action.DocWriteRequest;
+import org.elasticsearch.action.bulk.BulkRequest;
+import org.elasticsearch.action.delete.DeleteRequest;
+import org.elasticsearch.action.get.GetRequest;
+import org.elasticsearch.action.index.IndexRequest;
+import org.elasticsearch.action.search.ClearScrollRequest;
+import org.elasticsearch.action.search.SearchRequest;
+import org.elasticsearch.action.search.SearchScrollRequest;
+import org.elasticsearch.action.support.ActiveShardCount;
+import org.elasticsearch.action.support.IndicesOptions;
+import org.elasticsearch.action.support.WriteRequest;
+import org.elasticsearch.action.update.UpdateRequest;
+import org.elasticsearch.common.Nullable;
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.common.lucene.uid.Versions;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.common.xcontent.NamedXContentRegistry;
+import org.elasticsearch.common.xcontent.ToXContent;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentHelper;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.index.VersionType;
+import org.elasticsearch.rest.action.search.RestSearchAction;
+import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Locale;
+import java.util.Map;
+import java.util.StringJoiner;
+
+final class Request {
+
+ static final XContentType REQUEST_BODY_CONTENT_TYPE = XContentType.JSON;
+
+ final String method;
+ final String endpoint;
+ final Map params;
+ final HttpEntity entity;
+
+ Request(String method, String endpoint, Map params, HttpEntity entity) {
+ this.method = method;
+ this.endpoint = endpoint;
+ this.params = params;
+ this.entity = entity;
+ }
+
+ @Override
+ public String toString() {
+ return "Request{" +
+ "method='" + method + '\'' +
+ ", endpoint='" + endpoint + '\'' +
+ ", params=" + params +
+ ", hasBody=" + (entity != null) +
+ '}';
+ }
+
+ static Request delete(DeleteRequest deleteRequest) {
+ String endpoint = endpoint(deleteRequest.index(), deleteRequest.type(), deleteRequest.id());
+
+ Params parameters = Params.builder();
+ parameters.withRouting(deleteRequest.routing());
+ parameters.withParent(deleteRequest.parent());
+ parameters.withTimeout(deleteRequest.timeout());
+ parameters.withVersion(deleteRequest.version());
+ parameters.withVersionType(deleteRequest.versionType());
+ parameters.withRefreshPolicy(deleteRequest.getRefreshPolicy());
+ parameters.withWaitForActiveShards(deleteRequest.waitForActiveShards());
+
+ return new Request(HttpDelete.METHOD_NAME, endpoint, parameters.getParams(), null);
+ }
+
+ static Request info() {
+ return new Request(HttpGet.METHOD_NAME, "/", Collections.emptyMap(), null);
+ }
+
+ static Request bulk(BulkRequest bulkRequest) throws IOException {
+ Params parameters = Params.builder();
+ parameters.withTimeout(bulkRequest.timeout());
+ parameters.withRefreshPolicy(bulkRequest.getRefreshPolicy());
+
+ // Bulk API only supports newline delimited JSON or Smile. Before executing
+ // the bulk, we need to check that all requests have the same content-type
+ // and this content-type is supported by the Bulk API.
+ XContentType bulkContentType = null;
+ for (int i = 0; i < bulkRequest.numberOfActions(); i++) {
+ DocWriteRequest> request = bulkRequest.requests().get(i);
+
+ DocWriteRequest.OpType opType = request.opType();
+ if (opType == DocWriteRequest.OpType.INDEX || opType == DocWriteRequest.OpType.CREATE) {
+ bulkContentType = enforceSameContentType((IndexRequest) request, bulkContentType);
+
+ } else if (opType == DocWriteRequest.OpType.UPDATE) {
+ UpdateRequest updateRequest = (UpdateRequest) request;
+ if (updateRequest.doc() != null) {
+ bulkContentType = enforceSameContentType(updateRequest.doc(), bulkContentType);
+ }
+ if (updateRequest.upsertRequest() != null) {
+ bulkContentType = enforceSameContentType(updateRequest.upsertRequest(), bulkContentType);
+ }
+ }
+ }
+
+ if (bulkContentType == null) {
+ bulkContentType = XContentType.JSON;
+ }
+
+ byte separator = bulkContentType.xContent().streamSeparator();
+ ContentType requestContentType = ContentType.create(bulkContentType.mediaType());
+
+ ByteArrayOutputStream content = new ByteArrayOutputStream();
+ for (DocWriteRequest> request : bulkRequest.requests()) {
+ DocWriteRequest.OpType opType = request.opType();
+
+ try (XContentBuilder metadata = XContentBuilder.builder(bulkContentType.xContent())) {
+ metadata.startObject();
+ {
+ metadata.startObject(opType.getLowercase());
+ if (Strings.hasLength(request.index())) {
+ metadata.field("_index", request.index());
+ }
+ if (Strings.hasLength(request.type())) {
+ metadata.field("_type", request.type());
+ }
+ if (Strings.hasLength(request.id())) {
+ metadata.field("_id", request.id());
+ }
+ if (Strings.hasLength(request.routing())) {
+ metadata.field("_routing", request.routing());
+ }
+ if (Strings.hasLength(request.parent())) {
+ metadata.field("_parent", request.parent());
+ }
+ if (request.version() != Versions.MATCH_ANY) {
+ metadata.field("_version", request.version());
+ }
+
+ VersionType versionType = request.versionType();
+ if (versionType != VersionType.INTERNAL) {
+ if (versionType == VersionType.EXTERNAL) {
+ metadata.field("_version_type", "external");
+ } else if (versionType == VersionType.EXTERNAL_GTE) {
+ metadata.field("_version_type", "external_gte");
+ } else if (versionType == VersionType.FORCE) {
+ metadata.field("_version_type", "force");
+ }
+ }
+
+ if (opType == DocWriteRequest.OpType.INDEX || opType == DocWriteRequest.OpType.CREATE) {
+ IndexRequest indexRequest = (IndexRequest) request;
+ if (Strings.hasLength(indexRequest.getPipeline())) {
+ metadata.field("pipeline", indexRequest.getPipeline());
+ }
+ } else if (opType == DocWriteRequest.OpType.UPDATE) {
+ UpdateRequest updateRequest = (UpdateRequest) request;
+ if (updateRequest.retryOnConflict() > 0) {
+ metadata.field("_retry_on_conflict", updateRequest.retryOnConflict());
+ }
+ if (updateRequest.fetchSource() != null) {
+ metadata.field("_source", updateRequest.fetchSource());
+ }
+ }
+ metadata.endObject();
+ }
+ metadata.endObject();
+
+ BytesRef metadataSource = metadata.bytes().toBytesRef();
+ content.write(metadataSource.bytes, metadataSource.offset, metadataSource.length);
+ content.write(separator);
+ }
+
+ BytesRef source = null;
+ if (opType == DocWriteRequest.OpType.INDEX || opType == DocWriteRequest.OpType.CREATE) {
+ IndexRequest indexRequest = (IndexRequest) request;
+ BytesReference indexSource = indexRequest.source();
+ XContentType indexXContentType = indexRequest.getContentType();
+
+ try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, indexSource, indexXContentType)) {
+ try (XContentBuilder builder = XContentBuilder.builder(bulkContentType.xContent())) {
+ builder.copyCurrentStructure(parser);
+ source = builder.bytes().toBytesRef();
+ }
+ }
+ } else if (opType == DocWriteRequest.OpType.UPDATE) {
+ source = XContentHelper.toXContent((UpdateRequest) request, bulkContentType, false).toBytesRef();
+ }
+
+ if (source != null) {
+ content.write(source.bytes, source.offset, source.length);
+ content.write(separator);
+ }
+ }
+
+ HttpEntity entity = new ByteArrayEntity(content.toByteArray(), 0, content.size(), requestContentType);
+ return new Request(HttpPost.METHOD_NAME, "/_bulk", parameters.getParams(), entity);
+ }
+
+ static Request exists(GetRequest getRequest) {
+ Request request = get(getRequest);
+ return new Request(HttpHead.METHOD_NAME, request.endpoint, request.params, null);
+ }
+
+ static Request get(GetRequest getRequest) {
+ String endpoint = endpoint(getRequest.index(), getRequest.type(), getRequest.id());
+
+ Params parameters = Params.builder();
+ parameters.withPreference(getRequest.preference());
+ parameters.withRouting(getRequest.routing());
+ parameters.withParent(getRequest.parent());
+ parameters.withRefresh(getRequest.refresh());
+ parameters.withRealtime(getRequest.realtime());
+ parameters.withStoredFields(getRequest.storedFields());
+ parameters.withVersion(getRequest.version());
+ parameters.withVersionType(getRequest.versionType());
+ parameters.withFetchSourceContext(getRequest.fetchSourceContext());
+
+ return new Request(HttpGet.METHOD_NAME, endpoint, parameters.getParams(), null);
+ }
+
+ static Request index(IndexRequest indexRequest) {
+ String method = Strings.hasLength(indexRequest.id()) ? HttpPut.METHOD_NAME : HttpPost.METHOD_NAME;
+
+ boolean isCreate = (indexRequest.opType() == DocWriteRequest.OpType.CREATE);
+ String endpoint = endpoint(indexRequest.index(), indexRequest.type(), indexRequest.id(), isCreate ? "_create" : null);
+
+ Params parameters = Params.builder();
+ parameters.withRouting(indexRequest.routing());
+ parameters.withParent(indexRequest.parent());
+ parameters.withTimeout(indexRequest.timeout());
+ parameters.withVersion(indexRequest.version());
+ parameters.withVersionType(indexRequest.versionType());
+ parameters.withPipeline(indexRequest.getPipeline());
+ parameters.withRefreshPolicy(indexRequest.getRefreshPolicy());
+ parameters.withWaitForActiveShards(indexRequest.waitForActiveShards());
+
+ BytesRef source = indexRequest.source().toBytesRef();
+ ContentType contentType = ContentType.create(indexRequest.getContentType().mediaType());
+ HttpEntity entity = new ByteArrayEntity(source.bytes, source.offset, source.length, contentType);
+
+ return new Request(method, endpoint, parameters.getParams(), entity);
+ }
+
+ static Request ping() {
+ return new Request(HttpHead.METHOD_NAME, "/", Collections.emptyMap(), null);
+ }
+
+ static Request update(UpdateRequest updateRequest) throws IOException {
+ String endpoint = endpoint(updateRequest.index(), updateRequest.type(), updateRequest.id(), "_update");
+
+ Params parameters = Params.builder();
+ parameters.withRouting(updateRequest.routing());
+ parameters.withParent(updateRequest.parent());
+ parameters.withTimeout(updateRequest.timeout());
+ parameters.withRefreshPolicy(updateRequest.getRefreshPolicy());
+ parameters.withWaitForActiveShards(updateRequest.waitForActiveShards());
+ parameters.withDocAsUpsert(updateRequest.docAsUpsert());
+ parameters.withFetchSourceContext(updateRequest.fetchSource());
+ parameters.withRetryOnConflict(updateRequest.retryOnConflict());
+ parameters.withVersion(updateRequest.version());
+ parameters.withVersionType(updateRequest.versionType());
+
+ // The Java API allows update requests with different content types
+ // set for the partial document and the upsert document. This client
+ // only accepts update requests that have the same content types set
+ // for both doc and upsert.
+ XContentType xContentType = null;
+ if (updateRequest.doc() != null) {
+ xContentType = updateRequest.doc().getContentType();
+ }
+ if (updateRequest.upsertRequest() != null) {
+ XContentType upsertContentType = updateRequest.upsertRequest().getContentType();
+ if ((xContentType != null) && (xContentType != upsertContentType)) {
+ throw new IllegalStateException("Update request cannot have different content types for doc [" + xContentType + "]" +
+ " and upsert [" + upsertContentType + "] documents");
+ } else {
+ xContentType = upsertContentType;
+ }
+ }
+ if (xContentType == null) {
+ xContentType = Requests.INDEX_CONTENT_TYPE;
+ }
+
+ HttpEntity entity = createEntity(updateRequest, xContentType);
+ return new Request(HttpPost.METHOD_NAME, endpoint, parameters.getParams(), entity);
+ }
+
+ static Request search(SearchRequest searchRequest) throws IOException {
+ String endpoint = endpoint(searchRequest.indices(), searchRequest.types(), "_search");
+ Params params = Params.builder();
+ params.putParam(RestSearchAction.TYPED_KEYS_PARAM, "true");
+ params.withRouting(searchRequest.routing());
+ params.withPreference(searchRequest.preference());
+ params.withIndicesOptions(searchRequest.indicesOptions());
+ params.putParam("search_type", searchRequest.searchType().name().toLowerCase(Locale.ROOT));
+ if (searchRequest.requestCache() != null) {
+ params.putParam("request_cache", Boolean.toString(searchRequest.requestCache()));
+ }
+ params.putParam("batched_reduce_size", Integer.toString(searchRequest.getBatchedReduceSize()));
+ if (searchRequest.scroll() != null) {
+ params.putParam("scroll", searchRequest.scroll().keepAlive());
+ }
+ HttpEntity entity = null;
+ if (searchRequest.source() != null) {
+ entity = createEntity(searchRequest.source(), REQUEST_BODY_CONTENT_TYPE);
+ }
+ return new Request(HttpGet.METHOD_NAME, endpoint, params.getParams(), entity);
+ }
+
+ static Request searchScroll(SearchScrollRequest searchScrollRequest) throws IOException {
+ HttpEntity entity = createEntity(searchScrollRequest, REQUEST_BODY_CONTENT_TYPE);
+ return new Request("GET", "/_search/scroll", Collections.emptyMap(), entity);
+ }
+
+ static Request clearScroll(ClearScrollRequest clearScrollRequest) throws IOException {
+ HttpEntity entity = createEntity(clearScrollRequest, REQUEST_BODY_CONTENT_TYPE);
+ return new Request("DELETE", "/_search/scroll", Collections.emptyMap(), entity);
+ }
+
+ private static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException {
+ BytesRef source = XContentHelper.toXContent(toXContent, xContentType, false).toBytesRef();
+ return new ByteArrayEntity(source.bytes, source.offset, source.length, ContentType.create(xContentType.mediaType()));
+ }
+
+ static String endpoint(String[] indices, String[] types, String endpoint) {
+ return endpoint(String.join(",", indices), String.join(",", types), endpoint);
+ }
+
+ /**
+ * Utility method to build request's endpoint.
+ */
+ static String endpoint(String... parts) {
+ StringJoiner joiner = new StringJoiner("/", "/", "");
+ for (String part : parts) {
+ if (Strings.hasLength(part)) {
+ joiner.add(part);
+ }
+ }
+ return joiner.toString();
+ }
+
+ /**
+ * Utility class to build request's parameters map and centralize all parameter names.
+ */
+ static class Params {
+ private final Map params = new HashMap<>();
+
+ private Params() {
+ }
+
+ Params putParam(String key, String value) {
+ if (Strings.hasLength(value)) {
+ if (params.putIfAbsent(key, value) != null) {
+ throw new IllegalArgumentException("Request parameter [" + key + "] is already registered");
+ }
+ }
+ return this;
+ }
+
+ Params putParam(String key, TimeValue value) {
+ if (value != null) {
+ return putParam(key, value.getStringRep());
+ }
+ return this;
+ }
+
+ Params withDocAsUpsert(boolean docAsUpsert) {
+ if (docAsUpsert) {
+ return putParam("doc_as_upsert", Boolean.TRUE.toString());
+ }
+ return this;
+ }
+
+ Params withFetchSourceContext(FetchSourceContext fetchSourceContext) {
+ if (fetchSourceContext != null) {
+ if (fetchSourceContext.fetchSource() == false) {
+ putParam("_source", Boolean.FALSE.toString());
+ }
+ if (fetchSourceContext.includes() != null && fetchSourceContext.includes().length > 0) {
+ putParam("_source_include", String.join(",", fetchSourceContext.includes()));
+ }
+ if (fetchSourceContext.excludes() != null && fetchSourceContext.excludes().length > 0) {
+ putParam("_source_exclude", String.join(",", fetchSourceContext.excludes()));
+ }
+ }
+ return this;
+ }
+
+ Params withParent(String parent) {
+ return putParam("parent", parent);
+ }
+
+ Params withPipeline(String pipeline) {
+ return putParam("pipeline", pipeline);
+ }
+
+ Params withPreference(String preference) {
+ return putParam("preference", preference);
+ }
+
+ Params withRealtime(boolean realtime) {
+ if (realtime == false) {
+ return putParam("realtime", Boolean.FALSE.toString());
+ }
+ return this;
+ }
+
+ Params withRefresh(boolean refresh) {
+ if (refresh) {
+ return withRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
+ }
+ return this;
+ }
+
+ Params withRefreshPolicy(WriteRequest.RefreshPolicy refreshPolicy) {
+ if (refreshPolicy != WriteRequest.RefreshPolicy.NONE) {
+ return putParam("refresh", refreshPolicy.getValue());
+ }
+ return this;
+ }
+
+ Params withRetryOnConflict(int retryOnConflict) {
+ if (retryOnConflict > 0) {
+ return putParam("retry_on_conflict", String.valueOf(retryOnConflict));
+ }
+ return this;
+ }
+
+ Params withRouting(String routing) {
+ return putParam("routing", routing);
+ }
+
+ Params withStoredFields(String[] storedFields) {
+ if (storedFields != null && storedFields.length > 0) {
+ return putParam("stored_fields", String.join(",", storedFields));
+ }
+ return this;
+ }
+
+ Params withTimeout(TimeValue timeout) {
+ return putParam("timeout", timeout);
+ }
+
+ Params withVersion(long version) {
+ if (version != Versions.MATCH_ANY) {
+ return putParam("version", Long.toString(version));
+ }
+ return this;
+ }
+
+ Params withVersionType(VersionType versionType) {
+ if (versionType != VersionType.INTERNAL) {
+ return putParam("version_type", versionType.name().toLowerCase(Locale.ROOT));
+ }
+ return this;
+ }
+
+ Params withWaitForActiveShards(ActiveShardCount activeShardCount) {
+ if (activeShardCount != null && activeShardCount != ActiveShardCount.DEFAULT) {
+ return putParam("wait_for_active_shards", activeShardCount.toString().toLowerCase(Locale.ROOT));
+ }
+ return this;
+ }
+
+ Params withIndicesOptions(IndicesOptions indicesOptions) {
+ putParam("ignore_unavailable", Boolean.toString(indicesOptions.ignoreUnavailable()));
+ putParam("allow_no_indices", Boolean.toString(indicesOptions.allowNoIndices()));
+ String expandWildcards;
+ if (indicesOptions.expandWildcardsOpen() == false && indicesOptions.expandWildcardsClosed() == false) {
+ expandWildcards = "none";
+ } else {
+ StringJoiner joiner = new StringJoiner(",");
+ if (indicesOptions.expandWildcardsOpen()) {
+ joiner.add("open");
+ }
+ if (indicesOptions.expandWildcardsClosed()) {
+ joiner.add("closed");
+ }
+ expandWildcards = joiner.toString();
+ }
+ putParam("expand_wildcards", expandWildcards);
+ return this;
+ }
+
+ Map getParams() {
+ return Collections.unmodifiableMap(params);
+ }
+
+ static Params builder() {
+ return new Params();
+ }
+ }
+
+ /**
+ * Ensure that the {@link IndexRequest}'s content type is supported by the Bulk API and that it conforms
+ * to the current {@link BulkRequest}'s content type (if it's known at the time of this method get called).
+ *
+ * @return the {@link IndexRequest}'s content type
+ */
+ static XContentType enforceSameContentType(IndexRequest indexRequest, @Nullable XContentType xContentType) {
+ XContentType requestContentType = indexRequest.getContentType();
+ if (requestContentType != XContentType.JSON && requestContentType != XContentType.SMILE) {
+ throw new IllegalArgumentException("Unsupported content-type found for request with content-type [" + requestContentType
+ + "], only JSON and SMILE are supported");
+ }
+ if (xContentType == null) {
+ return requestContentType;
+ }
+ if (requestContentType != xContentType) {
+ throw new IllegalArgumentException("Mismatching content-type found for request with content-type [" + requestContentType
+ + "], previous requests have content-type [" + xContentType + "]");
+ }
+ return xContentType;
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java
index 58ecc5f9c2d96..a354bdfb7ba5a 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java
@@ -19,35 +19,564 @@
package org.elasticsearch.client;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
import org.apache.http.Header;
+import org.apache.http.HttpEntity;
+import org.elasticsearch.ElasticsearchException;
+import org.elasticsearch.ElasticsearchStatusException;
+import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.ActionRequest;
+import org.elasticsearch.action.ActionRequestValidationException;
+import org.elasticsearch.action.bulk.BulkRequest;
+import org.elasticsearch.action.bulk.BulkResponse;
+import org.elasticsearch.action.delete.DeleteRequest;
+import org.elasticsearch.action.delete.DeleteResponse;
+import org.elasticsearch.action.get.GetRequest;
+import org.elasticsearch.action.get.GetResponse;
+import org.elasticsearch.action.index.IndexRequest;
+import org.elasticsearch.action.index.IndexResponse;
+import org.elasticsearch.action.main.MainRequest;
+import org.elasticsearch.action.main.MainResponse;
+import org.elasticsearch.action.search.ClearScrollRequest;
+import org.elasticsearch.action.search.ClearScrollResponse;
+import org.elasticsearch.action.search.SearchRequest;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.action.search.SearchScrollRequest;
+import org.elasticsearch.action.update.UpdateRequest;
+import org.elasticsearch.action.update.UpdateResponse;
+import org.elasticsearch.common.CheckedFunction;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ContextParser;
+import org.elasticsearch.common.xcontent.NamedXContentRegistry;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.join.aggregations.ChildrenAggregationBuilder;
+import org.elasticsearch.join.aggregations.ParsedChildren;
+import org.elasticsearch.rest.BytesRestResponse;
+import org.elasticsearch.rest.RestStatus;
+import org.elasticsearch.search.aggregations.Aggregation;
+import org.elasticsearch.search.aggregations.bucket.adjacency.AdjacencyMatrixAggregationBuilder;
+import org.elasticsearch.search.aggregations.bucket.adjacency.ParsedAdjacencyMatrix;
+import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder;
+import org.elasticsearch.search.aggregations.bucket.filter.ParsedFilter;
+import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregationBuilder;
+import org.elasticsearch.search.aggregations.bucket.filters.ParsedFilters;
+import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder;
+import org.elasticsearch.search.aggregations.bucket.geogrid.ParsedGeoHashGrid;
+import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder;
+import org.elasticsearch.search.aggregations.bucket.global.ParsedGlobal;
+import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
+import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
+import org.elasticsearch.search.aggregations.bucket.histogram.ParsedDateHistogram;
+import org.elasticsearch.search.aggregations.bucket.histogram.ParsedHistogram;
+import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregationBuilder;
+import org.elasticsearch.search.aggregations.bucket.missing.ParsedMissing;
+import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregationBuilder;
+import org.elasticsearch.search.aggregations.bucket.nested.ParsedNested;
+import org.elasticsearch.search.aggregations.bucket.nested.ParsedReverseNested;
+import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedAggregationBuilder;
+import org.elasticsearch.search.aggregations.bucket.range.ParsedRange;
+import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder;
+import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregationBuilder;
+import org.elasticsearch.search.aggregations.bucket.range.date.ParsedDateRange;
+import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregationBuilder;
+import org.elasticsearch.search.aggregations.bucket.range.geodistance.ParsedGeoDistance;
+import org.elasticsearch.search.aggregations.bucket.sampler.InternalSampler;
+import org.elasticsearch.search.aggregations.bucket.sampler.ParsedSampler;
+import org.elasticsearch.search.aggregations.bucket.significant.ParsedSignificantLongTerms;
+import org.elasticsearch.search.aggregations.bucket.significant.ParsedSignificantStringTerms;
+import org.elasticsearch.search.aggregations.bucket.significant.SignificantLongTerms;
+import org.elasticsearch.search.aggregations.bucket.significant.SignificantStringTerms;
+import org.elasticsearch.search.aggregations.bucket.terms.DoubleTerms;
+import org.elasticsearch.search.aggregations.bucket.terms.LongTerms;
+import org.elasticsearch.search.aggregations.bucket.terms.ParsedDoubleTerms;
+import org.elasticsearch.search.aggregations.bucket.terms.ParsedLongTerms;
+import org.elasticsearch.search.aggregations.bucket.terms.ParsedStringTerms;
+import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
+import org.elasticsearch.search.aggregations.matrix.stats.MatrixStatsAggregationBuilder;
+import org.elasticsearch.search.aggregations.matrix.stats.ParsedMatrixStats;
+import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder;
+import org.elasticsearch.search.aggregations.metrics.avg.ParsedAvg;
+import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregationBuilder;
+import org.elasticsearch.search.aggregations.metrics.cardinality.ParsedCardinality;
+import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregationBuilder;
+import org.elasticsearch.search.aggregations.metrics.geobounds.ParsedGeoBounds;
+import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregationBuilder;
+import org.elasticsearch.search.aggregations.metrics.geocentroid.ParsedGeoCentroid;
+import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder;
+import org.elasticsearch.search.aggregations.metrics.max.ParsedMax;
+import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder;
+import org.elasticsearch.search.aggregations.metrics.min.ParsedMin;
+import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentileRanks;
+import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentiles;
+import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.ParsedHDRPercentileRanks;
+import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.ParsedHDRPercentiles;
+import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentileRanks;
+import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentiles;
+import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.ParsedTDigestPercentileRanks;
+import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.ParsedTDigestPercentiles;
+import org.elasticsearch.search.aggregations.metrics.scripted.ParsedScriptedMetric;
+import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregationBuilder;
+import org.elasticsearch.search.aggregations.metrics.stats.ParsedStats;
+import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregationBuilder;
+import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregationBuilder;
+import org.elasticsearch.search.aggregations.metrics.stats.extended.ParsedExtendedStats;
+import org.elasticsearch.search.aggregations.metrics.sum.ParsedSum;
+import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder;
+import org.elasticsearch.search.aggregations.metrics.valuecount.ParsedValueCount;
+import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder;
+import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue;
+import org.elasticsearch.search.aggregations.pipeline.ParsedSimpleValue;
+import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue;
+import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.ParsedBucketMetricValue;
+import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.ParsedPercentilesBucket;
+import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucketPipelineAggregationBuilder;
+import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.ParsedStatsBucket;
+import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.StatsBucketPipelineAggregationBuilder;
+import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ExtendedStatsBucketPipelineAggregationBuilder;
+import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ParsedExtendedStatsBucket;
+import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativePipelineAggregationBuilder;
+import org.elasticsearch.search.aggregations.pipeline.derivative.ParsedDerivative;
+import org.elasticsearch.search.suggest.Suggest;
+import org.elasticsearch.search.suggest.completion.CompletionSuggestion;
+import org.elasticsearch.search.suggest.phrase.PhraseSuggestion;
+import org.elasticsearch.search.suggest.term.TermSuggestion;
import java.io.IOException;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
import java.util.Objects;
+import java.util.Set;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+import static java.util.Collections.emptySet;
+import static java.util.Collections.singleton;
+import static java.util.stream.Collectors.toList;
/**
* High level REST client that wraps an instance of the low level {@link RestClient} and allows to build requests and read responses.
* The provided {@link RestClient} is externally built and closed.
+ * Can be sub-classed to expose additional client methods that make use of endpoints added to Elasticsearch through plugins, or to
+ * add support for custom response sections, again added to Elasticsearch through plugins.
*/
-public final class RestHighLevelClient {
-
- private static final Log logger = LogFactory.getLog(RestHighLevelClient.class);
+public class RestHighLevelClient {
private final RestClient client;
+ private final NamedXContentRegistry registry;
+
+ /**
+ * Creates a {@link RestHighLevelClient} given the low level {@link RestClient} that it should use to perform requests.
+ */
+ public RestHighLevelClient(RestClient restClient) {
+ this(restClient, Collections.emptyList());
+ }
+
+ /**
+ * Creates a {@link RestHighLevelClient} given the low level {@link RestClient} that it should use to perform requests and
+ * a list of entries that allow to parse custom response sections added to Elasticsearch through plugins.
+ */
+ protected RestHighLevelClient(RestClient restClient, List namedXContentEntries) {
+ this.client = Objects.requireNonNull(restClient);
+ this.registry = new NamedXContentRegistry(Stream.of(getDefaultNamedXContents().stream(), namedXContentEntries.stream())
+ .flatMap(Function.identity()).collect(toList()));
+ }
+
+ /**
+ * Executes a bulk request using the Bulk API
+ *
+ * See Bulk API on elastic.co
+ */
+ public BulkResponse bulk(BulkRequest bulkRequest, Header... headers) throws IOException {
+ return performRequestAndParseEntity(bulkRequest, Request::bulk, BulkResponse::fromXContent, emptySet(), headers);
+ }
+
+ /**
+ * Asynchronously executes a bulk request using the Bulk API
+ *
+ * See Bulk API on elastic.co
+ */
+ public void bulkAsync(BulkRequest bulkRequest, ActionListener listener, Header... headers) {
+ performRequestAsyncAndParseEntity(bulkRequest, Request::bulk, BulkResponse::fromXContent, listener, emptySet(), headers);
+ }
+
+ /**
+ * Pings the remote Elasticsearch cluster and returns true if the ping succeeded, false otherwise
+ */
+ public boolean ping(Header... headers) throws IOException {
+ return performRequest(new MainRequest(), (request) -> Request.ping(), RestHighLevelClient::convertExistsResponse,
+ emptySet(), headers);
+ }
+
+ /**
+ * Get the cluster info otherwise provided when sending an HTTP request to port 9200
+ */
+ public MainResponse info(Header... headers) throws IOException {
+ return performRequestAndParseEntity(new MainRequest(), (request) -> Request.info(), MainResponse::fromXContent, emptySet(),
+ headers);
+ }
+
+ /**
+ * Retrieves a document by id using the Get API
+ *
+ * See Get API on elastic.co
+ */
+ public GetResponse get(GetRequest getRequest, Header... headers) throws IOException {
+ return performRequestAndParseEntity(getRequest, Request::get, GetResponse::fromXContent, singleton(404), headers);
+ }
+
+ /**
+ * Asynchronously retrieves a document by id using the Get API
+ *
+ * See Get API on elastic.co
+ */
+ public void getAsync(GetRequest getRequest, ActionListener listener, Header... headers) {
+ performRequestAsyncAndParseEntity(getRequest, Request::get, GetResponse::fromXContent, listener, singleton(404), headers);
+ }
+
+ /**
+ * Checks for the existence of a document. Returns true if it exists, false otherwise
+ *
+ * See Get API on elastic.co
+ */
+ public boolean exists(GetRequest getRequest, Header... headers) throws IOException {
+ return performRequest(getRequest, Request::exists, RestHighLevelClient::convertExistsResponse, emptySet(), headers);
+ }
+
+ /**
+ * Asynchronously checks for the existence of a document. Returns true if it exists, false otherwise
+ *
+ * See Get API on elastic.co
+ */
+ public void existsAsync(GetRequest getRequest, ActionListener listener, Header... headers) {
+ performRequestAsync(getRequest, Request::exists, RestHighLevelClient::convertExistsResponse, listener, emptySet(), headers);
+ }
+
+ /**
+ * Index a document using the Index API
+ *
+ * See Index API on elastic.co
+ */
+ public IndexResponse index(IndexRequest indexRequest, Header... headers) throws IOException {
+ return performRequestAndParseEntity(indexRequest, Request::index, IndexResponse::fromXContent, emptySet(), headers);
+ }
+
+ /**
+ * Asynchronously index a document using the Index API
+ *
+ * See Index API on elastic.co
+ */
+ public void indexAsync(IndexRequest indexRequest, ActionListener listener, Header... headers) {
+ performRequestAsyncAndParseEntity(indexRequest, Request::index, IndexResponse::fromXContent, listener, emptySet(), headers);
+ }
+
+ /**
+ * Updates a document using the Update API
+ *
+ * See Update API on elastic.co
+ */
+ public UpdateResponse update(UpdateRequest updateRequest, Header... headers) throws IOException {
+ return performRequestAndParseEntity(updateRequest, Request::update, UpdateResponse::fromXContent, emptySet(), headers);
+ }
+
+ /**
+ * Asynchronously updates a document using the Update API
+ *
+ * See Update API on elastic.co
+ */
+ public void updateAsync(UpdateRequest updateRequest, ActionListener listener, Header... headers) {
+ performRequestAsyncAndParseEntity(updateRequest, Request::update, UpdateResponse::fromXContent, listener, emptySet(), headers);
+ }
+
+ /**
+ * Deletes a document by id using the Delete api
+ *
+ * See Delete API on elastic.co
+ */
+ public DeleteResponse delete(DeleteRequest deleteRequest, Header... headers) throws IOException {
+ return performRequestAndParseEntity(deleteRequest, Request::delete, DeleteResponse::fromXContent, Collections.singleton(404),
+ headers);
+ }
+
+ /**
+ * Asynchronously deletes a document by id using the Delete api
+ *
+ * See Delete API on elastic.co
+ */
+ public void deleteAsync(DeleteRequest deleteRequest, ActionListener listener, Header... headers) {
+ performRequestAsyncAndParseEntity(deleteRequest, Request::delete, DeleteResponse::fromXContent, listener,
+ Collections.singleton(404), headers);
+ }
+
+ /**
+ * Executes a search using the Search api
+ *
+ * See Search API on elastic.co
+ */
+ public SearchResponse search(SearchRequest searchRequest, Header... headers) throws IOException {
+ return performRequestAndParseEntity(searchRequest, Request::search, SearchResponse::fromXContent, emptySet(), headers);
+ }
- public RestHighLevelClient(RestClient client) {
- this.client = Objects.requireNonNull(client);
+ /**
+ * Asynchronously executes a search using the Search api
+ *
+ * See Search API on elastic.co
+ */
+ public void searchAsync(SearchRequest searchRequest, ActionListener listener, Header... headers) {
+ performRequestAsyncAndParseEntity(searchRequest, Request::search, SearchResponse::fromXContent, listener, emptySet(), headers);
}
- public boolean ping(Header... headers) {
+ /**
+ * Executes a search using the Search Scroll api
+ *
+ * See Search Scroll
+ * API on elastic.co
+ */
+ public SearchResponse searchScroll(SearchScrollRequest searchScrollRequest, Header... headers) throws IOException {
+ return performRequestAndParseEntity(searchScrollRequest, Request::searchScroll, SearchResponse::fromXContent, emptySet(), headers);
+ }
+
+ /**
+ * Asynchronously executes a search using the Search Scroll api
+ *
+ * See Search Scroll
+ * API on elastic.co
+ */
+ public void searchScrollAsync(SearchScrollRequest searchScrollRequest, ActionListener listener, Header... headers) {
+ performRequestAsyncAndParseEntity(searchScrollRequest, Request::searchScroll, SearchResponse::fromXContent,
+ listener, emptySet(), headers);
+ }
+
+ /**
+ * Clears one or more scroll ids using the Clear Scroll api
+ *
+ * See
+ * Clear Scroll API on elastic.co
+ */
+ public ClearScrollResponse clearScroll(ClearScrollRequest clearScrollRequest, Header... headers) throws IOException {
+ return performRequestAndParseEntity(clearScrollRequest, Request::clearScroll, ClearScrollResponse::fromXContent,
+ emptySet(), headers);
+ }
+
+ /**
+ * Asynchronously clears one or more scroll ids using the Clear Scroll api
+ *
+ * See
+ * Clear Scroll API on elastic.co
+ */
+ public void clearScrollAsync(ClearScrollRequest clearScrollRequest, ActionListener listener, Header... headers) {
+ performRequestAsyncAndParseEntity(clearScrollRequest, Request::clearScroll, ClearScrollResponse::fromXContent,
+ listener, emptySet(), headers);
+ }
+
+ protected Resp performRequestAndParseEntity(Req request,
+ CheckedFunction requestConverter,
+ CheckedFunction entityParser,
+ Set ignores, Header... headers) throws IOException {
+ return performRequest(request, requestConverter, (response) -> parseEntity(response.getEntity(), entityParser), ignores, headers);
+ }
+
+ protected Resp performRequest(Req request,
+ CheckedFunction requestConverter,
+ CheckedFunction responseConverter,
+ Set ignores, Header... headers) throws IOException {
+ ActionRequestValidationException validationException = request.validate();
+ if (validationException != null) {
+ throw validationException;
+ }
+ Request req = requestConverter.apply(request);
+ Response response;
+ try {
+ response = client.performRequest(req.method, req.endpoint, req.params, req.entity, headers);
+ } catch (ResponseException e) {
+ if (ignores.contains(e.getResponse().getStatusLine().getStatusCode())) {
+ try {
+ return responseConverter.apply(e.getResponse());
+ } catch (Exception innerException) {
+ throw parseResponseException(e);
+ }
+ }
+ throw parseResponseException(e);
+ }
+
+ try {
+ return responseConverter.apply(response);
+ } catch(Exception e) {
+ throw new IOException("Unable to parse response body for " + response, e);
+ }
+ }
+
+ protected void performRequestAsyncAndParseEntity(Req request,
+ CheckedFunction requestConverter,
+ CheckedFunction entityParser,
+ ActionListener listener, Set ignores, Header... headers) {
+ performRequestAsync(request, requestConverter, (response) -> parseEntity(response.getEntity(), entityParser),
+ listener, ignores, headers);
+ }
+
+ protected void performRequestAsync(Req request,
+ CheckedFunction requestConverter,
+ CheckedFunction responseConverter,
+ ActionListener listener, Set ignores, Header... headers) {
+ ActionRequestValidationException validationException = request.validate();
+ if (validationException != null) {
+ listener.onFailure(validationException);
+ return;
+ }
+ Request req;
try {
- client.performRequest("HEAD", "/", headers);
- return true;
- } catch(IOException exception) {
- return false;
+ req = requestConverter.apply(request);
+ } catch (Exception e) {
+ listener.onFailure(e);
+ return;
+ }
+
+ ResponseListener responseListener = wrapResponseListener(responseConverter, listener, ignores);
+ client.performRequestAsync(req.method, req.endpoint, req.params, req.entity, responseListener, headers);
+ }
+
+ ResponseListener wrapResponseListener(CheckedFunction responseConverter,
+ ActionListener actionListener, Set ignores) {
+ return new ResponseListener() {
+ @Override
+ public void onSuccess(Response response) {
+ try {
+ actionListener.onResponse(responseConverter.apply(response));
+ } catch(Exception e) {
+ IOException ioe = new IOException("Unable to parse response body for " + response, e);
+ onFailure(ioe);
+ }
+ }
+
+ @Override
+ public void onFailure(Exception exception) {
+ if (exception instanceof ResponseException) {
+ ResponseException responseException = (ResponseException) exception;
+ Response response = responseException.getResponse();
+ if (ignores.contains(response.getStatusLine().getStatusCode())) {
+ try {
+ actionListener.onResponse(responseConverter.apply(response));
+ } catch (Exception innerException) {
+ //the exception is ignored as we now try to parse the response as an error.
+ //this covers cases like get where 404 can either be a valid document not found response,
+ //or an error for which parsing is completely different. We try to consider the 404 response as a valid one
+ //first. If parsing of the response breaks, we fall back to parsing it as an error.
+ actionListener.onFailure(parseResponseException(responseException));
+ }
+ } else {
+ actionListener.onFailure(parseResponseException(responseException));
+ }
+ } else {
+ actionListener.onFailure(exception);
+ }
+ }
+ };
+ }
+
+ /**
+ * Converts a {@link ResponseException} obtained from the low level REST client into an {@link ElasticsearchException}.
+ * If a response body was returned, tries to parse it as an error returned from Elasticsearch.
+ * If no response body was returned or anything goes wrong while parsing the error, returns a new {@link ElasticsearchStatusException}
+ * that wraps the original {@link ResponseException}. The potential exception obtained while parsing is added to the returned
+ * exception as a suppressed exception. This method is guaranteed to not throw any exception eventually thrown while parsing.
+ */
+ ElasticsearchStatusException parseResponseException(ResponseException responseException) {
+ Response response = responseException.getResponse();
+ HttpEntity entity = response.getEntity();
+ ElasticsearchStatusException elasticsearchException;
+ if (entity == null) {
+ elasticsearchException = new ElasticsearchStatusException(
+ responseException.getMessage(), RestStatus.fromCode(response.getStatusLine().getStatusCode()), responseException);
+ } else {
+ try {
+ elasticsearchException = parseEntity(entity, BytesRestResponse::errorFromXContent);
+ elasticsearchException.addSuppressed(responseException);
+ } catch (Exception e) {
+ RestStatus restStatus = RestStatus.fromCode(response.getStatusLine().getStatusCode());
+ elasticsearchException = new ElasticsearchStatusException("Unable to parse response body", restStatus, responseException);
+ elasticsearchException.addSuppressed(e);
+ }
}
+ return elasticsearchException;
}
+ Resp parseEntity(
+ HttpEntity entity, CheckedFunction entityParser) throws IOException {
+ if (entity == null) {
+ throw new IllegalStateException("Response body expected but not returned");
+ }
+ if (entity.getContentType() == null) {
+ throw new IllegalStateException("Elasticsearch didn't return the [Content-Type] header, unable to parse response body");
+ }
+ XContentType xContentType = XContentType.fromMediaTypeOrFormat(entity.getContentType().getValue());
+ if (xContentType == null) {
+ throw new IllegalStateException("Unsupported Content-Type: " + entity.getContentType().getValue());
+ }
+ try (XContentParser parser = xContentType.xContent().createParser(registry, entity.getContent())) {
+ return entityParser.apply(parser);
+ }
+ }
+
+ static boolean convertExistsResponse(Response response) {
+ return response.getStatusLine().getStatusCode() == 200;
+ }
+ static List getDefaultNamedXContents() {
+ Map> map = new HashMap<>();
+ map.put(CardinalityAggregationBuilder.NAME, (p, c) -> ParsedCardinality.fromXContent(p, (String) c));
+ map.put(InternalHDRPercentiles.NAME, (p, c) -> ParsedHDRPercentiles.fromXContent(p, (String) c));
+ map.put(InternalHDRPercentileRanks.NAME, (p, c) -> ParsedHDRPercentileRanks.fromXContent(p, (String) c));
+ map.put(InternalTDigestPercentiles.NAME, (p, c) -> ParsedTDigestPercentiles.fromXContent(p, (String) c));
+ map.put(InternalTDigestPercentileRanks.NAME, (p, c) -> ParsedTDigestPercentileRanks.fromXContent(p, (String) c));
+ map.put(PercentilesBucketPipelineAggregationBuilder.NAME, (p, c) -> ParsedPercentilesBucket.fromXContent(p, (String) c));
+ map.put(MinAggregationBuilder.NAME, (p, c) -> ParsedMin.fromXContent(p, (String) c));
+ map.put(MaxAggregationBuilder.NAME, (p, c) -> ParsedMax.fromXContent(p, (String) c));
+ map.put(SumAggregationBuilder.NAME, (p, c) -> ParsedSum.fromXContent(p, (String) c));
+ map.put(AvgAggregationBuilder.NAME, (p, c) -> ParsedAvg.fromXContent(p, (String) c));
+ map.put(ValueCountAggregationBuilder.NAME, (p, c) -> ParsedValueCount.fromXContent(p, (String) c));
+ map.put(InternalSimpleValue.NAME, (p, c) -> ParsedSimpleValue.fromXContent(p, (String) c));
+ map.put(DerivativePipelineAggregationBuilder.NAME, (p, c) -> ParsedDerivative.fromXContent(p, (String) c));
+ map.put(InternalBucketMetricValue.NAME, (p, c) -> ParsedBucketMetricValue.fromXContent(p, (String) c));
+ map.put(StatsAggregationBuilder.NAME, (p, c) -> ParsedStats.fromXContent(p, (String) c));
+ map.put(StatsBucketPipelineAggregationBuilder.NAME, (p, c) -> ParsedStatsBucket.fromXContent(p, (String) c));
+ map.put(ExtendedStatsAggregationBuilder.NAME, (p, c) -> ParsedExtendedStats.fromXContent(p, (String) c));
+ map.put(ExtendedStatsBucketPipelineAggregationBuilder.NAME,
+ (p, c) -> ParsedExtendedStatsBucket.fromXContent(p, (String) c));
+ map.put(GeoBoundsAggregationBuilder.NAME, (p, c) -> ParsedGeoBounds.fromXContent(p, (String) c));
+ map.put(GeoCentroidAggregationBuilder.NAME, (p, c) -> ParsedGeoCentroid.fromXContent(p, (String) c));
+ map.put(HistogramAggregationBuilder.NAME, (p, c) -> ParsedHistogram.fromXContent(p, (String) c));
+ map.put(DateHistogramAggregationBuilder.NAME, (p, c) -> ParsedDateHistogram.fromXContent(p, (String) c));
+ map.put(StringTerms.NAME, (p, c) -> ParsedStringTerms.fromXContent(p, (String) c));
+ map.put(LongTerms.NAME, (p, c) -> ParsedLongTerms.fromXContent(p, (String) c));
+ map.put(DoubleTerms.NAME, (p, c) -> ParsedDoubleTerms.fromXContent(p, (String) c));
+ map.put(MissingAggregationBuilder.NAME, (p, c) -> ParsedMissing.fromXContent(p, (String) c));
+ map.put(NestedAggregationBuilder.NAME, (p, c) -> ParsedNested.fromXContent(p, (String) c));
+ map.put(ReverseNestedAggregationBuilder.NAME, (p, c) -> ParsedReverseNested.fromXContent(p, (String) c));
+ map.put(GlobalAggregationBuilder.NAME, (p, c) -> ParsedGlobal.fromXContent(p, (String) c));
+ map.put(FilterAggregationBuilder.NAME, (p, c) -> ParsedFilter.fromXContent(p, (String) c));
+ map.put(InternalSampler.PARSER_NAME, (p, c) -> ParsedSampler.fromXContent(p, (String) c));
+ map.put(GeoGridAggregationBuilder.NAME, (p, c) -> ParsedGeoHashGrid.fromXContent(p, (String) c));
+ map.put(RangeAggregationBuilder.NAME, (p, c) -> ParsedRange.fromXContent(p, (String) c));
+ map.put(DateRangeAggregationBuilder.NAME, (p, c) -> ParsedDateRange.fromXContent(p, (String) c));
+ map.put(GeoDistanceAggregationBuilder.NAME, (p, c) -> ParsedGeoDistance.fromXContent(p, (String) c));
+ map.put(FiltersAggregationBuilder.NAME, (p, c) -> ParsedFilters.fromXContent(p, (String) c));
+ map.put(AdjacencyMatrixAggregationBuilder.NAME, (p, c) -> ParsedAdjacencyMatrix.fromXContent(p, (String) c));
+ map.put(SignificantLongTerms.NAME, (p, c) -> ParsedSignificantLongTerms.fromXContent(p, (String) c));
+ map.put(SignificantStringTerms.NAME, (p, c) -> ParsedSignificantStringTerms.fromXContent(p, (String) c));
+ map.put(ScriptedMetricAggregationBuilder.NAME, (p, c) -> ParsedScriptedMetric.fromXContent(p, (String) c));
+ map.put(ChildrenAggregationBuilder.NAME, (p, c) -> ParsedChildren.fromXContent(p, (String) c));
+ map.put(MatrixStatsAggregationBuilder.NAME, (p, c) -> ParsedMatrixStats.fromXContent(p, (String) c));
+ List entries = map.entrySet().stream()
+ .map(entry -> new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(entry.getKey()), entry.getValue()))
+ .collect(Collectors.toList());
+ entries.add(new NamedXContentRegistry.Entry(Suggest.Suggestion.class, new ParseField(TermSuggestion.NAME),
+ (parser, context) -> TermSuggestion.fromXContent(parser, (String)context)));
+ entries.add(new NamedXContentRegistry.Entry(Suggest.Suggestion.class, new ParseField(PhraseSuggestion.NAME),
+ (parser, context) -> PhraseSuggestion.fromXContent(parser, (String)context)));
+ entries.add(new NamedXContentRegistry.Entry(Suggest.Suggestion.class, new ParseField(CompletionSuggestion.NAME),
+ (parser, context) -> CompletionSuggestion.fromXContent(parser, (String)context)));
+ return entries;
+ }
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java
new file mode 100644
index 0000000000000..b078a983357fc
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java
@@ -0,0 +1,705 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.client;
+
+import org.apache.http.entity.ContentType;
+import org.apache.http.entity.StringEntity;
+import org.elasticsearch.ElasticsearchException;
+import org.elasticsearch.ElasticsearchStatusException;
+import org.elasticsearch.action.DocWriteRequest;
+import org.elasticsearch.action.DocWriteResponse;
+import org.elasticsearch.action.bulk.BulkItemResponse;
+import org.elasticsearch.action.bulk.BulkProcessor;
+import org.elasticsearch.action.bulk.BulkRequest;
+import org.elasticsearch.action.bulk.BulkResponse;
+import org.elasticsearch.action.delete.DeleteRequest;
+import org.elasticsearch.action.delete.DeleteResponse;
+import org.elasticsearch.action.get.GetRequest;
+import org.elasticsearch.action.get.GetResponse;
+import org.elasticsearch.action.index.IndexRequest;
+import org.elasticsearch.action.index.IndexResponse;
+import org.elasticsearch.action.update.UpdateRequest;
+import org.elasticsearch.action.update.UpdateResponse;
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.unit.ByteSizeUnit;
+import org.elasticsearch.common.unit.ByteSizeValue;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.index.VersionType;
+import org.elasticsearch.index.get.GetResult;
+import org.elasticsearch.rest.RestStatus;
+import org.elasticsearch.script.Script;
+import org.elasticsearch.script.ScriptType;
+import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
+import org.elasticsearch.threadpool.ThreadPool;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicReference;
+
+import static java.util.Collections.singletonMap;
+
+public class CrudIT extends ESRestHighLevelClientTestCase {
+
+ public void testDelete() throws IOException {
+ {
+ // Testing deletion
+ String docId = "id";
+ highLevelClient().index(new IndexRequest("index", "type", docId).source(Collections.singletonMap("foo", "bar")));
+ DeleteRequest deleteRequest = new DeleteRequest("index", "type", docId);
+ if (randomBoolean()) {
+ deleteRequest.version(1L);
+ }
+ DeleteResponse deleteResponse = execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync);
+ assertEquals("index", deleteResponse.getIndex());
+ assertEquals("type", deleteResponse.getType());
+ assertEquals(docId, deleteResponse.getId());
+ assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult());
+ }
+ {
+ // Testing non existing document
+ String docId = "does_not_exist";
+ DeleteRequest deleteRequest = new DeleteRequest("index", "type", docId);
+ DeleteResponse deleteResponse = execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync);
+ assertEquals("index", deleteResponse.getIndex());
+ assertEquals("type", deleteResponse.getType());
+ assertEquals(docId, deleteResponse.getId());
+ assertEquals(DocWriteResponse.Result.NOT_FOUND, deleteResponse.getResult());
+ }
+ {
+ // Testing version conflict
+ String docId = "version_conflict";
+ highLevelClient().index(new IndexRequest("index", "type", docId).source(Collections.singletonMap("foo", "bar")));
+ DeleteRequest deleteRequest = new DeleteRequest("index", "type", docId).version(2);
+ ElasticsearchException exception = expectThrows(ElasticsearchException.class,
+ () -> execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync));
+ assertEquals(RestStatus.CONFLICT, exception.status());
+ assertEquals("Elasticsearch exception [type=version_conflict_engine_exception, reason=[type][" + docId + "]: " +
+ "version conflict, current version [1] is different than the one provided [2]]", exception.getMessage());
+ assertEquals("index", exception.getMetadata("es.index").get(0));
+ }
+ {
+ // Testing version type
+ String docId = "version_type";
+ highLevelClient().index(new IndexRequest("index", "type", docId).source(Collections.singletonMap("foo", "bar"))
+ .versionType(VersionType.EXTERNAL).version(12));
+ DeleteRequest deleteRequest = new DeleteRequest("index", "type", docId).versionType(VersionType.EXTERNAL).version(13);
+ DeleteResponse deleteResponse = execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync);
+ assertEquals("index", deleteResponse.getIndex());
+ assertEquals("type", deleteResponse.getType());
+ assertEquals(docId, deleteResponse.getId());
+ assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult());
+ }
+ {
+ // Testing version type with a wrong version
+ String docId = "wrong_version";
+ highLevelClient().index(new IndexRequest("index", "type", docId).source(Collections.singletonMap("foo", "bar"))
+ .versionType(VersionType.EXTERNAL).version(12));
+ ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> {
+ DeleteRequest deleteRequest = new DeleteRequest("index", "type", docId).versionType(VersionType.EXTERNAL).version(10);
+ execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync);
+ });
+ assertEquals(RestStatus.CONFLICT, exception.status());
+ assertEquals("Elasticsearch exception [type=version_conflict_engine_exception, reason=[type][" +
+ docId + "]: version conflict, current version [12] is higher or equal to the one provided [10]]", exception.getMessage());
+ assertEquals("index", exception.getMetadata("es.index").get(0));
+ }
+ {
+ // Testing routing
+ String docId = "routing";
+ highLevelClient().index(new IndexRequest("index", "type", docId).source(Collections.singletonMap("foo", "bar")).routing("foo"));
+ DeleteRequest deleteRequest = new DeleteRequest("index", "type", docId).routing("foo");
+ DeleteResponse deleteResponse = execute(deleteRequest, highLevelClient()::delete, highLevelClient()::deleteAsync);
+ assertEquals("index", deleteResponse.getIndex());
+ assertEquals("type", deleteResponse.getType());
+ assertEquals(docId, deleteResponse.getId());
+ assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult());
+ }
+ }
+
+ public void testExists() throws IOException {
+ {
+ GetRequest getRequest = new GetRequest("index", "type", "id");
+ assertFalse(execute(getRequest, highLevelClient()::exists, highLevelClient()::existsAsync));
+ }
+ String document = "{\"field1\":\"value1\",\"field2\":\"value2\"}";
+ StringEntity stringEntity = new StringEntity(document, ContentType.APPLICATION_JSON);
+ Response response = client().performRequest("PUT", "/index/type/id", Collections.singletonMap("refresh", "wait_for"), stringEntity);
+ assertEquals(201, response.getStatusLine().getStatusCode());
+ {
+ GetRequest getRequest = new GetRequest("index", "type", "id");
+ assertTrue(execute(getRequest, highLevelClient()::exists, highLevelClient()::existsAsync));
+ }
+ {
+ GetRequest getRequest = new GetRequest("index", "type", "does_not_exist");
+ assertFalse(execute(getRequest, highLevelClient()::exists, highLevelClient()::existsAsync));
+ }
+ {
+ GetRequest getRequest = new GetRequest("index", "type", "does_not_exist").version(1);
+ assertFalse(execute(getRequest, highLevelClient()::exists, highLevelClient()::existsAsync));
+ }
+ }
+
+ public void testGet() throws IOException {
+ {
+ GetRequest getRequest = new GetRequest("index", "type", "id");
+ ElasticsearchException exception = expectThrows(ElasticsearchException.class,
+ () -> execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync));
+ assertEquals(RestStatus.NOT_FOUND, exception.status());
+ assertEquals("Elasticsearch exception [type=index_not_found_exception, reason=no such index]", exception.getMessage());
+ assertEquals("index", exception.getMetadata("es.index").get(0));
+ }
+
+ String document = "{\"field1\":\"value1\",\"field2\":\"value2\"}";
+ StringEntity stringEntity = new StringEntity(document, ContentType.APPLICATION_JSON);
+ Response response = client().performRequest("PUT", "/index/type/id", Collections.singletonMap("refresh", "wait_for"), stringEntity);
+ assertEquals(201, response.getStatusLine().getStatusCode());
+ {
+ GetRequest getRequest = new GetRequest("index", "type", "id").version(2);
+ ElasticsearchException exception = expectThrows(ElasticsearchException.class,
+ () -> execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync));
+ assertEquals(RestStatus.CONFLICT, exception.status());
+ assertEquals("Elasticsearch exception [type=version_conflict_engine_exception, " + "reason=[type][id]: " +
+ "version conflict, current version [1] is different than the one provided [2]]", exception.getMessage());
+ assertEquals("index", exception.getMetadata("es.index").get(0));
+ }
+ {
+ GetRequest getRequest = new GetRequest("index", "type", "id");
+ if (randomBoolean()) {
+ getRequest.version(1L);
+ }
+ GetResponse getResponse = execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync);
+ assertEquals("index", getResponse.getIndex());
+ assertEquals("type", getResponse.getType());
+ assertEquals("id", getResponse.getId());
+ assertTrue(getResponse.isExists());
+ assertFalse(getResponse.isSourceEmpty());
+ assertEquals(1L, getResponse.getVersion());
+ assertEquals(document, getResponse.getSourceAsString());
+ }
+ {
+ GetRequest getRequest = new GetRequest("index", "type", "does_not_exist");
+ GetResponse getResponse = execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync);
+ assertEquals("index", getResponse.getIndex());
+ assertEquals("type", getResponse.getType());
+ assertEquals("does_not_exist", getResponse.getId());
+ assertFalse(getResponse.isExists());
+ assertEquals(-1, getResponse.getVersion());
+ assertTrue(getResponse.isSourceEmpty());
+ assertNull(getResponse.getSourceAsString());
+ }
+ {
+ GetRequest getRequest = new GetRequest("index", "type", "id");
+ getRequest.fetchSourceContext(new FetchSourceContext(false, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY));
+ GetResponse getResponse = execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync);
+ assertEquals("index", getResponse.getIndex());
+ assertEquals("type", getResponse.getType());
+ assertEquals("id", getResponse.getId());
+ assertTrue(getResponse.isExists());
+ assertTrue(getResponse.isSourceEmpty());
+ assertEquals(1L, getResponse.getVersion());
+ assertNull(getResponse.getSourceAsString());
+ }
+ {
+ GetRequest getRequest = new GetRequest("index", "type", "id");
+ if (randomBoolean()) {
+ getRequest.fetchSourceContext(new FetchSourceContext(true, new String[]{"field1"}, Strings.EMPTY_ARRAY));
+ } else {
+ getRequest.fetchSourceContext(new FetchSourceContext(true, Strings.EMPTY_ARRAY, new String[]{"field2"}));
+ }
+ GetResponse getResponse = execute(getRequest, highLevelClient()::get, highLevelClient()::getAsync);
+ assertEquals("index", getResponse.getIndex());
+ assertEquals("type", getResponse.getType());
+ assertEquals("id", getResponse.getId());
+ assertTrue(getResponse.isExists());
+ assertFalse(getResponse.isSourceEmpty());
+ assertEquals(1L, getResponse.getVersion());
+ Map sourceAsMap = getResponse.getSourceAsMap();
+ assertEquals(1, sourceAsMap.size());
+ assertEquals("value1", sourceAsMap.get("field1"));
+ }
+ }
+
+ public void testIndex() throws IOException {
+ final XContentType xContentType = randomFrom(XContentType.values());
+ {
+ IndexRequest indexRequest = new IndexRequest("index", "type");
+ indexRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("test", "test").endObject());
+
+ IndexResponse indexResponse = execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync);
+ assertEquals(RestStatus.CREATED, indexResponse.status());
+ assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult());
+ assertEquals("index", indexResponse.getIndex());
+ assertEquals("type", indexResponse.getType());
+ assertTrue(Strings.hasLength(indexResponse.getId()));
+ assertEquals(1L, indexResponse.getVersion());
+ assertNotNull(indexResponse.getShardId());
+ assertEquals(-1, indexResponse.getShardId().getId());
+ assertEquals("index", indexResponse.getShardId().getIndexName());
+ assertEquals("index", indexResponse.getShardId().getIndex().getName());
+ assertEquals("_na_", indexResponse.getShardId().getIndex().getUUID());
+ assertNotNull(indexResponse.getShardInfo());
+ assertEquals(0, indexResponse.getShardInfo().getFailed());
+ assertTrue(indexResponse.getShardInfo().getSuccessful() > 0);
+ assertTrue(indexResponse.getShardInfo().getTotal() > 0);
+ }
+ {
+ IndexRequest indexRequest = new IndexRequest("index", "type", "id");
+ indexRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("version", 1).endObject());
+
+ IndexResponse indexResponse = execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync);
+ assertEquals(RestStatus.CREATED, indexResponse.status());
+ assertEquals("index", indexResponse.getIndex());
+ assertEquals("type", indexResponse.getType());
+ assertEquals("id", indexResponse.getId());
+ assertEquals(1L, indexResponse.getVersion());
+
+ indexRequest = new IndexRequest("index", "type", "id");
+ indexRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("version", 2).endObject());
+
+ indexResponse = execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync);
+ assertEquals(RestStatus.OK, indexResponse.status());
+ assertEquals("index", indexResponse.getIndex());
+ assertEquals("type", indexResponse.getType());
+ assertEquals("id", indexResponse.getId());
+ assertEquals(2L, indexResponse.getVersion());
+
+ ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> {
+ IndexRequest wrongRequest = new IndexRequest("index", "type", "id");
+ wrongRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("field", "test").endObject());
+ wrongRequest.version(5L);
+
+ execute(wrongRequest, highLevelClient()::index, highLevelClient()::indexAsync);
+ });
+ assertEquals(RestStatus.CONFLICT, exception.status());
+ assertEquals("Elasticsearch exception [type=version_conflict_engine_exception, reason=[type][id]: " +
+ "version conflict, current version [2] is different than the one provided [5]]", exception.getMessage());
+ assertEquals("index", exception.getMetadata("es.index").get(0));
+ }
+ {
+ ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> {
+ IndexRequest indexRequest = new IndexRequest("index", "type", "missing_parent");
+ indexRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("field", "test").endObject());
+ indexRequest.parent("missing");
+
+ execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync);
+ });
+
+ assertEquals(RestStatus.BAD_REQUEST, exception.status());
+ assertEquals("Elasticsearch exception [type=illegal_argument_exception, " +
+ "reason=can't specify parent if no parent field has been configured]", exception.getMessage());
+ }
+ {
+ ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> {
+ IndexRequest indexRequest = new IndexRequest("index", "type", "missing_pipeline");
+ indexRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("field", "test").endObject());
+ indexRequest.setPipeline("missing");
+
+ execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync);
+ });
+
+ assertEquals(RestStatus.BAD_REQUEST, exception.status());
+ assertEquals("Elasticsearch exception [type=illegal_argument_exception, " +
+ "reason=pipeline with id [missing] does not exist]", exception.getMessage());
+ }
+ {
+ IndexRequest indexRequest = new IndexRequest("index", "type", "external_version_type");
+ indexRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("field", "test").endObject());
+ indexRequest.version(12L);
+ indexRequest.versionType(VersionType.EXTERNAL);
+
+ IndexResponse indexResponse = execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync);
+ assertEquals(RestStatus.CREATED, indexResponse.status());
+ assertEquals("index", indexResponse.getIndex());
+ assertEquals("type", indexResponse.getType());
+ assertEquals("external_version_type", indexResponse.getId());
+ assertEquals(12L, indexResponse.getVersion());
+ }
+ {
+ final IndexRequest indexRequest = new IndexRequest("index", "type", "with_create_op_type");
+ indexRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("field", "test").endObject());
+ indexRequest.opType(DocWriteRequest.OpType.CREATE);
+
+ IndexResponse indexResponse = execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync);
+ assertEquals(RestStatus.CREATED, indexResponse.status());
+ assertEquals("index", indexResponse.getIndex());
+ assertEquals("type", indexResponse.getType());
+ assertEquals("with_create_op_type", indexResponse.getId());
+
+ ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> {
+ execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync);
+ });
+
+ assertEquals(RestStatus.CONFLICT, exception.status());
+ assertEquals("Elasticsearch exception [type=version_conflict_engine_exception, reason=[type][with_create_op_type]: " +
+ "version conflict, document already exists (current version [1])]", exception.getMessage());
+ }
+ }
+
+ public void testUpdate() throws IOException {
+ {
+ UpdateRequest updateRequest = new UpdateRequest("index", "type", "does_not_exist");
+ updateRequest.doc(singletonMap("field", "value"), randomFrom(XContentType.values()));
+
+ ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () ->
+ execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync));
+ assertEquals(RestStatus.NOT_FOUND, exception.status());
+ assertEquals("Elasticsearch exception [type=document_missing_exception, reason=[type][does_not_exist]: document missing]",
+ exception.getMessage());
+ }
+ {
+ IndexRequest indexRequest = new IndexRequest("index", "type", "id");
+ indexRequest.source(singletonMap("field", "value"));
+ IndexResponse indexResponse = highLevelClient().index(indexRequest);
+ assertEquals(RestStatus.CREATED, indexResponse.status());
+
+ UpdateRequest updateRequest = new UpdateRequest("index", "type", "id");
+ updateRequest.doc(singletonMap("field", "updated"), randomFrom(XContentType.values()));
+
+ UpdateResponse updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync);
+ assertEquals(RestStatus.OK, updateResponse.status());
+ assertEquals(indexResponse.getVersion() + 1, updateResponse.getVersion());
+
+ UpdateRequest updateRequestConflict = new UpdateRequest("index", "type", "id");
+ updateRequestConflict.doc(singletonMap("field", "with_version_conflict"), randomFrom(XContentType.values()));
+ updateRequestConflict.version(indexResponse.getVersion());
+
+ ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () ->
+ execute(updateRequestConflict, highLevelClient()::update, highLevelClient()::updateAsync));
+ assertEquals(RestStatus.CONFLICT, exception.status());
+ assertEquals("Elasticsearch exception [type=version_conflict_engine_exception, reason=[type][id]: version conflict, " +
+ "current version [2] is different than the one provided [1]]", exception.getMessage());
+ }
+ {
+ ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> {
+ UpdateRequest updateRequest = new UpdateRequest("index", "type", "id");
+ updateRequest.doc(singletonMap("field", "updated"), randomFrom(XContentType.values()));
+ if (randomBoolean()) {
+ updateRequest.parent("missing");
+ } else {
+ updateRequest.routing("missing");
+ }
+ execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync);
+ });
+
+ assertEquals(RestStatus.NOT_FOUND, exception.status());
+ assertEquals("Elasticsearch exception [type=document_missing_exception, reason=[type][id]: document missing]",
+ exception.getMessage());
+ }
+ {
+ IndexRequest indexRequest = new IndexRequest("index", "type", "with_script");
+ indexRequest.source(singletonMap("counter", 12));
+ IndexResponse indexResponse = highLevelClient().index(indexRequest);
+ assertEquals(RestStatus.CREATED, indexResponse.status());
+
+ UpdateRequest updateRequest = new UpdateRequest("index", "type", "with_script");
+ Script script = new Script(ScriptType.INLINE, "painless", "ctx._source.counter += params.count", singletonMap("count", 8));
+ updateRequest.script(script);
+ updateRequest.fetchSource(true);
+
+ UpdateResponse updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync);
+ assertEquals(RestStatus.OK, updateResponse.status());
+ assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult());
+ assertEquals(2L, updateResponse.getVersion());
+ assertEquals(20, updateResponse.getGetResult().sourceAsMap().get("counter"));
+
+ }
+ {
+ IndexRequest indexRequest = new IndexRequest("index", "type", "with_doc");
+ indexRequest.source("field_1", "one", "field_3", "three");
+ indexRequest.version(12L);
+ indexRequest.versionType(VersionType.EXTERNAL);
+ IndexResponse indexResponse = highLevelClient().index(indexRequest);
+ assertEquals(RestStatus.CREATED, indexResponse.status());
+ assertEquals(12L, indexResponse.getVersion());
+
+ UpdateRequest updateRequest = new UpdateRequest("index", "type", "with_doc");
+ updateRequest.doc(singletonMap("field_2", "two"), randomFrom(XContentType.values()));
+ updateRequest.fetchSource("field_*", "field_3");
+
+ UpdateResponse updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync);
+ assertEquals(RestStatus.OK, updateResponse.status());
+ assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult());
+ assertEquals(13L, updateResponse.getVersion());
+ GetResult getResult = updateResponse.getGetResult();
+ assertEquals(13L, updateResponse.getVersion());
+ Map sourceAsMap = getResult.sourceAsMap();
+ assertEquals("one", sourceAsMap.get("field_1"));
+ assertEquals("two", sourceAsMap.get("field_2"));
+ assertFalse(sourceAsMap.containsKey("field_3"));
+ }
+ {
+ IndexRequest indexRequest = new IndexRequest("index", "type", "noop");
+ indexRequest.source("field", "value");
+ IndexResponse indexResponse = highLevelClient().index(indexRequest);
+ assertEquals(RestStatus.CREATED, indexResponse.status());
+ assertEquals(1L, indexResponse.getVersion());
+
+ UpdateRequest updateRequest = new UpdateRequest("index", "type", "noop");
+ updateRequest.doc(singletonMap("field", "value"), randomFrom(XContentType.values()));
+
+ UpdateResponse updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync);
+ assertEquals(RestStatus.OK, updateResponse.status());
+ assertEquals(DocWriteResponse.Result.NOOP, updateResponse.getResult());
+ assertEquals(1L, updateResponse.getVersion());
+
+ updateRequest.detectNoop(false);
+
+ updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync);
+ assertEquals(RestStatus.OK, updateResponse.status());
+ assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult());
+ assertEquals(2L, updateResponse.getVersion());
+ }
+ {
+ UpdateRequest updateRequest = new UpdateRequest("index", "type", "with_upsert");
+ updateRequest.upsert(singletonMap("doc_status", "created"));
+ updateRequest.doc(singletonMap("doc_status", "updated"));
+ updateRequest.fetchSource(true);
+
+ UpdateResponse updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync);
+ assertEquals(RestStatus.CREATED, updateResponse.status());
+ assertEquals("index", updateResponse.getIndex());
+ assertEquals("type", updateResponse.getType());
+ assertEquals("with_upsert", updateResponse.getId());
+ GetResult getResult = updateResponse.getGetResult();
+ assertEquals(1L, updateResponse.getVersion());
+ assertEquals("created", getResult.sourceAsMap().get("doc_status"));
+ }
+ {
+ UpdateRequest updateRequest = new UpdateRequest("index", "type", "with_doc_as_upsert");
+ updateRequest.doc(singletonMap("field", "initialized"));
+ updateRequest.fetchSource(true);
+ updateRequest.docAsUpsert(true);
+
+ UpdateResponse updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync);
+ assertEquals(RestStatus.CREATED, updateResponse.status());
+ assertEquals("index", updateResponse.getIndex());
+ assertEquals("type", updateResponse.getType());
+ assertEquals("with_doc_as_upsert", updateResponse.getId());
+ GetResult getResult = updateResponse.getGetResult();
+ assertEquals(1L, updateResponse.getVersion());
+ assertEquals("initialized", getResult.sourceAsMap().get("field"));
+ }
+ {
+ UpdateRequest updateRequest = new UpdateRequest("index", "type", "with_scripted_upsert");
+ updateRequest.fetchSource(true);
+ updateRequest.script(new Script(ScriptType.INLINE, "painless", "ctx._source.level = params.test", singletonMap("test", "C")));
+ updateRequest.scriptedUpsert(true);
+ updateRequest.upsert(singletonMap("level", "A"));
+
+ UpdateResponse updateResponse = execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync);
+ assertEquals(RestStatus.CREATED, updateResponse.status());
+ assertEquals("index", updateResponse.getIndex());
+ assertEquals("type", updateResponse.getType());
+ assertEquals("with_scripted_upsert", updateResponse.getId());
+
+ GetResult getResult = updateResponse.getGetResult();
+ assertEquals(1L, updateResponse.getVersion());
+ assertEquals("C", getResult.sourceAsMap().get("level"));
+ }
+ {
+ IllegalStateException exception = expectThrows(IllegalStateException.class, () -> {
+ UpdateRequest updateRequest = new UpdateRequest("index", "type", "id");
+ updateRequest.doc(new IndexRequest().source(Collections.singletonMap("field", "doc"), XContentType.JSON));
+ updateRequest.upsert(new IndexRequest().source(Collections.singletonMap("field", "upsert"), XContentType.YAML));
+ execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync);
+ });
+ assertEquals("Update request cannot have different content types for doc [JSON] and upsert [YAML] documents",
+ exception.getMessage());
+ }
+ }
+
+ public void testBulk() throws IOException {
+ int nbItems = randomIntBetween(10, 100);
+ boolean[] errors = new boolean[nbItems];
+
+ XContentType xContentType = randomFrom(XContentType.JSON, XContentType.SMILE);
+
+ BulkRequest bulkRequest = new BulkRequest();
+ for (int i = 0; i < nbItems; i++) {
+ String id = String.valueOf(i);
+ boolean erroneous = randomBoolean();
+ errors[i] = erroneous;
+
+ DocWriteRequest.OpType opType = randomFrom(DocWriteRequest.OpType.values());
+ if (opType == DocWriteRequest.OpType.DELETE) {
+ if (erroneous == false) {
+ assertEquals(RestStatus.CREATED,
+ highLevelClient().index(new IndexRequest("index", "test", id).source("field", -1)).status());
+ }
+ DeleteRequest deleteRequest = new DeleteRequest("index", "test", id);
+ bulkRequest.add(deleteRequest);
+
+ } else {
+ BytesReference source = XContentBuilder.builder(xContentType.xContent()).startObject().field("id", i).endObject().bytes();
+ if (opType == DocWriteRequest.OpType.INDEX) {
+ IndexRequest indexRequest = new IndexRequest("index", "test", id).source(source, xContentType);
+ if (erroneous) {
+ indexRequest.version(12L);
+ }
+ bulkRequest.add(indexRequest);
+
+ } else if (opType == DocWriteRequest.OpType.CREATE) {
+ IndexRequest createRequest = new IndexRequest("index", "test", id).source(source, xContentType).create(true);
+ if (erroneous) {
+ assertEquals(RestStatus.CREATED, highLevelClient().index(createRequest).status());
+ }
+ bulkRequest.add(createRequest);
+
+ } else if (opType == DocWriteRequest.OpType.UPDATE) {
+ UpdateRequest updateRequest = new UpdateRequest("index", "test", id)
+ .doc(new IndexRequest().source(source, xContentType));
+ if (erroneous == false) {
+ assertEquals(RestStatus.CREATED,
+ highLevelClient().index(new IndexRequest("index", "test", id).source("field", -1)).status());
+ }
+ bulkRequest.add(updateRequest);
+ }
+ }
+ }
+
+ BulkResponse bulkResponse = execute(bulkRequest, highLevelClient()::bulk, highLevelClient()::bulkAsync);
+ assertEquals(RestStatus.OK, bulkResponse.status());
+ assertTrue(bulkResponse.getTook().getMillis() > 0);
+ assertEquals(nbItems, bulkResponse.getItems().length);
+
+ validateBulkResponses(nbItems, errors, bulkResponse, bulkRequest);
+ }
+
+ public void testBulkProcessorIntegration() throws IOException, InterruptedException {
+ int nbItems = randomIntBetween(10, 100);
+ boolean[] errors = new boolean[nbItems];
+
+ XContentType xContentType = randomFrom(XContentType.JSON, XContentType.SMILE);
+
+ AtomicReference responseRef = new AtomicReference<>();
+ AtomicReference requestRef = new AtomicReference<>();
+ AtomicReference error = new AtomicReference<>();
+
+ BulkProcessor.Listener listener = new BulkProcessor.Listener() {
+ @Override
+ public void beforeBulk(long executionId, BulkRequest request) {
+
+ }
+
+ @Override
+ public void afterBulk(long executionId, BulkRequest request, BulkResponse response) {
+ responseRef.set(response);
+ requestRef.set(request);
+ }
+
+ @Override
+ public void afterBulk(long executionId, BulkRequest request, Throwable failure) {
+ error.set(failure);
+ }
+ };
+
+ ThreadPool threadPool = new ThreadPool(Settings.builder().put("node.name", getClass().getName()).build());
+ // Pull the client to a variable to work around https://bugs.eclipse.org/bugs/show_bug.cgi?id=514884
+ RestHighLevelClient hlClient = highLevelClient();
+ try(BulkProcessor processor = new BulkProcessor.Builder(hlClient::bulkAsync, listener, threadPool)
+ .setConcurrentRequests(0)
+ .setBulkSize(new ByteSizeValue(5, ByteSizeUnit.GB))
+ .setBulkActions(nbItems + 1)
+ .build()) {
+ for (int i = 0; i < nbItems; i++) {
+ String id = String.valueOf(i);
+ boolean erroneous = randomBoolean();
+ errors[i] = erroneous;
+
+ DocWriteRequest.OpType opType = randomFrom(DocWriteRequest.OpType.values());
+ if (opType == DocWriteRequest.OpType.DELETE) {
+ if (erroneous == false) {
+ assertEquals(RestStatus.CREATED,
+ highLevelClient().index(new IndexRequest("index", "test", id).source("field", -1)).status());
+ }
+ DeleteRequest deleteRequest = new DeleteRequest("index", "test", id);
+ processor.add(deleteRequest);
+
+ } else {
+ if (opType == DocWriteRequest.OpType.INDEX) {
+ IndexRequest indexRequest = new IndexRequest("index", "test", id).source(xContentType, "id", i);
+ if (erroneous) {
+ indexRequest.version(12L);
+ }
+ processor.add(indexRequest);
+
+ } else if (opType == DocWriteRequest.OpType.CREATE) {
+ IndexRequest createRequest = new IndexRequest("index", "test", id).source(xContentType, "id", i).create(true);
+ if (erroneous) {
+ assertEquals(RestStatus.CREATED, highLevelClient().index(createRequest).status());
+ }
+ processor.add(createRequest);
+
+ } else if (opType == DocWriteRequest.OpType.UPDATE) {
+ UpdateRequest updateRequest = new UpdateRequest("index", "test", id)
+ .doc(new IndexRequest().source(xContentType, "id", i));
+ if (erroneous == false) {
+ assertEquals(RestStatus.CREATED,
+ highLevelClient().index(new IndexRequest("index", "test", id).source("field", -1)).status());
+ }
+ processor.add(updateRequest);
+ }
+ }
+ }
+ assertNull(responseRef.get());
+ assertNull(requestRef.get());
+ }
+
+
+ BulkResponse bulkResponse = responseRef.get();
+ BulkRequest bulkRequest = requestRef.get();
+
+ assertEquals(RestStatus.OK, bulkResponse.status());
+ assertTrue(bulkResponse.getTook().getMillis() > 0);
+ assertEquals(nbItems, bulkResponse.getItems().length);
+ assertNull(error.get());
+
+ validateBulkResponses(nbItems, errors, bulkResponse, bulkRequest);
+
+ terminate(threadPool);
+ }
+
+ private void validateBulkResponses(int nbItems, boolean[] errors, BulkResponse bulkResponse, BulkRequest bulkRequest) {
+ for (int i = 0; i < nbItems; i++) {
+ BulkItemResponse bulkItemResponse = bulkResponse.getItems()[i];
+
+ assertEquals(i, bulkItemResponse.getItemId());
+ assertEquals("index", bulkItemResponse.getIndex());
+ assertEquals("test", bulkItemResponse.getType());
+ assertEquals(String.valueOf(i), bulkItemResponse.getId());
+
+ DocWriteRequest.OpType requestOpType = bulkRequest.requests().get(i).opType();
+ if (requestOpType == DocWriteRequest.OpType.INDEX || requestOpType == DocWriteRequest.OpType.CREATE) {
+ assertEquals(errors[i], bulkItemResponse.isFailed());
+ assertEquals(errors[i] ? RestStatus.CONFLICT : RestStatus.CREATED, bulkItemResponse.status());
+ } else if (requestOpType == DocWriteRequest.OpType.UPDATE) {
+ assertEquals(errors[i], bulkItemResponse.isFailed());
+ assertEquals(errors[i] ? RestStatus.NOT_FOUND : RestStatus.OK, bulkItemResponse.status());
+ } else if (requestOpType == DocWriteRequest.OpType.DELETE) {
+ assertFalse(bulkItemResponse.isFailed());
+ assertEquals(errors[i] ? RestStatus.NOT_FOUND : RestStatus.OK, bulkItemResponse.status());
+ }
+ }
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/CustomRestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/CustomRestHighLevelClientTests.java
new file mode 100644
index 0000000000000..8ad42c2232020
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/CustomRestHighLevelClientTests.java
@@ -0,0 +1,181 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.client;
+
+import org.apache.http.Header;
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpHost;
+import org.apache.http.HttpResponse;
+import org.apache.http.ProtocolVersion;
+import org.apache.http.RequestLine;
+import org.apache.http.client.methods.HttpGet;
+import org.apache.http.entity.ByteArrayEntity;
+import org.apache.http.entity.ContentType;
+import org.apache.http.message.BasicHeader;
+import org.apache.http.message.BasicHttpResponse;
+import org.apache.http.message.BasicRequestLine;
+import org.apache.http.message.BasicStatusLine;
+import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.Build;
+import org.elasticsearch.Version;
+import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.main.MainRequest;
+import org.elasticsearch.action.main.MainResponse;
+import org.elasticsearch.cluster.ClusterName;
+import org.elasticsearch.common.SuppressForbidden;
+import org.elasticsearch.common.xcontent.XContentHelper;
+import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.test.ESTestCase;
+import org.junit.Before;
+
+import java.io.IOException;
+import java.lang.reflect.Method;
+import java.lang.reflect.Modifier;
+
+import static java.util.Collections.emptyMap;
+import static java.util.Collections.emptySet;
+import static org.elasticsearch.client.ESRestHighLevelClientTestCase.execute;
+import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.anyMapOf;
+import static org.mockito.Matchers.anyObject;
+import static org.mockito.Matchers.anyVararg;
+import static org.mockito.Matchers.eq;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.mock;
+
+/**
+ * Test and demonstrates how {@link RestHighLevelClient} can be extended to support custom endpoints.
+ */
+public class CustomRestHighLevelClientTests extends ESTestCase {
+
+ private static final String ENDPOINT = "/_custom";
+
+ private CustomRestClient restHighLevelClient;
+
+ @Before
+ @SuppressWarnings("unchecked")
+ public void initClients() throws IOException {
+ if (restHighLevelClient == null) {
+ final RestClient restClient = mock(RestClient.class);
+ restHighLevelClient = new CustomRestClient(restClient);
+
+ doAnswer(mock -> mockPerformRequest((Header) mock.getArguments()[4]))
+ .when(restClient)
+ .performRequest(eq(HttpGet.METHOD_NAME), eq(ENDPOINT), anyMapOf(String.class, String.class), anyObject(), anyVararg());
+
+ doAnswer(mock -> mockPerformRequestAsync((Header) mock.getArguments()[5], (ResponseListener) mock.getArguments()[4]))
+ .when(restClient)
+ .performRequestAsync(eq(HttpGet.METHOD_NAME), eq(ENDPOINT), anyMapOf(String.class, String.class),
+ any(HttpEntity.class), any(ResponseListener.class), anyVararg());
+ }
+ }
+
+ public void testCustomEndpoint() throws IOException {
+ final MainRequest request = new MainRequest();
+ final Header header = new BasicHeader("node_name", randomAlphaOfLengthBetween(1, 10));
+
+ MainResponse response = execute(request, restHighLevelClient::custom, restHighLevelClient::customAsync, header);
+ assertEquals(header.getValue(), response.getNodeName());
+
+ response = execute(request, restHighLevelClient::customAndParse, restHighLevelClient::customAndParseAsync, header);
+ assertEquals(header.getValue(), response.getNodeName());
+ }
+
+ /**
+ * The {@link RestHighLevelClient} must declare the following execution methods using the protected
modifier
+ * so that they can be used by subclasses to implement custom logic.
+ */
+ @SuppressForbidden(reason = "We're forced to uses Class#getDeclaredMethods() here because this test checks protected methods")
+ public void testMethodsVisibility() throws ClassNotFoundException {
+ String[] methodNames = new String[]{"performRequest", "performRequestAndParseEntity", "performRequestAsync",
+ "performRequestAsyncAndParseEntity"};
+ for (String methodName : methodNames) {
+ boolean found = false;
+ for (Method method : RestHighLevelClient.class.getDeclaredMethods()) {
+ if (method.getName().equals(methodName)) {
+ assertTrue("Method " + methodName + " must be protected", Modifier.isProtected(method.getModifiers()));
+ found = true;
+ }
+ }
+ assertTrue("Failed to find method " + methodName, found);
+ }
+ }
+
+ /**
+ * Mocks the asynchronous request execution by calling the {@link #mockPerformRequest(Header)} method.
+ */
+ private Void mockPerformRequestAsync(Header httpHeader, ResponseListener responseListener) {
+ try {
+ responseListener.onSuccess(mockPerformRequest(httpHeader));
+ } catch (IOException e) {
+ responseListener.onFailure(e);
+ }
+ return null;
+ }
+
+ /**
+ * Mocks the synchronous request execution like if it was executed by Elasticsearch.
+ */
+ private Response mockPerformRequest(Header httpHeader) throws IOException {
+ ProtocolVersion protocol = new ProtocolVersion("HTTP", 1, 1);
+ HttpResponse httpResponse = new BasicHttpResponse(new BasicStatusLine(protocol, 200, "OK"));
+
+ MainResponse response = new MainResponse(httpHeader.getValue(), Version.CURRENT, ClusterName.DEFAULT, "_na", Build.CURRENT, true);
+ BytesRef bytesRef = XContentHelper.toXContent(response, XContentType.JSON, false).toBytesRef();
+ httpResponse.setEntity(new ByteArrayEntity(bytesRef.bytes, ContentType.APPLICATION_JSON));
+
+ RequestLine requestLine = new BasicRequestLine(HttpGet.METHOD_NAME, ENDPOINT, protocol);
+ return new Response(requestLine, new HttpHost("localhost", 9200), httpResponse);
+ }
+
+ /**
+ * A custom high level client that provides custom methods to execute a request and get its associate response back.
+ */
+ static class CustomRestClient extends RestHighLevelClient {
+
+ private CustomRestClient(RestClient restClient) {
+ super(restClient);
+ }
+
+ MainResponse custom(MainRequest mainRequest, Header... headers) throws IOException {
+ return performRequest(mainRequest, this::toRequest, this::toResponse, emptySet(), headers);
+ }
+
+ MainResponse customAndParse(MainRequest mainRequest, Header... headers) throws IOException {
+ return performRequestAndParseEntity(mainRequest, this::toRequest, MainResponse::fromXContent, emptySet(), headers);
+ }
+
+ void customAsync(MainRequest mainRequest, ActionListener listener, Header... headers) {
+ performRequestAsync(mainRequest, this::toRequest, this::toResponse, listener, emptySet(), headers);
+ }
+
+ void customAndParseAsync(MainRequest mainRequest, ActionListener listener, Header... headers) {
+ performRequestAsyncAndParseEntity(mainRequest, this::toRequest, MainResponse::fromXContent, listener, emptySet(), headers);
+ }
+
+ Request toRequest(MainRequest mainRequest) throws IOException {
+ return new Request(HttpGet.METHOD_NAME, ENDPOINT, emptyMap(), null);
+ }
+
+ MainResponse toResponse(Response response) throws IOException {
+ return parseEntity(response.getEntity(), MainResponse::fromXContent);
+ }
+ }
+}
\ No newline at end of file
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java
index bc12b1433d7e4..cdd8317830909 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java
@@ -19,6 +19,9 @@
package org.elasticsearch.client;
+import org.apache.http.Header;
+import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.junit.AfterClass;
import org.junit.Before;
@@ -38,11 +41,35 @@ public void initHighLevelClient() throws IOException {
}
@AfterClass
- public static void cleanupClient() throws IOException {
+ public static void cleanupClient() {
restHighLevelClient = null;
}
protected static RestHighLevelClient highLevelClient() {
return restHighLevelClient;
}
+
+ /**
+ * Executes the provided request using either the sync method or its async variant, both provided as functions
+ */
+ protected static Resp execute(Req request, SyncMethod syncMethod,
+ AsyncMethod asyncMethod, Header... headers) throws IOException {
+ if (randomBoolean()) {
+ return syncMethod.execute(request, headers);
+ } else {
+ PlainActionFuture future = PlainActionFuture.newFuture();
+ asyncMethod.execute(request, future, headers);
+ return future.actionGet();
+ }
+ }
+
+ @FunctionalInterface
+ protected interface SyncMethod {
+ Response execute(Request request, Header... headers) throws IOException;
+ }
+
+ @FunctionalInterface
+ protected interface AsyncMethod {
+ void execute(Request request, ActionListener listener, Header... headers);
+ }
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MainActionIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MainActionIT.java
deleted file mode 100644
index 717ab7a44f3fd..0000000000000
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MainActionIT.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.client;
-
-public class MainActionIT extends ESRestHighLevelClientTestCase {
-
- public void testPing() {
- assertTrue(highLevelClient().ping());
- }
-}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/PingAndInfoIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/PingAndInfoIT.java
new file mode 100644
index 0000000000000..b22ded52655df
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/PingAndInfoIT.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.client;
+
+import org.elasticsearch.action.main.MainResponse;
+
+import java.io.IOException;
+import java.util.Map;
+
+public class PingAndInfoIT extends ESRestHighLevelClientTestCase {
+
+ public void testPing() throws IOException {
+ assertTrue(highLevelClient().ping());
+ }
+
+ @SuppressWarnings("unchecked")
+ public void testInfo() throws IOException {
+ MainResponse info = highLevelClient().info();
+ // compare with what the low level client outputs
+ Map infoAsMap = entityAsMap(adminClient().performRequest("GET", "/"));
+ assertEquals(infoAsMap.get("cluster_name"), info.getClusterName().value());
+ assertEquals(infoAsMap.get("cluster_uuid"), info.getClusterUuid());
+
+ // only check node name existence, might be a different one from what was hit by low level client in multi-node cluster
+ assertNotNull(info.getNodeName());
+ Map versionMap = (Map) infoAsMap.get("version");
+ assertEquals(versionMap.get("build_hash"), info.getBuild().shortHash());
+ assertEquals(versionMap.get("build_date"), info.getBuild().date());
+ assertEquals(versionMap.get("build_snapshot"), info.getBuild().isSnapshot());
+ assertEquals(versionMap.get("number"), info.getVersion().toString());
+ assertEquals(versionMap.get("lucene_version"), info.getVersion().luceneVersion.toString());
+ }
+
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java
new file mode 100644
index 0000000000000..f18e348adce5e
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java
@@ -0,0 +1,906 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.client;
+
+import org.apache.http.HttpEntity;
+import org.apache.http.entity.ByteArrayEntity;
+import org.apache.http.util.EntityUtils;
+import org.elasticsearch.action.DocWriteRequest;
+import org.elasticsearch.action.bulk.BulkRequest;
+import org.elasticsearch.action.bulk.BulkShardRequest;
+import org.elasticsearch.action.delete.DeleteRequest;
+import org.elasticsearch.action.get.GetRequest;
+import org.elasticsearch.action.index.IndexRequest;
+import org.elasticsearch.action.search.ClearScrollRequest;
+import org.elasticsearch.action.search.SearchRequest;
+import org.elasticsearch.action.search.SearchScrollRequest;
+import org.elasticsearch.action.search.SearchType;
+import org.elasticsearch.action.support.IndicesOptions;
+import org.elasticsearch.action.support.WriteRequest;
+import org.elasticsearch.action.support.replication.ReplicatedWriteRequest;
+import org.elasticsearch.action.support.replication.ReplicationRequest;
+import org.elasticsearch.action.update.UpdateRequest;
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.bytes.BytesArray;
+import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.common.io.Streams;
+import org.elasticsearch.common.lucene.uid.Versions;
+import org.elasticsearch.common.xcontent.ToXContent;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentHelper;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.index.VersionType;
+import org.elasticsearch.index.query.TermQueryBuilder;
+import org.elasticsearch.rest.action.search.RestSearchAction;
+import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
+import org.elasticsearch.search.aggregations.support.ValueType;
+import org.elasticsearch.search.builder.SearchSourceBuilder;
+import org.elasticsearch.search.collapse.CollapseBuilder;
+import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
+import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
+import org.elasticsearch.search.rescore.QueryRescorerBuilder;
+import org.elasticsearch.search.suggest.SuggestBuilder;
+import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder;
+import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.test.RandomObjects;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.HashMap;
+import java.util.Locale;
+import java.util.Map;
+import java.util.StringJoiner;
+import java.util.function.Consumer;
+import java.util.function.Function;
+
+import static java.util.Collections.singletonMap;
+import static org.elasticsearch.client.Request.enforceSameContentType;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
+
+public class RequestTests extends ESTestCase {
+
+ public void testPing() {
+ Request request = Request.ping();
+ assertEquals("/", request.endpoint);
+ assertEquals(0, request.params.size());
+ assertNull(request.entity);
+ assertEquals("HEAD", request.method);
+ }
+
+ public void testInfo() {
+ Request request = Request.info();
+ assertEquals("/", request.endpoint);
+ assertEquals(0, request.params.size());
+ assertNull(request.entity);
+ assertEquals("GET", request.method);
+ }
+
+ public void testGet() {
+ getAndExistsTest(Request::get, "GET");
+ }
+
+ public void testDelete() throws IOException {
+ String index = randomAlphaOfLengthBetween(3, 10);
+ String type = randomAlphaOfLengthBetween(3, 10);
+ String id = randomAlphaOfLengthBetween(3, 10);
+ DeleteRequest deleteRequest = new DeleteRequest(index, type, id);
+
+ Map expectedParams = new HashMap<>();
+
+ setRandomTimeout(deleteRequest, expectedParams);
+ setRandomRefreshPolicy(deleteRequest, expectedParams);
+ setRandomVersion(deleteRequest, expectedParams);
+ setRandomVersionType(deleteRequest, expectedParams);
+
+ if (frequently()) {
+ if (randomBoolean()) {
+ String routing = randomAlphaOfLengthBetween(3, 10);
+ deleteRequest.routing(routing);
+ expectedParams.put("routing", routing);
+ }
+ if (randomBoolean()) {
+ String parent = randomAlphaOfLengthBetween(3, 10);
+ deleteRequest.parent(parent);
+ expectedParams.put("parent", parent);
+ }
+ }
+
+ Request request = Request.delete(deleteRequest);
+ assertEquals("/" + index + "/" + type + "/" + id, request.endpoint);
+ assertEquals(expectedParams, request.params);
+ assertEquals("DELETE", request.method);
+ assertNull(request.entity);
+ }
+
+ public void testExists() {
+ getAndExistsTest(Request::exists, "HEAD");
+ }
+
+ private static void getAndExistsTest(Function requestConverter, String method) {
+ String index = randomAlphaOfLengthBetween(3, 10);
+ String type = randomAlphaOfLengthBetween(3, 10);
+ String id = randomAlphaOfLengthBetween(3, 10);
+ GetRequest getRequest = new GetRequest(index, type, id);
+
+ Map expectedParams = new HashMap<>();
+ if (randomBoolean()) {
+ if (randomBoolean()) {
+ String preference = randomAlphaOfLengthBetween(3, 10);
+ getRequest.preference(preference);
+ expectedParams.put("preference", preference);
+ }
+ if (randomBoolean()) {
+ String routing = randomAlphaOfLengthBetween(3, 10);
+ getRequest.routing(routing);
+ expectedParams.put("routing", routing);
+ }
+ if (randomBoolean()) {
+ boolean realtime = randomBoolean();
+ getRequest.realtime(realtime);
+ if (realtime == false) {
+ expectedParams.put("realtime", "false");
+ }
+ }
+ if (randomBoolean()) {
+ boolean refresh = randomBoolean();
+ getRequest.refresh(refresh);
+ if (refresh) {
+ expectedParams.put("refresh", "true");
+ }
+ }
+ if (randomBoolean()) {
+ long version = randomLong();
+ getRequest.version(version);
+ if (version != Versions.MATCH_ANY) {
+ expectedParams.put("version", Long.toString(version));
+ }
+ }
+ if (randomBoolean()) {
+ VersionType versionType = randomFrom(VersionType.values());
+ getRequest.versionType(versionType);
+ if (versionType != VersionType.INTERNAL) {
+ expectedParams.put("version_type", versionType.name().toLowerCase(Locale.ROOT));
+ }
+ }
+ if (randomBoolean()) {
+ int numStoredFields = randomIntBetween(1, 10);
+ String[] storedFields = new String[numStoredFields];
+ StringBuilder storedFieldsParam = new StringBuilder();
+ for (int i = 0; i < numStoredFields; i++) {
+ String storedField = randomAlphaOfLengthBetween(3, 10);
+ storedFields[i] = storedField;
+ storedFieldsParam.append(storedField);
+ if (i < numStoredFields - 1) {
+ storedFieldsParam.append(",");
+ }
+ }
+ getRequest.storedFields(storedFields);
+ expectedParams.put("stored_fields", storedFieldsParam.toString());
+ }
+ if (randomBoolean()) {
+ randomizeFetchSourceContextParams(getRequest::fetchSourceContext, expectedParams);
+ }
+ }
+ Request request = requestConverter.apply(getRequest);
+ assertEquals("/" + index + "/" + type + "/" + id, request.endpoint);
+ assertEquals(expectedParams, request.params);
+ assertNull(request.entity);
+ assertEquals(method, request.method);
+ }
+
+ public void testIndex() throws IOException {
+ String index = randomAlphaOfLengthBetween(3, 10);
+ String type = randomAlphaOfLengthBetween(3, 10);
+ IndexRequest indexRequest = new IndexRequest(index, type);
+
+ String id = randomBoolean() ? randomAlphaOfLengthBetween(3, 10) : null;
+ indexRequest.id(id);
+
+ Map expectedParams = new HashMap<>();
+
+ String method = "POST";
+ if (id != null) {
+ method = "PUT";
+ if (randomBoolean()) {
+ indexRequest.opType(DocWriteRequest.OpType.CREATE);
+ }
+ }
+
+ setRandomTimeout(indexRequest, expectedParams);
+ setRandomRefreshPolicy(indexRequest, expectedParams);
+
+ // There is some logic around _create endpoint and version/version type
+ if (indexRequest.opType() == DocWriteRequest.OpType.CREATE) {
+ indexRequest.version(randomFrom(Versions.MATCH_ANY, Versions.MATCH_DELETED));
+ expectedParams.put("version", Long.toString(Versions.MATCH_DELETED));
+ } else {
+ setRandomVersion(indexRequest, expectedParams);
+ setRandomVersionType(indexRequest, expectedParams);
+ }
+
+ if (frequently()) {
+ if (randomBoolean()) {
+ String routing = randomAlphaOfLengthBetween(3, 10);
+ indexRequest.routing(routing);
+ expectedParams.put("routing", routing);
+ }
+ if (randomBoolean()) {
+ String parent = randomAlphaOfLengthBetween(3, 10);
+ indexRequest.parent(parent);
+ expectedParams.put("parent", parent);
+ }
+ if (randomBoolean()) {
+ String pipeline = randomAlphaOfLengthBetween(3, 10);
+ indexRequest.setPipeline(pipeline);
+ expectedParams.put("pipeline", pipeline);
+ }
+ }
+
+ XContentType xContentType = randomFrom(XContentType.values());
+ int nbFields = randomIntBetween(0, 10);
+ try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) {
+ builder.startObject();
+ for (int i = 0; i < nbFields; i++) {
+ builder.field("field_" + i, i);
+ }
+ builder.endObject();
+ indexRequest.source(builder);
+ }
+
+ Request request = Request.index(indexRequest);
+ if (indexRequest.opType() == DocWriteRequest.OpType.CREATE) {
+ assertEquals("/" + index + "/" + type + "/" + id + "/_create", request.endpoint);
+ } else if (id != null) {
+ assertEquals("/" + index + "/" + type + "/" + id, request.endpoint);
+ } else {
+ assertEquals("/" + index + "/" + type, request.endpoint);
+ }
+ assertEquals(expectedParams, request.params);
+ assertEquals(method, request.method);
+
+ HttpEntity entity = request.entity;
+ assertTrue(entity instanceof ByteArrayEntity);
+ assertEquals(indexRequest.getContentType().mediaType(), entity.getContentType().getValue());
+ try (XContentParser parser = createParser(xContentType.xContent(), entity.getContent())) {
+ assertEquals(nbFields, parser.map().size());
+ }
+ }
+
+ public void testUpdate() throws IOException {
+ XContentType xContentType = randomFrom(XContentType.values());
+
+ Map expectedParams = new HashMap<>();
+ String index = randomAlphaOfLengthBetween(3, 10);
+ String type = randomAlphaOfLengthBetween(3, 10);
+ String id = randomAlphaOfLengthBetween(3, 10);
+
+ UpdateRequest updateRequest = new UpdateRequest(index, type, id);
+ updateRequest.detectNoop(randomBoolean());
+
+ if (randomBoolean()) {
+ BytesReference source = RandomObjects.randomSource(random(), xContentType);
+ updateRequest.doc(new IndexRequest().source(source, xContentType));
+
+ boolean docAsUpsert = randomBoolean();
+ updateRequest.docAsUpsert(docAsUpsert);
+ if (docAsUpsert) {
+ expectedParams.put("doc_as_upsert", "true");
+ }
+ } else {
+ updateRequest.script(mockScript("_value + 1"));
+ updateRequest.scriptedUpsert(randomBoolean());
+ }
+ if (randomBoolean()) {
+ BytesReference source = RandomObjects.randomSource(random(), xContentType);
+ updateRequest.upsert(new IndexRequest().source(source, xContentType));
+ }
+ if (randomBoolean()) {
+ String routing = randomAlphaOfLengthBetween(3, 10);
+ updateRequest.routing(routing);
+ expectedParams.put("routing", routing);
+ }
+ if (randomBoolean()) {
+ String parent = randomAlphaOfLengthBetween(3, 10);
+ updateRequest.parent(parent);
+ expectedParams.put("parent", parent);
+ }
+ if (randomBoolean()) {
+ String timeout = randomTimeValue();
+ updateRequest.timeout(timeout);
+ expectedParams.put("timeout", timeout);
+ } else {
+ expectedParams.put("timeout", ReplicationRequest.DEFAULT_TIMEOUT.getStringRep());
+ }
+ if (randomBoolean()) {
+ WriteRequest.RefreshPolicy refreshPolicy = randomFrom(WriteRequest.RefreshPolicy.values());
+ updateRequest.setRefreshPolicy(refreshPolicy);
+ if (refreshPolicy != WriteRequest.RefreshPolicy.NONE) {
+ expectedParams.put("refresh", refreshPolicy.getValue());
+ }
+ }
+ if (randomBoolean()) {
+ int waitForActiveShards = randomIntBetween(0, 10);
+ updateRequest.waitForActiveShards(waitForActiveShards);
+ expectedParams.put("wait_for_active_shards", String.valueOf(waitForActiveShards));
+ }
+ if (randomBoolean()) {
+ long version = randomLong();
+ updateRequest.version(version);
+ if (version != Versions.MATCH_ANY) {
+ expectedParams.put("version", Long.toString(version));
+ }
+ }
+ if (randomBoolean()) {
+ VersionType versionType = randomFrom(VersionType.values());
+ updateRequest.versionType(versionType);
+ if (versionType != VersionType.INTERNAL) {
+ expectedParams.put("version_type", versionType.name().toLowerCase(Locale.ROOT));
+ }
+ }
+ if (randomBoolean()) {
+ int retryOnConflict = randomIntBetween(0, 5);
+ updateRequest.retryOnConflict(retryOnConflict);
+ if (retryOnConflict > 0) {
+ expectedParams.put("retry_on_conflict", String.valueOf(retryOnConflict));
+ }
+ }
+ if (randomBoolean()) {
+ randomizeFetchSourceContextParams(updateRequest::fetchSource, expectedParams);
+ }
+
+ Request request = Request.update(updateRequest);
+ assertEquals("/" + index + "/" + type + "/" + id + "/_update", request.endpoint);
+ assertEquals(expectedParams, request.params);
+ assertEquals("POST", request.method);
+
+ HttpEntity entity = request.entity;
+ assertTrue(entity instanceof ByteArrayEntity);
+
+ UpdateRequest parsedUpdateRequest = new UpdateRequest();
+
+ XContentType entityContentType = XContentType.fromMediaTypeOrFormat(entity.getContentType().getValue());
+ try (XContentParser parser = createParser(entityContentType.xContent(), entity.getContent())) {
+ parsedUpdateRequest.fromXContent(parser);
+ }
+
+ assertEquals(updateRequest.scriptedUpsert(), parsedUpdateRequest.scriptedUpsert());
+ assertEquals(updateRequest.docAsUpsert(), parsedUpdateRequest.docAsUpsert());
+ assertEquals(updateRequest.detectNoop(), parsedUpdateRequest.detectNoop());
+ assertEquals(updateRequest.fetchSource(), parsedUpdateRequest.fetchSource());
+ assertEquals(updateRequest.script(), parsedUpdateRequest.script());
+ if (updateRequest.doc() != null) {
+ assertToXContentEquivalent(updateRequest.doc().source(), parsedUpdateRequest.doc().source(), xContentType);
+ } else {
+ assertNull(parsedUpdateRequest.doc());
+ }
+ if (updateRequest.upsertRequest() != null) {
+ assertToXContentEquivalent(updateRequest.upsertRequest().source(), parsedUpdateRequest.upsertRequest().source(), xContentType);
+ } else {
+ assertNull(parsedUpdateRequest.upsertRequest());
+ }
+ }
+
+ public void testUpdateWithDifferentContentTypes() throws IOException {
+ IllegalStateException exception = expectThrows(IllegalStateException.class, () -> {
+ UpdateRequest updateRequest = new UpdateRequest();
+ updateRequest.doc(new IndexRequest().source(singletonMap("field", "doc"), XContentType.JSON));
+ updateRequest.upsert(new IndexRequest().source(singletonMap("field", "upsert"), XContentType.YAML));
+ Request.update(updateRequest);
+ });
+ assertEquals("Update request cannot have different content types for doc [JSON] and upsert [YAML] documents",
+ exception.getMessage());
+ }
+
+ public void testBulk() throws IOException {
+ Map expectedParams = new HashMap<>();
+
+ BulkRequest bulkRequest = new BulkRequest();
+ if (randomBoolean()) {
+ String timeout = randomTimeValue();
+ bulkRequest.timeout(timeout);
+ expectedParams.put("timeout", timeout);
+ } else {
+ expectedParams.put("timeout", BulkShardRequest.DEFAULT_TIMEOUT.getStringRep());
+ }
+
+ if (randomBoolean()) {
+ WriteRequest.RefreshPolicy refreshPolicy = randomFrom(WriteRequest.RefreshPolicy.values());
+ bulkRequest.setRefreshPolicy(refreshPolicy);
+ if (refreshPolicy != WriteRequest.RefreshPolicy.NONE) {
+ expectedParams.put("refresh", refreshPolicy.getValue());
+ }
+ }
+
+ XContentType xContentType = randomFrom(XContentType.JSON, XContentType.SMILE);
+
+ int nbItems = randomIntBetween(10, 100);
+ for (int i = 0; i < nbItems; i++) {
+ String index = randomAlphaOfLength(5);
+ String type = randomAlphaOfLength(5);
+ String id = randomAlphaOfLength(5);
+
+ BytesReference source = RandomObjects.randomSource(random(), xContentType);
+ DocWriteRequest.OpType opType = randomFrom(DocWriteRequest.OpType.values());
+
+ DocWriteRequest> docWriteRequest = null;
+ if (opType == DocWriteRequest.OpType.INDEX) {
+ IndexRequest indexRequest = new IndexRequest(index, type, id).source(source, xContentType);
+ docWriteRequest = indexRequest;
+ if (randomBoolean()) {
+ indexRequest.setPipeline(randomAlphaOfLength(5));
+ }
+ if (randomBoolean()) {
+ indexRequest.parent(randomAlphaOfLength(5));
+ }
+ } else if (opType == DocWriteRequest.OpType.CREATE) {
+ IndexRequest createRequest = new IndexRequest(index, type, id).source(source, xContentType).create(true);
+ docWriteRequest = createRequest;
+ if (randomBoolean()) {
+ createRequest.parent(randomAlphaOfLength(5));
+ }
+ } else if (opType == DocWriteRequest.OpType.UPDATE) {
+ final UpdateRequest updateRequest = new UpdateRequest(index, type, id).doc(new IndexRequest().source(source, xContentType));
+ docWriteRequest = updateRequest;
+ if (randomBoolean()) {
+ updateRequest.retryOnConflict(randomIntBetween(1, 5));
+ }
+ if (randomBoolean()) {
+ randomizeFetchSourceContextParams(updateRequest::fetchSource, new HashMap<>());
+ }
+ if (randomBoolean()) {
+ updateRequest.parent(randomAlphaOfLength(5));
+ }
+ } else if (opType == DocWriteRequest.OpType.DELETE) {
+ docWriteRequest = new DeleteRequest(index, type, id);
+ }
+
+ if (randomBoolean()) {
+ docWriteRequest.routing(randomAlphaOfLength(10));
+ }
+ if (randomBoolean()) {
+ docWriteRequest.version(randomNonNegativeLong());
+ }
+ if (randomBoolean()) {
+ docWriteRequest.versionType(randomFrom(VersionType.values()));
+ }
+ bulkRequest.add(docWriteRequest);
+ }
+
+ Request request = Request.bulk(bulkRequest);
+ assertEquals("/_bulk", request.endpoint);
+ assertEquals(expectedParams, request.params);
+ assertEquals("POST", request.method);
+ assertEquals(xContentType.mediaType(), request.entity.getContentType().getValue());
+ byte[] content = new byte[(int) request.entity.getContentLength()];
+ try (InputStream inputStream = request.entity.getContent()) {
+ Streams.readFully(inputStream, content);
+ }
+
+ BulkRequest parsedBulkRequest = new BulkRequest();
+ parsedBulkRequest.add(content, 0, content.length, xContentType);
+ assertEquals(bulkRequest.numberOfActions(), parsedBulkRequest.numberOfActions());
+
+ for (int i = 0; i < bulkRequest.numberOfActions(); i++) {
+ DocWriteRequest> originalRequest = bulkRequest.requests().get(i);
+ DocWriteRequest> parsedRequest = parsedBulkRequest.requests().get(i);
+
+ assertEquals(originalRequest.opType(), parsedRequest.opType());
+ assertEquals(originalRequest.index(), parsedRequest.index());
+ assertEquals(originalRequest.type(), parsedRequest.type());
+ assertEquals(originalRequest.id(), parsedRequest.id());
+ assertEquals(originalRequest.routing(), parsedRequest.routing());
+ assertEquals(originalRequest.parent(), parsedRequest.parent());
+ assertEquals(originalRequest.version(), parsedRequest.version());
+ assertEquals(originalRequest.versionType(), parsedRequest.versionType());
+
+ DocWriteRequest.OpType opType = originalRequest.opType();
+ if (opType == DocWriteRequest.OpType.INDEX) {
+ IndexRequest indexRequest = (IndexRequest) originalRequest;
+ IndexRequest parsedIndexRequest = (IndexRequest) parsedRequest;
+
+ assertEquals(indexRequest.getPipeline(), parsedIndexRequest.getPipeline());
+ assertToXContentEquivalent(indexRequest.source(), parsedIndexRequest.source(), xContentType);
+ } else if (opType == DocWriteRequest.OpType.UPDATE) {
+ UpdateRequest updateRequest = (UpdateRequest) originalRequest;
+ UpdateRequest parsedUpdateRequest = (UpdateRequest) parsedRequest;
+
+ assertEquals(updateRequest.retryOnConflict(), parsedUpdateRequest.retryOnConflict());
+ assertEquals(updateRequest.fetchSource(), parsedUpdateRequest.fetchSource());
+ if (updateRequest.doc() != null) {
+ assertToXContentEquivalent(updateRequest.doc().source(), parsedUpdateRequest.doc().source(), xContentType);
+ } else {
+ assertNull(parsedUpdateRequest.doc());
+ }
+ }
+ }
+ }
+
+ public void testBulkWithDifferentContentTypes() throws IOException {
+ {
+ BulkRequest bulkRequest = new BulkRequest();
+ bulkRequest.add(new DeleteRequest("index", "type", "0"));
+ bulkRequest.add(new UpdateRequest("index", "type", "1").script(mockScript("test")));
+ bulkRequest.add(new DeleteRequest("index", "type", "2"));
+
+ Request request = Request.bulk(bulkRequest);
+ assertEquals(XContentType.JSON.mediaType(), request.entity.getContentType().getValue());
+ }
+ {
+ XContentType xContentType = randomFrom(XContentType.JSON, XContentType.SMILE);
+ BulkRequest bulkRequest = new BulkRequest();
+ bulkRequest.add(new DeleteRequest("index", "type", "0"));
+ bulkRequest.add(new IndexRequest("index", "type", "0").source(singletonMap("field", "value"), xContentType));
+ bulkRequest.add(new DeleteRequest("index", "type", "2"));
+
+ Request request = Request.bulk(bulkRequest);
+ assertEquals(xContentType.mediaType(), request.entity.getContentType().getValue());
+ }
+ {
+ XContentType xContentType = randomFrom(XContentType.JSON, XContentType.SMILE);
+ UpdateRequest updateRequest = new UpdateRequest("index", "type", "0");
+ if (randomBoolean()) {
+ updateRequest.doc(new IndexRequest().source(singletonMap("field", "value"), xContentType));
+ } else {
+ updateRequest.upsert(new IndexRequest().source(singletonMap("field", "value"), xContentType));
+ }
+
+ Request request = Request.bulk(new BulkRequest().add(updateRequest));
+ assertEquals(xContentType.mediaType(), request.entity.getContentType().getValue());
+ }
+ {
+ BulkRequest bulkRequest = new BulkRequest();
+ bulkRequest.add(new IndexRequest("index", "type", "0").source(singletonMap("field", "value"), XContentType.SMILE));
+ bulkRequest.add(new IndexRequest("index", "type", "1").source(singletonMap("field", "value"), XContentType.JSON));
+ IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> Request.bulk(bulkRequest));
+ assertEquals("Mismatching content-type found for request with content-type [JSON], " +
+ "previous requests have content-type [SMILE]", exception.getMessage());
+ }
+ {
+ BulkRequest bulkRequest = new BulkRequest();
+ bulkRequest.add(new IndexRequest("index", "type", "0")
+ .source(singletonMap("field", "value"), XContentType.JSON));
+ bulkRequest.add(new IndexRequest("index", "type", "1")
+ .source(singletonMap("field", "value"), XContentType.JSON));
+ bulkRequest.add(new UpdateRequest("index", "type", "2")
+ .doc(new IndexRequest().source(singletonMap("field", "value"), XContentType.JSON))
+ .upsert(new IndexRequest().source(singletonMap("field", "value"), XContentType.SMILE))
+ );
+ IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> Request.bulk(bulkRequest));
+ assertEquals("Mismatching content-type found for request with content-type [SMILE], " +
+ "previous requests have content-type [JSON]", exception.getMessage());
+ }
+ {
+ XContentType xContentType = randomFrom(XContentType.CBOR, XContentType.YAML);
+ BulkRequest bulkRequest = new BulkRequest();
+ bulkRequest.add(new DeleteRequest("index", "type", "0"));
+ bulkRequest.add(new IndexRequest("index", "type", "1").source(singletonMap("field", "value"), XContentType.JSON));
+ bulkRequest.add(new DeleteRequest("index", "type", "2"));
+ bulkRequest.add(new DeleteRequest("index", "type", "3"));
+ bulkRequest.add(new IndexRequest("index", "type", "4").source(singletonMap("field", "value"), XContentType.JSON));
+ bulkRequest.add(new IndexRequest("index", "type", "1").source(singletonMap("field", "value"), xContentType));
+ IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> Request.bulk(bulkRequest));
+ assertEquals("Unsupported content-type found for request with content-type [" + xContentType
+ + "], only JSON and SMILE are supported", exception.getMessage());
+ }
+ }
+
+ public void testSearch() throws Exception {
+ SearchRequest searchRequest = new SearchRequest();
+ int numIndices = randomIntBetween(0, 5);
+ String[] indices = new String[numIndices];
+ for (int i = 0; i < numIndices; i++) {
+ indices[i] = "index-" + randomAlphaOfLengthBetween(2, 5);
+ }
+ searchRequest.indices(indices);
+ int numTypes = randomIntBetween(0, 5);
+ String[] types = new String[numTypes];
+ for (int i = 0; i < numTypes; i++) {
+ types[i] = "type-" + randomAlphaOfLengthBetween(2, 5);
+ }
+ searchRequest.types(types);
+
+ Map expectedParams = new HashMap<>();
+ expectedParams.put(RestSearchAction.TYPED_KEYS_PARAM, "true");
+ if (randomBoolean()) {
+ searchRequest.routing(randomAlphaOfLengthBetween(3, 10));
+ expectedParams.put("routing", searchRequest.routing());
+ }
+ if (randomBoolean()) {
+ searchRequest.preference(randomAlphaOfLengthBetween(3, 10));
+ expectedParams.put("preference", searchRequest.preference());
+ }
+ if (randomBoolean()) {
+ searchRequest.searchType(randomFrom(SearchType.values()));
+ }
+ expectedParams.put("search_type", searchRequest.searchType().name().toLowerCase(Locale.ROOT));
+ if (randomBoolean()) {
+ searchRequest.requestCache(randomBoolean());
+ expectedParams.put("request_cache", Boolean.toString(searchRequest.requestCache()));
+ }
+ if (randomBoolean()) {
+ searchRequest.setBatchedReduceSize(randomIntBetween(2, Integer.MAX_VALUE));
+ }
+ expectedParams.put("batched_reduce_size", Integer.toString(searchRequest.getBatchedReduceSize()));
+ if (randomBoolean()) {
+ searchRequest.scroll(randomTimeValue());
+ expectedParams.put("scroll", searchRequest.scroll().keepAlive().getStringRep());
+ }
+
+ if (randomBoolean()) {
+ searchRequest.indicesOptions(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean()));
+ }
+ expectedParams.put("ignore_unavailable", Boolean.toString(searchRequest.indicesOptions().ignoreUnavailable()));
+ expectedParams.put("allow_no_indices", Boolean.toString(searchRequest.indicesOptions().allowNoIndices()));
+ if (searchRequest.indicesOptions().expandWildcardsOpen() && searchRequest.indicesOptions().expandWildcardsClosed()) {
+ expectedParams.put("expand_wildcards", "open,closed");
+ } else if (searchRequest.indicesOptions().expandWildcardsOpen()) {
+ expectedParams.put("expand_wildcards", "open");
+ } else if (searchRequest.indicesOptions().expandWildcardsClosed()) {
+ expectedParams.put("expand_wildcards", "closed");
+ } else {
+ expectedParams.put("expand_wildcards", "none");
+ }
+
+ SearchSourceBuilder searchSourceBuilder = null;
+ if (frequently()) {
+ searchSourceBuilder = new SearchSourceBuilder();
+ if (randomBoolean()) {
+ searchSourceBuilder.size(randomIntBetween(0, Integer.MAX_VALUE));
+ }
+ if (randomBoolean()) {
+ searchSourceBuilder.from(randomIntBetween(0, Integer.MAX_VALUE));
+ }
+ if (randomBoolean()) {
+ searchSourceBuilder.minScore(randomFloat());
+ }
+ if (randomBoolean()) {
+ searchSourceBuilder.explain(randomBoolean());
+ }
+ if (randomBoolean()) {
+ searchSourceBuilder.profile(randomBoolean());
+ }
+ if (randomBoolean()) {
+ searchSourceBuilder.highlighter(new HighlightBuilder().field(randomAlphaOfLengthBetween(3, 10)));
+ }
+ if (randomBoolean()) {
+ searchSourceBuilder.query(new TermQueryBuilder(randomAlphaOfLengthBetween(3, 10), randomAlphaOfLengthBetween(3, 10)));
+ }
+ if (randomBoolean()) {
+ searchSourceBuilder.aggregation(new TermsAggregationBuilder(randomAlphaOfLengthBetween(3, 10), ValueType.STRING)
+ .field(randomAlphaOfLengthBetween(3, 10)));
+ }
+ if (randomBoolean()) {
+ searchSourceBuilder.suggest(new SuggestBuilder().addSuggestion(randomAlphaOfLengthBetween(3, 10),
+ new CompletionSuggestionBuilder(randomAlphaOfLengthBetween(3, 10))));
+ }
+ if (randomBoolean()) {
+ searchSourceBuilder.addRescorer(new QueryRescorerBuilder(
+ new TermQueryBuilder(randomAlphaOfLengthBetween(3, 10), randomAlphaOfLengthBetween(3, 10))));
+ }
+ if (randomBoolean()) {
+ searchSourceBuilder.collapse(new CollapseBuilder(randomAlphaOfLengthBetween(3, 10)));
+ }
+ searchRequest.source(searchSourceBuilder);
+ }
+
+ Request request = Request.search(searchRequest);
+ StringJoiner endpoint = new StringJoiner("/", "/", "");
+ String index = String.join(",", indices);
+ if (Strings.hasLength(index)) {
+ endpoint.add(index);
+ }
+ String type = String.join(",", types);
+ if (Strings.hasLength(type)) {
+ endpoint.add(type);
+ }
+ endpoint.add("_search");
+ assertEquals(endpoint.toString(), request.endpoint);
+ assertEquals(expectedParams, request.params);
+ if (searchSourceBuilder == null) {
+ assertNull(request.entity);
+ } else {
+ assertToXContentBody(searchSourceBuilder, request.entity);
+ }
+ }
+
+ public void testSearchScroll() throws IOException {
+ SearchScrollRequest searchScrollRequest = new SearchScrollRequest();
+ searchScrollRequest.scrollId(randomAlphaOfLengthBetween(5, 10));
+ if (randomBoolean()) {
+ searchScrollRequest.scroll(randomPositiveTimeValue());
+ }
+ Request request = Request.searchScroll(searchScrollRequest);
+ assertEquals("GET", request.method);
+ assertEquals("/_search/scroll", request.endpoint);
+ assertEquals(0, request.params.size());
+ assertToXContentBody(searchScrollRequest, request.entity);
+ assertEquals(Request.REQUEST_BODY_CONTENT_TYPE.mediaType(), request.entity.getContentType().getValue());
+ }
+
+ public void testClearScroll() throws IOException {
+ ClearScrollRequest clearScrollRequest = new ClearScrollRequest();
+ int numScrolls = randomIntBetween(1, 10);
+ for (int i = 0; i < numScrolls; i++) {
+ clearScrollRequest.addScrollId(randomAlphaOfLengthBetween(5, 10));
+ }
+ Request request = Request.clearScroll(clearScrollRequest);
+ assertEquals("DELETE", request.method);
+ assertEquals("/_search/scroll", request.endpoint);
+ assertEquals(0, request.params.size());
+ assertToXContentBody(clearScrollRequest, request.entity);
+ assertEquals(Request.REQUEST_BODY_CONTENT_TYPE.mediaType(), request.entity.getContentType().getValue());
+ }
+
+ private static void assertToXContentBody(ToXContent expectedBody, HttpEntity actualEntity) throws IOException {
+ BytesReference expectedBytes = XContentHelper.toXContent(expectedBody, Request.REQUEST_BODY_CONTENT_TYPE, false);
+ assertEquals(XContentType.JSON.mediaType(), actualEntity.getContentType().getValue());
+ assertEquals(expectedBytes, new BytesArray(EntityUtils.toByteArray(actualEntity)));
+ }
+
+ public void testParams() {
+ final int nbParams = randomIntBetween(0, 10);
+ Request.Params params = Request.Params.builder();
+ Map expectedParams = new HashMap<>();
+ for (int i = 0; i < nbParams; i++) {
+ String paramName = "p_" + i;
+ String paramValue = randomAlphaOfLength(5);
+ params.putParam(paramName, paramValue);
+ expectedParams.put(paramName, paramValue);
+ }
+
+ Map requestParams = params.getParams();
+ assertEquals(nbParams, requestParams.size());
+ assertEquals(expectedParams, requestParams);
+ }
+
+ public void testParamsNoDuplicates() {
+ Request.Params params = Request.Params.builder();
+ params.putParam("test", "1");
+
+ IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> params.putParam("test", "2"));
+ assertEquals("Request parameter [test] is already registered", e.getMessage());
+
+ Map requestParams = params.getParams();
+ assertEquals(1L, requestParams.size());
+ assertEquals("1", requestParams.values().iterator().next());
+ }
+
+ public void testEndpoint() {
+ assertEquals("/", Request.endpoint());
+ assertEquals("/", Request.endpoint(Strings.EMPTY_ARRAY));
+ assertEquals("/", Request.endpoint(""));
+ assertEquals("/a/b", Request.endpoint("a", "b"));
+ assertEquals("/a/b/_create", Request.endpoint("a", "b", "_create"));
+ assertEquals("/a/b/c/_create", Request.endpoint("a", "b", "c", "_create"));
+ assertEquals("/a/_create", Request.endpoint("a", null, null, "_create"));
+ }
+
+ public void testEnforceSameContentType() {
+ XContentType xContentType = randomFrom(XContentType.JSON, XContentType.SMILE);
+ IndexRequest indexRequest = new IndexRequest().source(singletonMap("field", "value"), xContentType);
+ assertEquals(xContentType, enforceSameContentType(indexRequest, null));
+ assertEquals(xContentType, enforceSameContentType(indexRequest, xContentType));
+
+ XContentType bulkContentType = randomBoolean() ? xContentType : null;
+
+ IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () ->
+ enforceSameContentType(new IndexRequest().source(singletonMap("field", "value"), XContentType.CBOR), bulkContentType));
+ assertEquals("Unsupported content-type found for request with content-type [CBOR], only JSON and SMILE are supported",
+ exception.getMessage());
+
+ exception = expectThrows(IllegalArgumentException.class, () ->
+ enforceSameContentType(new IndexRequest().source(singletonMap("field", "value"), XContentType.YAML), bulkContentType));
+ assertEquals("Unsupported content-type found for request with content-type [YAML], only JSON and SMILE are supported",
+ exception.getMessage());
+
+ XContentType requestContentType = xContentType == XContentType.JSON ? XContentType.SMILE : XContentType.JSON;
+
+ exception = expectThrows(IllegalArgumentException.class, () ->
+ enforceSameContentType(new IndexRequest().source(singletonMap("field", "value"), requestContentType), xContentType));
+ assertEquals("Mismatching content-type found for request with content-type [" + requestContentType + "], "
+ + "previous requests have content-type [" + xContentType + "]", exception.getMessage());
+ }
+
+ /**
+ * Randomize the {@link FetchSourceContext} request parameters.
+ */
+ private static void randomizeFetchSourceContextParams(Consumer consumer, Map expectedParams) {
+ if (randomBoolean()) {
+ if (randomBoolean()) {
+ boolean fetchSource = randomBoolean();
+ consumer.accept(new FetchSourceContext(fetchSource));
+ if (fetchSource == false) {
+ expectedParams.put("_source", "false");
+ }
+ } else {
+ int numIncludes = randomIntBetween(0, 5);
+ String[] includes = new String[numIncludes];
+ StringBuilder includesParam = new StringBuilder();
+ for (int i = 0; i < numIncludes; i++) {
+ String include = randomAlphaOfLengthBetween(3, 10);
+ includes[i] = include;
+ includesParam.append(include);
+ if (i < numIncludes - 1) {
+ includesParam.append(",");
+ }
+ }
+ if (numIncludes > 0) {
+ expectedParams.put("_source_include", includesParam.toString());
+ }
+ int numExcludes = randomIntBetween(0, 5);
+ String[] excludes = new String[numExcludes];
+ StringBuilder excludesParam = new StringBuilder();
+ for (int i = 0; i < numExcludes; i++) {
+ String exclude = randomAlphaOfLengthBetween(3, 10);
+ excludes[i] = exclude;
+ excludesParam.append(exclude);
+ if (i < numExcludes - 1) {
+ excludesParam.append(",");
+ }
+ }
+ if (numExcludes > 0) {
+ expectedParams.put("_source_exclude", excludesParam.toString());
+ }
+ consumer.accept(new FetchSourceContext(true, includes, excludes));
+ }
+ }
+ }
+
+ private static void setRandomTimeout(ReplicationRequest> request, Map expectedParams) {
+ if (randomBoolean()) {
+ String timeout = randomTimeValue();
+ request.timeout(timeout);
+ expectedParams.put("timeout", timeout);
+ } else {
+ expectedParams.put("timeout", ReplicationRequest.DEFAULT_TIMEOUT.getStringRep());
+ }
+ }
+
+ private static void setRandomRefreshPolicy(ReplicatedWriteRequest> request, Map expectedParams) {
+ if (randomBoolean()) {
+ WriteRequest.RefreshPolicy refreshPolicy = randomFrom(WriteRequest.RefreshPolicy.values());
+ request.setRefreshPolicy(refreshPolicy);
+ if (refreshPolicy != WriteRequest.RefreshPolicy.NONE) {
+ expectedParams.put("refresh", refreshPolicy.getValue());
+ }
+ }
+ }
+
+ private static void setRandomVersion(DocWriteRequest> request, Map expectedParams) {
+ if (randomBoolean()) {
+ long version = randomFrom(Versions.MATCH_ANY, Versions.MATCH_DELETED, Versions.NOT_FOUND, randomNonNegativeLong());
+ request.version(version);
+ if (version != Versions.MATCH_ANY) {
+ expectedParams.put("version", Long.toString(version));
+ }
+ }
+ }
+
+ private static void setRandomVersionType(DocWriteRequest> request, Map expectedParams) {
+ if (randomBoolean()) {
+ VersionType versionType = randomFrom(VersionType.values());
+ request.versionType(versionType);
+ if (versionType != VersionType.INTERNAL) {
+ expectedParams.put("version_type", versionType.name().toLowerCase(Locale.ROOT));
+ }
+ }
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientExtTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientExtTests.java
new file mode 100644
index 0000000000000..cb32f9ae9dd93
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientExtTests.java
@@ -0,0 +1,138 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.client;
+
+import org.apache.http.HttpEntity;
+import org.apache.http.entity.ContentType;
+import org.apache.http.entity.StringEntity;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.NamedXContentRegistry;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.test.ESTestCase;
+import org.junit.Before;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import static org.hamcrest.CoreMatchers.instanceOf;
+import static org.mockito.Mockito.mock;
+
+/**
+ * This test works against a {@link RestHighLevelClient} subclass that simulats how custom response sections returned by
+ * Elasticsearch plugins can be parsed using the high level client.
+ */
+public class RestHighLevelClientExtTests extends ESTestCase {
+
+ private RestHighLevelClient restHighLevelClient;
+
+ @Before
+ public void initClient() throws IOException {
+ RestClient restClient = mock(RestClient.class);
+ restHighLevelClient = new RestHighLevelClientExt(restClient);
+ }
+
+ public void testParseEntityCustomResponseSection() throws IOException {
+ {
+ HttpEntity jsonEntity = new StringEntity("{\"custom1\":{ \"field\":\"value\"}}", ContentType.APPLICATION_JSON);
+ BaseCustomResponseSection customSection = restHighLevelClient.parseEntity(jsonEntity, BaseCustomResponseSection::fromXContent);
+ assertThat(customSection, instanceOf(CustomResponseSection1.class));
+ CustomResponseSection1 customResponseSection1 = (CustomResponseSection1) customSection;
+ assertEquals("value", customResponseSection1.value);
+ }
+ {
+ HttpEntity jsonEntity = new StringEntity("{\"custom2\":{ \"array\": [\"item1\", \"item2\"]}}", ContentType.APPLICATION_JSON);
+ BaseCustomResponseSection customSection = restHighLevelClient.parseEntity(jsonEntity, BaseCustomResponseSection::fromXContent);
+ assertThat(customSection, instanceOf(CustomResponseSection2.class));
+ CustomResponseSection2 customResponseSection2 = (CustomResponseSection2) customSection;
+ assertArrayEquals(new String[]{"item1", "item2"}, customResponseSection2.values);
+ }
+ }
+
+ private static class RestHighLevelClientExt extends RestHighLevelClient {
+
+ private RestHighLevelClientExt(RestClient restClient) {
+ super(restClient, getNamedXContentsExt());
+ }
+
+ private static List getNamedXContentsExt() {
+ List entries = new ArrayList<>();
+ entries.add(new NamedXContentRegistry.Entry(BaseCustomResponseSection.class, new ParseField("custom1"),
+ CustomResponseSection1::fromXContent));
+ entries.add(new NamedXContentRegistry.Entry(BaseCustomResponseSection.class, new ParseField("custom2"),
+ CustomResponseSection2::fromXContent));
+ return entries;
+ }
+ }
+
+ private abstract static class BaseCustomResponseSection {
+
+ static BaseCustomResponseSection fromXContent(XContentParser parser) throws IOException {
+ assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
+ assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
+ BaseCustomResponseSection custom = parser.namedObject(BaseCustomResponseSection.class, parser.currentName(), null);
+ assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
+ return custom;
+ }
+ }
+
+ private static class CustomResponseSection1 extends BaseCustomResponseSection {
+
+ private final String value;
+
+ private CustomResponseSection1(String value) {
+ this.value = value;
+ }
+
+ static CustomResponseSection1 fromXContent(XContentParser parser) throws IOException {
+ assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
+ assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
+ assertEquals("field", parser.currentName());
+ assertEquals(XContentParser.Token.VALUE_STRING, parser.nextToken());
+ CustomResponseSection1 responseSection1 = new CustomResponseSection1(parser.text());
+ assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
+ return responseSection1;
+ }
+ }
+
+ private static class CustomResponseSection2 extends BaseCustomResponseSection {
+
+ private final String[] values;
+
+ private CustomResponseSection2(String[] values) {
+ this.values = values;
+ }
+
+ static CustomResponseSection2 fromXContent(XContentParser parser) throws IOException {
+ assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
+ assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
+ assertEquals("array", parser.currentName());
+ assertEquals(XContentParser.Token.START_ARRAY, parser.nextToken());
+ List values = new ArrayList<>();
+ while(parser.nextToken().isValue()) {
+ values.add(parser.text());
+ }
+ assertEquals(XContentParser.Token.END_ARRAY, parser.currentToken());
+ CustomResponseSection2 responseSection2 = new CustomResponseSection2(values.toArray(new String[values.size()]));
+ assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
+ return responseSection2;
+ }
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java
index 7d513e489982c..7fc0733a7f0c7 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java
@@ -19,7 +19,48 @@
package org.elasticsearch.client;
+import com.fasterxml.jackson.core.JsonParseException;
import org.apache.http.Header;
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpHost;
+import org.apache.http.HttpResponse;
+import org.apache.http.ProtocolVersion;
+import org.apache.http.RequestLine;
+import org.apache.http.StatusLine;
+import org.apache.http.entity.ByteArrayEntity;
+import org.apache.http.entity.ContentType;
+import org.apache.http.entity.StringEntity;
+import org.apache.http.message.BasicHttpResponse;
+import org.apache.http.message.BasicRequestLine;
+import org.apache.http.message.BasicStatusLine;
+import org.apache.http.nio.entity.NStringEntity;
+import org.elasticsearch.Build;
+import org.elasticsearch.ElasticsearchException;
+import org.elasticsearch.Version;
+import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.ActionRequest;
+import org.elasticsearch.action.ActionRequestValidationException;
+import org.elasticsearch.action.main.MainRequest;
+import org.elasticsearch.action.main.MainResponse;
+import org.elasticsearch.action.search.ClearScrollRequest;
+import org.elasticsearch.action.search.ClearScrollResponse;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.action.search.SearchResponseSections;
+import org.elasticsearch.action.search.SearchScrollRequest;
+import org.elasticsearch.action.search.ShardSearchFailure;
+import org.elasticsearch.cluster.ClusterName;
+import org.elasticsearch.common.CheckedFunction;
+import org.elasticsearch.common.xcontent.NamedXContentRegistry;
+import org.elasticsearch.common.xcontent.ToXContent;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.cbor.CborXContent;
+import org.elasticsearch.common.xcontent.smile.SmileXContent;
+import org.elasticsearch.rest.RestStatus;
+import org.elasticsearch.search.SearchHits;
+import org.elasticsearch.search.aggregations.Aggregation;
+import org.elasticsearch.search.aggregations.InternalAggregations;
+import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.test.ESTestCase;
import org.junit.Before;
import org.mockito.ArgumentMatcher;
@@ -28,47 +69,581 @@
import java.io.IOException;
import java.net.SocketTimeoutException;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicReference;
-import static org.mockito.Matchers.any;
+import static org.elasticsearch.client.RestClientTestUtil.randomHeaders;
+import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
+import static org.hamcrest.CoreMatchers.instanceOf;
+import static org.mockito.Matchers.anyMapOf;
+import static org.mockito.Matchers.anyObject;
+import static org.mockito.Matchers.anyString;
+import static org.mockito.Matchers.anyVararg;
import static org.mockito.Matchers.argThat;
import static org.mockito.Matchers.eq;
+import static org.mockito.Matchers.isNotNull;
+import static org.mockito.Matchers.isNull;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class RestHighLevelClientTests extends ESTestCase {
+ private static final ProtocolVersion HTTP_PROTOCOL = new ProtocolVersion("http", 1, 1);
+ private static final RequestLine REQUEST_LINE = new BasicRequestLine("GET", "/", HTTP_PROTOCOL);
+
private RestClient restClient;
private RestHighLevelClient restHighLevelClient;
@Before
- public void initClient() throws IOException {
+ public void initClient() {
restClient = mock(RestClient.class);
restHighLevelClient = new RestHighLevelClient(restClient);
}
- public void testPing() throws IOException {
- assertTrue(restHighLevelClient.ping());
- verify(restClient).performRequest(eq("HEAD"), eq("/"), argThat(new HeadersVarargMatcher()));
+ public void testPingSuccessful() throws IOException {
+ Header[] headers = randomHeaders(random(), "Header");
+ Response response = mock(Response.class);
+ when(response.getStatusLine()).thenReturn(newStatusLine(RestStatus.OK));
+ when(restClient.performRequest(anyString(), anyString(), anyMapOf(String.class, String.class),
+ anyObject(), anyVararg())).thenReturn(response);
+ assertTrue(restHighLevelClient.ping(headers));
+ verify(restClient).performRequest(eq("HEAD"), eq("/"), eq(Collections.emptyMap()),
+ isNull(HttpEntity.class), argThat(new HeadersVarargMatcher(headers)));
}
- public void testPingFailure() throws IOException {
- when(restClient.performRequest(any(), any())).thenThrow(new IllegalStateException());
- expectThrows(IllegalStateException.class, () -> restHighLevelClient.ping());
+ public void testPing404NotFound() throws IOException {
+ Header[] headers = randomHeaders(random(), "Header");
+ Response response = mock(Response.class);
+ when(response.getStatusLine()).thenReturn(newStatusLine(RestStatus.NOT_FOUND));
+ when(restClient.performRequest(anyString(), anyString(), anyMapOf(String.class, String.class),
+ anyObject(), anyVararg())).thenReturn(response);
+ assertFalse(restHighLevelClient.ping(headers));
+ verify(restClient).performRequest(eq("HEAD"), eq("/"), eq(Collections.emptyMap()),
+ isNull(HttpEntity.class), argThat(new HeadersVarargMatcher(headers)));
}
- public void testPingFailed() throws IOException {
- when(restClient.performRequest(any(), any())).thenThrow(new SocketTimeoutException());
- assertFalse(restHighLevelClient.ping());
+ public void testPingSocketTimeout() throws IOException {
+ Header[] headers = randomHeaders(random(), "Header");
+ when(restClient.performRequest(anyString(), anyString(), anyMapOf(String.class, String.class),
+ anyObject(), anyVararg())).thenThrow(new SocketTimeoutException());
+ expectThrows(SocketTimeoutException.class, () -> restHighLevelClient.ping(headers));
+ verify(restClient).performRequest(eq("HEAD"), eq("/"), eq(Collections.emptyMap()),
+ isNull(HttpEntity.class), argThat(new HeadersVarargMatcher(headers)));
}
- public void testPingWithHeaders() throws IOException {
- Header[] headers = RestClientTestUtil.randomHeaders(random(), "Header");
- assertTrue(restHighLevelClient.ping(headers));
- verify(restClient).performRequest(eq("HEAD"), eq("/"), argThat(new HeadersVarargMatcher(headers)));
+ public void testInfo() throws IOException {
+ Header[] headers = randomHeaders(random(), "Header");
+ MainResponse testInfo = new MainResponse("nodeName", Version.CURRENT, new ClusterName("clusterName"), "clusterUuid",
+ Build.CURRENT, true);
+ mockResponse(testInfo);
+ MainResponse receivedInfo = restHighLevelClient.info(headers);
+ assertEquals(testInfo, receivedInfo);
+ verify(restClient).performRequest(eq("GET"), eq("/"), eq(Collections.emptyMap()),
+ isNull(HttpEntity.class), argThat(new HeadersVarargMatcher(headers)));
+ }
+
+ public void testSearchScroll() throws IOException {
+ Header[] headers = randomHeaders(random(), "Header");
+ SearchResponse mockSearchResponse = new SearchResponse(new SearchResponseSections(SearchHits.empty(), InternalAggregations.EMPTY,
+ null, false, false, null, 1), randomAlphaOfLengthBetween(5, 10), 5, 5, 100, new ShardSearchFailure[0]);
+ mockResponse(mockSearchResponse);
+ SearchResponse searchResponse = restHighLevelClient.searchScroll(new SearchScrollRequest(randomAlphaOfLengthBetween(5, 10)),
+ headers);
+ assertEquals(mockSearchResponse.getScrollId(), searchResponse.getScrollId());
+ assertEquals(0, searchResponse.getHits().totalHits);
+ assertEquals(5, searchResponse.getTotalShards());
+ assertEquals(5, searchResponse.getSuccessfulShards());
+ assertEquals(100, searchResponse.getTook().getMillis());
+ verify(restClient).performRequest(eq("GET"), eq("/_search/scroll"), eq(Collections.emptyMap()),
+ isNotNull(HttpEntity.class), argThat(new HeadersVarargMatcher(headers)));
+ }
+
+ public void testClearScroll() throws IOException {
+ Header[] headers = randomHeaders(random(), "Header");
+ ClearScrollResponse mockClearScrollResponse = new ClearScrollResponse(randomBoolean(), randomIntBetween(0, Integer.MAX_VALUE));
+ mockResponse(mockClearScrollResponse);
+ ClearScrollRequest clearScrollRequest = new ClearScrollRequest();
+ clearScrollRequest.addScrollId(randomAlphaOfLengthBetween(5, 10));
+ ClearScrollResponse clearScrollResponse = restHighLevelClient.clearScroll(clearScrollRequest, headers);
+ assertEquals(mockClearScrollResponse.isSucceeded(), clearScrollResponse.isSucceeded());
+ assertEquals(mockClearScrollResponse.getNumFreed(), clearScrollResponse.getNumFreed());
+ verify(restClient).performRequest(eq("DELETE"), eq("/_search/scroll"), eq(Collections.emptyMap()),
+ isNotNull(HttpEntity.class), argThat(new HeadersVarargMatcher(headers)));
+ }
+
+ private void mockResponse(ToXContent toXContent) throws IOException {
+ Response response = mock(Response.class);
+ ContentType contentType = ContentType.parse(Request.REQUEST_BODY_CONTENT_TYPE.mediaType());
+ String requestBody = toXContent(toXContent, Request.REQUEST_BODY_CONTENT_TYPE, false).utf8ToString();
+ when(response.getEntity()).thenReturn(new NStringEntity(requestBody, contentType));
+ when(restClient.performRequest(anyString(), anyString(), anyMapOf(String.class, String.class),
+ anyObject(), anyVararg())).thenReturn(response);
+ }
+
+ public void testRequestValidation() {
+ ActionRequestValidationException validationException = new ActionRequestValidationException();
+ validationException.addValidationError("validation error");
+ ActionRequest request = new ActionRequest() {
+ @Override
+ public ActionRequestValidationException validate() {
+ return validationException;
+ }
+ };
+
+ {
+ ActionRequestValidationException actualException = expectThrows(ActionRequestValidationException.class,
+ () -> restHighLevelClient.performRequest(request, null, null, null));
+ assertSame(validationException, actualException);
+ }
+ {
+ TrackingActionListener trackingActionListener = new TrackingActionListener();
+ restHighLevelClient.performRequestAsync(request, null, null, trackingActionListener, null);
+ assertSame(validationException, trackingActionListener.exception.get());
+ }
+ }
+
+ public void testParseEntity() throws IOException {
+ {
+ IllegalStateException ise = expectThrows(IllegalStateException.class, () -> restHighLevelClient.parseEntity(null, null));
+ assertEquals("Response body expected but not returned", ise.getMessage());
+ }
+ {
+ IllegalStateException ise = expectThrows(IllegalStateException.class,
+ () -> restHighLevelClient.parseEntity(new StringEntity("", (ContentType) null), null));
+ assertEquals("Elasticsearch didn't return the [Content-Type] header, unable to parse response body", ise.getMessage());
+ }
+ {
+ StringEntity entity = new StringEntity("", ContentType.APPLICATION_SVG_XML);
+ IllegalStateException ise = expectThrows(IllegalStateException.class, () -> restHighLevelClient.parseEntity(entity, null));
+ assertEquals("Unsupported Content-Type: " + entity.getContentType().getValue(), ise.getMessage());
+ }
+ {
+ CheckedFunction entityParser = parser -> {
+ assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
+ assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
+ assertTrue(parser.nextToken().isValue());
+ String value = parser.text();
+ assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
+ return value;
+ };
+ HttpEntity jsonEntity = new StringEntity("{\"field\":\"value\"}", ContentType.APPLICATION_JSON);
+ assertEquals("value", restHighLevelClient.parseEntity(jsonEntity, entityParser));
+ HttpEntity yamlEntity = new StringEntity("---\nfield: value\n", ContentType.create("application/yaml"));
+ assertEquals("value", restHighLevelClient.parseEntity(yamlEntity, entityParser));
+ HttpEntity smileEntity = createBinaryEntity(SmileXContent.contentBuilder(), ContentType.create("application/smile"));
+ assertEquals("value", restHighLevelClient.parseEntity(smileEntity, entityParser));
+ HttpEntity cborEntity = createBinaryEntity(CborXContent.contentBuilder(), ContentType.create("application/cbor"));
+ assertEquals("value", restHighLevelClient.parseEntity(cborEntity, entityParser));
+ }
+ }
+
+ private static HttpEntity createBinaryEntity(XContentBuilder xContentBuilder, ContentType contentType) throws IOException {
+ try (XContentBuilder builder = xContentBuilder) {
+ builder.startObject();
+ builder.field("field", "value");
+ builder.endObject();
+ return new ByteArrayEntity(builder.bytes().toBytesRef().bytes, contentType);
+ }
+ }
+
+ public void testConvertExistsResponse() {
+ RestStatus restStatus = randomBoolean() ? RestStatus.OK : randomFrom(RestStatus.values());
+ HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
+ Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
+ boolean result = RestHighLevelClient.convertExistsResponse(response);
+ assertEquals(restStatus == RestStatus.OK, result);
+ }
+
+ public void testParseResponseException() throws IOException {
+ {
+ RestStatus restStatus = randomFrom(RestStatus.values());
+ HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
+ Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
+ ResponseException responseException = new ResponseException(response);
+ ElasticsearchException elasticsearchException = restHighLevelClient.parseResponseException(responseException);
+ assertEquals(responseException.getMessage(), elasticsearchException.getMessage());
+ assertEquals(restStatus, elasticsearchException.status());
+ assertSame(responseException, elasticsearchException.getCause());
+ }
+ {
+ RestStatus restStatus = randomFrom(RestStatus.values());
+ HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
+ httpResponse.setEntity(new StringEntity("{\"error\":\"test error message\",\"status\":" + restStatus.getStatus() + "}",
+ ContentType.APPLICATION_JSON));
+ Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
+ ResponseException responseException = new ResponseException(response);
+ ElasticsearchException elasticsearchException = restHighLevelClient.parseResponseException(responseException);
+ assertEquals("Elasticsearch exception [type=exception, reason=test error message]", elasticsearchException.getMessage());
+ assertEquals(restStatus, elasticsearchException.status());
+ assertSame(responseException, elasticsearchException.getSuppressed()[0]);
+ }
+ {
+ RestStatus restStatus = randomFrom(RestStatus.values());
+ HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
+ httpResponse.setEntity(new StringEntity("{\"error\":", ContentType.APPLICATION_JSON));
+ Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
+ ResponseException responseException = new ResponseException(response);
+ ElasticsearchException elasticsearchException = restHighLevelClient.parseResponseException(responseException);
+ assertEquals("Unable to parse response body", elasticsearchException.getMessage());
+ assertEquals(restStatus, elasticsearchException.status());
+ assertSame(responseException, elasticsearchException.getCause());
+ assertThat(elasticsearchException.getSuppressed()[0], instanceOf(IOException.class));
+ }
+ {
+ RestStatus restStatus = randomFrom(RestStatus.values());
+ HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
+ httpResponse.setEntity(new StringEntity("{\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON));
+ Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
+ ResponseException responseException = new ResponseException(response);
+ ElasticsearchException elasticsearchException = restHighLevelClient.parseResponseException(responseException);
+ assertEquals("Unable to parse response body", elasticsearchException.getMessage());
+ assertEquals(restStatus, elasticsearchException.status());
+ assertSame(responseException, elasticsearchException.getCause());
+ assertThat(elasticsearchException.getSuppressed()[0], instanceOf(IllegalStateException.class));
+ }
+ }
+
+ public void testPerformRequestOnSuccess() throws IOException {
+ MainRequest mainRequest = new MainRequest();
+ CheckedFunction requestConverter = request ->
+ new Request("GET", "/", Collections.emptyMap(), null);
+ RestStatus restStatus = randomFrom(RestStatus.values());
+ HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
+ Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
+ when(restClient.performRequest(anyString(), anyString(), anyMapOf(String.class, String.class),
+ anyObject(), anyVararg())).thenReturn(mockResponse);
+ {
+ Integer result = restHighLevelClient.performRequest(mainRequest, requestConverter,
+ response -> response.getStatusLine().getStatusCode(), Collections.emptySet());
+ assertEquals(restStatus.getStatus(), result.intValue());
+ }
+ {
+ IOException ioe = expectThrows(IOException.class, () -> restHighLevelClient.performRequest(mainRequest,
+ requestConverter, response -> {throw new IllegalStateException();}, Collections.emptySet()));
+ assertEquals("Unable to parse response body for Response{requestLine=GET / http/1.1, host=http://localhost:9200, " +
+ "response=http/1.1 " + restStatus.getStatus() + " " + restStatus.name() + "}", ioe.getMessage());
+ }
+ }
+
+ public void testPerformRequestOnResponseExceptionWithoutEntity() throws IOException {
+ MainRequest mainRequest = new MainRequest();
+ CheckedFunction requestConverter = request ->
+ new Request("GET", "/", Collections.emptyMap(), null);
+ RestStatus restStatus = randomFrom(RestStatus.values());
+ HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
+ Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
+ ResponseException responseException = new ResponseException(mockResponse);
+ when(restClient.performRequest(anyString(), anyString(), anyMapOf(String.class, String.class),
+ anyObject(), anyVararg())).thenThrow(responseException);
+ ElasticsearchException elasticsearchException = expectThrows(ElasticsearchException.class,
+ () -> restHighLevelClient.performRequest(mainRequest, requestConverter,
+ response -> response.getStatusLine().getStatusCode(), Collections.emptySet()));
+ assertEquals(responseException.getMessage(), elasticsearchException.getMessage());
+ assertEquals(restStatus, elasticsearchException.status());
+ assertSame(responseException, elasticsearchException.getCause());
+ }
+
+ public void testPerformRequestOnResponseExceptionWithEntity() throws IOException {
+ MainRequest mainRequest = new MainRequest();
+ CheckedFunction requestConverter = request ->
+ new Request("GET", "/", Collections.emptyMap(), null);
+ RestStatus restStatus = randomFrom(RestStatus.values());
+ HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
+ httpResponse.setEntity(new StringEntity("{\"error\":\"test error message\",\"status\":" + restStatus.getStatus() + "}",
+ ContentType.APPLICATION_JSON));
+ Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
+ ResponseException responseException = new ResponseException(mockResponse);
+ when(restClient.performRequest(anyString(), anyString(), anyMapOf(String.class, String.class),
+ anyObject(), anyVararg())).thenThrow(responseException);
+ ElasticsearchException elasticsearchException = expectThrows(ElasticsearchException.class,
+ () -> restHighLevelClient.performRequest(mainRequest, requestConverter,
+ response -> response.getStatusLine().getStatusCode(), Collections.emptySet()));
+ assertEquals("Elasticsearch exception [type=exception, reason=test error message]", elasticsearchException.getMessage());
+ assertEquals(restStatus, elasticsearchException.status());
+ assertSame(responseException, elasticsearchException.getSuppressed()[0]);
+ }
+
+ public void testPerformRequestOnResponseExceptionWithBrokenEntity() throws IOException {
+ MainRequest mainRequest = new MainRequest();
+ CheckedFunction requestConverter = request ->
+ new Request("GET", "/", Collections.emptyMap(), null);
+ RestStatus restStatus = randomFrom(RestStatus.values());
+ HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
+ httpResponse.setEntity(new StringEntity("{\"error\":", ContentType.APPLICATION_JSON));
+ Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
+ ResponseException responseException = new ResponseException(mockResponse);
+ when(restClient.performRequest(anyString(), anyString(), anyMapOf(String.class, String.class),
+ anyObject(), anyVararg())).thenThrow(responseException);
+ ElasticsearchException elasticsearchException = expectThrows(ElasticsearchException.class,
+ () -> restHighLevelClient.performRequest(mainRequest, requestConverter,
+ response -> response.getStatusLine().getStatusCode(), Collections.emptySet()));
+ assertEquals("Unable to parse response body", elasticsearchException.getMessage());
+ assertEquals(restStatus, elasticsearchException.status());
+ assertSame(responseException, elasticsearchException.getCause());
+ assertThat(elasticsearchException.getSuppressed()[0], instanceOf(JsonParseException.class));
}
- private class HeadersVarargMatcher extends ArgumentMatcher implements VarargMatcher {
+ public void testPerformRequestOnResponseExceptionWithBrokenEntity2() throws IOException {
+ MainRequest mainRequest = new MainRequest();
+ CheckedFunction requestConverter = request ->
+ new Request("GET", "/", Collections.emptyMap(), null);
+ RestStatus restStatus = randomFrom(RestStatus.values());
+ HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
+ httpResponse.setEntity(new StringEntity("{\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON));
+ Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
+ ResponseException responseException = new ResponseException(mockResponse);
+ when(restClient.performRequest(anyString(), anyString(), anyMapOf(String.class, String.class),
+ anyObject(), anyVararg())).thenThrow(responseException);
+ ElasticsearchException elasticsearchException = expectThrows(ElasticsearchException.class,
+ () -> restHighLevelClient.performRequest(mainRequest, requestConverter,
+ response -> response.getStatusLine().getStatusCode(), Collections.emptySet()));
+ assertEquals("Unable to parse response body", elasticsearchException.getMessage());
+ assertEquals(restStatus, elasticsearchException.status());
+ assertSame(responseException, elasticsearchException.getCause());
+ assertThat(elasticsearchException.getSuppressed()[0], instanceOf(IllegalStateException.class));
+ }
+
+ public void testPerformRequestOnResponseExceptionWithIgnores() throws IOException {
+ MainRequest mainRequest = new MainRequest();
+ CheckedFunction requestConverter = request ->
+ new Request("GET", "/", Collections.emptyMap(), null);
+ HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(RestStatus.NOT_FOUND));
+ Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
+ ResponseException responseException = new ResponseException(mockResponse);
+ when(restClient.performRequest(anyString(), anyString(), anyMapOf(String.class, String.class),
+ anyObject(), anyVararg())).thenThrow(responseException);
+ //although we got an exception, we turn it into a successful response because the status code was provided among ignores
+ assertEquals(Integer.valueOf(404), restHighLevelClient.performRequest(mainRequest, requestConverter,
+ response -> response.getStatusLine().getStatusCode(), Collections.singleton(404)));
+ }
+
+ public void testPerformRequestOnResponseExceptionWithIgnoresErrorNoBody() throws IOException {
+ MainRequest mainRequest = new MainRequest();
+ CheckedFunction requestConverter = request ->
+ new Request("GET", "/", Collections.emptyMap(), null);
+ HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(RestStatus.NOT_FOUND));
+ Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
+ ResponseException responseException = new ResponseException(mockResponse);
+ when(restClient.performRequest(anyString(), anyString(), anyMapOf(String.class, String.class),
+ anyObject(), anyVararg())).thenThrow(responseException);
+ ElasticsearchException elasticsearchException = expectThrows(ElasticsearchException.class,
+ () -> restHighLevelClient.performRequest(mainRequest, requestConverter,
+ response -> {throw new IllegalStateException();}, Collections.singleton(404)));
+ assertEquals(RestStatus.NOT_FOUND, elasticsearchException.status());
+ assertSame(responseException, elasticsearchException.getCause());
+ assertEquals(responseException.getMessage(), elasticsearchException.getMessage());
+ }
+
+ public void testPerformRequestOnResponseExceptionWithIgnoresErrorValidBody() throws IOException {
+ MainRequest mainRequest = new MainRequest();
+ CheckedFunction requestConverter = request ->
+ new Request("GET", "/", Collections.emptyMap(), null);
+ HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(RestStatus.NOT_FOUND));
+ httpResponse.setEntity(new StringEntity("{\"error\":\"test error message\",\"status\":404}",
+ ContentType.APPLICATION_JSON));
+ Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
+ ResponseException responseException = new ResponseException(mockResponse);
+ when(restClient.performRequest(anyString(), anyString(), anyMapOf(String.class, String.class),
+ anyObject(), anyVararg())).thenThrow(responseException);
+ ElasticsearchException elasticsearchException = expectThrows(ElasticsearchException.class,
+ () -> restHighLevelClient.performRequest(mainRequest, requestConverter,
+ response -> {throw new IllegalStateException();}, Collections.singleton(404)));
+ assertEquals(RestStatus.NOT_FOUND, elasticsearchException.status());
+ assertSame(responseException, elasticsearchException.getSuppressed()[0]);
+ assertEquals("Elasticsearch exception [type=exception, reason=test error message]", elasticsearchException.getMessage());
+ }
+
+ public void testWrapResponseListenerOnSuccess() {
+ {
+ TrackingActionListener trackingActionListener = new TrackingActionListener();
+ ResponseListener responseListener = restHighLevelClient.wrapResponseListener(
+ response -> response.getStatusLine().getStatusCode(), trackingActionListener, Collections.emptySet());
+ RestStatus restStatus = randomFrom(RestStatus.values());
+ HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
+ responseListener.onSuccess(new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse));
+ assertNull(trackingActionListener.exception.get());
+ assertEquals(restStatus.getStatus(), trackingActionListener.statusCode.get());
+ }
+ {
+ TrackingActionListener trackingActionListener = new TrackingActionListener();
+ ResponseListener responseListener = restHighLevelClient.wrapResponseListener(
+ response -> {throw new IllegalStateException();}, trackingActionListener, Collections.emptySet());
+ RestStatus restStatus = randomFrom(RestStatus.values());
+ HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
+ responseListener.onSuccess(new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse));
+ assertThat(trackingActionListener.exception.get(), instanceOf(IOException.class));
+ IOException ioe = (IOException) trackingActionListener.exception.get();
+ assertEquals("Unable to parse response body for Response{requestLine=GET / http/1.1, host=http://localhost:9200, " +
+ "response=http/1.1 " + restStatus.getStatus() + " " + restStatus.name() + "}", ioe.getMessage());
+ assertThat(ioe.getCause(), instanceOf(IllegalStateException.class));
+ }
+ }
+
+ public void testWrapResponseListenerOnException() {
+ TrackingActionListener trackingActionListener = new TrackingActionListener();
+ ResponseListener responseListener = restHighLevelClient.wrapResponseListener(
+ response -> response.getStatusLine().getStatusCode(), trackingActionListener, Collections.emptySet());
+ IllegalStateException exception = new IllegalStateException();
+ responseListener.onFailure(exception);
+ assertSame(exception, trackingActionListener.exception.get());
+ }
+
+ public void testWrapResponseListenerOnResponseExceptionWithoutEntity() throws IOException {
+ TrackingActionListener trackingActionListener = new TrackingActionListener();
+ ResponseListener responseListener = restHighLevelClient.wrapResponseListener(
+ response -> response.getStatusLine().getStatusCode(), trackingActionListener, Collections.emptySet());
+ RestStatus restStatus = randomFrom(RestStatus.values());
+ HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
+ Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
+ ResponseException responseException = new ResponseException(response);
+ responseListener.onFailure(responseException);
+ assertThat(trackingActionListener.exception.get(), instanceOf(ElasticsearchException.class));
+ ElasticsearchException elasticsearchException = (ElasticsearchException) trackingActionListener.exception.get();
+ assertEquals(responseException.getMessage(), elasticsearchException.getMessage());
+ assertEquals(restStatus, elasticsearchException.status());
+ assertSame(responseException, elasticsearchException.getCause());
+ }
+
+ public void testWrapResponseListenerOnResponseExceptionWithEntity() throws IOException {
+ TrackingActionListener trackingActionListener = new TrackingActionListener();
+ ResponseListener responseListener = restHighLevelClient.wrapResponseListener(
+ response -> response.getStatusLine().getStatusCode(), trackingActionListener, Collections.emptySet());
+ RestStatus restStatus = randomFrom(RestStatus.values());
+ HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
+ httpResponse.setEntity(new StringEntity("{\"error\":\"test error message\",\"status\":" + restStatus.getStatus() + "}",
+ ContentType.APPLICATION_JSON));
+ Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
+ ResponseException responseException = new ResponseException(response);
+ responseListener.onFailure(responseException);
+ assertThat(trackingActionListener.exception.get(), instanceOf(ElasticsearchException.class));
+ ElasticsearchException elasticsearchException = (ElasticsearchException)trackingActionListener.exception.get();
+ assertEquals("Elasticsearch exception [type=exception, reason=test error message]", elasticsearchException.getMessage());
+ assertEquals(restStatus, elasticsearchException.status());
+ assertSame(responseException, elasticsearchException.getSuppressed()[0]);
+ }
+
+ public void testWrapResponseListenerOnResponseExceptionWithBrokenEntity() throws IOException {
+ {
+ TrackingActionListener trackingActionListener = new TrackingActionListener();
+ ResponseListener responseListener = restHighLevelClient.wrapResponseListener(
+ response -> response.getStatusLine().getStatusCode(), trackingActionListener, Collections.emptySet());
+ RestStatus restStatus = randomFrom(RestStatus.values());
+ HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
+ httpResponse.setEntity(new StringEntity("{\"error\":", ContentType.APPLICATION_JSON));
+ Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
+ ResponseException responseException = new ResponseException(response);
+ responseListener.onFailure(responseException);
+ assertThat(trackingActionListener.exception.get(), instanceOf(ElasticsearchException.class));
+ ElasticsearchException elasticsearchException = (ElasticsearchException)trackingActionListener.exception.get();
+ assertEquals("Unable to parse response body", elasticsearchException.getMessage());
+ assertEquals(restStatus, elasticsearchException.status());
+ assertSame(responseException, elasticsearchException.getCause());
+ assertThat(elasticsearchException.getSuppressed()[0], instanceOf(JsonParseException.class));
+ }
+ {
+ TrackingActionListener trackingActionListener = new TrackingActionListener();
+ ResponseListener responseListener = restHighLevelClient.wrapResponseListener(
+ response -> response.getStatusLine().getStatusCode(), trackingActionListener, Collections.emptySet());
+ RestStatus restStatus = randomFrom(RestStatus.values());
+ HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus));
+ httpResponse.setEntity(new StringEntity("{\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON));
+ Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
+ ResponseException responseException = new ResponseException(response);
+ responseListener.onFailure(responseException);
+ assertThat(trackingActionListener.exception.get(), instanceOf(ElasticsearchException.class));
+ ElasticsearchException elasticsearchException = (ElasticsearchException)trackingActionListener.exception.get();
+ assertEquals("Unable to parse response body", elasticsearchException.getMessage());
+ assertEquals(restStatus, elasticsearchException.status());
+ assertSame(responseException, elasticsearchException.getCause());
+ assertThat(elasticsearchException.getSuppressed()[0], instanceOf(IllegalStateException.class));
+ }
+ }
+
+ public void testWrapResponseListenerOnResponseExceptionWithIgnores() throws IOException {
+ TrackingActionListener trackingActionListener = new TrackingActionListener();
+ ResponseListener responseListener = restHighLevelClient.wrapResponseListener(
+ response -> response.getStatusLine().getStatusCode(), trackingActionListener, Collections.singleton(404));
+ HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(RestStatus.NOT_FOUND));
+ Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
+ ResponseException responseException = new ResponseException(response);
+ responseListener.onFailure(responseException);
+ //although we got an exception, we turn it into a successful response because the status code was provided among ignores
+ assertNull(trackingActionListener.exception.get());
+ assertEquals(404, trackingActionListener.statusCode.get());
+ }
+
+ public void testWrapResponseListenerOnResponseExceptionWithIgnoresErrorNoBody() throws IOException {
+ TrackingActionListener trackingActionListener = new TrackingActionListener();
+ //response parsing throws exception while handling ignores. same as when GetResponse#fromXContent throws error when trying
+ //to parse a 404 response which contains an error rather than a valid document not found response.
+ ResponseListener responseListener = restHighLevelClient.wrapResponseListener(
+ response -> { throw new IllegalStateException(); }, trackingActionListener, Collections.singleton(404));
+ HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(RestStatus.NOT_FOUND));
+ Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
+ ResponseException responseException = new ResponseException(response);
+ responseListener.onFailure(responseException);
+ assertThat(trackingActionListener.exception.get(), instanceOf(ElasticsearchException.class));
+ ElasticsearchException elasticsearchException = (ElasticsearchException)trackingActionListener.exception.get();
+ assertEquals(RestStatus.NOT_FOUND, elasticsearchException.status());
+ assertSame(responseException, elasticsearchException.getCause());
+ assertEquals(responseException.getMessage(), elasticsearchException.getMessage());
+ }
+
+ public void testWrapResponseListenerOnResponseExceptionWithIgnoresErrorValidBody() throws IOException {
+ TrackingActionListener trackingActionListener = new TrackingActionListener();
+ //response parsing throws exception while handling ignores. same as when GetResponse#fromXContent throws error when trying
+ //to parse a 404 response which contains an error rather than a valid document not found response.
+ ResponseListener responseListener = restHighLevelClient.wrapResponseListener(
+ response -> { throw new IllegalStateException(); }, trackingActionListener, Collections.singleton(404));
+ HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(RestStatus.NOT_FOUND));
+ httpResponse.setEntity(new StringEntity("{\"error\":\"test error message\",\"status\":404}",
+ ContentType.APPLICATION_JSON));
+ Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse);
+ ResponseException responseException = new ResponseException(response);
+ responseListener.onFailure(responseException);
+ assertThat(trackingActionListener.exception.get(), instanceOf(ElasticsearchException.class));
+ ElasticsearchException elasticsearchException = (ElasticsearchException)trackingActionListener.exception.get();
+ assertEquals(RestStatus.NOT_FOUND, elasticsearchException.status());
+ assertSame(responseException, elasticsearchException.getSuppressed()[0]);
+ assertEquals("Elasticsearch exception [type=exception, reason=test error message]", elasticsearchException.getMessage());
+ }
+
+ public void testNamedXContents() {
+ List namedXContents = RestHighLevelClient.getDefaultNamedXContents();
+ assertEquals(45, namedXContents.size());
+ Map, Integer> categories = new HashMap<>();
+ for (NamedXContentRegistry.Entry namedXContent : namedXContents) {
+ Integer counter = categories.putIfAbsent(namedXContent.categoryClass, 1);
+ if (counter != null) {
+ categories.put(namedXContent.categoryClass, counter + 1);
+ }
+ }
+ assertEquals(2, categories.size());
+ assertEquals(Integer.valueOf(42), categories.get(Aggregation.class));
+ assertEquals(Integer.valueOf(3), categories.get(Suggest.Suggestion.class));
+ }
+
+ private static class TrackingActionListener implements ActionListener {
+ private final AtomicInteger statusCode = new AtomicInteger(-1);
+ private final AtomicReference exception = new AtomicReference<>();
+
+ @Override
+ public void onResponse(Integer statusCode) {
+ assertTrue(this.statusCode.compareAndSet(-1, statusCode));
+ }
+
+ @Override
+ public void onFailure(Exception e) {
+ assertTrue(exception.compareAndSet(null, e));
+ }
+ }
+
+ private static class HeadersVarargMatcher extends ArgumentMatcher implements VarargMatcher {
private Header[] expectedHeaders;
HeadersVarargMatcher(Header... expectedHeaders) {
@@ -84,4 +659,8 @@ public boolean matches(Object varargArgument) {
return false;
}
}
+
+ private static StatusLine newStatusLine(RestStatus restStatus) {
+ return new BasicStatusLine(HTTP_PROTOCOL, restStatus.getStatus(), restStatus.name());
+ }
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java
new file mode 100644
index 0000000000000..328f2ee32f557
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java
@@ -0,0 +1,464 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.client;
+
+import org.apache.http.HttpEntity;
+import org.apache.http.entity.ContentType;
+import org.apache.http.entity.StringEntity;
+import org.apache.http.nio.entity.NStringEntity;
+import org.elasticsearch.ElasticsearchException;
+import org.elasticsearch.ElasticsearchStatusException;
+import org.elasticsearch.action.search.ClearScrollRequest;
+import org.elasticsearch.action.search.ClearScrollResponse;
+import org.elasticsearch.action.search.SearchRequest;
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.action.search.SearchScrollRequest;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.index.query.MatchQueryBuilder;
+import org.elasticsearch.join.aggregations.Children;
+import org.elasticsearch.join.aggregations.ChildrenAggregationBuilder;
+import org.elasticsearch.rest.RestStatus;
+import org.elasticsearch.search.SearchHit;
+import org.elasticsearch.search.aggregations.bucket.range.Range;
+import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder;
+import org.elasticsearch.search.aggregations.bucket.terms.Terms;
+import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
+import org.elasticsearch.search.aggregations.matrix.stats.MatrixStats;
+import org.elasticsearch.search.aggregations.matrix.stats.MatrixStatsAggregationBuilder;
+import org.elasticsearch.search.aggregations.support.ValueType;
+import org.elasticsearch.search.builder.SearchSourceBuilder;
+import org.elasticsearch.search.sort.SortOrder;
+import org.elasticsearch.search.suggest.Suggest;
+import org.elasticsearch.search.suggest.SuggestBuilder;
+import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder;
+import org.junit.Before;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collections;
+
+import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
+import static org.hamcrest.Matchers.both;
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.either;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.greaterThan;
+import static org.hamcrest.Matchers.greaterThanOrEqualTo;
+import static org.hamcrest.Matchers.instanceOf;
+import static org.hamcrest.Matchers.lessThan;
+
+public class SearchIT extends ESRestHighLevelClientTestCase {
+
+ @Before
+ public void indexDocuments() throws IOException {
+ StringEntity doc1 = new StringEntity("{\"type\":\"type1\", \"num\":10, \"num2\":50}", ContentType.APPLICATION_JSON);
+ client().performRequest("PUT", "/index/type/1", Collections.emptyMap(), doc1);
+ StringEntity doc2 = new StringEntity("{\"type\":\"type1\", \"num\":20, \"num2\":40}", ContentType.APPLICATION_JSON);
+ client().performRequest("PUT", "/index/type/2", Collections.emptyMap(), doc2);
+ StringEntity doc3 = new StringEntity("{\"type\":\"type1\", \"num\":50, \"num2\":35}", ContentType.APPLICATION_JSON);
+ client().performRequest("PUT", "/index/type/3", Collections.emptyMap(), doc3);
+ StringEntity doc4 = new StringEntity("{\"type\":\"type2\", \"num\":100, \"num2\":10}", ContentType.APPLICATION_JSON);
+ client().performRequest("PUT", "/index/type/4", Collections.emptyMap(), doc4);
+ StringEntity doc5 = new StringEntity("{\"type\":\"type2\", \"num\":100, \"num2\":10}", ContentType.APPLICATION_JSON);
+ client().performRequest("PUT", "/index/type/5", Collections.emptyMap(), doc5);
+ client().performRequest("POST", "/index/_refresh");
+ }
+
+ public void testSearchNoQuery() throws IOException {
+ SearchRequest searchRequest = new SearchRequest();
+ SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync);
+ assertSearchHeader(searchResponse);
+ assertNull(searchResponse.getAggregations());
+ assertNull(searchResponse.getSuggest());
+ assertEquals(Collections.emptyMap(), searchResponse.getProfileResults());
+ assertEquals(5, searchResponse.getHits().totalHits);
+ assertEquals(5, searchResponse.getHits().getHits().length);
+ for (SearchHit searchHit : searchResponse.getHits().getHits()) {
+ assertEquals("index", searchHit.getIndex());
+ assertEquals("type", searchHit.getType());
+ assertThat(Integer.valueOf(searchHit.getId()), both(greaterThan(0)).and(lessThan(6)));
+ assertEquals(1.0f, searchHit.getScore(), 0);
+ assertEquals(-1L, searchHit.getVersion());
+ assertNotNull(searchHit.getSourceAsMap());
+ assertEquals(3, searchHit.getSourceAsMap().size());
+ assertTrue(searchHit.getSourceAsMap().containsKey("type"));
+ assertTrue(searchHit.getSourceAsMap().containsKey("num"));
+ assertTrue(searchHit.getSourceAsMap().containsKey("num2"));
+ }
+ }
+
+ public void testSearchMatchQuery() throws IOException {
+ SearchRequest searchRequest = new SearchRequest();
+ searchRequest.source(new SearchSourceBuilder().query(new MatchQueryBuilder("num", 10)));
+ SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync);
+ assertSearchHeader(searchResponse);
+ assertNull(searchResponse.getAggregations());
+ assertNull(searchResponse.getSuggest());
+ assertEquals(Collections.emptyMap(), searchResponse.getProfileResults());
+ assertEquals(1, searchResponse.getHits().totalHits);
+ assertEquals(1, searchResponse.getHits().getHits().length);
+ assertThat(searchResponse.getHits().getMaxScore(), greaterThan(0f));
+ SearchHit searchHit = searchResponse.getHits().getHits()[0];
+ assertEquals("index", searchHit.getIndex());
+ assertEquals("type", searchHit.getType());
+ assertEquals("1", searchHit.getId());
+ assertThat(searchHit.getScore(), greaterThan(0f));
+ assertEquals(-1L, searchHit.getVersion());
+ assertNotNull(searchHit.getSourceAsMap());
+ assertEquals(3, searchHit.getSourceAsMap().size());
+ assertEquals("type1", searchHit.getSourceAsMap().get("type"));
+ assertEquals(50, searchHit.getSourceAsMap().get("num2"));
+ }
+
+ public void testSearchWithTermsAgg() throws IOException {
+ SearchRequest searchRequest = new SearchRequest();
+ SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
+ searchSourceBuilder.aggregation(new TermsAggregationBuilder("agg1", ValueType.STRING).field("type.keyword"));
+ searchSourceBuilder.size(0);
+ searchRequest.source(searchSourceBuilder);
+ SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync);
+ assertSearchHeader(searchResponse);
+ assertNull(searchResponse.getSuggest());
+ assertEquals(Collections.emptyMap(), searchResponse.getProfileResults());
+ assertEquals(0, searchResponse.getHits().getHits().length);
+ assertEquals(0f, searchResponse.getHits().getMaxScore(), 0f);
+ Terms termsAgg = searchResponse.getAggregations().get("agg1");
+ assertEquals("agg1", termsAgg.getName());
+ assertEquals(2, termsAgg.getBuckets().size());
+ Terms.Bucket type1 = termsAgg.getBucketByKey("type1");
+ assertEquals(3, type1.getDocCount());
+ assertEquals(0, type1.getAggregations().asList().size());
+ Terms.Bucket type2 = termsAgg.getBucketByKey("type2");
+ assertEquals(2, type2.getDocCount());
+ assertEquals(0, type2.getAggregations().asList().size());
+ }
+
+ public void testSearchWithRangeAgg() throws IOException {
+ {
+ SearchRequest searchRequest = new SearchRequest();
+ SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
+ searchSourceBuilder.aggregation(new RangeAggregationBuilder("agg1").field("num"));
+ searchSourceBuilder.size(0);
+ searchRequest.source(searchSourceBuilder);
+
+ ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class,
+ () -> execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync));
+ assertEquals(RestStatus.BAD_REQUEST, exception.status());
+ }
+
+ SearchRequest searchRequest = new SearchRequest();
+ SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
+ searchSourceBuilder.aggregation(new RangeAggregationBuilder("agg1").field("num")
+ .addRange("first", 0, 30).addRange("second", 31, 200));
+ searchSourceBuilder.size(0);
+ searchRequest.source(searchSourceBuilder);
+ SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync);
+ assertSearchHeader(searchResponse);
+ assertNull(searchResponse.getSuggest());
+ assertEquals(Collections.emptyMap(), searchResponse.getProfileResults());
+ assertEquals(5, searchResponse.getHits().totalHits);
+ assertEquals(0, searchResponse.getHits().getHits().length);
+ assertEquals(0f, searchResponse.getHits().getMaxScore(), 0f);
+ Range rangeAgg = searchResponse.getAggregations().get("agg1");
+ assertEquals("agg1", rangeAgg.getName());
+ assertEquals(2, rangeAgg.getBuckets().size());
+ {
+ Range.Bucket bucket = rangeAgg.getBuckets().get(0);
+ assertEquals("first", bucket.getKeyAsString());
+ assertEquals(2, bucket.getDocCount());
+ }
+ {
+ Range.Bucket bucket = rangeAgg.getBuckets().get(1);
+ assertEquals("second", bucket.getKeyAsString());
+ assertEquals(3, bucket.getDocCount());
+ }
+ }
+
+ public void testSearchWithTermsAndRangeAgg() throws IOException {
+ SearchRequest searchRequest = new SearchRequest();
+ SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
+ TermsAggregationBuilder agg = new TermsAggregationBuilder("agg1", ValueType.STRING).field("type.keyword");
+ agg.subAggregation(new RangeAggregationBuilder("subagg").field("num")
+ .addRange("first", 0, 30).addRange("second", 31, 200));
+ searchSourceBuilder.aggregation(agg);
+ searchSourceBuilder.size(0);
+ searchRequest.source(searchSourceBuilder);
+ SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync);
+ assertSearchHeader(searchResponse);
+ assertNull(searchResponse.getSuggest());
+ assertEquals(Collections.emptyMap(), searchResponse.getProfileResults());
+ assertEquals(0, searchResponse.getHits().getHits().length);
+ assertEquals(0f, searchResponse.getHits().getMaxScore(), 0f);
+ Terms termsAgg = searchResponse.getAggregations().get("agg1");
+ assertEquals("agg1", termsAgg.getName());
+ assertEquals(2, termsAgg.getBuckets().size());
+ Terms.Bucket type1 = termsAgg.getBucketByKey("type1");
+ assertEquals(3, type1.getDocCount());
+ assertEquals(1, type1.getAggregations().asList().size());
+ {
+ Range rangeAgg = type1.getAggregations().get("subagg");
+ assertEquals(2, rangeAgg.getBuckets().size());
+ {
+ Range.Bucket bucket = rangeAgg.getBuckets().get(0);
+ assertEquals("first", bucket.getKeyAsString());
+ assertEquals(2, bucket.getDocCount());
+ }
+ {
+ Range.Bucket bucket = rangeAgg.getBuckets().get(1);
+ assertEquals("second", bucket.getKeyAsString());
+ assertEquals(1, bucket.getDocCount());
+ }
+ }
+ Terms.Bucket type2 = termsAgg.getBucketByKey("type2");
+ assertEquals(2, type2.getDocCount());
+ assertEquals(1, type2.getAggregations().asList().size());
+ {
+ Range rangeAgg = type2.getAggregations().get("subagg");
+ assertEquals(2, rangeAgg.getBuckets().size());
+ {
+ Range.Bucket bucket = rangeAgg.getBuckets().get(0);
+ assertEquals("first", bucket.getKeyAsString());
+ assertEquals(0, bucket.getDocCount());
+ }
+ {
+ Range.Bucket bucket = rangeAgg.getBuckets().get(1);
+ assertEquals("second", bucket.getKeyAsString());
+ assertEquals(2, bucket.getDocCount());
+ }
+ }
+ }
+
+ public void testSearchWithMatrixStats() throws IOException {
+ SearchRequest searchRequest = new SearchRequest();
+ SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
+ searchSourceBuilder.aggregation(new MatrixStatsAggregationBuilder("agg1").fields(Arrays.asList("num", "num2")));
+ searchSourceBuilder.size(0);
+ searchRequest.source(searchSourceBuilder);
+ SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync);
+ assertSearchHeader(searchResponse);
+ assertNull(searchResponse.getSuggest());
+ assertEquals(Collections.emptyMap(), searchResponse.getProfileResults());
+ assertEquals(5, searchResponse.getHits().totalHits);
+ assertEquals(0, searchResponse.getHits().getHits().length);
+ assertEquals(0f, searchResponse.getHits().getMaxScore(), 0f);
+ assertEquals(1, searchResponse.getAggregations().asList().size());
+ MatrixStats matrixStats = searchResponse.getAggregations().get("agg1");
+ assertEquals(5, matrixStats.getFieldCount("num"));
+ assertEquals(56d, matrixStats.getMean("num"), 0d);
+ assertEquals(1830d, matrixStats.getVariance("num"), 0d);
+ assertEquals(0.09340198804973057, matrixStats.getSkewness("num"), 0d);
+ assertEquals(1.2741646510794589, matrixStats.getKurtosis("num"), 0d);
+ assertEquals(5, matrixStats.getFieldCount("num2"));
+ assertEquals(29d, matrixStats.getMean("num2"), 0d);
+ assertEquals(330d, matrixStats.getVariance("num2"), 0d);
+ assertEquals(-0.13568039346585542, matrixStats.getSkewness("num2"), 0d);
+ assertEquals(1.3517561983471074, matrixStats.getKurtosis("num2"), 0d);
+ assertEquals(-767.5, matrixStats.getCovariance("num", "num2"), 0d);
+ assertEquals(-0.9876336291667923, matrixStats.getCorrelation("num", "num2"), 0d);
+ }
+
+ public void testSearchWithParentJoin() throws IOException {
+ StringEntity parentMapping = new StringEntity("{\n" +
+ " \"mappings\": {\n" +
+ " \"answer\" : {\n" +
+ " \"_parent\" : {\n" +
+ " \"type\" : \"question\"\n" +
+ " }\n" +
+ " }\n" +
+ " },\n" +
+ " \"settings\": {\n" +
+ " \"index.mapping.single_type\": false" +
+ " }\n" +
+ "}", ContentType.APPLICATION_JSON);
+ client().performRequest("PUT", "/child_example", Collections.emptyMap(), parentMapping);
+ StringEntity questionDoc = new StringEntity("{\n" +
+ " \"body\": \"I have Windows 2003 server and i bought a new Windows 2008 server...\",\n" +
+ " \"title\": \"Whats the best way to file transfer my site from server to a newer one?\",\n" +
+ " \"tags\": [\n" +
+ " \"windows-server-2003\",\n" +
+ " \"windows-server-2008\",\n" +
+ " \"file-transfer\"\n" +
+ " ]\n" +
+ "}", ContentType.APPLICATION_JSON);
+ client().performRequest("PUT", "/child_example/question/1", Collections.emptyMap(), questionDoc);
+ StringEntity answerDoc1 = new StringEntity("{\n" +
+ " \"owner\": {\n" +
+ " \"location\": \"Norfolk, United Kingdom\",\n" +
+ " \"display_name\": \"Sam\",\n" +
+ " \"id\": 48\n" +
+ " },\n" +
+ " \"body\": \"
Unfortunately you're pretty much limited to FTP...\",\n" +
+ " \"creation_date\": \"2009-05-04T13:45:37.030\"\n" +
+ "}", ContentType.APPLICATION_JSON);
+ client().performRequest("PUT", "child_example/answer/1", Collections.singletonMap("parent", "1"), answerDoc1);
+ StringEntity answerDoc2 = new StringEntity("{\n" +
+ " \"owner\": {\n" +
+ " \"location\": \"Norfolk, United Kingdom\",\n" +
+ " \"display_name\": \"Troll\",\n" +
+ " \"id\": 49\n" +
+ " },\n" +
+ " \"body\": \"
Use Linux...\",\n" +
+ " \"creation_date\": \"2009-05-05T13:45:37.030\"\n" +
+ "}", ContentType.APPLICATION_JSON);
+ client().performRequest("PUT", "/child_example/answer/2", Collections.singletonMap("parent", "1"), answerDoc2);
+ client().performRequest("POST", "/_refresh");
+
+ TermsAggregationBuilder leafTermAgg = new TermsAggregationBuilder("top-names", ValueType.STRING)
+ .field("owner.display_name.keyword").size(10);
+ ChildrenAggregationBuilder childrenAgg = new ChildrenAggregationBuilder("to-answers", "answer").subAggregation(leafTermAgg);
+ TermsAggregationBuilder termsAgg = new TermsAggregationBuilder("top-tags", ValueType.STRING).field("tags.keyword")
+ .size(10).subAggregation(childrenAgg);
+ SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
+ searchSourceBuilder.size(0).aggregation(termsAgg);
+ SearchRequest searchRequest = new SearchRequest("child_example");
+ searchRequest.source(searchSourceBuilder);
+
+ SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync);
+ assertSearchHeader(searchResponse);
+ assertNull(searchResponse.getSuggest());
+ assertEquals(Collections.emptyMap(), searchResponse.getProfileResults());
+ assertEquals(3, searchResponse.getHits().totalHits);
+ assertEquals(0, searchResponse.getHits().getHits().length);
+ assertEquals(0f, searchResponse.getHits().getMaxScore(), 0f);
+ assertEquals(1, searchResponse.getAggregations().asList().size());
+ Terms terms = searchResponse.getAggregations().get("top-tags");
+ assertEquals(0, terms.getDocCountError());
+ assertEquals(0, terms.getSumOfOtherDocCounts());
+ assertEquals(3, terms.getBuckets().size());
+ for (Terms.Bucket bucket : terms.getBuckets()) {
+ assertThat(bucket.getKeyAsString(),
+ either(equalTo("file-transfer")).or(equalTo("windows-server-2003")).or(equalTo("windows-server-2008")));
+ assertEquals(1, bucket.getDocCount());
+ assertEquals(1, bucket.getAggregations().asList().size());
+ Children children = bucket.getAggregations().get("to-answers");
+ assertEquals(2, children.getDocCount());
+ assertEquals(1, children.getAggregations().asList().size());
+ Terms leafTerms = children.getAggregations().get("top-names");
+ assertEquals(0, leafTerms.getDocCountError());
+ assertEquals(0, leafTerms.getSumOfOtherDocCounts());
+ assertEquals(2, leafTerms.getBuckets().size());
+ assertEquals(2, leafTerms.getBuckets().size());
+ Terms.Bucket sam = leafTerms.getBucketByKey("Sam");
+ assertEquals(1, sam.getDocCount());
+ Terms.Bucket troll = leafTerms.getBucketByKey("Troll");
+ assertEquals(1, troll.getDocCount());
+ }
+ }
+
+ public void testSearchWithSuggest() throws IOException {
+ SearchRequest searchRequest = new SearchRequest();
+ SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
+ searchSourceBuilder.suggest(new SuggestBuilder().addSuggestion("sugg1", new PhraseSuggestionBuilder("type"))
+ .setGlobalText("type"));
+ searchSourceBuilder.size(0);
+ searchRequest.source(searchSourceBuilder);
+
+ SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync);
+ assertSearchHeader(searchResponse);
+ assertNull(searchResponse.getAggregations());
+ assertEquals(Collections.emptyMap(), searchResponse.getProfileResults());
+ assertEquals(0, searchResponse.getHits().totalHits);
+ assertEquals(0f, searchResponse.getHits().getMaxScore(), 0f);
+ assertEquals(0, searchResponse.getHits().getHits().length);
+ assertEquals(1, searchResponse.getSuggest().size());
+
+ Suggest.Suggestion extends Suggest.Suggestion.Entry extends Suggest.Suggestion.Entry.Option>> sugg = searchResponse
+ .getSuggest().iterator().next();
+ assertEquals("sugg1", sugg.getName());
+ for (Suggest.Suggestion.Entry extends Suggest.Suggestion.Entry.Option> options : sugg) {
+ assertEquals("type", options.getText().string());
+ assertEquals(0, options.getOffset());
+ assertEquals(4, options.getLength());
+ assertEquals(2 ,options.getOptions().size());
+ for (Suggest.Suggestion.Entry.Option option : options) {
+ assertThat(option.getScore(), greaterThan(0f));
+ assertThat(option.getText().string(), either(equalTo("type1")).or(equalTo("type2")));
+ }
+ }
+ }
+
+ public void testSearchScroll() throws Exception {
+
+ for (int i = 0; i < 100; i++) {
+ XContentBuilder builder = jsonBuilder().startObject().field("field", i).endObject();
+ HttpEntity entity = new NStringEntity(builder.string(), ContentType.APPLICATION_JSON);
+ client().performRequest("PUT", "test/type1/" + Integer.toString(i), Collections.emptyMap(), entity);
+ }
+ client().performRequest("POST", "/test/_refresh");
+
+ SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().size(35).sort("field", SortOrder.ASC);
+ SearchRequest searchRequest = new SearchRequest("test").scroll(TimeValue.timeValueMinutes(2)).source(searchSourceBuilder);
+ SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync);
+
+ try {
+ long counter = 0;
+ assertSearchHeader(searchResponse);
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(100L));
+ assertThat(searchResponse.getHits().getHits().length, equalTo(35));
+ for (SearchHit hit : searchResponse.getHits()) {
+ assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++));
+ }
+
+ searchResponse = execute(new SearchScrollRequest(searchResponse.getScrollId()).scroll(TimeValue.timeValueMinutes(2)),
+ highLevelClient()::searchScroll, highLevelClient()::searchScrollAsync);
+
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(100L));
+ assertThat(searchResponse.getHits().getHits().length, equalTo(35));
+ for (SearchHit hit : searchResponse.getHits()) {
+ assertEquals(counter++, ((Number) hit.getSortValues()[0]).longValue());
+ }
+
+ searchResponse = execute(new SearchScrollRequest(searchResponse.getScrollId()).scroll(TimeValue.timeValueMinutes(2)),
+ highLevelClient()::searchScroll, highLevelClient()::searchScrollAsync);
+
+ assertThat(searchResponse.getHits().getTotalHits(), equalTo(100L));
+ assertThat(searchResponse.getHits().getHits().length, equalTo(30));
+ for (SearchHit hit : searchResponse.getHits()) {
+ assertEquals(counter++, ((Number) hit.getSortValues()[0]).longValue());
+ }
+ } finally {
+ ClearScrollRequest clearScrollRequest = new ClearScrollRequest();
+ clearScrollRequest.addScrollId(searchResponse.getScrollId());
+ ClearScrollResponse clearScrollResponse = execute(clearScrollRequest,
+ // Not using a method reference to work around https://bugs.eclipse.org/bugs/show_bug.cgi?id=517951
+ (request, headers) -> highLevelClient().clearScroll(request, headers),
+ (request, listener, headers) -> highLevelClient().clearScrollAsync(request, listener, headers));
+ assertThat(clearScrollResponse.getNumFreed(), greaterThan(0));
+ assertTrue(clearScrollResponse.isSucceeded());
+
+ SearchScrollRequest scrollRequest = new SearchScrollRequest(searchResponse.getScrollId()).scroll(TimeValue.timeValueMinutes(2));
+ ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> execute(scrollRequest,
+ highLevelClient()::searchScroll, highLevelClient()::searchScrollAsync));
+ assertEquals(RestStatus.NOT_FOUND, exception.status());
+ assertThat(exception.getRootCause(), instanceOf(ElasticsearchException.class));
+ ElasticsearchException rootCause = (ElasticsearchException) exception.getRootCause();
+ assertThat(rootCause.getMessage(), containsString("No search context found for"));
+ }
+ }
+
+ private static void assertSearchHeader(SearchResponse searchResponse) {
+ assertThat(searchResponse.getTook().nanos(), greaterThanOrEqualTo(0L));
+ assertEquals(0, searchResponse.getFailedShards());
+ assertThat(searchResponse.getTotalShards(), greaterThan(0));
+ assertEquals(searchResponse.getTotalShards(), searchResponse.getSuccessfulShards());
+ assertEquals(0, searchResponse.getShardFailures().length);
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DeleteDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DeleteDocumentationIT.java
new file mode 100644
index 0000000000000..00c19019f47e7
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DeleteDocumentationIT.java
@@ -0,0 +1,112 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.client.documentation;
+
+import org.elasticsearch.ElasticsearchException;
+import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.DocWriteResponse;
+import org.elasticsearch.action.delete.DeleteRequest;
+import org.elasticsearch.action.delete.DeleteResponse;
+import org.elasticsearch.action.support.WriteRequest;
+import org.elasticsearch.client.ESRestHighLevelClientTestCase;
+import org.elasticsearch.client.RestHighLevelClient;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.index.VersionType;
+import org.elasticsearch.rest.RestStatus;
+
+import java.io.IOException;
+
+/**
+ * This class is used to generate the Java Delete API documentation.
+ * You need to wrap your code between two tags like:
+ * // tag::example[]
+ * // end::example[]
+ *
+ * Where example is your tag name.
+ *
+ * Then in the documentation, you can extract what is between tag and end tags with
+ * ["source","java",subs="attributes,callouts"]
+ * --------------------------------------------------
+ * sys2::[perl -ne 'exit if /end::example/; print if $tag; $tag = $tag || /tag::example/' \
+ * {docdir}/../../client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/DeleteDocumentationIT.java]
+ * --------------------------------------------------
+ */
+public class DeleteDocumentationIT extends ESRestHighLevelClientTestCase {
+
+ /**
+ * This test documents docs/java-rest/high-level/document/delete.asciidoc
+ */
+ public void testDelete() throws IOException {
+ RestHighLevelClient client = highLevelClient();
+
+ // tag::delete-request
+ DeleteRequest request = new DeleteRequest(
+ "index", // <1>
+ "type", // <2>
+ "id"); // <3>
+ // end::delete-request
+
+ // tag::delete-request-props
+ request.timeout(TimeValue.timeValueSeconds(1)); // <1>
+ request.timeout("1s"); // <2>
+ request.setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL); // <3>
+ request.setRefreshPolicy("wait_for"); // <4>
+ request.version(2); // <5>
+ request.versionType(VersionType.EXTERNAL); // <6>
+ // end::delete-request-props
+
+ // tag::delete-execute
+ DeleteResponse response = client.delete(request);
+ // end::delete-execute
+
+ try {
+ // tag::delete-notfound
+ if (response.getResult().equals(DocWriteResponse.Result.NOT_FOUND)) {
+ throw new Exception("Can't find document to be removed"); // <1>
+ }
+ // end::delete-notfound
+ } catch (Exception ignored) { }
+
+ // tag::delete-execute-async
+ client.deleteAsync(request, new ActionListener() {
+ @Override
+ public void onResponse(DeleteResponse deleteResponse) {
+ // <1>
+ }
+
+ @Override
+ public void onFailure(Exception e) {
+ // <2>
+ }
+ });
+ // end::delete-execute-async
+
+ // tag::delete-conflict
+ try {
+ client.delete(request);
+ } catch (ElasticsearchException exception) {
+ if (exception.status().equals(RestStatus.CONFLICT)) {
+ // <1>
+ }
+ }
+ // end::delete-conflict
+
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/QueryDSLDocumentationTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/QueryDSLDocumentationTests.java
new file mode 100644
index 0000000000000..01a5eb5dfc12d
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/QueryDSLDocumentationTests.java
@@ -0,0 +1,453 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.client.documentation;
+
+import org.apache.lucene.search.join.ScoreMode;
+import org.elasticsearch.common.geo.GeoPoint;
+import org.elasticsearch.common.geo.ShapeRelation;
+import org.elasticsearch.common.geo.builders.CoordinatesBuilder;
+import org.elasticsearch.common.geo.builders.ShapeBuilders;
+import org.elasticsearch.common.unit.DistanceUnit;
+import org.elasticsearch.index.query.GeoShapeQueryBuilder;
+import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
+import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder.FilterFunctionBuilder;
+import org.elasticsearch.script.Script;
+import org.elasticsearch.script.ScriptType;
+import org.elasticsearch.test.ESTestCase;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static java.util.Collections.singletonMap;
+import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
+import static org.elasticsearch.index.query.QueryBuilders.boostingQuery;
+import static org.elasticsearch.index.query.QueryBuilders.commonTermsQuery;
+import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
+import static org.elasticsearch.index.query.QueryBuilders.disMaxQuery;
+import static org.elasticsearch.index.query.QueryBuilders.existsQuery;
+import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery;
+import static org.elasticsearch.index.query.QueryBuilders.fuzzyQuery;
+import static org.elasticsearch.index.query.QueryBuilders.geoBoundingBoxQuery;
+import static org.elasticsearch.index.query.QueryBuilders.geoDistanceQuery;
+import static org.elasticsearch.index.query.QueryBuilders.geoPolygonQuery;
+import static org.elasticsearch.index.query.QueryBuilders.geoShapeQuery;
+import static org.elasticsearch.index.query.QueryBuilders.idsQuery;
+import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
+import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
+import static org.elasticsearch.index.query.QueryBuilders.moreLikeThisQuery;
+import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery;
+import static org.elasticsearch.index.query.QueryBuilders.nestedQuery;
+import static org.elasticsearch.index.query.QueryBuilders.prefixQuery;
+import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery;
+import static org.elasticsearch.index.query.QueryBuilders.rangeQuery;
+import static org.elasticsearch.index.query.QueryBuilders.regexpQuery;
+import static org.elasticsearch.index.query.QueryBuilders.scriptQuery;
+import static org.elasticsearch.index.query.QueryBuilders.simpleQueryStringQuery;
+import static org.elasticsearch.index.query.QueryBuilders.spanContainingQuery;
+import static org.elasticsearch.index.query.QueryBuilders.spanFirstQuery;
+import static org.elasticsearch.index.query.QueryBuilders.spanMultiTermQueryBuilder;
+import static org.elasticsearch.index.query.QueryBuilders.spanNearQuery;
+import static org.elasticsearch.index.query.QueryBuilders.spanNotQuery;
+import static org.elasticsearch.index.query.QueryBuilders.spanOrQuery;
+import static org.elasticsearch.index.query.QueryBuilders.spanTermQuery;
+import static org.elasticsearch.index.query.QueryBuilders.spanWithinQuery;
+import static org.elasticsearch.index.query.QueryBuilders.termQuery;
+import static org.elasticsearch.index.query.QueryBuilders.termsQuery;
+import static org.elasticsearch.index.query.QueryBuilders.typeQuery;
+import static org.elasticsearch.index.query.QueryBuilders.wildcardQuery;
+import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.exponentialDecayFunction;
+import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.randomFunction;
+import static org.elasticsearch.join.query.JoinQueryBuilders.hasChildQuery;
+import static org.elasticsearch.join.query.JoinQueryBuilders.hasParentQuery;
+
+/**
+ * Examples of using the transport client that are imported into the transport client documentation.
+ * There are no assertions here because we're mostly concerned with making sure that the examples
+ * compile and don't throw weird runtime exceptions. Assertions and example data would be nice, but
+ * that is secondary.
+ */
+public class QueryDSLDocumentationTests extends ESTestCase {
+ public void testBool() {
+ // tag::bool
+ boolQuery()
+ .must(termQuery("content", "test1")) // <1>
+ .must(termQuery("content", "test4")) // <1>
+ .mustNot(termQuery("content", "test2")) // <2>
+ .should(termQuery("content", "test3")) // <3>
+ .filter(termQuery("content", "test5")); // <4>
+ // end::bool
+ }
+
+ public void testBoosting() {
+ // tag::boosting
+ boostingQuery(
+ termQuery("name","kimchy"), // <1>
+ termQuery("name","dadoonet")) // <2>
+ .negativeBoost(0.2f); // <3>
+ // end::boosting
+ }
+
+ public void testCommonTerms() {
+ // tag::common_terms
+ commonTermsQuery("name", // <1>
+ "kimchy"); // <2>
+ // end::common_terms
+ }
+
+ public void testConstantScore() {
+ // tag::constant_score
+ constantScoreQuery(
+ termQuery("name","kimchy")) // <1>
+ .boost(2.0f); // <2>
+ // end::constant_score
+ }
+
+ public void testDisMax() {
+ // tag::dis_max
+ disMaxQuery()
+ .add(termQuery("name", "kimchy")) // <1>
+ .add(termQuery("name", "elasticsearch")) // <2>
+ .boost(1.2f) // <3>
+ .tieBreaker(0.7f); // <4>
+ // end::dis_max
+ }
+
+ public void testExists() {
+ // tag::exists
+ existsQuery("name"); // <1>
+ // end::exists
+ }
+
+ public void testFunctionScore() {
+ // tag::function_score
+ FilterFunctionBuilder[] functions = {
+ new FunctionScoreQueryBuilder.FilterFunctionBuilder(
+ matchQuery("name", "kimchy"), // <1>
+ randomFunction("ABCDEF")), // <2>
+ new FunctionScoreQueryBuilder.FilterFunctionBuilder(
+ exponentialDecayFunction("age", 0L, 1L)) // <3>
+ };
+ functionScoreQuery(functions);
+ // end::function_score
+ }
+
+ public void testFuzzy() {
+ // tag::fuzzy
+ fuzzyQuery(
+ "name", // <1>
+ "kimchy"); // <2>
+ // end::fuzzy
+ }
+
+ public void testGeoBoundingBox() {
+ // tag::geo_bounding_box
+ geoBoundingBoxQuery("pin.location") // <1>
+ .setCorners(40.73, -74.1, // <2>
+ 40.717, -73.99); // <3>
+ // end::geo_bounding_box
+ }
+
+ public void testGeoDistance() {
+ // tag::geo_distance
+ geoDistanceQuery("pin.location") // <1>
+ .point(40, -70) // <2>
+ .distance(200, DistanceUnit.KILOMETERS); // <3>
+ // end::geo_distance
+ }
+
+ public void testGeoPolygon() {
+ // tag::geo_polygon
+ List points = new ArrayList(); // <1>
+ points.add(new GeoPoint(40, -70));
+ points.add(new GeoPoint(30, -80));
+ points.add(new GeoPoint(20, -90));
+ geoPolygonQuery("pin.location", points); // <2>
+ // end::geo_polygon
+ }
+
+ public void testGeoShape() throws IOException {
+ {
+ // tag::geo_shape
+ GeoShapeQueryBuilder qb = geoShapeQuery(
+ "pin.location", // <1>
+ ShapeBuilders.newMultiPoint( // <2>
+ new CoordinatesBuilder()
+ .coordinate(0, 0)
+ .coordinate(0, 10)
+ .coordinate(10, 10)
+ .coordinate(10, 0)
+ .coordinate(0, 0)
+ .build()));
+ qb.relation(ShapeRelation.WITHIN); // <3>
+ // end::geo_shape
+ }
+
+ {
+ // tag::indexed_geo_shape
+ // Using pre-indexed shapes
+ GeoShapeQueryBuilder qb = geoShapeQuery(
+ "pin.location", // <1>
+ "DEU", // <2>
+ "countries"); // <3>
+ qb.relation(ShapeRelation.WITHIN) // <4>
+ .indexedShapeIndex("shapes") // <5>
+ .indexedShapePath("location"); // <6>
+ // end::indexed_geo_shape
+ }
+ }
+
+ public void testHasChild() {
+ // tag::has_child
+ hasChildQuery(
+ "blog_tag", // <1>
+ termQuery("tag","something"), // <2>
+ ScoreMode.None); // <3>
+ // end::has_child
+ }
+
+ public void testHasParent() {
+ // tag::has_parent
+ hasParentQuery(
+ "blog", // <1>
+ termQuery("tag","something"), // <2>
+ false); // <3>
+ // end::has_parent
+ }
+
+ public void testIds() {
+ // tag::ids
+ idsQuery("my_type", "type2")
+ .addIds("1", "4", "100");
+
+ idsQuery() // <1>
+ .addIds("1", "4", "100");
+ // end::ids
+ }
+
+ public void testMatchAll() {
+ // tag::match_all
+ matchAllQuery();
+ // end::match_all
+ }
+
+ public void testMatch() {
+ // tag::match
+ matchQuery(
+ "name", // <1>
+ "kimchy elasticsearch"); // <2>
+ // end::match
+ }
+
+ public void testMoreLikeThis() {
+ // tag::more_like_this
+ String[] fields = {"name.first", "name.last"}; // <1>
+ String[] texts = {"text like this one"}; // <2>
+
+ moreLikeThisQuery(fields, texts, null)
+ .minTermFreq(1) // <3>
+ .maxQueryTerms(12); // <4>
+ // end::more_like_this
+ }
+
+ public void testMultiMatch() {
+ // tag::multi_match
+ multiMatchQuery(
+ "kimchy elasticsearch", // <1>
+ "user", "message"); // <2>
+ // end::multi_match
+ }
+
+ public void testNested() {
+ // tag::nested
+ nestedQuery(
+ "obj1", // <1>
+ boolQuery() // <2>
+ .must(matchQuery("obj1.name", "blue"))
+ .must(rangeQuery("obj1.count").gt(5)),
+ ScoreMode.Avg); // <3>
+ // end::nested
+ }
+
+ public void testPrefix() {
+ // tag::prefix
+ prefixQuery(
+ "brand", // <1>
+ "heine"); // <2>
+ // end::prefix
+ }
+
+ public void testQueryString() {
+ // tag::query_string
+ queryStringQuery("+kimchy -elasticsearch");
+ // end::query_string
+ }
+
+ public void testRange() {
+ // tag::range
+ rangeQuery("price") // <1>
+ .from(5) // <2>
+ .to(10) // <3>
+ .includeLower(true) // <4>
+ .includeUpper(false); // <5>
+ // end::range
+
+ // tag::range_simplified
+ // A simplified form using gte, gt, lt or lte
+ rangeQuery("age") // <1>
+ .gte("10") // <2>
+ .lt("20"); // <3>
+ // end::range_simplified
+ }
+
+ public void testRegExp() {
+ // tag::regexp
+ regexpQuery(
+ "name.first", // <1>
+ "s.*y"); // <2>
+ // end::regexp
+ }
+
+ public void testScript() {
+ // tag::script_inline
+ scriptQuery(
+ new Script("doc['num1'].value > 1") // <1>
+ );
+ // end::script_inline
+
+ // tag::script_file
+ Map parameters = new HashMap<>();
+ parameters.put("param1", 5);
+ scriptQuery(new Script(
+ ScriptType.STORED, // <1>
+ "painless", // <2>
+ "myscript", // <3>
+ singletonMap("param1", 5))); // <4>
+ // end::script_file
+ }
+
+ public void testSimpleQueryString() {
+ // tag::simple_query_string
+ simpleQueryStringQuery("+kimchy -elasticsearch");
+ // end::simple_query_string
+ }
+
+ public void testSpanContaining() {
+ // tag::span_containing
+ spanContainingQuery(
+ spanNearQuery(spanTermQuery("field1","bar"), 5) // <1>
+ .addClause(spanTermQuery("field1","baz"))
+ .inOrder(true),
+ spanTermQuery("field1","foo")); // <2>
+ // end::span_containing
+ }
+
+ public void testSpanFirst() {
+ // tag::span_first
+ spanFirstQuery(
+ spanTermQuery("user", "kimchy"), // <1>
+ 3 // <2>
+ );
+ // end::span_first
+ }
+
+ public void testSpanMultiTerm() {
+ // tag::span_multi
+ spanMultiTermQueryBuilder(
+ prefixQuery("user", "ki")); // <1>
+ // end::span_multi
+ }
+
+ public void testSpanNear() {
+ // tag::span_near
+ spanNearQuery(
+ spanTermQuery("field","value1"), // <1>
+ 12) // <2>
+ .addClause(spanTermQuery("field","value2")) // <1>
+ .addClause(spanTermQuery("field","value3")) // <1>
+ .inOrder(false); // <3>
+ // end::span_near
+ }
+
+ public void testSpanNot() {
+ // tag::span_not
+ spanNotQuery(
+ spanTermQuery("field","value1"), // <1>
+ spanTermQuery("field","value2")); // <2>
+ // end::span_not
+ }
+
+ public void testSpanOr() {
+ // tag::span_or
+ spanOrQuery(spanTermQuery("field","value1")) // <1>
+ .addClause(spanTermQuery("field","value2")) // <1>
+ .addClause(spanTermQuery("field","value3")); // <1>
+ // end::span_or
+ }
+
+ public void testSpanTerm() {
+ // tag::span_term
+ spanTermQuery(
+ "user", // <1>
+ "kimchy"); // <2>
+ // end::span_term
+ }
+
+ public void testSpanWithin() {
+ // tag::span_within
+ spanWithinQuery(
+ spanNearQuery(spanTermQuery("field1", "bar"), 5) // <1>
+ .addClause(spanTermQuery("field1", "baz"))
+ .inOrder(true),
+ spanTermQuery("field1", "foo")); // <2>
+ // end::span_within
+ }
+
+ public void testTerm() {
+ // tag::term
+ termQuery(
+ "name", // <1>
+ "kimchy"); // <2>
+ // end::term
+ }
+
+ public void testTerms() {
+ // tag::terms
+ termsQuery("tags", // <1>
+ "blue", "pill"); // <2>
+ // end::terms
+ }
+
+ public void testType() {
+ // tag::type
+ typeQuery("my_type"); // <1>
+ // end::type
+ }
+
+ public void testWildcard() {
+ // tag::wildcard
+ wildcardQuery(
+ "user", // <1>
+ "k?mch*"); // <2>
+ // end::wildcard
+ }
+}
diff --git a/client/rest/build.gradle b/client/rest/build.gradle
index 67f8426fb5faa..19ec584a1032d 100644
--- a/client/rest/build.gradle
+++ b/client/rest/build.gradle
@@ -33,7 +33,7 @@ group = 'org.elasticsearch.client'
dependencies {
compile "org.apache.httpcomponents:httpclient:${versions.httpclient}"
compile "org.apache.httpcomponents:httpcore:${versions.httpcore}"
- compile "org.apache.httpcomponents:httpasyncclient:4.1.2"
+ compile "org.apache.httpcomponents:httpasyncclient:${versions.httpasyncclient}"
compile "org.apache.httpcomponents:httpcore-nio:${versions.httpcore}"
compile "commons-codec:commons-codec:${versions.commonscodec}"
compile "commons-logging:commons-logging:${versions.commonslogging}"
@@ -49,8 +49,9 @@ dependencies {
}
forbiddenApisMain {
- //client does not depend on core, so only jdk signatures should be checked
- signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')]
+ //client does not depend on core, so only jdk and http signatures should be checked
+ signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt'),
+ PrecommitTasks.getResource('/forbidden/http-signatures.txt')]
}
forbiddenApisTest {
@@ -58,7 +59,8 @@ forbiddenApisTest {
bundledSignatures -= 'jdk-non-portable'
bundledSignatures += 'jdk-internal'
//client does not depend on core, so only jdk signatures should be checked
- signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')]
+ signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt'),
+ PrecommitTasks.getResource('/forbidden/http-signatures.txt')]
}
dependencyLicenses {
diff --git a/client/rest/src/main/java/org/elasticsearch/client/HttpAsyncResponseConsumerFactory.java b/client/rest/src/main/java/org/elasticsearch/client/HttpAsyncResponseConsumerFactory.java
index a5e5b39bed567..1af9e0dcf0fa4 100644
--- a/client/rest/src/main/java/org/elasticsearch/client/HttpAsyncResponseConsumerFactory.java
+++ b/client/rest/src/main/java/org/elasticsearch/client/HttpAsyncResponseConsumerFactory.java
@@ -29,7 +29,7 @@
* consumer object. Users can implement this interface and pass their own instance to the specialized
* performRequest methods that accept an {@link HttpAsyncResponseConsumerFactory} instance as argument.
*/
-interface HttpAsyncResponseConsumerFactory {
+public interface HttpAsyncResponseConsumerFactory {
/**
* Creates the default type of {@link HttpAsyncResponseConsumer}, based on heap buffering with a buffer limit of 100MB.
diff --git a/client/rest/src/main/java/org/elasticsearch/client/RequestLogger.java b/client/rest/src/main/java/org/elasticsearch/client/RequestLogger.java
index ad2348762dd07..07ff89b7e3fb0 100644
--- a/client/rest/src/main/java/org/elasticsearch/client/RequestLogger.java
+++ b/client/rest/src/main/java/org/elasticsearch/client/RequestLogger.java
@@ -139,11 +139,12 @@ static String buildTraceRequest(HttpUriRequest request, HttpHost host) throws IO
* Creates curl output for given response
*/
static String buildTraceResponse(HttpResponse httpResponse) throws IOException {
- String responseLine = "# " + httpResponse.getStatusLine().toString();
+ StringBuilder responseLine = new StringBuilder();
+ responseLine.append("# ").append(httpResponse.getStatusLine());
for (Header header : httpResponse.getAllHeaders()) {
- responseLine += "\n# " + header.getName() + ": " + header.getValue();
+ responseLine.append("\n# ").append(header.getName()).append(": ").append(header.getValue());
}
- responseLine += "\n#";
+ responseLine.append("\n#");
HttpEntity entity = httpResponse.getEntity();
if (entity != null) {
if (entity.isRepeatable() == false) {
@@ -158,11 +159,11 @@ static String buildTraceResponse(HttpResponse httpResponse) throws IOException {
try (BufferedReader reader = new BufferedReader(new InputStreamReader(entity.getContent(), charset))) {
String line;
while( (line = reader.readLine()) != null) {
- responseLine += "\n# " + line;
+ responseLine.append("\n# ").append(line);
}
}
}
- return responseLine;
+ return responseLine.toString();
}
private static String getUri(RequestLine requestLine) {
diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java
index 89c3309dbbdd1..ba3a07454ee48 100644
--- a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java
+++ b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java
@@ -25,6 +25,7 @@
import org.apache.http.HttpHost;
import org.apache.http.HttpRequest;
import org.apache.http.HttpResponse;
+import org.apache.http.client.AuthCache;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.HttpEntityEnclosingRequestBase;
import org.apache.http.client.methods.HttpHead;
@@ -34,8 +35,11 @@
import org.apache.http.client.methods.HttpPut;
import org.apache.http.client.methods.HttpRequestBase;
import org.apache.http.client.methods.HttpTrace;
+import org.apache.http.client.protocol.HttpClientContext;
import org.apache.http.client.utils.URIBuilder;
import org.apache.http.concurrent.FutureCallback;
+import org.apache.http.impl.auth.BasicScheme;
+import org.apache.http.impl.client.BasicAuthCache;
import org.apache.http.impl.nio.client.CloseableHttpAsyncClient;
import org.apache.http.nio.client.methods.HttpAsyncMethods;
import org.apache.http.nio.protocol.HttpAsyncRequestProducer;
@@ -49,6 +53,7 @@
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
+import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
@@ -91,7 +96,7 @@ public class RestClient implements Closeable {
private final long maxRetryTimeoutMillis;
private final String pathPrefix;
private final AtomicInteger lastHostIndex = new AtomicInteger(0);
- private volatile Set hosts;
+ private volatile HostTuple> hostTuple;
private final ConcurrentMap blacklist = new ConcurrentHashMap<>();
private final FailureListener failureListener;
@@ -121,11 +126,13 @@ public synchronized void setHosts(HttpHost... hosts) {
throw new IllegalArgumentException("hosts must not be null nor empty");
}
Set httpHosts = new HashSet<>();
+ AuthCache authCache = new BasicAuthCache();
for (HttpHost host : hosts) {
Objects.requireNonNull(host, "host cannot be null");
httpHosts.add(host);
+ authCache.put(host, new BasicScheme());
}
- this.hosts = Collections.unmodifiableSet(httpHosts);
+ this.hostTuple = new HostTuple<>(Collections.unmodifiableSet(httpHosts), authCache);
this.blacklist.clear();
}
@@ -282,29 +289,65 @@ public void performRequestAsync(String method, String endpoint, Map params,
HttpEntity entity, HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory,
ResponseListener responseListener, Header... headers) {
- URI uri = buildUri(pathPrefix, endpoint, params);
- HttpRequestBase request = createHttpRequest(method, uri, entity);
- setHeaders(request, headers);
- FailureTrackingResponseListener failureTrackingResponseListener = new FailureTrackingResponseListener(responseListener);
- long startTime = System.nanoTime();
- performRequestAsync(startTime, nextHost().iterator(), request, httpAsyncResponseConsumerFactory, failureTrackingResponseListener);
+ try {
+ Objects.requireNonNull(params, "params must not be null");
+ Map requestParams = new HashMap<>(params);
+ //ignore is a special parameter supported by the clients, shouldn't be sent to es
+ String ignoreString = requestParams.remove("ignore");
+ Set ignoreErrorCodes;
+ if (ignoreString == null) {
+ if (HttpHead.METHOD_NAME.equals(method)) {
+ //404 never causes error if returned for a HEAD request
+ ignoreErrorCodes = Collections.singleton(404);
+ } else {
+ ignoreErrorCodes = Collections.emptySet();
+ }
+ } else {
+ String[] ignoresArray = ignoreString.split(",");
+ ignoreErrorCodes = new HashSet<>();
+ if (HttpHead.METHOD_NAME.equals(method)) {
+ //404 never causes error if returned for a HEAD request
+ ignoreErrorCodes.add(404);
+ }
+ for (String ignoreCode : ignoresArray) {
+ try {
+ ignoreErrorCodes.add(Integer.valueOf(ignoreCode));
+ } catch (NumberFormatException e) {
+ throw new IllegalArgumentException("ignore value should be a number, found [" + ignoreString + "] instead", e);
+ }
+ }
+ }
+ URI uri = buildUri(pathPrefix, endpoint, requestParams);
+ HttpRequestBase request = createHttpRequest(method, uri, entity);
+ setHeaders(request, headers);
+ FailureTrackingResponseListener failureTrackingResponseListener = new FailureTrackingResponseListener(responseListener);
+ long startTime = System.nanoTime();
+ performRequestAsync(startTime, nextHost(), request, ignoreErrorCodes, httpAsyncResponseConsumerFactory,
+ failureTrackingResponseListener);
+ } catch (Exception e) {
+ responseListener.onFailure(e);
+ }
}
- private void performRequestAsync(final long startTime, final Iterator hosts, final HttpRequestBase request,
+ private void performRequestAsync(final long startTime, final HostTuple> hostTuple, final HttpRequestBase request,
+ final Set ignoreErrorCodes,
final HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory,
final FailureTrackingResponseListener listener) {
- final HttpHost host = hosts.next();
+ final HttpHost host = hostTuple.hosts.next();
//we stream the request body if the entity allows for it
- HttpAsyncRequestProducer requestProducer = HttpAsyncMethods.create(host, request);
- HttpAsyncResponseConsumer asyncResponseConsumer = httpAsyncResponseConsumerFactory.createHttpAsyncResponseConsumer();
- client.execute(requestProducer, asyncResponseConsumer, new FutureCallback() {
+ final HttpAsyncRequestProducer requestProducer = HttpAsyncMethods.create(host, request);
+ final HttpAsyncResponseConsumer asyncResponseConsumer =
+ httpAsyncResponseConsumerFactory.createHttpAsyncResponseConsumer();
+ final HttpClientContext context = HttpClientContext.create();
+ context.setAuthCache(hostTuple.authCache);
+ client.execute(requestProducer, asyncResponseConsumer, context, new FutureCallback() {
@Override
public void completed(HttpResponse httpResponse) {
try {
RequestLogger.logResponse(logger, request, host, httpResponse);
int statusCode = httpResponse.getStatusLine().getStatusCode();
Response response = new Response(request.getRequestLine(), host, httpResponse);
- if (isSuccessfulResponse(request.getMethod(), statusCode)) {
+ if (isSuccessfulResponse(statusCode) || ignoreErrorCodes.contains(response.getStatusLine().getStatusCode())) {
onResponse(host);
listener.onSuccess(response);
} else {
@@ -312,7 +355,7 @@ public void completed(HttpResponse httpResponse) {
if (isRetryStatus(statusCode)) {
//mark host dead and retry against next one
onFailure(host);
- retryIfPossible(responseException, hosts, request);
+ retryIfPossible(responseException);
} else {
//mark host alive and don't retry, as the error should be a request problem
onResponse(host);
@@ -329,14 +372,14 @@ public void failed(Exception failure) {
try {
RequestLogger.logFailedRequest(logger, request, host, failure);
onFailure(host);
- retryIfPossible(failure, hosts, request);
+ retryIfPossible(failure);
} catch(Exception e) {
listener.onDefinitiveFailure(e);
}
}
- private void retryIfPossible(Exception exception, Iterator hosts, HttpRequestBase request) {
- if (hosts.hasNext()) {
+ private void retryIfPossible(Exception exception) {
+ if (hostTuple.hosts.hasNext()) {
//in case we are retrying, check whether maxRetryTimeout has been reached
long timeElapsedMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTime);
long timeout = maxRetryTimeoutMillis - timeElapsedMillis;
@@ -347,7 +390,7 @@ private void retryIfPossible(Exception exception, Iterator hosts, Http
} else {
listener.trackFailure(exception);
request.reset();
- performRequestAsync(startTime, hosts, request, httpAsyncResponseConsumerFactory, listener);
+ performRequestAsync(startTime, hostTuple, request, ignoreErrorCodes, httpAsyncResponseConsumerFactory, listener);
}
} else {
listener.onDefinitiveFailure(exception);
@@ -385,17 +428,18 @@ private void setHeaders(HttpRequest httpRequest, Header[] requestHeaders) {
* The iterator returned will never be empty. In case there are no healthy hosts available, or dead ones to be be retried,
* one dead host gets returned so that it can be retried.
*/
- private Iterable nextHost() {
+ private HostTuple> nextHost() {
+ final HostTuple> hostTuple = this.hostTuple;
Collection nextHosts = Collections.emptySet();
do {
- Set filteredHosts = new HashSet<>(hosts);
+ Set filteredHosts = new HashSet<>(hostTuple.hosts);
for (Map.Entry entry : blacklist.entrySet()) {
if (System.nanoTime() - entry.getValue().getDeadUntilNanos() < 0) {
filteredHosts.remove(entry.getKey());
}
}
if (filteredHosts.isEmpty()) {
- //last resort: if there are no good hosts to use, return a single dead one, the one that's closest to being retried
+ //last resort: if there are no good host to use, return a single dead one, the one that's closest to being retried
List