diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle
index c273e76a92aed..222de9608aeb9 100644
--- a/client/rest-high-level/build.gradle
+++ b/client/rest-high-level/build.gradle
@@ -40,6 +40,7 @@ dependencies {
compile "org.elasticsearch.plugin:parent-join-client:${version}"
compile "org.elasticsearch.plugin:aggs-matrix-stats-client:${version}"
compile "org.elasticsearch.plugin:rank-eval-client:${version}"
+ compile "org.elasticsearch.plugin:lang-mustache-client:${version}"
testCompile "org.elasticsearch.client:test:${version}"
testCompile "org.elasticsearch.test:framework:${version}"
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ClusterClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ClusterClient.java
index f3c84db79d65f..e78e4686d6991 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ClusterClient.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ClusterClient.java
@@ -21,6 +21,8 @@
import org.apache.http.Header;
import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
+import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse;
@@ -63,4 +65,26 @@ public void putSettingsAsync(ClusterUpdateSettingsRequest clusterUpdateSettingsR
restHighLevelClient.performRequestAsyncAndParseEntity(clusterUpdateSettingsRequest, RequestConverters::clusterPutSettings,
ClusterUpdateSettingsResponse::fromXContent, listener, emptySet(), headers);
}
+
+ /**
+ * Get current tasks using the Task Management API
+ *
+ * See
+ * Task Management API on elastic.co
+ */
+ public ListTasksResponse listTasks(ListTasksRequest request, Header... headers) throws IOException {
+ return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::listTasks, ListTasksResponse::fromXContent,
+ emptySet(), headers);
+ }
+
+ /**
+ * Asynchronously get current tasks using the Task Management API
+ *
+ * See
+ * Task Management API on elastic.co
+ */
+ public void listTasksAsync(ListTasksRequest request, ActionListener listener, Header... headers) {
+ restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::listTasks, ListTasksResponse::fromXContent,
+ listener, emptySet(), headers);
+ }
}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java
index 2e7b4ba74cc39..a5a6b9f7bd271 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java
@@ -29,7 +29,9 @@
import org.apache.http.entity.ContentType;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.action.DocWriteRequest;
+import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest;
+import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
@@ -44,8 +46,8 @@
import org.elasticsearch.action.admin.indices.open.OpenIndexRequest;
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
import org.elasticsearch.action.admin.indices.rollover.RolloverRequest;
-import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest;
+import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest;
import org.elasticsearch.action.admin.indices.shrink.ResizeRequest;
import org.elasticsearch.action.admin.indices.shrink.ResizeType;
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
@@ -80,7 +82,9 @@
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.rankeval.RankEvalRequest;
import org.elasticsearch.rest.action.search.RestSearchAction;
+import org.elasticsearch.script.mustache.SearchTemplateRequest;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
+import org.elasticsearch.tasks.TaskId;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
@@ -458,6 +462,15 @@ static Request search(SearchRequest searchRequest) throws IOException {
Request request = new Request(HttpPost.METHOD_NAME, endpoint(searchRequest.indices(), searchRequest.types(), "_search"));
Params params = new Params(request);
+ addSearchRequestParams(params, searchRequest);
+
+ if (searchRequest.source() != null) {
+ request.setEntity(createEntity(searchRequest.source(), REQUEST_BODY_CONTENT_TYPE));
+ }
+ return request;
+ }
+
+ private static void addSearchRequestParams(Params params, SearchRequest searchRequest) {
params.putParam(RestSearchAction.TYPED_KEYS_PARAM, "true");
params.withRouting(searchRequest.routing());
params.withPreference(searchRequest.preference());
@@ -473,11 +486,6 @@ static Request search(SearchRequest searchRequest) throws IOException {
if (searchRequest.scroll() != null) {
params.putParam("scroll", searchRequest.scroll().keepAlive());
}
-
- if (searchRequest.source() != null) {
- request.setEntity(createEntity(searchRequest.source(), REQUEST_BODY_CONTENT_TYPE));
- }
- return request;
}
static Request searchScroll(SearchScrollRequest searchScrollRequest) throws IOException {
@@ -507,6 +515,24 @@ static Request multiSearch(MultiSearchRequest multiSearchRequest) throws IOExcep
return request;
}
+ static Request searchTemplate(SearchTemplateRequest searchTemplateRequest) throws IOException {
+ Request request;
+
+ if (searchTemplateRequest.isSimulate()) {
+ request = new Request(HttpGet.METHOD_NAME, "_render/template");
+ } else {
+ SearchRequest searchRequest = searchTemplateRequest.getRequest();
+ String endpoint = endpoint(searchRequest.indices(), searchRequest.types(), "_search/template");
+ request = new Request(HttpGet.METHOD_NAME, endpoint);
+
+ Params params = new Params(request);
+ addSearchRequestParams(params, searchRequest);
+ }
+
+ request.setEntity(createEntity(searchTemplateRequest, REQUEST_BODY_CONTENT_TYPE));
+ return request;
+ }
+
static Request existsAlias(GetAliasesRequest getAliasesRequest) {
if ((getAliasesRequest.indices() == null || getAliasesRequest.indices().length == 0) &&
(getAliasesRequest.aliases() == null || getAliasesRequest.aliases().length == 0)) {
@@ -582,6 +608,22 @@ static Request clusterPutSettings(ClusterUpdateSettingsRequest clusterUpdateSett
return request;
}
+ static Request listTasks(ListTasksRequest listTaskRequest) {
+ if (listTaskRequest.getTaskId() != null && listTaskRequest.getTaskId().isSet()) {
+ throw new IllegalArgumentException("TaskId cannot be used for list tasks request");
+ }
+ Request request = new Request(HttpGet.METHOD_NAME, "/_tasks");
+ Params params = new Params(request);
+ params.withTimeout(listTaskRequest.getTimeout())
+ .withDetailed(listTaskRequest.getDetailed())
+ .withWaitForCompletion(listTaskRequest.getWaitForCompletion())
+ .withParentTaskId(listTaskRequest.getParentTaskId())
+ .withNodes(listTaskRequest.getNodes())
+ .withActions(listTaskRequest.getActions())
+ .putParam("group_by", "none");
+ return request;
+ }
+
static Request rollover(RolloverRequest rolloverRequest) throws IOException {
String endpoint = new EndpointBuilder().addPathPart(rolloverRequest.getAlias()).addPathPartAsIs("_rollover")
.addPathPart(rolloverRequest.getNewIndexName()).build();
@@ -656,6 +698,19 @@ static Request getRepositories(GetRepositoriesRequest getRepositoriesRequest) {
return request;
}
+ static Request createRepository(PutRepositoryRequest putRepositoryRequest) throws IOException {
+ String endpoint = new EndpointBuilder().addPathPart("_snapshot").addPathPart(putRepositoryRequest.name()).build();
+ Request request = new Request(HttpPut.METHOD_NAME, endpoint);
+
+ Params parameters = new Params(request);
+ parameters.withMasterTimeout(putRepositoryRequest.masterNodeTimeout());
+ parameters.withTimeout(putRepositoryRequest.timeout());
+ parameters.withVerify(putRepositoryRequest.verify());
+
+ request.setEntity(createEntity(putRepositoryRequest, REQUEST_BODY_CONTENT_TYPE));
+ return request;
+ }
+
static Request putTemplate(PutIndexTemplateRequest putIndexTemplateRequest) throws IOException {
String endpoint = new EndpointBuilder().addPathPartAsIs("_template").addPathPart(putIndexTemplateRequest.name()).build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
@@ -894,6 +949,48 @@ Params withPreserveExisting(boolean preserveExisting) {
}
return this;
}
+
+ Params withDetailed(boolean detailed) {
+ if (detailed) {
+ return putParam("detailed", Boolean.TRUE.toString());
+ }
+ return this;
+ }
+
+ Params withWaitForCompletion(boolean waitForCompletion) {
+ if (waitForCompletion) {
+ return putParam("wait_for_completion", Boolean.TRUE.toString());
+ }
+ return this;
+ }
+
+ Params withNodes(String[] nodes) {
+ if (nodes != null && nodes.length > 0) {
+ return putParam("nodes", String.join(",", nodes));
+ }
+ return this;
+ }
+
+ Params withActions(String[] actions) {
+ if (actions != null && actions.length > 0) {
+ return putParam("actions", String.join(",", actions));
+ }
+ return this;
+ }
+
+ Params withParentTaskId(TaskId parentTaskId) {
+ if (parentTaskId != null && parentTaskId.isSet()) {
+ return putParam("parent_task_id", parentTaskId.toString());
+ }
+ return this;
+ }
+
+ Params withVerify(boolean verify) {
+ if (verify) {
+ return putParam("verify", Boolean.TRUE.toString());
+ }
+ return this;
+ }
}
/**
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java
index 1985d6bd06dd4..5dbf2709d9988 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java
@@ -64,6 +64,8 @@
import org.elasticsearch.plugins.spi.NamedXContentProvider;
import org.elasticsearch.rest.BytesRestResponse;
import org.elasticsearch.rest.RestStatus;
+import org.elasticsearch.script.mustache.SearchTemplateRequest;
+import org.elasticsearch.script.mustache.SearchTemplateResponse;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.bucket.adjacency.AdjacencyMatrixAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.adjacency.ParsedAdjacencyMatrix;
@@ -501,6 +503,32 @@ public final void clearScrollAsync(ClearScrollRequest clearScrollRequest,
listener, emptySet(), headers);
}
+ /**
+ * Executes a request using the Search Template API.
+ *
+ * See Search Template API
+ * on elastic.co.
+ */
+ public final SearchTemplateResponse searchTemplate(SearchTemplateRequest searchTemplateRequest,
+ Header... headers) throws IOException {
+ return performRequestAndParseEntity(searchTemplateRequest, RequestConverters::searchTemplate,
+ SearchTemplateResponse::fromXContent, emptySet(), headers);
+ }
+
+ /**
+ * Asynchronously executes a request using the Search Template API
+ *
+ * See Search Template API
+ * on elastic.co.
+ */
+ public final void searchTemplateAsync(SearchTemplateRequest searchTemplateRequest,
+ ActionListener listener,
+ Header... headers) {
+ performRequestAsyncAndParseEntity(searchTemplateRequest, RequestConverters::searchTemplate,
+ SearchTemplateResponse::fromXContent, listener, emptySet(), headers);
+ }
+
+
/**
* Executes a request using the Ranking Evaluation API.
*
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java
index e526fbe7164f9..aec94586bee30 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java
@@ -23,8 +23,8 @@
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest;
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesResponse;
-import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
-import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse;
+import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest;
+import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse;
import java.io.IOException;
@@ -67,4 +67,27 @@ public void getRepositoriesAsync(GetRepositoriesRequest getRepositoriesRequest,
restHighLevelClient.performRequestAsyncAndParseEntity(getRepositoriesRequest, RequestConverters::getRepositories,
GetRepositoriesResponse::fromXContent, listener, emptySet(), headers);
}
+
+ /**
+ * Creates a snapshot repository.
+ *
+ * See Snapshot and Restore
+ * API on elastic.co
+ */
+ public PutRepositoryResponse createRepository(PutRepositoryRequest putRepositoryRequest, Header... headers) throws IOException {
+ return restHighLevelClient.performRequestAndParseEntity(putRepositoryRequest, RequestConverters::createRepository,
+ PutRepositoryResponse::fromXContent, emptySet(), headers);
+ }
+
+ /**
+ * Asynchronously creates a snapshot repository.
+ *
+ * See Snapshot and Restore
+ * API on elastic.co
+ */
+ public void createRepositoryAsync(PutRepositoryRequest putRepositoryRequest,
+ ActionListener listener, Header... headers) {
+ restHighLevelClient.performRequestAsyncAndParseEntity(putRepositoryRequest, RequestConverters::createRepository,
+ PutRepositoryResponse::fromXContent, listener, emptySet(), headers);
+ }
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ClusterClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ClusterClientIT.java
index 9314bb2e36cea..fa3086442f528 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ClusterClientIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ClusterClientIT.java
@@ -20,6 +20,9 @@
package org.elasticsearch.client;
import org.elasticsearch.ElasticsearchException;
+import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
+import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
+import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskGroup;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse;
import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider;
@@ -29,13 +32,16 @@
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.indices.recovery.RecoverySettings;
import org.elasticsearch.rest.RestStatus;
+import org.elasticsearch.tasks.TaskInfo;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
+import static java.util.Collections.emptyList;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
@@ -105,4 +111,29 @@ public void testClusterUpdateSettingNonExistent() {
assertThat(exception.getMessage(), equalTo(
"Elasticsearch exception [type=illegal_argument_exception, reason=transient setting [" + setting + "], not recognized]"));
}
+
+ public void testListTasks() throws IOException {
+ ListTasksRequest request = new ListTasksRequest();
+ ListTasksResponse response = execute(request, highLevelClient().cluster()::listTasks, highLevelClient().cluster()::listTasksAsync);
+
+ assertThat(response, notNullValue());
+ assertThat(response.getNodeFailures(), equalTo(emptyList()));
+ assertThat(response.getTaskFailures(), equalTo(emptyList()));
+ // It's possible that there are other tasks except 'cluster:monitor/tasks/lists[n]' and 'action":"cluster:monitor/tasks/lists'
+ assertThat(response.getTasks().size(), greaterThanOrEqualTo(2));
+ boolean listTasksFound = false;
+ for (TaskGroup taskGroup : response.getTaskGroups()) {
+ TaskInfo parent = taskGroup.getTaskInfo();
+ if ("cluster:monitor/tasks/lists".equals(parent.getAction())) {
+ assertThat(taskGroup.getChildTasks().size(), equalTo(1));
+ TaskGroup childGroup = taskGroup.getChildTasks().iterator().next();
+ assertThat(childGroup.getChildTasks().isEmpty(), equalTo(true));
+ TaskInfo child = childGroup.getTaskInfo();
+ assertThat(child.getAction(), equalTo("cluster:monitor/tasks/lists[n]"));
+ assertThat(child.getParentTaskId(), equalTo(parent.getTaskId()));
+ listTasksFound = true;
+ }
+ }
+ assertTrue("List tasks were not found", listTasksFound);
+ }
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java
index 2d4ef8b6413d9..4a0276e74d228 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java
@@ -29,7 +29,9 @@
import org.apache.http.util.EntityUtils;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.DocWriteRequest;
+import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest;
+import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
@@ -77,9 +79,11 @@
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
@@ -94,7 +98,10 @@
import org.elasticsearch.index.rankeval.RankEvalSpec;
import org.elasticsearch.index.rankeval.RatedRequest;
import org.elasticsearch.index.rankeval.RestRankEvalAction;
+import org.elasticsearch.repositories.fs.FsRepository;
import org.elasticsearch.rest.action.search.RestSearchAction;
+import org.elasticsearch.script.ScriptType;
+import org.elasticsearch.script.mustache.SearchTemplateRequest;
import org.elasticsearch.search.Scroll;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.support.ValueType;
@@ -105,11 +112,13 @@
import org.elasticsearch.search.rescore.QueryRescorerBuilder;
import org.elasticsearch.search.suggest.SuggestBuilder;
import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder;
+import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.RandomObjects;
import java.io.IOException;
import java.io.InputStream;
+import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -135,6 +144,7 @@
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.hasKey;
+import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
public class RequestConvertersTests extends ESTestCase {
@@ -181,8 +191,7 @@ public void testMultiGet() throws IOException {
int numberOfRequests = randomIntBetween(0, 32);
for (int i = 0; i < numberOfRequests; i++) {
- MultiGetRequest.Item item =
- new MultiGetRequest.Item(randomAlphaOfLength(4), randomAlphaOfLength(4), randomAlphaOfLength(4));
+ MultiGetRequest.Item item = new MultiGetRequest.Item(randomAlphaOfLength(4), randomAlphaOfLength(4), randomAlphaOfLength(4));
if (randomBoolean()) {
item.routing(randomAlphaOfLength(4));
}
@@ -261,7 +270,7 @@ public void testIndicesExist() {
public void testIndicesExistEmptyIndices() {
expectThrows(IllegalArgumentException.class, () -> RequestConverters.indicesExist(new GetIndexRequest()));
- expectThrows(IllegalArgumentException.class, () -> RequestConverters.indicesExist(new GetIndexRequest().indices((String[])null)));
+ expectThrows(IllegalArgumentException.class, () -> RequestConverters.indicesExist(new GetIndexRequest().indices((String[]) null)));
}
private static void getAndExistsTest(Function requestConverter, String method) {
@@ -415,7 +424,8 @@ public void testGetSettings() throws IOException {
setRandomLocal(getSettingsRequest, expectedParams);
if (randomBoolean()) {
- //the request object will not have include_defaults present unless it is set to true
+ // the request object will not have include_defaults present unless it is set to
+ // true
getSettingsRequest.includeDefaults(randomBoolean());
if (getSettingsRequest.includeDefaults()) {
expectedParams.put("include_defaults", Boolean.toString(true));
@@ -959,22 +969,21 @@ public void testBulkWithDifferentContentTypes() throws IOException {
bulkRequest.add(new IndexRequest("index", "type", "0").source(singletonMap("field", "value"), XContentType.SMILE));
bulkRequest.add(new IndexRequest("index", "type", "1").source(singletonMap("field", "value"), XContentType.JSON));
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> RequestConverters.bulk(bulkRequest));
- assertEquals("Mismatching content-type found for request with content-type [JSON], " +
- "previous requests have content-type [SMILE]", exception.getMessage());
+ assertEquals(
+ "Mismatching content-type found for request with content-type [JSON], " + "previous requests have content-type [SMILE]",
+ exception.getMessage());
}
{
BulkRequest bulkRequest = new BulkRequest();
- bulkRequest.add(new IndexRequest("index", "type", "0")
- .source(singletonMap("field", "value"), XContentType.JSON));
- bulkRequest.add(new IndexRequest("index", "type", "1")
- .source(singletonMap("field", "value"), XContentType.JSON));
+ bulkRequest.add(new IndexRequest("index", "type", "0").source(singletonMap("field", "value"), XContentType.JSON));
+ bulkRequest.add(new IndexRequest("index", "type", "1").source(singletonMap("field", "value"), XContentType.JSON));
bulkRequest.add(new UpdateRequest("index", "type", "2")
.doc(new IndexRequest().source(singletonMap("field", "value"), XContentType.JSON))
- .upsert(new IndexRequest().source(singletonMap("field", "value"), XContentType.SMILE))
- );
+ .upsert(new IndexRequest().source(singletonMap("field", "value"), XContentType.SMILE)));
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> RequestConverters.bulk(bulkRequest));
- assertEquals("Mismatching content-type found for request with content-type [SMILE], " +
- "previous requests have content-type [JSON]", exception.getMessage());
+ assertEquals(
+ "Mismatching content-type found for request with content-type [SMILE], " + "previous requests have content-type [JSON]",
+ exception.getMessage());
}
{
XContentType xContentType = randomFrom(XContentType.CBOR, XContentType.YAML);
@@ -1011,42 +1020,14 @@ public void testSearch() throws Exception {
searchRequest.types(types);
Map expectedParams = new HashMap<>();
- expectedParams.put(RestSearchAction.TYPED_KEYS_PARAM, "true");
- if (randomBoolean()) {
- searchRequest.routing(randomAlphaOfLengthBetween(3, 10));
- expectedParams.put("routing", searchRequest.routing());
- }
- if (randomBoolean()) {
- searchRequest.preference(randomAlphaOfLengthBetween(3, 10));
- expectedParams.put("preference", searchRequest.preference());
- }
- if (randomBoolean()) {
- searchRequest.searchType(randomFrom(SearchType.values()));
- }
- expectedParams.put("search_type", searchRequest.searchType().name().toLowerCase(Locale.ROOT));
- if (randomBoolean()) {
- searchRequest.requestCache(randomBoolean());
- expectedParams.put("request_cache", Boolean.toString(searchRequest.requestCache()));
- }
- if (randomBoolean()) {
- searchRequest.allowPartialSearchResults(randomBoolean());
- expectedParams.put("allow_partial_search_results", Boolean.toString(searchRequest.allowPartialSearchResults()));
- }
- if (randomBoolean()) {
- searchRequest.setBatchedReduceSize(randomIntBetween(2, Integer.MAX_VALUE));
- }
- expectedParams.put("batched_reduce_size", Integer.toString(searchRequest.getBatchedReduceSize()));
- if (randomBoolean()) {
- searchRequest.scroll(randomTimeValue());
- expectedParams.put("scroll", searchRequest.scroll().keepAlive().getStringRep());
- }
-
+ setRandomSearchParams(searchRequest, expectedParams);
setRandomIndicesOptions(searchRequest::indicesOptions, searchRequest::indicesOptions, expectedParams);
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
- //rarely skip setting the search source completely
+ // rarely skip setting the search source completely
if (frequently()) {
- //frequently set the search source to have some content, otherwise leave it empty but still set it
+ // frequently set the search source to have some content, otherwise leave it
+ // empty but still set it
if (frequently()) {
if (randomBoolean()) {
searchSourceBuilder.size(randomIntBetween(0, Integer.MAX_VALUE));
@@ -1116,7 +1097,8 @@ public void testMultiSearch() throws IOException {
MultiSearchRequest multiSearchRequest = new MultiSearchRequest();
for (int i = 0; i < numberOfSearchRequests; i++) {
SearchRequest searchRequest = randomSearchRequest(() -> {
- // No need to return a very complex SearchSourceBuilder here, that is tested elsewhere
+ // No need to return a very complex SearchSourceBuilder here, that is tested
+ // elsewhere
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
searchSourceBuilder.from(randomInt(10));
searchSourceBuilder.size(randomIntBetween(20, 100));
@@ -1124,14 +1106,13 @@ public void testMultiSearch() throws IOException {
});
// scroll is not supported in the current msearch api, so unset it:
searchRequest.scroll((Scroll) null);
- // only expand_wildcards, ignore_unavailable and allow_no_indices can be specified from msearch api, so unset other options:
+ // only expand_wildcards, ignore_unavailable and allow_no_indices can be
+ // specified from msearch api, so unset other options:
IndicesOptions randomlyGenerated = searchRequest.indicesOptions();
IndicesOptions msearchDefault = new MultiSearchRequest().indicesOptions();
- searchRequest.indicesOptions(IndicesOptions.fromOptions(
- randomlyGenerated.ignoreUnavailable(), randomlyGenerated.allowNoIndices(), randomlyGenerated.expandWildcardsOpen(),
- randomlyGenerated.expandWildcardsClosed(), msearchDefault.allowAliasesToMultipleIndices(),
- msearchDefault.forbidClosedIndices(), msearchDefault.ignoreAliases()
- ));
+ searchRequest.indicesOptions(IndicesOptions.fromOptions(randomlyGenerated.ignoreUnavailable(),
+ randomlyGenerated.allowNoIndices(), randomlyGenerated.expandWildcardsOpen(), randomlyGenerated.expandWildcardsClosed(),
+ msearchDefault.allowAliasesToMultipleIndices(), msearchDefault.forbidClosedIndices(), msearchDefault.ignoreAliases()));
multiSearchRequest.add(searchRequest);
}
@@ -1156,8 +1137,8 @@ public void testMultiSearch() throws IOException {
requests.add(searchRequest);
};
MultiSearchRequest.readMultiLineFormat(new BytesArray(EntityUtils.toByteArray(request.getEntity())),
- REQUEST_BODY_CONTENT_TYPE.xContent(), consumer, null, multiSearchRequest.indicesOptions(), null, null,
- null, xContentRegistry(), true);
+ REQUEST_BODY_CONTENT_TYPE.xContent(), consumer, null, multiSearchRequest.indicesOptions(), null, null, null,
+ xContentRegistry(), true);
assertEquals(requests, multiSearchRequest.requests());
}
@@ -1189,11 +1170,70 @@ public void testClearScroll() throws IOException {
assertEquals(REQUEST_BODY_CONTENT_TYPE.mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue());
}
+ public void testSearchTemplate() throws Exception {
+ // Create a random request.
+ String[] indices = randomIndicesNames(0, 5);
+ SearchRequest searchRequest = new SearchRequest(indices);
+
+ Map expectedParams = new HashMap<>();
+ setRandomSearchParams(searchRequest, expectedParams);
+ setRandomIndicesOptions(searchRequest::indicesOptions, searchRequest::indicesOptions, expectedParams);
+
+ SearchTemplateRequest searchTemplateRequest = new SearchTemplateRequest(searchRequest);
+
+ searchTemplateRequest.setScript("{\"query\": { \"match\" : { \"{{field}}\" : \"{{value}}\" }}}");
+ searchTemplateRequest.setScriptType(ScriptType.INLINE);
+ searchTemplateRequest.setProfile(randomBoolean());
+
+ Map scriptParams = new HashMap<>();
+ scriptParams.put("field", "name");
+ scriptParams.put("value", "soren");
+ searchTemplateRequest.setScriptParams(scriptParams);
+
+ // Verify that the resulting REST request looks as expected.
+ Request request = RequestConverters.searchTemplate(searchTemplateRequest);
+ StringJoiner endpoint = new StringJoiner("/", "/", "");
+ String index = String.join(",", indices);
+ if (Strings.hasLength(index)) {
+ endpoint.add(index);
+ }
+ endpoint.add("_search/template");
+
+ assertEquals(HttpGet.METHOD_NAME, request.getMethod());
+ assertEquals(endpoint.toString(), request.getEndpoint());
+ assertEquals(expectedParams, request.getParameters());
+ assertToXContentBody(searchTemplateRequest, request.getEntity());
+ }
+
+ public void testRenderSearchTemplate() throws Exception {
+ // Create a simple request.
+ SearchTemplateRequest searchTemplateRequest = new SearchTemplateRequest();
+ searchTemplateRequest.setSimulate(true); // Setting simulate true means the template should only be rendered.
+
+ searchTemplateRequest.setScript("template1");
+ searchTemplateRequest.setScriptType(ScriptType.STORED);
+ searchTemplateRequest.setProfile(randomBoolean());
+
+ Map scriptParams = new HashMap<>();
+ scriptParams.put("field", "name");
+ scriptParams.put("value", "soren");
+ searchTemplateRequest.setScriptParams(scriptParams);
+
+ // Verify that the resulting REST request looks as expected.
+ Request request = RequestConverters.searchTemplate(searchTemplateRequest);
+ String endpoint = "_render/template";
+
+ assertEquals(HttpGet.METHOD_NAME, request.getMethod());
+ assertEquals(endpoint, request.getEndpoint());
+ assertEquals(Collections.emptyMap(), request.getParameters());
+ assertToXContentBody(searchTemplateRequest, request.getEntity());
+ }
+
public void testExistsAlias() {
GetAliasesRequest getAliasesRequest = new GetAliasesRequest();
String[] indices = randomBoolean() ? null : randomIndicesNames(0, 5);
getAliasesRequest.indices(indices);
- //the HEAD endpoint requires at least an alias or an index
+ // the HEAD endpoint requires at least an alias or an index
boolean hasIndices = indices != null && indices.length > 0;
String[] aliases;
if (hasIndices) {
@@ -1224,15 +1264,15 @@ public void testExistsAlias() {
public void testExistsAliasNoAliasNoIndex() {
{
GetAliasesRequest getAliasesRequest = new GetAliasesRequest();
- IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () ->
- RequestConverters.existsAlias(getAliasesRequest));
+ IllegalArgumentException iae = expectThrows(IllegalArgumentException.class,
+ () -> RequestConverters.existsAlias(getAliasesRequest));
assertEquals("existsAlias requires at least an alias or an index", iae.getMessage());
}
{
- GetAliasesRequest getAliasesRequest = new GetAliasesRequest((String[])null);
- getAliasesRequest.indices((String[])null);
- IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () ->
- RequestConverters.existsAlias(getAliasesRequest));
+ GetAliasesRequest getAliasesRequest = new GetAliasesRequest((String[]) null);
+ getAliasesRequest.indices((String[]) null);
+ IllegalArgumentException iae = expectThrows(IllegalArgumentException.class,
+ () -> RequestConverters.existsAlias(getAliasesRequest));
assertEquals("existsAlias requires at least an alias or an index", iae.getMessage());
}
}
@@ -1242,14 +1282,10 @@ public void testFieldCaps() {
String[] indices = randomIndicesNames(0, 5);
String[] fields = generateRandomStringArray(5, 10, false, false);
- FieldCapabilitiesRequest fieldCapabilitiesRequest = new FieldCapabilitiesRequest()
- .indices(indices)
- .fields(fields);
+ FieldCapabilitiesRequest fieldCapabilitiesRequest = new FieldCapabilitiesRequest().indices(indices).fields(fields);
Map indicesOptionsParams = new HashMap<>();
- setRandomIndicesOptions(fieldCapabilitiesRequest::indicesOptions,
- fieldCapabilitiesRequest::indicesOptions,
- indicesOptionsParams);
+ setRandomIndicesOptions(fieldCapabilitiesRequest::indicesOptions, fieldCapabilitiesRequest::indicesOptions, indicesOptionsParams);
Request request = RequestConverters.fieldCaps(fieldCapabilitiesRequest);
@@ -1264,12 +1300,13 @@ public void testFieldCaps() {
assertEquals(endpoint.toString(), request.getEndpoint());
assertEquals(4, request.getParameters().size());
- // Note that we don't check the field param value explicitly, as field names are passed through
- // a hash set before being added to the request, and can appear in a non-deterministic order.
+ // Note that we don't check the field param value explicitly, as field names are
+ // passed through
+ // a hash set before being added to the request, and can appear in a
+ // non-deterministic order.
assertThat(request.getParameters(), hasKey("fields"));
String[] requestFields = Strings.splitStringByCommaToArray(request.getParameters().get("fields"));
- assertEquals(new HashSet<>(Arrays.asList(fields)),
- new HashSet<>(Arrays.asList(requestFields)));
+ assertEquals(new HashSet<>(Arrays.asList(fields)), new HashSet<>(Arrays.asList(requestFields)));
for (Map.Entry param : indicesOptionsParams.entrySet()) {
assertThat(request.getParameters(), hasEntry(param.getKey(), param.getValue()));
@@ -1428,6 +1465,66 @@ public void testIndexPutSettings() throws IOException {
assertEquals(expectedParams, request.getParameters());
}
+ public void testListTasks() {
+ {
+ ListTasksRequest request = new ListTasksRequest();
+ Map expectedParams = new HashMap<>();
+ if (randomBoolean()) {
+ request.setDetailed(randomBoolean());
+ if (request.getDetailed()) {
+ expectedParams.put("detailed", "true");
+ }
+ }
+ if (randomBoolean()) {
+ request.setWaitForCompletion(randomBoolean());
+ if (request.getWaitForCompletion()) {
+ expectedParams.put("wait_for_completion", "true");
+ }
+ }
+ if (randomBoolean()) {
+ String timeout = randomTimeValue();
+ request.setTimeout(timeout);
+ expectedParams.put("timeout", timeout);
+ }
+ if (randomBoolean()) {
+ if (randomBoolean()) {
+ TaskId taskId = new TaskId(randomAlphaOfLength(5), randomNonNegativeLong());
+ request.setParentTaskId(taskId);
+ expectedParams.put("parent_task_id", taskId.toString());
+ } else {
+ request.setParentTask(TaskId.EMPTY_TASK_ID);
+ }
+ }
+ if (randomBoolean()) {
+ String[] nodes = generateRandomStringArray(10, 8, false);
+ request.setNodes(nodes);
+ if (nodes.length > 0) {
+ expectedParams.put("nodes", String.join(",", nodes));
+ }
+ }
+ if (randomBoolean()) {
+ String[] actions = generateRandomStringArray(10, 8, false);
+ request.setActions(actions);
+ if (actions.length > 0) {
+ expectedParams.put("actions", String.join(",", actions));
+ }
+ }
+ expectedParams.put("group_by", "none");
+ Request httpRequest = RequestConverters.listTasks(request);
+ assertThat(httpRequest, notNullValue());
+ assertThat(httpRequest.getMethod(), equalTo(HttpGet.METHOD_NAME));
+ assertThat(httpRequest.getEntity(), nullValue());
+ assertThat(httpRequest.getEndpoint(), equalTo("/_tasks"));
+ assertThat(httpRequest.getParameters(), equalTo(expectedParams));
+ }
+ {
+ ListTasksRequest request = new ListTasksRequest();
+ request.setTaskId(new TaskId(randomAlphaOfLength(5), randomNonNegativeLong()));
+ IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> RequestConverters.listTasks(request));
+ assertEquals("TaskId cannot be used for list tasks request", exception.getMessage());
+ }
+ }
+
public void testGetRepositories() {
Map expectedParams = new HashMap<>();
StringBuilder endpoint = new StringBuilder("/_snapshot");
@@ -1437,7 +1534,7 @@ public void testGetRepositories() {
setRandomLocal(getRepositoriesRequest, expectedParams);
if (randomBoolean()) {
- String[] entries = new String[] {"a", "b", "c"};
+ String[] entries = new String[] { "a", "b", "c" };
getRepositoriesRequest.repositories(entries);
endpoint.append("/" + String.join(",", entries));
}
@@ -1448,6 +1545,27 @@ public void testGetRepositories() {
assertThat(expectedParams, equalTo(request.getParameters()));
}
+ public void testCreateRepository() throws IOException {
+ String repository = "repo";
+ String endpoint = "/_snapshot/" + repository;
+ Path repositoryLocation = PathUtils.get(".");
+ PutRepositoryRequest putRepositoryRequest = new PutRepositoryRequest(repository);
+ putRepositoryRequest.type(FsRepository.TYPE);
+ putRepositoryRequest.verify(randomBoolean());
+
+ putRepositoryRequest.settings(
+ Settings.builder()
+ .put(FsRepository.LOCATION_SETTING.getKey(), repositoryLocation)
+ .put(FsRepository.COMPRESS_SETTING.getKey(), randomBoolean())
+ .put(FsRepository.CHUNK_SIZE_SETTING.getKey(), randomIntBetween(100, 1000), ByteSizeUnit.BYTES)
+ .build());
+
+ Request request = RequestConverters.createRepository(putRepositoryRequest);
+ assertThat(endpoint, equalTo(request.getEndpoint()));
+ assertThat(HttpPut.METHOD_NAME, equalTo(request.getMethod()));
+ assertToXContentBody(putRepositoryRequest, request.getEntity());
+ }
+
public void testPutTemplateRequest() throws Exception {
Map names = new HashMap<>();
names.put("log", "log");
@@ -1455,9 +1573,8 @@ public void testPutTemplateRequest() throws Exception {
names.put("-#template", "-%23template");
names.put("foo^bar", "foo%5Ebar");
- PutIndexTemplateRequest putTemplateRequest = new PutIndexTemplateRequest()
- .name(randomFrom(names.keySet()))
- .patterns(Arrays.asList(generateRandomStringArray(20, 100, false, false)));
+ PutIndexTemplateRequest putTemplateRequest = new PutIndexTemplateRequest().name(randomFrom(names.keySet()))
+ .patterns(Arrays.asList(generateRandomStringArray(20, 100, false, false)));
if (randomBoolean()) {
putTemplateRequest.order(randomInt());
}
@@ -1514,14 +1631,12 @@ public void testEndpointBuilder() {
assertEquals("/a/b", endpointBuilder.build());
}
{
- EndpointBuilder endpointBuilder = new EndpointBuilder().addPathPart("a").addPathPart("b")
- .addPathPartAsIs("_create");
+ EndpointBuilder endpointBuilder = new EndpointBuilder().addPathPart("a").addPathPart("b").addPathPartAsIs("_create");
assertEquals("/a/b/_create", endpointBuilder.build());
}
{
- EndpointBuilder endpointBuilder = new EndpointBuilder().addPathPart("a", "b", "c")
- .addPathPartAsIs("_create");
+ EndpointBuilder endpointBuilder = new EndpointBuilder().addPathPart("a", "b", "c").addPathPartAsIs("_create");
assertEquals("/a/b/c/_create", endpointBuilder.build());
}
{
@@ -1580,13 +1695,12 @@ public void testEndpointBuilderEncodeParts() {
assertEquals("/foo%5Ebar", endpointBuilder.build());
}
{
- EndpointBuilder endpointBuilder = new EndpointBuilder().addPathPart("cluster1:index1,index2")
- .addPathPartAsIs("_search");
+ EndpointBuilder endpointBuilder = new EndpointBuilder().addPathPart("cluster1:index1,index2").addPathPartAsIs("_search");
assertEquals("/cluster1:index1,index2/_search", endpointBuilder.build());
}
{
- EndpointBuilder endpointBuilder = new EndpointBuilder()
- .addCommaSeparatedPathParts(new String[]{"index1", "index2"}).addPathPartAsIs("cache/clear");
+ EndpointBuilder endpointBuilder = new EndpointBuilder().addCommaSeparatedPathParts(new String[] { "index1", "index2" })
+ .addPathPartAsIs("cache/clear");
assertEquals("/index1,index2/cache/clear", endpointBuilder.build());
}
}
@@ -1594,12 +1708,12 @@ public void testEndpointBuilderEncodeParts() {
public void testEndpoint() {
assertEquals("/index/type/id", RequestConverters.endpoint("index", "type", "id"));
assertEquals("/index/type/id/_endpoint", RequestConverters.endpoint("index", "type", "id", "_endpoint"));
- assertEquals("/index1,index2", RequestConverters.endpoint(new String[]{"index1", "index2"}));
- assertEquals("/index1,index2/_endpoint", RequestConverters.endpoint(new String[]{"index1", "index2"}, "_endpoint"));
- assertEquals("/index1,index2/type1,type2/_endpoint", RequestConverters.endpoint(new String[]{"index1", "index2"},
- new String[]{"type1", "type2"}, "_endpoint"));
- assertEquals("/index1,index2/_endpoint/suffix1,suffix2", RequestConverters.endpoint(new String[]{"index1", "index2"},
- "_endpoint", new String[]{"suffix1", "suffix2"}));
+ assertEquals("/index1,index2", RequestConverters.endpoint(new String[] { "index1", "index2" }));
+ assertEquals("/index1,index2/_endpoint", RequestConverters.endpoint(new String[] { "index1", "index2" }, "_endpoint"));
+ assertEquals("/index1,index2/type1,type2/_endpoint",
+ RequestConverters.endpoint(new String[] { "index1", "index2" }, new String[] { "type1", "type2" }, "_endpoint"));
+ assertEquals("/index1,index2/_endpoint/suffix1,suffix2",
+ RequestConverters.endpoint(new String[] { "index1", "index2" }, "_endpoint", new String[] { "suffix1", "suffix2" }));
}
public void testCreateContentType() {
@@ -1615,20 +1729,22 @@ public void testEnforceSameContentType() {
XContentType bulkContentType = randomBoolean() ? xContentType : null;
- IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () ->
- enforceSameContentType(new IndexRequest().source(singletonMap("field", "value"), XContentType.CBOR), bulkContentType));
+ IllegalArgumentException exception = expectThrows(IllegalArgumentException.class,
+ () -> enforceSameContentType(new IndexRequest().source(singletonMap("field", "value"), XContentType.CBOR),
+ bulkContentType));
assertEquals("Unsupported content-type found for request with content-type [CBOR], only JSON and SMILE are supported",
exception.getMessage());
- exception = expectThrows(IllegalArgumentException.class, () ->
- enforceSameContentType(new IndexRequest().source(singletonMap("field", "value"), XContentType.YAML), bulkContentType));
+ exception = expectThrows(IllegalArgumentException.class,
+ () -> enforceSameContentType(new IndexRequest().source(singletonMap("field", "value"), XContentType.YAML),
+ bulkContentType));
assertEquals("Unsupported content-type found for request with content-type [YAML], only JSON and SMILE are supported",
exception.getMessage());
XContentType requestContentType = xContentType == XContentType.JSON ? XContentType.SMILE : XContentType.JSON;
- exception = expectThrows(IllegalArgumentException.class, () ->
- enforceSameContentType(new IndexRequest().source(singletonMap("field", "value"), requestContentType), xContentType));
+ exception = expectThrows(IllegalArgumentException.class,
+ () -> enforceSameContentType(new IndexRequest().source(singletonMap("field", "value"), requestContentType), xContentType));
assertEquals("Mismatching content-type found for request with content-type [" + requestContentType + "], "
+ "previous requests have content-type [" + xContentType + "]", exception.getMessage());
}
@@ -1662,12 +1778,44 @@ private static void randomizeFetchSourceContextParams(Consumer expectedParams) {
+ expectedParams.put(RestSearchAction.TYPED_KEYS_PARAM, "true");
+ if (randomBoolean()) {
+ searchRequest.routing(randomAlphaOfLengthBetween(3, 10));
+ expectedParams.put("routing", searchRequest.routing());
+ }
+ if (randomBoolean()) {
+ searchRequest.preference(randomAlphaOfLengthBetween(3, 10));
+ expectedParams.put("preference", searchRequest.preference());
+ }
+ if (randomBoolean()) {
+ searchRequest.searchType(randomFrom(SearchType.values()));
+ }
+ expectedParams.put("search_type", searchRequest.searchType().name().toLowerCase(Locale.ROOT));
+ if (randomBoolean()) {
+ searchRequest.requestCache(randomBoolean());
+ expectedParams.put("request_cache", Boolean.toString(searchRequest.requestCache()));
+ }
+ if (randomBoolean()) {
+ searchRequest.allowPartialSearchResults(randomBoolean());
+ expectedParams.put("allow_partial_search_results", Boolean.toString(searchRequest.allowPartialSearchResults()));
+ }
+ if (randomBoolean()) {
+ searchRequest.setBatchedReduceSize(randomIntBetween(2, Integer.MAX_VALUE));
+ }
+ expectedParams.put("batched_reduce_size", Integer.toString(searchRequest.getBatchedReduceSize()));
+ if (randomBoolean()) {
+ searchRequest.scroll(randomTimeValue());
+ expectedParams.put("scroll", searchRequest.scroll().keepAlive().getStringRep());
+ }
+ }
+
private static void setRandomIndicesOptions(Consumer setter, Supplier getter,
- Map expectedParams) {
+ Map expectedParams) {
if (randomBoolean()) {
- setter.accept(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(),
- randomBoolean()));
+ setter.accept(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean()));
}
expectedParams.put("ignore_unavailable", Boolean.toString(getter.get().ignoreUnavailable()));
expectedParams.put("allow_no_indices", Boolean.toString(getter.get().allowNoIndices()));
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java
index 549b4ce0a85c5..e147642fc73bd 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java
@@ -38,8 +38,11 @@
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchScrollRequest;
import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.MatchQueryBuilder;
import org.elasticsearch.index.query.ScriptQueryBuilder;
import org.elasticsearch.index.query.TermsQueryBuilder;
@@ -48,6 +51,8 @@
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
+import org.elasticsearch.script.mustache.SearchTemplateRequest;
+import org.elasticsearch.script.mustache.SearchTemplateResponse;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.search.aggregations.bucket.range.Range;
@@ -69,10 +74,12 @@
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
+import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
import static org.hamcrest.Matchers.both;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.either;
@@ -733,6 +740,103 @@ public void testMultiSearch_failure() throws Exception {
assertThat(multiSearchResponse.getResponses()[1].getResponse(), nullValue());
}
+ public void testSearchTemplate() throws IOException {
+ SearchTemplateRequest searchTemplateRequest = new SearchTemplateRequest();
+ searchTemplateRequest.setRequest(new SearchRequest("index"));
+
+ searchTemplateRequest.setScriptType(ScriptType.INLINE);
+ searchTemplateRequest.setScript(
+ "{" +
+ " \"query\": {" +
+ " \"match\": {" +
+ " \"num\": {{number}}" +
+ " }" +
+ " }" +
+ "}");
+
+ Map scriptParams = new HashMap<>();
+ scriptParams.put("number", 10);
+ searchTemplateRequest.setScriptParams(scriptParams);
+
+ searchTemplateRequest.setExplain(true);
+ searchTemplateRequest.setProfile(true);
+
+ SearchTemplateResponse searchTemplateResponse = execute(searchTemplateRequest,
+ highLevelClient()::searchTemplate,
+ highLevelClient()::searchTemplateAsync);
+
+ assertNull(searchTemplateResponse.getSource());
+
+ SearchResponse searchResponse = searchTemplateResponse.getResponse();
+ assertNotNull(searchResponse);
+
+ assertEquals(1, searchResponse.getHits().totalHits);
+ assertEquals(1, searchResponse.getHits().getHits().length);
+ assertThat(searchResponse.getHits().getMaxScore(), greaterThan(0f));
+
+ SearchHit hit = searchResponse.getHits().getHits()[0];
+ assertNotNull(hit.getExplanation());
+
+ assertFalse(searchResponse.getProfileResults().isEmpty());
+ }
+
+ public void testNonExistentSearchTemplate() {
+ SearchTemplateRequest searchTemplateRequest = new SearchTemplateRequest();
+ searchTemplateRequest.setRequest(new SearchRequest("index"));
+
+ searchTemplateRequest.setScriptType(ScriptType.STORED);
+ searchTemplateRequest.setScript("non-existent");
+ searchTemplateRequest.setScriptParams(Collections.emptyMap());
+
+ ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class,
+ () -> execute(searchTemplateRequest,
+ highLevelClient()::searchTemplate,
+ highLevelClient()::searchTemplateAsync));
+
+ assertEquals(RestStatus.NOT_FOUND, exception.status());
+ }
+
+ public void testRenderSearchTemplate() throws IOException {
+ SearchTemplateRequest searchTemplateRequest = new SearchTemplateRequest();
+
+ searchTemplateRequest.setScriptType(ScriptType.INLINE);
+ searchTemplateRequest.setScript(
+ "{" +
+ " \"query\": {" +
+ " \"match\": {" +
+ " \"num\": {{number}}" +
+ " }" +
+ " }" +
+ "}");
+
+ Map scriptParams = new HashMap<>();
+ scriptParams.put("number", 10);
+ searchTemplateRequest.setScriptParams(scriptParams);
+
+ // Setting simulate true causes the template to only be rendered.
+ searchTemplateRequest.setSimulate(true);
+
+ SearchTemplateResponse searchTemplateResponse = execute(searchTemplateRequest,
+ highLevelClient()::searchTemplate,
+ highLevelClient()::searchTemplateAsync);
+ assertNull(searchTemplateResponse.getResponse());
+
+ BytesReference expectedSource = BytesReference.bytes(
+ XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("query")
+ .startObject("match")
+ .field("num", 10)
+ .endObject()
+ .endObject()
+ .endObject());
+
+ BytesReference actualSource = searchTemplateResponse.getSource();
+ assertNotNull(actualSource);
+
+ assertToXContentEquivalent(expectedSource, actualSource, XContentType.JSON);
+ }
+
public void testFieldCaps() throws IOException {
FieldCapabilitiesRequest request = new FieldCapabilitiesRequest()
.indices("index1", "index2")
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java
index ab2c632bfeb58..1d0ea953cd5c1 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java
@@ -19,56 +19,56 @@
package org.elasticsearch.client;
-import org.apache.http.entity.ContentType;
-import org.apache.http.entity.StringEntity;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest;
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesResponse;
+import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest;
+import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse;
+import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.repositories.fs.FsRepository;
import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
-import java.util.Collections;
import static org.hamcrest.Matchers.equalTo;
public class SnapshotIT extends ESRestHighLevelClientTestCase {
- public void testModulesGetRepositoriesUsingParams() throws IOException {
- String repository = "test";
- String repositorySettings = "{\"type\":\"fs\", \"settings\":{\"location\": \".\"}}";
- highLevelClient().getLowLevelClient().performRequest("put", "_snapshot/" + repository, Collections.emptyMap(),
- new StringEntity(repositorySettings, ContentType.APPLICATION_JSON));
-
- highLevelClient().getLowLevelClient().performRequest("put", "_snapshot/" + repository + "_other", Collections.emptyMap(),
- new StringEntity(repositorySettings, ContentType.APPLICATION_JSON));
+ private PutRepositoryResponse createTestRepository(String repository, String type, String settings) throws IOException {
+ PutRepositoryRequest request = new PutRepositoryRequest(repository);
+ request.settings(settings, XContentType.JSON);
+ request.type(type);
+ return execute(request, highLevelClient().snapshot()::createRepository,
+ highLevelClient().snapshot()::createRepositoryAsync);
- {
- GetRepositoriesRequest request = new GetRepositoriesRequest();
- request.repositories(new String[]{repository});
- GetRepositoriesResponse response = execute(request, highLevelClient().snapshot()::getRepositories,
- highLevelClient().snapshot()::getRepositoriesAsync);
- assertThat(1, equalTo(response.repositories().size()));
- }
- {
- GetRepositoriesRequest request = new GetRepositoriesRequest();
- GetRepositoriesResponse response = execute(request, highLevelClient().snapshot()::getRepositories,
- highLevelClient().snapshot()::getRepositoriesAsync);
- assertThat(2, equalTo(response.repositories().size()));
- }
}
- public void testModulesGetDefaultRepositories() throws IOException {
- String repositorySettings = "{\"type\":\"fs\", \"settings\":{\"location\": \".\"}}";
- GetRepositoriesRequest request = new GetRepositoriesRequest();
+ public void testCreateRepository() throws IOException {
+ PutRepositoryResponse response = createTestRepository("test", FsRepository.TYPE, "{\"location\": \".\"}");
+ assertTrue(response.isAcknowledged());
+ }
- highLevelClient().getLowLevelClient().performRequest("put", "_snapshot/test", Collections.emptyMap(),
- new StringEntity(repositorySettings, ContentType.APPLICATION_JSON));
+ public void testModulesGetRepositoriesUsingParams() throws IOException {
+ String testRepository = "test";
+ assertTrue(createTestRepository(testRepository, FsRepository.TYPE, "{\"location\": \".\"}").isAcknowledged());
+ assertTrue(createTestRepository("other", FsRepository.TYPE, "{\"location\": \".\"}").isAcknowledged());
+ GetRepositoriesRequest request = new GetRepositoriesRequest();
+ request.repositories(new String[]{testRepository});
GetRepositoriesResponse response = execute(request, highLevelClient().snapshot()::getRepositories,
highLevelClient().snapshot()::getRepositoriesAsync);
assertThat(1, equalTo(response.repositories().size()));
}
+ public void testModulesGetDefaultRepositories() throws IOException {
+ assertTrue(createTestRepository("other", FsRepository.TYPE, "{\"location\": \".\"}").isAcknowledged());
+ assertTrue(createTestRepository("test", FsRepository.TYPE, "{\"location\": \".\"}").isAcknowledged());
+
+ GetRepositoriesResponse response = execute(new GetRepositoriesRequest(), highLevelClient().snapshot()::getRepositories,
+ highLevelClient().snapshot()::getRepositoriesAsync);
+ assertThat(2, equalTo(response.repositories().size()));
+ }
+
public void testModulesGetRepositoriesNonExistent() throws IOException {
String repository = "doesnotexist";
GetRepositoriesRequest request = new GetRepositoriesRequest(new String[]{repository});
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/ClusterClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/ClusterClientDocumentationIT.java
index 2e7ea1650f424..d41b11c68fe44 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/ClusterClientDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/ClusterClientDocumentationIT.java
@@ -19,8 +19,14 @@
package org.elasticsearch.client.documentation;
+import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.LatchedActionListener;
+import org.elasticsearch.action.TaskOperationFailure;
+import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
+import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
+import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskGroup;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse;
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
@@ -31,14 +37,20 @@
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.indices.recovery.RecoverySettings;
+import org.elasticsearch.tasks.TaskId;
+import org.elasticsearch.tasks.TaskInfo;
import java.io.IOException;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
+import static java.util.Collections.emptyList;
import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.greaterThanOrEqualTo;
+import static org.hamcrest.Matchers.notNullValue;
/**
* This class is used to generate the Java Cluster API documentation.
@@ -177,4 +189,87 @@ public void onFailure(Exception e) {
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
+
+ public void testListTasks() throws IOException {
+ RestHighLevelClient client = highLevelClient();
+ {
+ // tag::list-tasks-request
+ ListTasksRequest request = new ListTasksRequest();
+ // end::list-tasks-request
+
+ // tag::list-tasks-request-filter
+ request.setActions("cluster:*"); // <1>
+ request.setNodes("nodeId1", "nodeId2"); // <2>
+ request.setParentTaskId(new TaskId("parentTaskId", 42)); // <3>
+ // end::list-tasks-request-filter
+
+ // tag::list-tasks-request-detailed
+ request.setDetailed(true); // <1>
+ // end::list-tasks-request-detailed
+
+ // tag::list-tasks-request-wait-completion
+ request.setWaitForCompletion(true); // <1>
+ request.setTimeout(TimeValue.timeValueSeconds(50)); // <2>
+ request.setTimeout("50s"); // <3>
+ // end::list-tasks-request-wait-completion
+ }
+
+ ListTasksRequest request = new ListTasksRequest();
+
+ // tag::list-tasks-execute
+ ListTasksResponse response = client.cluster().listTasks(request);
+ // end::list-tasks-execute
+
+ assertThat(response, notNullValue());
+
+ // tag::list-tasks-response-tasks
+ List tasks = response.getTasks(); // <1>
+ // end::list-tasks-response-tasks
+
+ // tag::list-tasks-response-calc
+ Map> perNodeTasks = response.getPerNodeTasks(); // <1>
+ List groups = response.getTaskGroups(); // <2>
+ // end::list-tasks-response-calc
+
+ // tag::list-tasks-response-failures
+ List nodeFailures = response.getNodeFailures(); // <1>
+ List taskFailures = response.getTaskFailures(); // <2>
+ // end::list-tasks-response-failures
+
+ assertThat(response.getNodeFailures(), equalTo(emptyList()));
+ assertThat(response.getTaskFailures(), equalTo(emptyList()));
+ assertThat(response.getTasks().size(), greaterThanOrEqualTo(2));
+ }
+
+ public void testListTasksAsync() throws Exception {
+ RestHighLevelClient client = highLevelClient();
+ {
+ ListTasksRequest request = new ListTasksRequest();
+
+ // tag::list-tasks-execute-listener
+ ActionListener listener =
+ new ActionListener() {
+ @Override
+ public void onResponse(ListTasksResponse response) {
+ // <1>
+ }
+
+ @Override
+ public void onFailure(Exception e) {
+ // <2>
+ }
+ };
+ // end::list-tasks-execute-listener
+
+ // Replace the empty listener by a blocking listener in test
+ final CountDownLatch latch = new CountDownLatch(1);
+ listener = new LatchedActionListener<>(listener, latch);
+
+ // tag::list-tasks-execute-async
+ client.cluster().listTasksAsync(request, listener); // <1>
+ // end::list-tasks-execute-async
+
+ assertTrue(latch.await(30L, TimeUnit.SECONDS));
+ }
+ }
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java
index 8a12016025c3e..463c5f7d12f5e 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java
@@ -41,7 +41,11 @@
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
+import org.elasticsearch.client.Request;
+import org.elasticsearch.client.Response;
+import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestHighLevelClient;
+import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.unit.TimeValue;
@@ -60,6 +64,9 @@
import org.elasticsearch.index.rankeval.RatedRequest;
import org.elasticsearch.index.rankeval.RatedSearchHit;
import org.elasticsearch.rest.RestStatus;
+import org.elasticsearch.script.ScriptType;
+import org.elasticsearch.script.mustache.SearchTemplateRequest;
+import org.elasticsearch.script.mustache.SearchTemplateResponse;
import org.elasticsearch.search.Scroll;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
@@ -92,6 +99,7 @@
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
+import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
@@ -706,9 +714,130 @@ public void onFailure(Exception e) {
}
}
+ public void testSearchTemplateWithInlineScript() throws Exception {
+ indexSearchTestData();
+ RestHighLevelClient client = highLevelClient();
+
+ // tag::search-template-request-inline
+ SearchTemplateRequest request = new SearchTemplateRequest();
+ request.setRequest(new SearchRequest("posts")); // <1>
+
+ request.setScriptType(ScriptType.INLINE);
+ request.setScript( // <2>
+ "{" +
+ " \"query\": { \"match\" : { \"{{field}}\" : \"{{value}}\" } }," +
+ " \"size\" : \"{{size}}\"" +
+ "}");
+
+ Map scriptParams = new HashMap<>();
+ scriptParams.put("field", "title");
+ scriptParams.put("value", "elasticsearch");
+ scriptParams.put("size", 5);
+ request.setScriptParams(scriptParams); // <3>
+ // end::search-template-request-inline
+
+ // tag::search-template-response
+ SearchTemplateResponse response = client.searchTemplate(request);
+ SearchResponse searchResponse = response.getResponse();
+ // end::search-template-response
+
+ assertNotNull(searchResponse);
+ assertTrue(searchResponse.getHits().totalHits > 0);
+
+ // tag::render-search-template-request
+ request.setSimulate(true); // <1>
+ // end::render-search-template-request
+
+ // tag::render-search-template-response
+ SearchTemplateResponse renderResponse = client.searchTemplate(request);
+ BytesReference source = renderResponse.getSource(); // <1>
+ // end::render-search-template-response
+
+ assertNotNull(source);
+ assertEquals((
+ "{" +
+ " \"size\" : \"5\"," +
+ " \"query\": { \"match\" : { \"title\" : \"elasticsearch\" } }" +
+ "}").replaceAll("\\s+", ""), source.utf8ToString());
+ }
+
+ public void testSearchTemplateWithStoredScript() throws Exception {
+ indexSearchTestData();
+ RestHighLevelClient client = highLevelClient();
+ RestClient restClient = client();
+
+ // tag::register-script
+ Request scriptRequest = new Request("POST", "_scripts/title_search");
+ scriptRequest.setJsonEntity(
+ "{" +
+ " \"script\": {" +
+ " \"lang\": \"mustache\"," +
+ " \"source\": {" +
+ " \"query\": { \"match\" : { \"{{field}}\" : \"{{value}}\" } }," +
+ " \"size\" : \"{{size}}\"" +
+ " }" +
+ " }" +
+ "}");
+ Response scriptResponse = restClient.performRequest(scriptRequest);
+ // end::register-script
+ assertEquals(RestStatus.OK.getStatus(), scriptResponse.getStatusLine().getStatusCode());
+
+ // tag::search-template-request-stored
+ SearchTemplateRequest request = new SearchTemplateRequest();
+ request.setRequest(new SearchRequest("posts"));
+
+ request.setScriptType(ScriptType.STORED);
+ request.setScript("title_search");
+
+ Map params = new HashMap<>();
+ params.put("field", "title");
+ params.put("value", "elasticsearch");
+ params.put("size", 5);
+ request.setScriptParams(params);
+ // end::search-template-request-stored
+
+ // tag::search-template-request-options
+ request.setExplain(true);
+ request.setProfile(true);
+ // end::search-template-request-options
+
+ // tag::search-template-execute
+ SearchTemplateResponse response = client.searchTemplate(request);
+ // end::search-template-execute
+
+ SearchResponse searchResponse = response.getResponse();
+ assertNotNull(searchResponse);
+ assertTrue(searchResponse.getHits().totalHits > 0);
+
+ // tag::search-template-execute-listener
+ ActionListener listener = new ActionListener() {
+ @Override
+ public void onResponse(SearchTemplateResponse response) {
+ // <1>
+ }
+
+ @Override
+ public void onFailure(Exception e) {
+ // <2>
+ }
+ };
+ // end::search-template-execute-listener
+
+ // Replace the empty listener by a blocking listener for tests.
+ CountDownLatch latch = new CountDownLatch(1);
+ listener = new LatchedActionListener<>(listener, latch);
+
+ // tag::search-template-execute-async
+ client.searchTemplateAsync(request, listener); // <1>
+ // end::search-template-execute-async
+
+ assertTrue(latch.await(30L, TimeUnit.SECONDS));
+ }
+
public void testFieldCaps() throws Exception {
indexSearchTestData();
RestHighLevelClient client = highLevelClient();
+
// tag::field-caps-request
FieldCapabilitiesRequest request = new FieldCapabilitiesRequest()
.fields("user")
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java
index 1044cc9da3332..c57f8e2a2fbd5 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java
@@ -19,20 +19,24 @@
package org.elasticsearch.client.documentation;
-import org.apache.http.entity.ContentType;
-import org.apache.http.entity.StringEntity;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.LatchedActionListener;
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest;
import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesResponse;
+import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest;
+import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse;
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.cluster.metadata.RepositoryMetaData;
+import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.repositories.fs.FsRepository;
import java.io.IOException;
-import java.util.Collections;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
@@ -58,7 +62,114 @@
*/
public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase {
- private static final String testRepository = "test_repository";
+ private static final String repositoryName = "test_repository";
+
+ public void testSnapshotCreateRepository() throws IOException {
+ RestHighLevelClient client = highLevelClient();
+
+ // tag::create-repository-request
+ PutRepositoryRequest request = new PutRepositoryRequest();
+ // end::create-repository-request
+
+ // tag::create-repository-create-settings
+ String locationKey = FsRepository.LOCATION_SETTING.getKey();
+ String locationValue = ".";
+ String compressKey = FsRepository.COMPRESS_SETTING.getKey();
+ boolean compressValue = true;
+
+ Settings settings = Settings.builder()
+ .put(locationKey, locationValue)
+ .put(compressKey, compressValue)
+ .build(); // <1>
+ // end::create-repository-create-settings
+
+ // tag::create-repository-request-repository-settings
+ request.settings(settings); // <1>
+ // end::create-repository-request-repository-settings
+
+ {
+ // tag::create-repository-settings-builder
+ Settings.Builder settingsBuilder = Settings.builder()
+ .put(locationKey, locationValue)
+ .put(compressKey, compressValue);
+ request.settings(settingsBuilder); // <1>
+ // end::create-repository-settings-builder
+ }
+ {
+ // tag::create-repository-settings-map
+ Map map = new HashMap<>();
+ map.put(locationKey, locationValue);
+ map.put(compressKey, compressValue);
+ request.settings(map); // <1>
+ // end::create-repository-settings-map
+ }
+ {
+ // tag::create-repository-settings-source
+ request.settings("{\"location\": \".\", \"compress\": \"true\"}",
+ XContentType.JSON); // <1>
+ // end::create-repository-settings-source
+ }
+
+ // tag::create-repository-request-name
+ request.name(repositoryName); // <1>
+ // end::create-repository-request-name
+ // tag::create-repository-request-type
+ request.type(FsRepository.TYPE); // <1>
+ // end::create-repository-request-type
+
+ // tag::create-repository-request-masterTimeout
+ request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); // <1>
+ request.masterNodeTimeout("1m"); // <2>
+ // end::create-repository-request-masterTimeout
+ // tag::create-repository-request-timeout
+ request.timeout(TimeValue.timeValueMinutes(1)); // <1>
+ request.timeout("1m"); // <2>
+ // end::create-repository-request-timeout
+ // tag::create-repository-request-verify
+ request.verify(true); // <1>
+ // end::create-repository-request-verify
+
+ // tag::create-repository-execute
+ PutRepositoryResponse response = client.snapshot().createRepository(request);
+ // end::create-repository-execute
+
+ // tag::create-repository-response
+ boolean acknowledged = response.isAcknowledged(); // <1>
+ // end::create-repository-response
+ assertTrue(acknowledged);
+ }
+
+ public void testSnapshotCreateRepositoryAsync() throws InterruptedException {
+ RestHighLevelClient client = highLevelClient();
+ {
+ PutRepositoryRequest request = new PutRepositoryRequest(repositoryName);
+
+ // tag::create-repository-execute-listener
+ ActionListener listener =
+ new ActionListener() {
+ @Override
+ public void onResponse(PutRepositoryResponse putRepositoryResponse) {
+ // <1>
+ }
+
+ @Override
+ public void onFailure(Exception e) {
+ // <2>
+ }
+ };
+ // end::create-repository-execute-listener
+
+ // Replace the empty listener by a blocking listener in test
+ final CountDownLatch latch = new CountDownLatch(1);
+ listener = new LatchedActionListener<>(listener, latch);
+
+ // tag::create-repository-execute-async
+ client.snapshot().createRepositoryAsync(request, listener); // <1>
+ // end::create-repository-execute-async
+
+ assertTrue(latch.await(30L, TimeUnit.SECONDS));
+ }
+ }
public void testSnapshotGetRepository() throws IOException {
RestHighLevelClient client = highLevelClient();
@@ -70,7 +181,7 @@ public void testSnapshotGetRepository() throws IOException {
// end::get-repository-request
// tag::get-repository-request-repositories
- String [] repositories = new String[] { testRepository };
+ String [] repositories = new String[] {repositoryName};
request.repositories(repositories); // <1>
// end::get-repository-request-repositories
// tag::get-repository-request-local
@@ -89,7 +200,7 @@ public void testSnapshotGetRepository() throws IOException {
List repositoryMetaDataResponse = response.repositories();
// end::get-repository-response
assertThat(1, equalTo(repositoryMetaDataResponse.size()));
- assertThat(testRepository, equalTo(repositoryMetaDataResponse.get(0).name()));
+ assertThat(repositoryName, equalTo(repositoryMetaDataResponse.get(0).name()));
}
public void testSnapshotGetRepositoryAsync() throws InterruptedException {
@@ -122,14 +233,12 @@ public void onFailure(Exception e) {
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
-
}
private void createTestRepositories() throws IOException {
- RestHighLevelClient client = highLevelClient();
- String repositorySettings = "{\"type\":\"fs\", \"settings\":{\"location\": \".\"}}";
- highLevelClient().getLowLevelClient().performRequest("put", "_snapshot/" + testRepository, Collections.emptyMap(),
- new StringEntity(repositorySettings, ContentType.APPLICATION_JSON));
-
+ PutRepositoryRequest request = new PutRepositoryRequest(repositoryName);
+ request.type(FsRepository.TYPE);
+ request.settings("{\"location\": \".\"}", XContentType.JSON);
+ assertTrue(highLevelClient().snapshot().createRepository(request).isAcknowledged());
}
}
diff --git a/docs/java-api/query-dsl/has-child-query.asciidoc b/docs/java-api/query-dsl/has-child-query.asciidoc
index 300b32e1922b0..f47f3af487dfe 100644
--- a/docs/java-api/query-dsl/has-child-query.asciidoc
+++ b/docs/java-api/query-dsl/has-child-query.asciidoc
@@ -9,7 +9,7 @@ When using the `has_child` query it is important to use the `PreBuiltTransportCl
--------------------------------------------------
Settings settings = Settings.builder().put("cluster.name", "elasticsearch").build();
TransportClient client = new PreBuiltTransportClient(settings);
-client.addTransportAddress(new InetSocketTransportAddress(new InetSocketAddress(InetAddresses.forString("127.0.0.1"), 9300)));
+client.addTransportAddress(new TransportAddress(new InetSocketAddress(InetAddresses.forString("127.0.0.1"), 9300)));
--------------------------------------------------
Otherwise the parent-join module doesn't get loaded and the `has_child` query can't be used from the transport client.
diff --git a/docs/java-rest/high-level/cluster/list_tasks.asciidoc b/docs/java-rest/high-level/cluster/list_tasks.asciidoc
new file mode 100644
index 0000000000000..1a2117b2e66e6
--- /dev/null
+++ b/docs/java-rest/high-level/cluster/list_tasks.asciidoc
@@ -0,0 +1,101 @@
+[[java-rest-high-cluster-list-tasks]]
+=== List Tasks API
+
+The List Tasks API allows to get information about the tasks currently executing in the cluster.
+
+[[java-rest-high-cluster-list-tasks-request]]
+==== List Tasks Request
+
+A `ListTasksRequest`:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-request]
+--------------------------------------------------
+There is no required parameters. By default the client will list all tasks and will not wait
+for task completion.
+
+==== Parameters
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-request-filter]
+--------------------------------------------------
+<1> Request only cluster-related tasks
+<2> Request all tasks running on nodes nodeId1 and nodeId2
+<3> Request only children of a particular task
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-request-detailed]
+--------------------------------------------------
+<1> Should the information include detailed, potentially slow to generate data. Defaults to `false`
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-request-wait-completion]
+--------------------------------------------------
+<1> Should this request wait for all found tasks to complete. Defaults to `false`
+<2> Timeout for the request as a `TimeValue`. Applicable only if `setWaitForCompletion` is `true`.
+Defaults to 30 seconds
+<3> Timeout as a `String`
+
+[[java-rest-high-cluster-list-tasks-sync]]
+==== Synchronous Execution
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-execute]
+--------------------------------------------------
+
+[[java-rest-high-cluster-list-tasks-async]]
+==== Asynchronous Execution
+
+The asynchronous execution of a cluster update settings requires both the
+`ListTasksRequest` instance and an `ActionListener` instance to be
+passed to the asynchronous method:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-execute-async]
+--------------------------------------------------
+<1> The `ListTasksRequest` to execute and the `ActionListener` to use
+when the execution completes
+
+The asynchronous method does not block and returns immediately. Once it is
+completed the `ActionListener` is called back using the `onResponse` method
+if the execution successfully completed or using the `onFailure` method if
+it failed.
+
+A typical listener for `ListTasksResponse` looks like:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-execute-listener]
+--------------------------------------------------
+<1> Called when the execution is successfully completed. The response is
+provided as an argument
+<2> Called in case of a failure. The raised exception is provided as an argument
+
+[[java-rest-high-cluster-list-tasks-response]]
+==== List Tasks Response
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-response-tasks]
+--------------------------------------------------
+<1> List of currently running tasks
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-response-calc]
+--------------------------------------------------
+<1> List of tasks grouped by a node
+<2> List of tasks grouped by a parent task
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-response-failures]
+--------------------------------------------------
+<1> List of node failures
+<2> List of tasks failures
diff --git a/docs/java-rest/high-level/search/search-template.asciidoc b/docs/java-rest/high-level/search/search-template.asciidoc
new file mode 100644
index 0000000000000..3f0dfb8ab28e0
--- /dev/null
+++ b/docs/java-rest/high-level/search/search-template.asciidoc
@@ -0,0 +1,117 @@
+[[java-rest-high-search-template]]
+=== Search Template API
+
+The search template API allows for searches to be executed from a template based
+on the mustache language, and also for previewing rendered templates.
+
+[[java-rest-high-search-template-request]]
+==== Search Template Request
+
+===== Inline Templates
+
+In the most basic form of request, the search template is specified inline:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SearchDocumentationIT.java[search-template-request-inline]
+--------------------------------------------------
+<1> The search is executed against the `posts` index.
+<2> The template defines the structure of the search source. It is passed
+as a string because mustache templates are not always valid JSON.
+<3> Before running the search, the template is rendered with the provided parameters.
+
+===== Registered Templates
+
+Search templates can be registered in advance through stored scripts API. Note that
+the stored scripts API is not yet available in the high-level REST client, so in this
+example we use the low-level REST client.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SearchDocumentationIT.java[register-script]
+--------------------------------------------------
+
+Instead of providing an inline script, we can refer to this registered template in the request:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SearchDocumentationIT.java[search-template-request-stored]
+--------------------------------------------------
+
+===== Rendering Templates
+
+Given parameter values, a template can be rendered without executing a search:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SearchDocumentationIT.java[render-search-template-request]
+--------------------------------------------------
+<1> Setting `simulate` to `true` causes the search template to only be rendered.
+
+Both inline and pre-registered templates can be rendered.
+
+===== Optional Arguments
+
+As in standard search requests, the `explain` and `profile` options are supported:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SearchDocumentationIT.java[search-template-request-options]
+--------------------------------------------------
+
+===== Additional References
+
+The {ref}/search-template.html[Search Template documentation] contains further examples of how search requests can be templated.
+
+[[java-rest-high-search-template-sync]]
+==== Synchronous Execution
+
+The `searchTemplate` method executes the request synchronously:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SearchDocumentationIT.java[search-template-execute]
+--------------------------------------------------
+
+==== Asynchronous Execution
+
+A search template request can be executed asynchronously through the `searchTemplateAsync`
+method:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SearchDocumentationIT.java[search-template-execute-async]
+--------------------------------------------------
+<1> The `SearchTemplateRequest` to execute and the `ActionListener` to call when the execution completes.
+
+The asynchronous method does not block and returns immediately. Once the request completes, the
+`ActionListener` is called back using the `onResponse` method if the execution completed successfully,
+or using the `onFailure` method if it failed.
+
+A typical listener for `SearchTemplateResponse` is constructed as follows:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SearchDocumentationIT.java[search-template-execute-listener]
+--------------------------------------------------
+<1> Called when the execution is successfully completed.
+<2> Called when the whole `SearchTemplateRequest` fails.
+
+==== Search Template Response
+
+For a standard search template request, the response contains a `SearchResponse` object
+with the result of executing the search:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SearchDocumentationIT.java[search-template-response]
+--------------------------------------------------
+
+If `simulate` was set to `true` in the request, then the response
+will contain the rendered search source instead of a `SearchResponse`:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SearchDocumentationIT.java[render-search-template-response]
+--------------------------------------------------
+<1> The rendered source in bytes, in our example `{"query": { "match" : { "title" : "elasticsearch" }}, "size" : 5}`.
diff --git a/docs/java-rest/high-level/snapshot/create_repository.asciidoc b/docs/java-rest/high-level/snapshot/create_repository.asciidoc
new file mode 100644
index 0000000000000..5c54529209720
--- /dev/null
+++ b/docs/java-rest/high-level/snapshot/create_repository.asciidoc
@@ -0,0 +1,139 @@
+[[java-rest-high-snapshot-create-repository]]
+=== Snapshot Create RepositoryAPI
+
+The Snapshot Create RepositoryAPI allows to register a snapshot repository.
+
+[[java-rest-high-snapshot-create-repository-request]]
+==== Snapshot Create RepositoryRequest
+
+A `PutRepositoryRequest`:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-repository-request]
+--------------------------------------------------
+
+==== Repository Settings
+Settings requirements will differ based on the repository backend chosen.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-repository-request-repository-settings]
+--------------------------------------------------
+<1> Sets the repository settings
+
+==== Providing the Settings
+The settings to be applied can be provided in different ways:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-repository-create-settings]
+--------------------------------------------------
+<1> Settings provided as `Settings`
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-repository-settings-builder]
+--------------------------------------------------
+<1> Settings provided as `Settings.Builder`
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-repository-settings-source]
+--------------------------------------------------
+<1> Settings provided as `String`
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-repository-settings-map]
+--------------------------------------------------
+<1> Settings provided as a `Map`
+
+==== Required Arguments
+The following arguments must be provided:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-repository-request-name]
+--------------------------------------------------
+<1> The name of the repository
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-repository-request-type]
+--------------------------------------------------
+<1> The type of the repository
+
+==== Optional Arguments
+The following arguments can optionally be provided:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-repository-request-timeout]
+--------------------------------------------------
+<1> Timeout to wait for the all the nodes to acknowledge the settings were applied
+as a `TimeValue`
+<2> Timeout to wait for the all the nodes to acknowledge the settings were applied
+as a `String`
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-repository-request-masterTimeout]
+--------------------------------------------------
+<1> Timeout to connect to the master node as a `TimeValue`
+<2> Timeout to connect to the master node as a `String`
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-repository-request-verify]
+--------------------------------------------------
+<1> Verify after creation as a `Boolean`
+
+[[java-rest-high-snapshot-create-repository-sync]]
+==== Synchronous Execution
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-repository-execute]
+--------------------------------------------------
+
+[[java-rest-high-snapshot-create-repository-async]]
+==== Asynchronous Execution
+
+The asynchronous execution of a repository put settings requires both the
+`PutRepositoryRequest` instance and an `ActionListener` instance to be
+passed to the asynchronous method:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-repository-execute-async]
+--------------------------------------------------
+<1> The `PutRepositoryRequest` to execute and the `ActionListener`
+to use when the execution completes
+
+The asynchronous method does not block and returns immediately. Once it is
+completed the `ActionListener` is called back using the `onResponse` method
+if the execution successfully completed or using the `onFailure` method if
+it failed.
+
+A typical listener for `PutRepositoryResponse` looks like:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-repository-execute-listener]
+--------------------------------------------------
+<1> Called when the execution is successfully completed. The response is
+provided as an argument
+<2> Called in case of a failure. The raised exception is provided as an argument
+
+[[java-rest-high-snapshot-create-repository-response]]
+==== Snapshot Create RepositoryResponse
+
+The returned `PutRepositoryResponse` allows to retrieve information about the
+executed operation as follows:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-repository-response]
+--------------------------------------------------
+<1> Indicates the node has acknowledged the request
diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc
index 2dee4643e73eb..b00047359a5d7 100644
--- a/docs/java-rest/high-level/supported-apis.asciidoc
+++ b/docs/java-rest/high-level/supported-apis.asciidoc
@@ -31,6 +31,7 @@ The Java High Level REST Client supports the following Search APIs:
* <>
* <>
* <>
+* <>
* <>
* <>
* <>
@@ -38,6 +39,7 @@ The Java High Level REST Client supports the following Search APIs:
include::search/search.asciidoc[]
include::search/scroll.asciidoc[]
include::search/multi-search.asciidoc[]
+include::search/search-template.asciidoc[]
include::search/field-caps.asciidoc[]
include::search/rank-eval.asciidoc[]
@@ -102,8 +104,10 @@ include::indices/put_template.asciidoc[]
The Java High Level REST Client supports the following Cluster APIs:
* <>
+* <>
include::cluster/put_settings.asciidoc[]
+include::cluster/list_tasks.asciidoc[]
== Snapshot APIs
@@ -111,4 +115,5 @@ The Java High Level REST Client supports the following Snapshot APIs:
* <>
-include::snapshot/get_repository.asciidoc[]
\ No newline at end of file
+include::snapshot/get_repository.asciidoc[]
+include::snapshot/create_repository.asciidoc[]
diff --git a/docs/reference/aggregations/pipeline.asciidoc b/docs/reference/aggregations/pipeline.asciidoc
index bd1b0284a84fb..37c1c357007b0 100644
--- a/docs/reference/aggregations/pipeline.asciidoc
+++ b/docs/reference/aggregations/pipeline.asciidoc
@@ -72,6 +72,7 @@ POST /_search
}
--------------------------------------------------
// CONSOLE
+// TEST[warning:The moving_avg aggregation has been deprecated in favor of the moving_fn aggregation.]
<1> The metric is called `"the_sum"`
<2> The `buckets_path` refers to the metric via a relative path `"the_sum"`
@@ -136,6 +137,7 @@ POST /_search
}
--------------------------------------------------
// CONSOLE
+// TEST[warning:The moving_avg aggregation has been deprecated in favor of the moving_fn aggregation.]
<1> By using `_count` instead of a metric name, we can calculate the moving average of document counts in the histogram
The `buckets_path` can also use `"_bucket_count"` and path to a multi-bucket aggregation to use the number of buckets
@@ -231,6 +233,7 @@ include::pipeline/stats-bucket-aggregation.asciidoc[]
include::pipeline/extended-stats-bucket-aggregation.asciidoc[]
include::pipeline/percentiles-bucket-aggregation.asciidoc[]
include::pipeline/movavg-aggregation.asciidoc[]
+include::pipeline/movfn-aggregation.asciidoc[]
include::pipeline/cumulative-sum-aggregation.asciidoc[]
include::pipeline/bucket-script-aggregation.asciidoc[]
include::pipeline/bucket-selector-aggregation.asciidoc[]
diff --git a/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc b/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc
index db73510216be0..39a8255c90705 100644
--- a/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc
+++ b/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc
@@ -1,6 +1,10 @@
[[search-aggregations-pipeline-movavg-aggregation]]
=== Moving Average Aggregation
+deprecated[6.4.0, The Moving Average aggregation has been deprecated in favor of the more general
+<>. The new Moving Function aggregation provides
+all the same functionality as the Moving Average aggregation, but also provides more flexibility.]
+
Given an ordered series of data, the Moving Average aggregation will slide a window across the data and emit the average
value of that window. For example, given the data `[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]`, we can calculate a simple moving
average with windows size of `5` as follows:
@@ -74,6 +78,7 @@ POST /_search
--------------------------------------------------
// CONSOLE
// TEST[setup:sales]
+// TEST[warning:The moving_avg aggregation has been deprecated in favor of the moving_fn aggregation.]
<1> A `date_histogram` named "my_date_histo" is constructed on the "timestamp" field, with one-day intervals
<2> A `sum` metric is used to calculate the sum of a field. This could be any metric (sum, min, max, etc)
@@ -180,6 +185,7 @@ POST /_search
--------------------------------------------------
// CONSOLE
// TEST[setup:sales]
+// TEST[warning:The moving_avg aggregation has been deprecated in favor of the moving_fn aggregation.]
A `simple` model has no special settings to configure
@@ -233,6 +239,7 @@ POST /_search
--------------------------------------------------
// CONSOLE
// TEST[setup:sales]
+// TEST[warning:The moving_avg aggregation has been deprecated in favor of the moving_fn aggregation.]
A `linear` model has no special settings to configure
@@ -295,7 +302,7 @@ POST /_search
--------------------------------------------------
// CONSOLE
// TEST[setup:sales]
-
+// TEST[warning:The moving_avg aggregation has been deprecated in favor of the moving_fn aggregation.]
[[single_0.2alpha]]
.EWMA with window of size 10, alpha = 0.2
@@ -355,6 +362,7 @@ POST /_search
--------------------------------------------------
// CONSOLE
// TEST[setup:sales]
+// TEST[warning:The moving_avg aggregation has been deprecated in favor of the moving_fn aggregation.]
In practice, the `alpha` value behaves very similarly in `holt` as `ewma`: small values produce more smoothing
and more lag, while larger values produce closer tracking and less lag. The value of `beta` is often difficult
@@ -446,7 +454,7 @@ POST /_search
--------------------------------------------------
// CONSOLE
// TEST[setup:sales]
-
+// TEST[warning:The moving_avg aggregation has been deprecated in favor of the moving_fn aggregation.]
[[holt_winters_add]]
.Holt-Winters moving average with window of size 120, alpha = 0.5, beta = 0.7, gamma = 0.3, period = 30
@@ -508,6 +516,7 @@ POST /_search
--------------------------------------------------
// CONSOLE
// TEST[setup:sales]
+// TEST[warning:The moving_avg aggregation has been deprecated in favor of the moving_fn aggregation.]
==== Prediction
@@ -550,6 +559,7 @@ POST /_search
--------------------------------------------------
// CONSOLE
// TEST[setup:sales]
+// TEST[warning:The moving_avg aggregation has been deprecated in favor of the moving_fn aggregation.]
The `simple`, `linear` and `ewma` models all produce "flat" predictions: they essentially converge on the mean
of the last value in the series, producing a flat:
@@ -631,6 +641,7 @@ POST /_search
--------------------------------------------------
// CONSOLE
// TEST[setup:sales]
+// TEST[warning:The moving_avg aggregation has been deprecated in favor of the moving_fn aggregation.]
<1> Minimization is enabled with the `minimize` parameter
diff --git a/docs/reference/aggregations/pipeline/movfn-aggregation.asciidoc b/docs/reference/aggregations/pipeline/movfn-aggregation.asciidoc
new file mode 100644
index 0000000000000..b05c56b880560
--- /dev/null
+++ b/docs/reference/aggregations/pipeline/movfn-aggregation.asciidoc
@@ -0,0 +1,633 @@
+[[search-aggregations-pipeline-movfn-aggregation]]
+=== Moving Function Aggregation
+
+Given an ordered series of data, the Moving Function aggregation will slide a window across the data and allow the user to specify a custom
+script that is executed on each window of data. For convenience, a number of common functions are predefined such as min/max, moving averages,
+etc.
+
+This is conceptually very similar to the <> pipeline aggregation, except
+it provides more functionality.
+==== Syntax
+
+A `moving_fn` aggregation looks like this in isolation:
+
+[source,js]
+--------------------------------------------------
+{
+ "moving_fn": {
+ "buckets_path": "the_sum",
+ "window": 10,
+ "script": "MovingFunctions.min(values)"
+ }
+}
+--------------------------------------------------
+// NOTCONSOLE
+
+.`moving_avg` Parameters
+|===
+|Parameter Name |Description |Required |Default Value
+|`buckets_path` |Path to the metric of interest (see <> for more details |Required |
+|`window` |The size of window to "slide" across the histogram. |Required |
+|`script` |The script that should be executed on each window of data |Required |
+|===
+
+`moving_fn` aggregations must be embedded inside of a `histogram` or `date_histogram` aggregation. They can be
+embedded like any other metric aggregation:
+
+[source,js]
+--------------------------------------------------
+POST /_search
+{
+ "size": 0,
+ "aggs": {
+ "my_date_histo":{ <1>
+ "date_histogram":{
+ "field":"date",
+ "interval":"1M"
+ },
+ "aggs":{
+ "the_sum":{
+ "sum":{ "field": "price" } <2>
+ },
+ "the_movfn": {
+ "moving_fn": {
+ "buckets_path": "the_sum", <3>
+ "window": 10,
+ "script": "MovingFunctions.unweightedAvg(values)"
+ }
+ }
+ }
+ }
+ }
+}
+--------------------------------------------------
+// CONSOLE
+// TEST[setup:sales]
+
+<1> A `date_histogram` named "my_date_histo" is constructed on the "timestamp" field, with one-day intervals
+<2> A `sum` metric is used to calculate the sum of a field. This could be any numeric metric (sum, min, max, etc)
+<3> Finally, we specify a `moving_fn` aggregation which uses "the_sum" metric as its input.
+
+Moving averages are built by first specifying a `histogram` or `date_histogram` over a field. You can then optionally
+add numeric metrics, such as a `sum`, inside of that histogram. Finally, the `moving_fn` is embedded inside the histogram.
+The `buckets_path` parameter is then used to "point" at one of the sibling metrics inside of the histogram (see
+<> for a description of the syntax for `buckets_path`.
+
+An example response from the above aggregation may look like:
+
+[source,js]
+--------------------------------------------------
+{
+ "took": 11,
+ "timed_out": false,
+ "_shards": ...,
+ "hits": ...,
+ "aggregations": {
+ "my_date_histo": {
+ "buckets": [
+ {
+ "key_as_string": "2015/01/01 00:00:00",
+ "key": 1420070400000,
+ "doc_count": 3,
+ "the_sum": {
+ "value": 550.0
+ },
+ "the_movfn": {
+ "value": null
+ }
+ },
+ {
+ "key_as_string": "2015/02/01 00:00:00",
+ "key": 1422748800000,
+ "doc_count": 2,
+ "the_sum": {
+ "value": 60.0
+ },
+ "the_movfn": {
+ "value": 550.0
+ }
+ },
+ {
+ "key_as_string": "2015/03/01 00:00:00",
+ "key": 1425168000000,
+ "doc_count": 2,
+ "the_sum": {
+ "value": 375.0
+ },
+ "the_movfn": {
+ "value": 305.0
+ }
+ }
+ ]
+ }
+ }
+}
+--------------------------------------------------
+// TESTRESPONSE[s/"took": 11/"took": $body.took/]
+// TESTRESPONSE[s/"_shards": \.\.\./"_shards": $body._shards/]
+// TESTRESPONSE[s/"hits": \.\.\./"hits": $body.hits/]
+
+
+==== Custom user scripting
+
+The Moving Function aggregation allows the user to specify any arbitrary script to define custom logic. The script is invoked each time a
+new window of data is collected. These values are provided to the script in the `values` variable. The script should then perform some
+kind of calculation and emit a single `double` as the result. Emitting `null` is not permitted, although `NaN` and +/- `Inf` are allowed.
+
+For example, this script will simply return the first value from the window, or `NaN` if no values are available:
+
+[source,js]
+--------------------------------------------------
+POST /_search
+{
+ "size": 0,
+ "aggs": {
+ "my_date_histo":{
+ "date_histogram":{
+ "field":"date",
+ "interval":"1M"
+ },
+ "aggs":{
+ "the_sum":{
+ "sum":{ "field": "price" }
+ },
+ "the_movavg": {
+ "moving_fn": {
+ "buckets_path": "the_sum",
+ "window": 10,
+ "script": "return values.length > 0 ? values[0] : Double.NaN"
+ }
+ }
+ }
+ }
+ }
+}
+--------------------------------------------------
+// CONSOLE
+// TEST[setup:sales]
+
+==== Pre-built Functions
+
+For convenience, a number of functions have been prebuilt and are available inside the `moving_fn` script context:
+
+- `max()`
+- `min()`
+- `sum()`
+- `stdDev()`
+- `unweightedAvg()`
+- `linearWeightedAvg()`
+- `ewma()`
+- `holt()`
+- `holtWinters()`
+
+The functions are available from the `MovingFunctions` namespace. E.g. `MovingFunctions.max()`
+
+===== max Function
+
+This function accepts a collection of doubles and returns the maximum value in that window. `null` and `NaN` values are ignored; the maximum
+is only calculated over the real values. If the window is empty, or all values are `null`/`NaN`, `NaN` is returned as the result.
+
+.`max(double[] values)` Parameters
+|===
+|Parameter Name |Description
+|`values` |The window of values to find the maximum
+|===
+
+[source,js]
+--------------------------------------------------
+POST /_search
+{
+ "size": 0,
+ "aggs": {
+ "my_date_histo":{
+ "date_histogram":{
+ "field":"date",
+ "interval":"1M"
+ },
+ "aggs":{
+ "the_sum":{
+ "sum":{ "field": "price" }
+ },
+ "the_moving_max": {
+ "moving_fn": {
+ "buckets_path": "the_sum",
+ "window": 10,
+ "script": "MovingFunctions.max(values)"
+ }
+ }
+ }
+ }
+ }
+}
+--------------------------------------------------
+// CONSOLE
+// TEST[setup:sales]
+
+===== min Function
+
+This function accepts a collection of doubles and returns the minimum value in that window. `null` and `NaN` values are ignored; the minimum
+is only calculated over the real values. If the window is empty, or all values are `null`/`NaN`, `NaN` is returned as the result.
+
+.`min(double[] values)` Parameters
+|===
+|Parameter Name |Description
+|`values` |The window of values to find the minimum
+|===
+
+[source,js]
+--------------------------------------------------
+POST /_search
+{
+ "size": 0,
+ "aggs": {
+ "my_date_histo":{
+ "date_histogram":{
+ "field":"date",
+ "interval":"1M"
+ },
+ "aggs":{
+ "the_sum":{
+ "sum":{ "field": "price" }
+ },
+ "the_moving_min": {
+ "moving_fn": {
+ "buckets_path": "the_sum",
+ "window": 10,
+ "script": "MovingFunctions.min(values)"
+ }
+ }
+ }
+ }
+ }
+}
+--------------------------------------------------
+// CONSOLE
+// TEST[setup:sales]
+
+===== sum Function
+
+This function accepts a collection of doubles and returns the sum of the values in that window. `null` and `NaN` values are ignored;
+the sum is only calculated over the real values. If the window is empty, or all values are `null`/`NaN`, `0.0` is returned as the result.
+
+.`sum(double[] values)` Parameters
+|===
+|Parameter Name |Description
+|`values` |The window of values to find the sum of
+|===
+
+[source,js]
+--------------------------------------------------
+POST /_search
+{
+ "size": 0,
+ "aggs": {
+ "my_date_histo":{
+ "date_histogram":{
+ "field":"date",
+ "interval":"1M"
+ },
+ "aggs":{
+ "the_sum":{
+ "sum":{ "field": "price" }
+ },
+ "the_moving_sum": {
+ "moving_fn": {
+ "buckets_path": "the_sum",
+ "window": 10,
+ "script": "MovingFunctions.sum(values)"
+ }
+ }
+ }
+ }
+ }
+}
+--------------------------------------------------
+// CONSOLE
+// TEST[setup:sales]
+
+===== stdDev Function
+
+This function accepts a collection of doubles and and average, then returns the standard deviation of the values in that window.
+`null` and `NaN` values are ignored; the sum is only calculated over the real values. If the window is empty, or all values are
+`null`/`NaN`, `0.0` is returned as the result.
+
+.`stdDev(double[] values)` Parameters
+|===
+|Parameter Name |Description
+|`values` |The window of values to find the standard deviation of
+|`avg` |The average of the window
+|===
+
+[source,js]
+--------------------------------------------------
+POST /_search
+{
+ "size": 0,
+ "aggs": {
+ "my_date_histo":{
+ "date_histogram":{
+ "field":"date",
+ "interval":"1M"
+ },
+ "aggs":{
+ "the_sum":{
+ "sum":{ "field": "price" }
+ },
+ "the_moving_sum": {
+ "moving_fn": {
+ "buckets_path": "the_sum",
+ "window": 10,
+ "script": "MovingFunctions.stdDev(values, MovingFunctions.unweightedAvg(values))"
+ }
+ }
+ }
+ }
+ }
+}
+--------------------------------------------------
+// CONSOLE
+// TEST[setup:sales]
+
+The `avg` parameter must be provided to the standard deviation function because different styles of averages can be computed on the window
+(simple, linearly weighted, etc). The various moving averages that are detailed below can be used to calculate the average for the
+standard deviation function.
+
+===== unweightedAvg Function
+
+The `unweightedAvg` function calculates the sum of all values in the window, then divides by the size of the window. It is effectively
+a simple arithmetic mean of the window. The simple moving average does not perform any time-dependent weighting, which means
+the values from a `simple` moving average tend to "lag" behind the real data.
+
+`null` and `NaN` values are ignored; the average is only calculated over the real values. If the window is empty, or all values are
+`null`/`NaN`, `NaN` is returned as the result. This means that the count used in the average calculation is count of non-`null`,non-`NaN`
+values.
+
+.`unweightedAvg(double[] values)` Parameters
+|===
+|Parameter Name |Description
+|`values` |The window of values to find the sum of
+|===
+
+[source,js]
+--------------------------------------------------
+POST /_search
+{
+ "size": 0,
+ "aggs": {
+ "my_date_histo":{
+ "date_histogram":{
+ "field":"date",
+ "interval":"1M"
+ },
+ "aggs":{
+ "the_sum":{
+ "sum":{ "field": "price" }
+ },
+ "the_movavg": {
+ "moving_fn": {
+ "buckets_path": "the_sum",
+ "window": 10,
+ "script": "MovingFunctions.unweightedAvg(values)"
+ }
+ }
+ }
+ }
+ }
+}
+--------------------------------------------------
+// CONSOLE
+// TEST[setup:sales]
+
+==== linearWeightedAvg Function
+
+The `linearWeightedAvg` function assigns a linear weighting to points in the series, such that "older" datapoints (e.g. those at
+the beginning of the window) contribute a linearly less amount to the total average. The linear weighting helps reduce
+the "lag" behind the data's mean, since older points have less influence.
+
+If the window is empty, or all values are `null`/`NaN`, `NaN` is returned as the result.
+
+.`linearWeightedAvg(double[] values)` Parameters
+|===
+|Parameter Name |Description
+|`values` |The window of values to find the sum of
+|===
+
+[source,js]
+--------------------------------------------------
+POST /_search
+{
+ "size": 0,
+ "aggs": {
+ "my_date_histo":{
+ "date_histogram":{
+ "field":"date",
+ "interval":"1M"
+ },
+ "aggs":{
+ "the_sum":{
+ "sum":{ "field": "price" }
+ },
+ "the_movavg": {
+ "moving_fn": {
+ "buckets_path": "the_sum",
+ "window": 10,
+ "script": "MovingFunctions.linearWeightedAvg(values)"
+ }
+ }
+ }
+ }
+ }
+}
+--------------------------------------------------
+// CONSOLE
+// TEST[setup:sales]
+
+==== ewma Function
+
+The `ewma` function (aka "single-exponential") is similar to the `linearMovAvg` function,
+except older data-points become exponentially less important,
+rather than linearly less important. The speed at which the importance decays can be controlled with an `alpha`
+setting. Small values make the weight decay slowly, which provides greater smoothing and takes into account a larger
+portion of the window. Larger valuers make the weight decay quickly, which reduces the impact of older values on the
+moving average. This tends to make the moving average track the data more closely but with less smoothing.
+
+`null` and `NaN` values are ignored; the average is only calculated over the real values. If the window is empty, or all values are
+`null`/`NaN`, `NaN` is returned as the result. This means that the count used in the average calculation is count of non-`null`,non-`NaN`
+values.
+
+.`ewma(double[] values, double alpha)` Parameters
+|===
+|Parameter Name |Description
+|`values` |The window of values to find the sum of
+|`alpha` |Exponential decay
+|===
+
+[source,js]
+--------------------------------------------------
+POST /_search
+{
+ "size": 0,
+ "aggs": {
+ "my_date_histo":{
+ "date_histogram":{
+ "field":"date",
+ "interval":"1M"
+ },
+ "aggs":{
+ "the_sum":{
+ "sum":{ "field": "price" }
+ },
+ "the_movavg": {
+ "moving_fn": {
+ "buckets_path": "the_sum",
+ "window": 10,
+ "script": "MovingFunctions.ewma(values, 0.3)"
+ }
+ }
+ }
+ }
+ }
+}
+--------------------------------------------------
+// CONSOLE
+// TEST[setup:sales]
+
+
+==== holt Function
+
+The `holt` function (aka "double exponential") incorporates a second exponential term which
+tracks the data's trend. Single exponential does not perform well when the data has an underlying linear trend. The
+double exponential model calculates two values internally: a "level" and a "trend".
+
+The level calculation is similar to `ewma`, and is an exponentially weighted view of the data. The difference is
+that the previously smoothed value is used instead of the raw value, which allows it to stay close to the original series.
+The trend calculation looks at the difference between the current and last value (e.g. the slope, or trend, of the
+smoothed data). The trend value is also exponentially weighted.
+
+Values are produced by multiplying the level and trend components.
+
+`null` and `NaN` values are ignored; the average is only calculated over the real values. If the window is empty, or all values are
+`null`/`NaN`, `NaN` is returned as the result. This means that the count used in the average calculation is count of non-`null`,non-`NaN`
+values.
+
+.`holt(double[] values, double alpha)` Parameters
+|===
+|Parameter Name |Description
+|`values` |The window of values to find the sum of
+|`alpha` |Level decay value
+|`beta` |Trend decay value
+|===
+
+[source,js]
+--------------------------------------------------
+POST /_search
+{
+ "size": 0,
+ "aggs": {
+ "my_date_histo":{
+ "date_histogram":{
+ "field":"date",
+ "interval":"1M"
+ },
+ "aggs":{
+ "the_sum":{
+ "sum":{ "field": "price" }
+ },
+ "the_movavg": {
+ "moving_fn": {
+ "buckets_path": "the_sum",
+ "window": 10,
+ "script": "MovingFunctions.holt(values, 0.3, 0.1)"
+ }
+ }
+ }
+ }
+ }
+}
+--------------------------------------------------
+// CONSOLE
+// TEST[setup:sales]
+
+In practice, the `alpha` value behaves very similarly in `holtMovAvg` as `ewmaMovAvg`: small values produce more smoothing
+and more lag, while larger values produce closer tracking and less lag. The value of `beta` is often difficult
+to see. Small values emphasize long-term trends (such as a constant linear trend in the whole series), while larger
+values emphasize short-term trends.
+
+==== holtWinters Function
+
+The `holtWinters` function (aka "triple exponential") incorporates a third exponential term which
+tracks the seasonal aspect of your data. This aggregation therefore smooths based on three components: "level", "trend"
+and "seasonality".
+
+The level and trend calculation is identical to `holt` The seasonal calculation looks at the difference between
+the current point, and the point one period earlier.
+
+Holt-Winters requires a little more handholding than the other moving averages. You need to specify the "periodicity"
+of your data: e.g. if your data has cyclic trends every 7 days, you would set `period = 7`. Similarly if there was
+a monthly trend, you would set it to `30`. There is currently no periodicity detection, although that is planned
+for future enhancements.
+
+`null` and `NaN` values are ignored; the average is only calculated over the real values. If the window is empty, or all values are
+`null`/`NaN`, `NaN` is returned as the result. This means that the count used in the average calculation is count of non-`null`,non-`NaN`
+values.
+
+.`holtWinters(double[] values, double alpha)` Parameters
+|===
+|Parameter Name |Description
+|`values` |The window of values to find the sum of
+|`alpha` |Level decay value
+|`beta` |Trend decay value
+|`gamma` |Seasonality decay value
+|`period` |The periodicity of the data
+|`multiplicative` |True if you wish to use multiplicative holt-winters, false to use additive
+|===
+
+[source,js]
+--------------------------------------------------
+POST /_search
+{
+ "size": 0,
+ "aggs": {
+ "my_date_histo":{
+ "date_histogram":{
+ "field":"date",
+ "interval":"1M"
+ },
+ "aggs":{
+ "the_sum":{
+ "sum":{ "field": "price" }
+ },
+ "the_movavg": {
+ "moving_fn": {
+ "buckets_path": "the_sum",
+ "window": 10,
+ "script": "if (values.length > 5*2) {MovingFunctions.holtWinters(values, 0.3, 0.1, 0.1, 5, false)}"
+ }
+ }
+ }
+ }
+ }
+}
+--------------------------------------------------
+// CONSOLE
+// TEST[setup:sales]
+
+[WARNING]
+======
+Multiplicative Holt-Winters works by dividing each data point by the seasonal value. This is problematic if any of
+your data is zero, or if there are gaps in the data (since this results in a divid-by-zero). To combat this, the
+`mult` Holt-Winters pads all values by a very small amount (1*10^-10^) so that all values are non-zero. This affects
+the result, but only minimally. If your data is non-zero, or you prefer to see `NaN` when zero's are encountered,
+you can disable this behavior with `pad: false`
+======
+
+===== "Cold Start"
+
+Unfortunately, due to the nature of Holt-Winters, it requires two periods of data to "bootstrap" the algorithm. This
+means that your `window` must always be *at least* twice the size of your period. An exception will be thrown if it
+isn't. It also means that Holt-Winters will not emit a value for the first `2 * period` buckets; the current algorithm
+does not backcast.
+
+You'll notice in the above example we have an `if ()` statement checking the size of values. This is checking to make sure
+we have two periods worth of data (`5 * 2`, where 5 is the period specified in the `holtWintersMovAvg` function) before calling
+the holt-winters function.
diff --git a/docs/reference/modules/snapshots.asciidoc b/docs/reference/modules/snapshots.asciidoc
index 693d537d732c1..f70857e66c86f 100644
--- a/docs/reference/modules/snapshots.asciidoc
+++ b/docs/reference/modules/snapshots.asciidoc
@@ -289,6 +289,20 @@ By setting `include_global_state` to false it's possible to prevent the cluster
the snapshot. By default, the entire snapshot will fail if one or more indices participating in the snapshot don't have
all primary shards available. This behaviour can be changed by setting `partial` to `true`.
+Snapshot names can be automatically derived using <>, similarly as when creating
+new indices. Note that special characters need to be URI encoded.
+
+For example, creating a snapshot with the current day in the name, like `snapshot-2018.05.11`, can be achieved with
+the following command:
+[source,js]
+-----------------------------------
+# PUT /_snapshot/my_backup/
+PUT /_snapshot/my_backup/%3Csnapshot-%7Bnow%2Fd%7D%3E
+-----------------------------------
+// CONSOLE
+// TEST[continued]
+
+
The index snapshot process is incremental. In the process of making the index snapshot Elasticsearch analyses
the list of the index files that are already stored in the repository and copies only files that were created or
changed since the last snapshot. That allows multiple snapshots to be preserved in the repository in a compact form.
diff --git a/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java b/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java
index a36df9987e7de..02bc304317e68 100644
--- a/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java
+++ b/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java
@@ -86,9 +86,13 @@ protected boolean randomizeContentType() {
}
@Override
- protected ClientYamlTestClient initClientYamlTestClient(ClientYamlSuiteRestSpec restSpec, RestClient restClient,
- List hosts, Version esVersion) throws IOException {
- return new ClientYamlDocsTestClient(restSpec, restClient, hosts, esVersion);
+ protected ClientYamlTestClient initClientYamlTestClient(
+ final ClientYamlSuiteRestSpec restSpec,
+ final RestClient restClient,
+ final List hosts,
+ final Version esVersion,
+ final Version masterVersion) throws IOException {
+ return new ClientYamlDocsTestClient(restSpec, restClient, hosts, esVersion, masterVersion);
}
/**
diff --git a/libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/BytesChannelContext.java b/libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/BytesChannelContext.java
index 000e871e92781..ef1e188a22e0a 100644
--- a/libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/BytesChannelContext.java
+++ b/libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/BytesChannelContext.java
@@ -20,25 +20,13 @@
package org.elasticsearch.nio;
import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.nio.channels.ClosedChannelException;
-import java.util.LinkedList;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.function.BiConsumer;
import java.util.function.Consumer;
public class BytesChannelContext extends SocketChannelContext {
- private final ReadConsumer readConsumer;
- private final InboundChannelBuffer channelBuffer;
- private final LinkedList queued = new LinkedList<>();
- private final AtomicBoolean isClosing = new AtomicBoolean(false);
-
public BytesChannelContext(NioSocketChannel channel, SocketSelector selector, Consumer exceptionHandler,
- ReadConsumer readConsumer, InboundChannelBuffer channelBuffer) {
- super(channel, selector, exceptionHandler);
- this.readConsumer = readConsumer;
- this.channelBuffer = channelBuffer;
+ ReadWriteHandler handler, InboundChannelBuffer channelBuffer) {
+ super(channel, selector, exceptionHandler, handler, channelBuffer);
}
@Override
@@ -56,55 +44,30 @@ public int read() throws IOException {
channelBuffer.incrementIndex(bytesRead);
- int bytesConsumed = Integer.MAX_VALUE;
- while (bytesConsumed > 0 && channelBuffer.getIndex() > 0) {
- bytesConsumed = readConsumer.consumeReads(channelBuffer);
- channelBuffer.release(bytesConsumed);
- }
+ handleReadBytes();
return bytesRead;
}
- @Override
- public void sendMessage(ByteBuffer[] buffers, BiConsumer listener) {
- if (isClosing.get()) {
- listener.accept(null, new ClosedChannelException());
- return;
- }
-
- BytesWriteOperation writeOperation = new BytesWriteOperation(this, buffers, listener);
- SocketSelector selector = getSelector();
- if (selector.isOnCurrentThread() == false) {
- selector.queueWrite(writeOperation);
- return;
- }
-
- selector.queueWriteInChannelBuffer(writeOperation);
- }
-
- @Override
- public void queueWriteOperation(WriteOperation writeOperation) {
- getSelector().assertOnSelectorThread();
- queued.add((BytesWriteOperation) writeOperation);
- }
-
@Override
public void flushChannel() throws IOException {
getSelector().assertOnSelectorThread();
- int ops = queued.size();
- if (ops == 1) {
- singleFlush(queued.pop());
- } else if (ops > 1) {
- multiFlush();
+ boolean lastOpCompleted = true;
+ FlushOperation flushOperation;
+ while (lastOpCompleted && (flushOperation = getPendingFlush()) != null) {
+ try {
+ if (singleFlush(flushOperation)) {
+ currentFlushOperationComplete();
+ } else {
+ lastOpCompleted = false;
+ }
+ } catch (IOException e) {
+ currentFlushOperationFailed(e);
+ throw e;
+ }
}
}
- @Override
- public boolean hasQueuedWriteOps() {
- getSelector().assertOnSelectorThread();
- return queued.isEmpty() == false;
- }
-
@Override
public void closeChannel() {
if (isClosing.compareAndSet(false, true)) {
@@ -117,51 +80,12 @@ public boolean selectorShouldClose() {
return isPeerClosed() || hasIOException() || isClosing.get();
}
- @Override
- public void closeFromSelector() throws IOException {
- getSelector().assertOnSelectorThread();
- if (channel.isOpen()) {
- IOException channelCloseException = null;
- try {
- super.closeFromSelector();
- } catch (IOException e) {
- channelCloseException = e;
- }
- // Set to true in order to reject new writes before queuing with selector
- isClosing.set(true);
- channelBuffer.close();
- for (BytesWriteOperation op : queued) {
- getSelector().executeFailedListener(op.getListener(), new ClosedChannelException());
- }
- queued.clear();
- if (channelCloseException != null) {
- throw channelCloseException;
- }
- }
- }
-
- private void singleFlush(BytesWriteOperation headOp) throws IOException {
- try {
- int written = flushToChannel(headOp.getBuffersToWrite());
- headOp.incrementIndex(written);
- } catch (IOException e) {
- getSelector().executeFailedListener(headOp.getListener(), e);
- throw e;
- }
-
- if (headOp.isFullyFlushed()) {
- getSelector().executeListener(headOp.getListener(), null);
- } else {
- queued.push(headOp);
- }
- }
-
- private void multiFlush() throws IOException {
- boolean lastOpCompleted = true;
- while (lastOpCompleted && queued.isEmpty() == false) {
- BytesWriteOperation op = queued.pop();
- singleFlush(op);
- lastOpCompleted = op.isFullyFlushed();
- }
+ /**
+ * Returns a boolean indicating if the operation was fully flushed.
+ */
+ private boolean singleFlush(FlushOperation flushOperation) throws IOException {
+ int written = flushToChannel(flushOperation.getBuffersToWrite());
+ flushOperation.incrementIndex(written);
+ return flushOperation.isFullyFlushed();
}
}
diff --git a/libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/BytesWriteHandler.java b/libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/BytesWriteHandler.java
new file mode 100644
index 0000000000000..ba379e2873210
--- /dev/null
+++ b/libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/BytesWriteHandler.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.nio;
+
+import java.nio.ByteBuffer;
+import java.util.Collections;
+import java.util.List;
+import java.util.function.BiConsumer;
+
+public abstract class BytesWriteHandler implements ReadWriteHandler {
+
+ private static final List EMPTY_LIST = Collections.emptyList();
+
+ public WriteOperation createWriteOperation(SocketChannelContext context, Object message, BiConsumer listener) {
+ assert message instanceof ByteBuffer[] : "This channel only supports messages that are of type: " + ByteBuffer[].class
+ + ". Found type: " + message.getClass() + ".";
+ return new FlushReadyWrite(context, (ByteBuffer[]) message, listener);
+ }
+
+ public List writeToBytes(WriteOperation writeOperation) {
+ assert writeOperation instanceof FlushReadyWrite : "Write operation must be flush ready";
+ return Collections.singletonList((FlushReadyWrite) writeOperation);
+ }
+
+ public List pollFlushOperations() {
+ return EMPTY_LIST;
+ }
+
+ public void close() {}
+}
diff --git a/libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/BytesWriteOperation.java b/libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/FlushOperation.java
similarity index 86%
rename from libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/BytesWriteOperation.java
rename to libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/FlushOperation.java
index 37c6e49727634..3102c972a6795 100644
--- a/libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/BytesWriteOperation.java
+++ b/libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/FlushOperation.java
@@ -23,17 +23,15 @@
import java.util.Arrays;
import java.util.function.BiConsumer;
-public class BytesWriteOperation implements WriteOperation {
+public class FlushOperation {
- private final SocketChannelContext channelContext;
private final BiConsumer listener;
private final ByteBuffer[] buffers;
private final int[] offsets;
private final int length;
private int internalIndex;
- public BytesWriteOperation(SocketChannelContext channelContext, ByteBuffer[] buffers, BiConsumer listener) {
- this.channelContext = channelContext;
+ public FlushOperation(ByteBuffer[] buffers, BiConsumer listener) {
this.listener = listener;
this.buffers = buffers;
this.offsets = new int[buffers.length];
@@ -46,16 +44,10 @@ public BytesWriteOperation(SocketChannelContext channelContext, ByteBuffer[] buf
length = offset;
}
- @Override
public BiConsumer getListener() {
return listener;
}
- @Override
- public SocketChannelContext getChannel() {
- return channelContext;
- }
-
public boolean isFullyFlushed() {
assert length >= internalIndex : "Should never have an index that is greater than the length [length=" + length + ", index="
+ internalIndex + "]";
@@ -84,5 +76,4 @@ public ByteBuffer[] getBuffersToWrite() {
return postIndexBuffers;
}
-
}
diff --git a/libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/FlushReadyWrite.java b/libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/FlushReadyWrite.java
new file mode 100644
index 0000000000000..65bc8f17aaf4b
--- /dev/null
+++ b/libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/FlushReadyWrite.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.nio;
+
+import java.nio.ByteBuffer;
+import java.util.function.BiConsumer;
+
+public class FlushReadyWrite extends FlushOperation implements WriteOperation {
+
+ private final SocketChannelContext channelContext;
+ private final ByteBuffer[] buffers;
+
+ FlushReadyWrite(SocketChannelContext channelContext, ByteBuffer[] buffers, BiConsumer listener) {
+ super(buffers, listener);
+ this.channelContext = channelContext;
+ this.buffers = buffers;
+ }
+
+ @Override
+ public SocketChannelContext getChannel() {
+ return channelContext;
+ }
+
+ @Override
+ public ByteBuffer[] getObject() {
+ return buffers;
+ }
+}
diff --git a/libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/ReadWriteHandler.java b/libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/ReadWriteHandler.java
new file mode 100644
index 0000000000000..f0637ea265280
--- /dev/null
+++ b/libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/ReadWriteHandler.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.nio;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.function.BiConsumer;
+
+/**
+ * Implements the application specific logic for handling inbound and outbound messages for a channel.
+ */
+public interface ReadWriteHandler {
+
+ /**
+ * This method is called when a message is queued with a channel. It can be called from any thread.
+ * This method should validate that the message is a valid type and return a write operation object
+ * to be queued with the channel
+ *
+ * @param context the channel context
+ * @param message the message
+ * @param listener the listener to be called when the message is sent
+ * @return the write operation to be queued
+ */
+ WriteOperation createWriteOperation(SocketChannelContext context, Object message, BiConsumer listener);
+
+ /**
+ * This method is called on the event loop thread. It should serialize a write operation object to bytes
+ * that can be flushed to the raw nio channel.
+ *
+ * @param writeOperation to be converted to bytes
+ * @return the operations to flush the bytes to the channel
+ */
+ List writeToBytes(WriteOperation writeOperation);
+
+ /**
+ * Returns any flush operations that are ready to flush. This exists as a way to check if any flush
+ * operations were produced during a read call.
+ *
+ * @return flush operations
+ */
+ List pollFlushOperations();
+
+ /**
+ * This method handles bytes that have been read from the network. It should return the number of bytes
+ * consumed so that they can be released.
+ *
+ * @param channelBuffer of bytes read from the network
+ * @return the number of bytes consumed
+ * @throws IOException if an exception occurs
+ */
+ int consumeReads(InboundChannelBuffer channelBuffer) throws IOException;
+
+ void close() throws IOException;
+}
diff --git a/libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/SocketChannelContext.java b/libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/SocketChannelContext.java
index 3bf47a98e0267..f2d299a9d328a 100644
--- a/libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/SocketChannelContext.java
+++ b/libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/SocketChannelContext.java
@@ -19,10 +19,16 @@
package org.elasticsearch.nio;
+import org.elasticsearch.nio.utils.ExceptionsHelper;
+
import java.io.IOException;
import java.nio.ByteBuffer;
+import java.nio.channels.ClosedChannelException;
import java.nio.channels.SocketChannel;
+import java.util.ArrayList;
+import java.util.LinkedList;
import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
@@ -33,21 +39,28 @@
* close behavior is required, it should be implemented in this context.
*
* The only methods of the context that should ever be called from a non-selector thread are
- * {@link #closeChannel()} and {@link #sendMessage(ByteBuffer[], BiConsumer)}.
+ * {@link #closeChannel()} and {@link #sendMessage(Object, BiConsumer)}.
*/
public abstract class SocketChannelContext extends ChannelContext {
protected final NioSocketChannel channel;
+ protected final InboundChannelBuffer channelBuffer;
+ protected final AtomicBoolean isClosing = new AtomicBoolean(false);
+ private final ReadWriteHandler readWriteHandler;
private final SocketSelector selector;
private final CompletableFuture connectContext = new CompletableFuture<>();
+ private final LinkedList pendingFlushes = new LinkedList<>();
private boolean ioException;
private boolean peerClosed;
private Exception connectException;
- protected SocketChannelContext(NioSocketChannel channel, SocketSelector selector, Consumer exceptionHandler) {
+ protected SocketChannelContext(NioSocketChannel channel, SocketSelector selector, Consumer exceptionHandler,
+ ReadWriteHandler readWriteHandler, InboundChannelBuffer channelBuffer) {
super(channel.getRawChannel(), exceptionHandler);
this.selector = selector;
this.channel = channel;
+ this.readWriteHandler = readWriteHandler;
+ this.channelBuffer = channelBuffer;
}
@Override
@@ -108,15 +121,94 @@ public boolean connect() throws IOException {
return isConnected;
}
- public abstract int read() throws IOException;
+ public void sendMessage(Object message, BiConsumer listener) {
+ if (isClosing.get()) {
+ listener.accept(null, new ClosedChannelException());
+ return;
+ }
- public abstract void sendMessage(ByteBuffer[] buffers, BiConsumer listener);
+ WriteOperation writeOperation = readWriteHandler.createWriteOperation(this, message, listener);
+
+ SocketSelector selector = getSelector();
+ if (selector.isOnCurrentThread() == false) {
+ selector.queueWrite(writeOperation);
+ return;
+ }
+
+ selector.queueWriteInChannelBuffer(writeOperation);
+ }
+
+ public void queueWriteOperation(WriteOperation writeOperation) {
+ getSelector().assertOnSelectorThread();
+ pendingFlushes.addAll(readWriteHandler.writeToBytes(writeOperation));
+ }
- public abstract void queueWriteOperation(WriteOperation writeOperation);
+ public abstract int read() throws IOException;
public abstract void flushChannel() throws IOException;
- public abstract boolean hasQueuedWriteOps();
+ protected void currentFlushOperationFailed(IOException e) {
+ FlushOperation flushOperation = pendingFlushes.pollFirst();
+ getSelector().executeFailedListener(flushOperation.getListener(), e);
+ }
+
+ protected void currentFlushOperationComplete() {
+ FlushOperation flushOperation = pendingFlushes.pollFirst();
+ getSelector().executeListener(flushOperation.getListener(), null);
+ }
+
+ protected FlushOperation getPendingFlush() {
+ return pendingFlushes.peekFirst();
+ }
+
+ @Override
+ public void closeFromSelector() throws IOException {
+ getSelector().assertOnSelectorThread();
+ if (channel.isOpen()) {
+ ArrayList closingExceptions = new ArrayList<>(3);
+ try {
+ super.closeFromSelector();
+ } catch (IOException e) {
+ closingExceptions.add(e);
+ }
+ // Set to true in order to reject new writes before queuing with selector
+ isClosing.set(true);
+
+ // Poll for new flush operations to close
+ pendingFlushes.addAll(readWriteHandler.pollFlushOperations());
+ FlushOperation flushOperation;
+ while ((flushOperation = pendingFlushes.pollFirst()) != null) {
+ selector.executeFailedListener(flushOperation.getListener(), new ClosedChannelException());
+ }
+
+ try {
+ readWriteHandler.close();
+ } catch (IOException e) {
+ closingExceptions.add(e);
+ }
+ channelBuffer.close();
+
+ if (closingExceptions.isEmpty() == false) {
+ ExceptionsHelper.rethrowAndSuppress(closingExceptions);
+ }
+ }
+ }
+
+ protected void handleReadBytes() throws IOException {
+ int bytesConsumed = Integer.MAX_VALUE;
+ while (bytesConsumed > 0 && channelBuffer.getIndex() > 0) {
+ bytesConsumed = readWriteHandler.consumeReads(channelBuffer);
+ channelBuffer.release(bytesConsumed);
+ }
+
+ // Some protocols might produce messages to flush during a read operation.
+ pendingFlushes.addAll(readWriteHandler.pollFlushOperations());
+ }
+
+ public boolean readyForFlush() {
+ getSelector().assertOnSelectorThread();
+ return pendingFlushes.isEmpty() == false;
+ }
/**
* This method indicates if a selector should close this channel.
@@ -178,9 +270,4 @@ protected int flushToChannel(ByteBuffer[] buffers) throws IOException {
throw e;
}
}
-
- @FunctionalInterface
- public interface ReadConsumer {
- int consumeReads(InboundChannelBuffer channelBuffer) throws IOException;
- }
}
diff --git a/libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/SocketEventHandler.java b/libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/SocketEventHandler.java
index b1f738647619b..cacee47e96196 100644
--- a/libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/SocketEventHandler.java
+++ b/libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/SocketEventHandler.java
@@ -48,7 +48,7 @@ protected void handleRegistration(SocketChannelContext context) throws IOExcepti
context.register();
SelectionKey selectionKey = context.getSelectionKey();
selectionKey.attach(context);
- if (context.hasQueuedWriteOps()) {
+ if (context.readyForFlush()) {
SelectionKeyUtils.setConnectReadAndWriteInterested(selectionKey);
} else {
SelectionKeyUtils.setConnectAndReadInterested(selectionKey);
@@ -150,7 +150,7 @@ protected void postHandling(SocketChannelContext context) {
} else {
SelectionKey selectionKey = context.getSelectionKey();
boolean currentlyWriteInterested = SelectionKeyUtils.isWriteInterested(selectionKey);
- boolean pendingWrites = context.hasQueuedWriteOps();
+ boolean pendingWrites = context.readyForFlush();
if (currentlyWriteInterested == false && pendingWrites) {
SelectionKeyUtils.setWriteInterested(selectionKey);
} else if (currentlyWriteInterested && pendingWrites == false) {
diff --git a/libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/WriteOperation.java b/libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/WriteOperation.java
index 665b9f7759e11..25de6ab7326f3 100644
--- a/libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/WriteOperation.java
+++ b/libs/elasticsearch-nio/src/main/java/org/elasticsearch/nio/WriteOperation.java
@@ -16,7 +16,6 @@
* specific language governing permissions and limitations
* under the License.
*/
-
package org.elasticsearch.nio;
import java.util.function.BiConsumer;
@@ -24,11 +23,14 @@
/**
* This is a basic write operation that can be queued with a channel. The only requirements of a write
* operation is that is has a listener and a reference to its channel. The actual conversion of the write
- * operation implementation to bytes will be performed by the {@link SocketChannelContext}.
+ * operation implementation to bytes will be performed by the {@link ReadWriteHandler}.
*/
public interface WriteOperation {
BiConsumer getListener();
SocketChannelContext getChannel();
+
+ Object getObject();
+
}
diff --git a/libs/elasticsearch-nio/src/test/java/org/elasticsearch/nio/BytesChannelContextTests.java b/libs/elasticsearch-nio/src/test/java/org/elasticsearch/nio/BytesChannelContextTests.java
index d9de0ab1361c3..addfcdedbf99f 100644
--- a/libs/elasticsearch-nio/src/test/java/org/elasticsearch/nio/BytesChannelContextTests.java
+++ b/libs/elasticsearch-nio/src/test/java/org/elasticsearch/nio/BytesChannelContextTests.java
@@ -19,23 +19,19 @@
package org.elasticsearch.nio;
+import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.test.ESTestCase;
import org.junit.Before;
-import org.mockito.ArgumentCaptor;
import java.io.IOException;
import java.nio.ByteBuffer;
-import java.nio.channels.ClosedChannelException;
import java.nio.channels.SocketChannel;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
-import java.util.function.Supplier;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
-import static org.mockito.Matchers.isNull;
-import static org.mockito.Matchers.same;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
@@ -43,20 +39,19 @@
public class BytesChannelContextTests extends ESTestCase {
- private SocketChannelContext.ReadConsumer readConsumer;
+ private CheckedFunction readConsumer;
private NioSocketChannel channel;
private SocketChannel rawChannel;
private BytesChannelContext context;
private InboundChannelBuffer channelBuffer;
private SocketSelector selector;
- private Consumer exceptionHandler;
private BiConsumer listener;
private int messageLength;
@Before
@SuppressWarnings("unchecked")
public void init() {
- readConsumer = mock(SocketChannelContext.ReadConsumer.class);
+ readConsumer = mock(CheckedFunction.class);
messageLength = randomInt(96) + 20;
selector = mock(SocketSelector.class);
@@ -64,9 +59,9 @@ public void init() {
channel = mock(NioSocketChannel.class);
rawChannel = mock(SocketChannel.class);
channelBuffer = InboundChannelBuffer.allocatingInstance();
- exceptionHandler = mock(Consumer.class);
+ TestReadWriteHandler handler = new TestReadWriteHandler(readConsumer);
when(channel.getRawChannel()).thenReturn(rawChannel);
- context = new BytesChannelContext(channel, selector, exceptionHandler, readConsumer, channelBuffer);
+ context = new BytesChannelContext(channel, selector, mock(Consumer.class), handler, channelBuffer);
when(selector.isOnCurrentThread()).thenReturn(true);
}
@@ -80,13 +75,13 @@ public void testSuccessfulRead() throws IOException {
return bytes.length;
});
- when(readConsumer.consumeReads(channelBuffer)).thenReturn(messageLength, 0);
+ when(readConsumer.apply(channelBuffer)).thenReturn(messageLength, 0);
assertEquals(messageLength, context.read());
assertEquals(0, channelBuffer.getIndex());
assertEquals(BigArrays.BYTE_PAGE_SIZE - bytes.length, channelBuffer.getCapacity());
- verify(readConsumer, times(1)).consumeReads(channelBuffer);
+ verify(readConsumer, times(1)).apply(channelBuffer);
}
public void testMultipleReadsConsumed() throws IOException {
@@ -98,13 +93,13 @@ public void testMultipleReadsConsumed() throws IOException {
return bytes.length;
});
- when(readConsumer.consumeReads(channelBuffer)).thenReturn(messageLength, messageLength, 0);
+ when(readConsumer.apply(channelBuffer)).thenReturn(messageLength, messageLength, 0);
assertEquals(bytes.length, context.read());
assertEquals(0, channelBuffer.getIndex());
assertEquals(BigArrays.BYTE_PAGE_SIZE - bytes.length, channelBuffer.getCapacity());
- verify(readConsumer, times(2)).consumeReads(channelBuffer);
+ verify(readConsumer, times(2)).apply(channelBuffer);
}
public void testPartialRead() throws IOException {
@@ -117,20 +112,20 @@ public void testPartialRead() throws IOException {
});
- when(readConsumer.consumeReads(channelBuffer)).thenReturn(0);
+ when(readConsumer.apply(channelBuffer)).thenReturn(0);
assertEquals(messageLength, context.read());
assertEquals(bytes.length, channelBuffer.getIndex());
- verify(readConsumer, times(1)).consumeReads(channelBuffer);
+ verify(readConsumer, times(1)).apply(channelBuffer);
- when(readConsumer.consumeReads(channelBuffer)).thenReturn(messageLength * 2, 0);
+ when(readConsumer.apply(channelBuffer)).thenReturn(messageLength * 2, 0);
assertEquals(messageLength, context.read());
assertEquals(0, channelBuffer.getIndex());
assertEquals(BigArrays.BYTE_PAGE_SIZE - (bytes.length * 2), channelBuffer.getCapacity());
- verify(readConsumer, times(2)).consumeReads(channelBuffer);
+ verify(readConsumer, times(2)).apply(channelBuffer);
}
public void testReadThrowsIOException() throws IOException {
@@ -157,186 +152,100 @@ public void testReadLessThanZeroMeansReadyForClose() throws IOException {
assertTrue(context.selectorShouldClose());
}
- @SuppressWarnings("unchecked")
- public void testCloseClosesChannelBuffer() throws IOException {
- try (SocketChannel realChannel = SocketChannel.open()) {
- when(channel.getRawChannel()).thenReturn(realChannel);
- context = new BytesChannelContext(channel, selector, exceptionHandler, readConsumer, channelBuffer);
-
- when(channel.isOpen()).thenReturn(true);
- Runnable closer = mock(Runnable.class);
- Supplier pageSupplier = () -> new InboundChannelBuffer.Page(ByteBuffer.allocate(1 << 14), closer);
- InboundChannelBuffer buffer = new InboundChannelBuffer(pageSupplier);
- buffer.ensureCapacity(1);
- BytesChannelContext context = new BytesChannelContext(channel, selector, exceptionHandler, readConsumer, buffer);
- context.closeFromSelector();
- verify(closer).run();
- }
- }
-
- public void testWriteFailsIfClosing() {
- context.closeChannel();
-
- ByteBuffer[] buffers = {ByteBuffer.wrap(createMessage(10))};
- context.sendMessage(buffers, listener);
-
- verify(listener).accept(isNull(Void.class), any(ClosedChannelException.class));
- }
-
- public void testSendMessageFromDifferentThreadIsQueuedWithSelector() throws Exception {
- ArgumentCaptor writeOpCaptor = ArgumentCaptor.forClass(BytesWriteOperation.class);
-
- when(selector.isOnCurrentThread()).thenReturn(false);
-
- ByteBuffer[] buffers = {ByteBuffer.wrap(createMessage(10))};
- context.sendMessage(buffers, listener);
-
- verify(selector).queueWrite(writeOpCaptor.capture());
- BytesWriteOperation writeOp = writeOpCaptor.getValue();
-
- assertSame(listener, writeOp.getListener());
- assertSame(context, writeOp.getChannel());
- assertEquals(buffers[0], writeOp.getBuffersToWrite()[0]);
- }
-
- public void testSendMessageFromSameThreadIsQueuedInChannel() {
- ArgumentCaptor writeOpCaptor = ArgumentCaptor.forClass(BytesWriteOperation.class);
-
- ByteBuffer[] buffers = {ByteBuffer.wrap(createMessage(10))};
- context.sendMessage(buffers, listener);
-
- verify(selector).queueWriteInChannelBuffer(writeOpCaptor.capture());
- BytesWriteOperation writeOp = writeOpCaptor.getValue();
-
- assertSame(listener, writeOp.getListener());
- assertSame(context, writeOp.getChannel());
- assertEquals(buffers[0], writeOp.getBuffersToWrite()[0]);
- }
-
- public void testWriteIsQueuedInChannel() {
- assertFalse(context.hasQueuedWriteOps());
-
- ByteBuffer[] buffer = {ByteBuffer.allocate(10)};
- context.queueWriteOperation(new BytesWriteOperation(context, buffer, listener));
-
- assertTrue(context.hasQueuedWriteOps());
- }
-
- @SuppressWarnings("unchecked")
- public void testWriteOpsClearedOnClose() throws Exception {
- try (SocketChannel realChannel = SocketChannel.open()) {
- when(channel.getRawChannel()).thenReturn(realChannel);
- context = new BytesChannelContext(channel, selector, exceptionHandler, readConsumer, channelBuffer);
-
- assertFalse(context.hasQueuedWriteOps());
-
- ByteBuffer[] buffer = {ByteBuffer.allocate(10)};
- context.queueWriteOperation(new BytesWriteOperation(context, buffer, listener));
-
- assertTrue(context.hasQueuedWriteOps());
-
- when(channel.isOpen()).thenReturn(true);
- context.closeFromSelector();
-
- verify(selector).executeFailedListener(same(listener), any(ClosedChannelException.class));
-
- assertFalse(context.hasQueuedWriteOps());
- }
- }
-
+ @SuppressWarnings("varargs")
public void testQueuedWriteIsFlushedInFlushCall() throws Exception {
- assertFalse(context.hasQueuedWriteOps());
+ assertFalse(context.readyForFlush());
ByteBuffer[] buffers = {ByteBuffer.allocate(10)};
- BytesWriteOperation writeOperation = mock(BytesWriteOperation.class);
- context.queueWriteOperation(writeOperation);
- assertTrue(context.hasQueuedWriteOps());
+ FlushReadyWrite flushOperation = mock(FlushReadyWrite.class);
+ context.queueWriteOperation(flushOperation);
+
+ assertTrue(context.readyForFlush());
- when(writeOperation.getBuffersToWrite()).thenReturn(buffers);
- when(writeOperation.isFullyFlushed()).thenReturn(true);
- when(writeOperation.getListener()).thenReturn(listener);
+ when(flushOperation.getBuffersToWrite()).thenReturn(buffers);
+ when(flushOperation.isFullyFlushed()).thenReturn(true);
+ when(flushOperation.getListener()).thenReturn(listener);
context.flushChannel();
verify(rawChannel).write(buffers, 0, buffers.length);
verify(selector).executeListener(listener, null);
- assertFalse(context.hasQueuedWriteOps());
+ assertFalse(context.readyForFlush());
}
public void testPartialFlush() throws IOException {
- assertFalse(context.hasQueuedWriteOps());
-
- BytesWriteOperation writeOperation = mock(BytesWriteOperation.class);
- context.queueWriteOperation(writeOperation);
+ assertFalse(context.readyForFlush());
+ FlushReadyWrite flushOperation = mock(FlushReadyWrite.class);
+ context.queueWriteOperation(flushOperation);
+ assertTrue(context.readyForFlush());
- assertTrue(context.hasQueuedWriteOps());
-
- when(writeOperation.isFullyFlushed()).thenReturn(false);
- when(writeOperation.getBuffersToWrite()).thenReturn(new ByteBuffer[0]);
+ when(flushOperation.isFullyFlushed()).thenReturn(false);
+ when(flushOperation.getBuffersToWrite()).thenReturn(new ByteBuffer[0]);
context.flushChannel();
verify(listener, times(0)).accept(null, null);
- assertTrue(context.hasQueuedWriteOps());
+ assertTrue(context.readyForFlush());
}
@SuppressWarnings("unchecked")
public void testMultipleWritesPartialFlushes() throws IOException {
- assertFalse(context.hasQueuedWriteOps());
+ assertFalse(context.readyForFlush());
BiConsumer listener2 = mock(BiConsumer.class);
- BytesWriteOperation writeOperation1 = mock(BytesWriteOperation.class);
- BytesWriteOperation writeOperation2 = mock(BytesWriteOperation.class);
- when(writeOperation1.getBuffersToWrite()).thenReturn(new ByteBuffer[0]);
- when(writeOperation2.getBuffersToWrite()).thenReturn(new ByteBuffer[0]);
- when(writeOperation1.getListener()).thenReturn(listener);
- when(writeOperation2.getListener()).thenReturn(listener2);
- context.queueWriteOperation(writeOperation1);
- context.queueWriteOperation(writeOperation2);
-
- assertTrue(context.hasQueuedWriteOps());
-
- when(writeOperation1.isFullyFlushed()).thenReturn(true);
- when(writeOperation2.isFullyFlushed()).thenReturn(false);
+ FlushReadyWrite flushOperation1 = mock(FlushReadyWrite.class);
+ FlushReadyWrite flushOperation2 = mock(FlushReadyWrite.class);
+ when(flushOperation1.getBuffersToWrite()).thenReturn(new ByteBuffer[0]);
+ when(flushOperation2.getBuffersToWrite()).thenReturn(new ByteBuffer[0]);
+ when(flushOperation1.getListener()).thenReturn(listener);
+ when(flushOperation2.getListener()).thenReturn(listener2);
+
+ context.queueWriteOperation(flushOperation1);
+ context.queueWriteOperation(flushOperation2);
+
+ assertTrue(context.readyForFlush());
+
+ when(flushOperation1.isFullyFlushed()).thenReturn(true);
+ when(flushOperation2.isFullyFlushed()).thenReturn(false);
context.flushChannel();
verify(selector).executeListener(listener, null);
verify(listener2, times(0)).accept(null, null);
- assertTrue(context.hasQueuedWriteOps());
+ assertTrue(context.readyForFlush());
- when(writeOperation2.isFullyFlushed()).thenReturn(true);
+ when(flushOperation2.isFullyFlushed()).thenReturn(true);
context.flushChannel();
verify(selector).executeListener(listener2, null);
- assertFalse(context.hasQueuedWriteOps());
+ assertFalse(context.readyForFlush());
}
public void testWhenIOExceptionThrownListenerIsCalled() throws IOException {
- assertFalse(context.hasQueuedWriteOps());
+ assertFalse(context.readyForFlush());
ByteBuffer[] buffers = {ByteBuffer.allocate(10)};
- BytesWriteOperation writeOperation = mock(BytesWriteOperation.class);
- context.queueWriteOperation(writeOperation);
+ FlushReadyWrite flushOperation = mock(FlushReadyWrite.class);
+ context.queueWriteOperation(flushOperation);
- assertTrue(context.hasQueuedWriteOps());
+ assertTrue(context.readyForFlush());
IOException exception = new IOException();
- when(writeOperation.getBuffersToWrite()).thenReturn(buffers);
+ when(flushOperation.getBuffersToWrite()).thenReturn(buffers);
when(rawChannel.write(buffers, 0, buffers.length)).thenThrow(exception);
- when(writeOperation.getListener()).thenReturn(listener);
+ when(flushOperation.getListener()).thenReturn(listener);
expectThrows(IOException.class, () -> context.flushChannel());
verify(selector).executeFailedListener(listener, exception);
- assertFalse(context.hasQueuedWriteOps());
+ assertFalse(context.readyForFlush());
}
public void testWriteIOExceptionMeansChannelReadyToClose() throws IOException {
ByteBuffer[] buffers = {ByteBuffer.allocate(10)};
- BytesWriteOperation writeOperation = mock(BytesWriteOperation.class);
- context.queueWriteOperation(writeOperation);
+ FlushReadyWrite flushOperation = mock(FlushReadyWrite.class);
+ context.queueWriteOperation(flushOperation);
IOException exception = new IOException();
- when(writeOperation.getBuffersToWrite()).thenReturn(buffers);
+ when(flushOperation.getBuffersToWrite()).thenReturn(buffers);
when(rawChannel.write(buffers, 0, buffers.length)).thenThrow(exception);
assertFalse(context.selectorShouldClose());
@@ -344,7 +253,7 @@ public void testWriteIOExceptionMeansChannelReadyToClose() throws IOException {
assertTrue(context.selectorShouldClose());
}
- public void initiateCloseSchedulesCloseWithSelector() {
+ public void testInitiateCloseSchedulesCloseWithSelector() {
context.closeChannel();
verify(selector).queueChannelClose(channel);
}
@@ -356,4 +265,18 @@ private static byte[] createMessage(int length) {
}
return bytes;
}
+
+ private static class TestReadWriteHandler extends BytesWriteHandler {
+
+ private final CheckedFunction fn;
+
+ private TestReadWriteHandler(CheckedFunction fn) {
+ this.fn = fn;
+ }
+
+ @Override
+ public int consumeReads(InboundChannelBuffer channelBuffer) throws IOException {
+ return fn.apply(channelBuffer);
+ }
+ }
}
diff --git a/libs/elasticsearch-nio/src/test/java/org/elasticsearch/nio/BytesWriteOperationTests.java b/libs/elasticsearch-nio/src/test/java/org/elasticsearch/nio/FlushOperationTests.java
similarity index 87%
rename from libs/elasticsearch-nio/src/test/java/org/elasticsearch/nio/BytesWriteOperationTests.java
rename to libs/elasticsearch-nio/src/test/java/org/elasticsearch/nio/FlushOperationTests.java
index 05afc80a49086..a244de51f3591 100644
--- a/libs/elasticsearch-nio/src/test/java/org/elasticsearch/nio/BytesWriteOperationTests.java
+++ b/libs/elasticsearch-nio/src/test/java/org/elasticsearch/nio/FlushOperationTests.java
@@ -29,22 +29,19 @@
import static org.mockito.Mockito.mock;
-public class BytesWriteOperationTests extends ESTestCase {
+public class FlushOperationTests extends ESTestCase {
- private SocketChannelContext channelContext;
private BiConsumer listener;
@Before
@SuppressWarnings("unchecked")
public void setFields() {
- channelContext = mock(SocketChannelContext.class);
listener = mock(BiConsumer.class);
-
}
public void testFullyFlushedMarker() {
ByteBuffer[] buffers = {ByteBuffer.allocate(10)};
- BytesWriteOperation writeOp = new BytesWriteOperation(channelContext, buffers, listener);
+ FlushOperation writeOp = new FlushOperation(buffers, listener);
writeOp.incrementIndex(10);
@@ -53,7 +50,7 @@ public void testFullyFlushedMarker() {
public void testPartiallyFlushedMarker() {
ByteBuffer[] buffers = {ByteBuffer.allocate(10)};
- BytesWriteOperation writeOp = new BytesWriteOperation(channelContext, buffers, listener);
+ FlushOperation writeOp = new FlushOperation(buffers, listener);
writeOp.incrementIndex(5);
@@ -62,7 +59,7 @@ public void testPartiallyFlushedMarker() {
public void testMultipleFlushesWithCompositeBuffer() throws IOException {
ByteBuffer[] buffers = {ByteBuffer.allocate(10), ByteBuffer.allocate(15), ByteBuffer.allocate(3)};
- BytesWriteOperation writeOp = new BytesWriteOperation(channelContext, buffers, listener);
+ FlushOperation writeOp = new FlushOperation(buffers, listener);
ArgumentCaptor buffersCaptor = ArgumentCaptor.forClass(ByteBuffer[].class);
diff --git a/libs/elasticsearch-nio/src/test/java/org/elasticsearch/nio/SocketChannelContextTests.java b/libs/elasticsearch-nio/src/test/java/org/elasticsearch/nio/SocketChannelContextTests.java
index 17e6b7acba283..d6787f7cc1534 100644
--- a/libs/elasticsearch-nio/src/test/java/org/elasticsearch/nio/SocketChannelContextTests.java
+++ b/libs/elasticsearch-nio/src/test/java/org/elasticsearch/nio/SocketChannelContextTests.java
@@ -21,18 +21,27 @@
import org.elasticsearch.test.ESTestCase;
import org.junit.Before;
+import org.mockito.ArgumentCaptor;
import java.io.IOException;
import java.nio.ByteBuffer;
+import java.nio.channels.ClosedChannelException;
import java.nio.channels.SocketChannel;
+import java.util.Arrays;
+import java.util.Collections;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
+import java.util.function.Supplier;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
+import static org.mockito.Matchers.isNull;
+import static org.mockito.Matchers.same;
import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class SocketChannelContextTests extends ESTestCase {
@@ -41,6 +50,9 @@ public class SocketChannelContextTests extends ESTestCase {
private TestSocketChannelContext context;
private Consumer exceptionHandler;
private NioSocketChannel channel;
+ private BiConsumer listener;
+ private SocketSelector selector;
+ private ReadWriteHandler readWriteHandler;
@SuppressWarnings("unchecked")
@Before
@@ -49,9 +61,15 @@ public void setup() throws Exception {
rawChannel = mock(SocketChannel.class);
channel = mock(NioSocketChannel.class);
+ listener = mock(BiConsumer.class);
when(channel.getRawChannel()).thenReturn(rawChannel);
exceptionHandler = mock(Consumer.class);
- context = new TestSocketChannelContext(channel, mock(SocketSelector.class), exceptionHandler);
+ selector = mock(SocketSelector.class);
+ readWriteHandler = mock(ReadWriteHandler.class);
+ InboundChannelBuffer channelBuffer = InboundChannelBuffer.allocatingInstance();
+ context = new TestSocketChannelContext(channel, selector, exceptionHandler, readWriteHandler, channelBuffer);
+
+ when(selector.isOnCurrentThread()).thenReturn(true);
}
public void testIOExceptionSetIfEncountered() throws IOException {
@@ -119,10 +137,147 @@ public void testConnectFails() throws IOException {
assertSame(ioException, exception.get());
}
+ public void testWriteFailsIfClosing() {
+ context.closeChannel();
+
+ ByteBuffer[] buffers = {ByteBuffer.wrap(createMessage(10))};
+ context.sendMessage(buffers, listener);
+
+ verify(listener).accept(isNull(Void.class), any(ClosedChannelException.class));
+ }
+
+ public void testSendMessageFromDifferentThreadIsQueuedWithSelector() throws Exception {
+ ArgumentCaptor writeOpCaptor = ArgumentCaptor.forClass(WriteOperation.class);
+
+ when(selector.isOnCurrentThread()).thenReturn(false);
+
+ ByteBuffer[] buffers = {ByteBuffer.wrap(createMessage(10))};
+ WriteOperation writeOperation = mock(WriteOperation.class);
+ when(readWriteHandler.createWriteOperation(context, buffers, listener)).thenReturn(writeOperation);
+ context.sendMessage(buffers, listener);
+
+ verify(selector).queueWrite(writeOpCaptor.capture());
+ WriteOperation writeOp = writeOpCaptor.getValue();
+
+ assertSame(writeOperation, writeOp);
+ }
+
+ public void testSendMessageFromSameThreadIsQueuedInChannel() {
+ ArgumentCaptor writeOpCaptor = ArgumentCaptor.forClass(WriteOperation.class);
+
+ ByteBuffer[] buffers = {ByteBuffer.wrap(createMessage(10))};
+ WriteOperation writeOperation = mock(WriteOperation.class);
+ when(readWriteHandler.createWriteOperation(context, buffers, listener)).thenReturn(writeOperation);
+ context.sendMessage(buffers, listener);
+
+ verify(selector).queueWriteInChannelBuffer(writeOpCaptor.capture());
+ WriteOperation writeOp = writeOpCaptor.getValue();
+
+ assertSame(writeOperation, writeOp);
+ }
+
+ public void testWriteIsQueuedInChannel() {
+ assertFalse(context.readyForFlush());
+
+ ByteBuffer[] buffer = {ByteBuffer.allocate(10)};
+ FlushReadyWrite writeOperation = new FlushReadyWrite(context, buffer, listener);
+ when(readWriteHandler.writeToBytes(writeOperation)).thenReturn(Collections.singletonList(writeOperation));
+ context.queueWriteOperation(writeOperation);
+
+ verify(readWriteHandler).writeToBytes(writeOperation);
+ assertTrue(context.readyForFlush());
+ }
+
+ public void testHandleReadBytesWillCheckForNewFlushOperations() throws IOException {
+ assertFalse(context.readyForFlush());
+ when(readWriteHandler.pollFlushOperations()).thenReturn(Collections.singletonList(mock(FlushOperation.class)));
+ context.handleReadBytes();
+ assertTrue(context.readyForFlush());
+ }
+
+ @SuppressWarnings({"unchecked", "varargs"})
+ public void testFlushOpsClearedOnClose() throws Exception {
+ try (SocketChannel realChannel = SocketChannel.open()) {
+ when(channel.getRawChannel()).thenReturn(realChannel);
+ InboundChannelBuffer channelBuffer = InboundChannelBuffer.allocatingInstance();
+ context = new TestSocketChannelContext(channel, selector, exceptionHandler, readWriteHandler, channelBuffer);
+
+ assertFalse(context.readyForFlush());
+
+ ByteBuffer[] buffer = {ByteBuffer.allocate(10)};
+ WriteOperation writeOperation = mock(WriteOperation.class);
+ BiConsumer listener2 = mock(BiConsumer.class);
+ when(readWriteHandler.writeToBytes(writeOperation)).thenReturn(Arrays.asList(new FlushOperation(buffer, listener),
+ new FlushOperation(buffer, listener2)));
+ context.queueWriteOperation(writeOperation);
+
+ assertTrue(context.readyForFlush());
+
+ when(channel.isOpen()).thenReturn(true);
+ context.closeFromSelector();
+
+ verify(selector, times(1)).executeFailedListener(same(listener), any(ClosedChannelException.class));
+ verify(selector, times(1)).executeFailedListener(same(listener2), any(ClosedChannelException.class));
+
+ assertFalse(context.readyForFlush());
+ }
+ }
+
+ @SuppressWarnings({"unchecked", "varargs"})
+ public void testWillPollForFlushOpsToClose() throws Exception {
+ try (SocketChannel realChannel = SocketChannel.open()) {
+ when(channel.getRawChannel()).thenReturn(realChannel);
+ InboundChannelBuffer channelBuffer = InboundChannelBuffer.allocatingInstance();
+ context = new TestSocketChannelContext(channel, selector, exceptionHandler, readWriteHandler, channelBuffer);
+
+
+ ByteBuffer[] buffer = {ByteBuffer.allocate(10)};
+ BiConsumer listener2 = mock(BiConsumer.class);
+
+ assertFalse(context.readyForFlush());
+ when(channel.isOpen()).thenReturn(true);
+ when(readWriteHandler.pollFlushOperations()).thenReturn(Arrays.asList(new FlushOperation(buffer, listener),
+ new FlushOperation(buffer, listener2)));
+ context.closeFromSelector();
+
+ verify(selector, times(1)).executeFailedListener(same(listener), any(ClosedChannelException.class));
+ verify(selector, times(1)).executeFailedListener(same(listener2), any(ClosedChannelException.class));
+
+ assertFalse(context.readyForFlush());
+ }
+ }
+
+ public void testCloseClosesWriteProducer() throws IOException {
+ try (SocketChannel realChannel = SocketChannel.open()) {
+ when(channel.getRawChannel()).thenReturn(realChannel);
+ when(channel.isOpen()).thenReturn(true);
+ InboundChannelBuffer buffer = InboundChannelBuffer.allocatingInstance();
+ BytesChannelContext context = new BytesChannelContext(channel, selector, exceptionHandler, readWriteHandler, buffer);
+ context.closeFromSelector();
+ verify(readWriteHandler).close();
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ public void testCloseClosesChannelBuffer() throws IOException {
+ try (SocketChannel realChannel = SocketChannel.open()) {
+ when(channel.getRawChannel()).thenReturn(realChannel);
+ when(channel.isOpen()).thenReturn(true);
+ Runnable closer = mock(Runnable.class);
+ Supplier pageSupplier = () -> new InboundChannelBuffer.Page(ByteBuffer.allocate(1 << 14), closer);
+ InboundChannelBuffer buffer = new InboundChannelBuffer(pageSupplier);
+ buffer.ensureCapacity(1);
+ TestSocketChannelContext context = new TestSocketChannelContext(channel, selector, exceptionHandler, readWriteHandler, buffer);
+ context.closeFromSelector();
+ verify(closer).run();
+ }
+ }
+
private static class TestSocketChannelContext extends SocketChannelContext {
- private TestSocketChannelContext(NioSocketChannel channel, SocketSelector selector, Consumer exceptionHandler) {
- super(channel, selector, exceptionHandler);
+ private TestSocketChannelContext(NioSocketChannel channel, SocketSelector selector, Consumer exceptionHandler,
+ ReadWriteHandler readWriteHandler, InboundChannelBuffer channelBuffer) {
+ super(channel, selector, exceptionHandler, readWriteHandler, channelBuffer);
}
@Override
@@ -135,16 +290,6 @@ public int read() throws IOException {
}
}
- @Override
- public void sendMessage(ByteBuffer[] buffers, BiConsumer listener) {
-
- }
-
- @Override
- public void queueWriteOperation(WriteOperation writeOperation) {
-
- }
-
@Override
public void flushChannel() throws IOException {
if (randomBoolean()) {
@@ -155,11 +300,6 @@ public void flushChannel() throws IOException {
}
}
- @Override
- public boolean hasQueuedWriteOps() {
- return false;
- }
-
@Override
public boolean selectorShouldClose() {
return false;
@@ -167,7 +307,15 @@ public boolean selectorShouldClose() {
@Override
public void closeChannel() {
+ isClosing.set(true);
+ }
+ }
+ private static byte[] createMessage(int length) {
+ byte[] bytes = new byte[length];
+ for (int i = 0; i < length; ++i) {
+ bytes[i] = randomByte();
}
+ return bytes;
}
}
diff --git a/libs/elasticsearch-nio/src/test/java/org/elasticsearch/nio/SocketEventHandlerTests.java b/libs/elasticsearch-nio/src/test/java/org/elasticsearch/nio/SocketEventHandlerTests.java
index 4f476c1ff6b22..a80563f7d74db 100644
--- a/libs/elasticsearch-nio/src/test/java/org/elasticsearch/nio/SocketEventHandlerTests.java
+++ b/libs/elasticsearch-nio/src/test/java/org/elasticsearch/nio/SocketEventHandlerTests.java
@@ -26,6 +26,7 @@
import java.nio.channels.CancelledKeyException;
import java.nio.channels.SelectionKey;
import java.nio.channels.SocketChannel;
+import java.util.Collections;
import java.util.function.Consumer;
import static org.mockito.Mockito.mock;
@@ -37,6 +38,7 @@ public class SocketEventHandlerTests extends ESTestCase {
private Consumer exceptionHandler;
+ private ReadWriteHandler readWriteHandler;
private SocketEventHandler handler;
private NioSocketChannel channel;
private SocketChannel rawChannel;
@@ -46,13 +48,14 @@ public class SocketEventHandlerTests extends ESTestCase {
@SuppressWarnings("unchecked")
public void setUpHandler() throws IOException {
exceptionHandler = mock(Consumer.class);
+ readWriteHandler = mock(ReadWriteHandler.class);
SocketSelector selector = mock(SocketSelector.class);
handler = new SocketEventHandler(logger);
rawChannel = mock(SocketChannel.class);
channel = new NioSocketChannel(rawChannel);
when(rawChannel.finishConnect()).thenReturn(true);
- context = new DoNotRegisterContext(channel, selector, exceptionHandler, new TestSelectionKey(0));
+ context = new DoNotRegisterContext(channel, selector, exceptionHandler, new TestSelectionKey(0), readWriteHandler);
channel.setContext(context);
handler.handleRegistration(context);
@@ -83,7 +86,9 @@ public void testRegisterAddsAttachment() throws IOException {
}
public void testRegisterWithPendingWritesAddsOP_CONNECTAndOP_READAndOP_WRITEInterest() throws IOException {
- channel.getContext().queueWriteOperation(mock(BytesWriteOperation.class));
+ FlushReadyWrite flushReadyWrite = mock(FlushReadyWrite.class);
+ when(readWriteHandler.writeToBytes(flushReadyWrite)).thenReturn(Collections.singletonList(flushReadyWrite));
+ channel.getContext().queueWriteOperation(flushReadyWrite);
handler.handleRegistration(context);
assertEquals(SelectionKey.OP_READ | SelectionKey.OP_CONNECT | SelectionKey.OP_WRITE, context.getSelectionKey().interestOps());
}
@@ -162,7 +167,7 @@ public void testPostHandlingWillAddWriteIfNecessary() throws IOException {
TestSelectionKey selectionKey = new TestSelectionKey(SelectionKey.OP_READ);
SocketChannelContext context = mock(SocketChannelContext.class);
when(context.getSelectionKey()).thenReturn(selectionKey);
- when(context.hasQueuedWriteOps()).thenReturn(true);
+ when(context.readyForFlush()).thenReturn(true);
NioSocketChannel channel = mock(NioSocketChannel.class);
when(channel.getContext()).thenReturn(context);
@@ -176,7 +181,7 @@ public void testPostHandlingWillRemoveWriteIfNecessary() throws IOException {
TestSelectionKey key = new TestSelectionKey(SelectionKey.OP_READ | SelectionKey.OP_WRITE);
SocketChannelContext context = mock(SocketChannelContext.class);
when(context.getSelectionKey()).thenReturn(key);
- when(context.hasQueuedWriteOps()).thenReturn(false);
+ when(context.readyForFlush()).thenReturn(false);
NioSocketChannel channel = mock(NioSocketChannel.class);
when(channel.getContext()).thenReturn(context);
@@ -192,8 +197,8 @@ private class DoNotRegisterContext extends BytesChannelContext {
private final TestSelectionKey selectionKey;
DoNotRegisterContext(NioSocketChannel channel, SocketSelector selector, Consumer exceptionHandler,
- TestSelectionKey selectionKey) {
- super(channel, selector, exceptionHandler, mock(ReadConsumer.class), InboundChannelBuffer.allocatingInstance());
+ TestSelectionKey selectionKey, ReadWriteHandler handler) {
+ super(channel, selector, exceptionHandler, handler, InboundChannelBuffer.allocatingInstance());
this.selectionKey = selectionKey;
}
diff --git a/libs/elasticsearch-nio/src/test/java/org/elasticsearch/nio/SocketSelectorTests.java b/libs/elasticsearch-nio/src/test/java/org/elasticsearch/nio/SocketSelectorTests.java
index 223f14455f96d..a68f5c05dad5a 100644
--- a/libs/elasticsearch-nio/src/test/java/org/elasticsearch/nio/SocketSelectorTests.java
+++ b/libs/elasticsearch-nio/src/test/java/org/elasticsearch/nio/SocketSelectorTests.java
@@ -117,13 +117,13 @@ public void testSuccessfullyRegisterChannelWillAttemptConnect() throws Exception
public void testQueueWriteWhenNotRunning() throws Exception {
socketSelector.close();
- socketSelector.queueWrite(new BytesWriteOperation(channelContext, buffers, listener));
+ socketSelector.queueWrite(new FlushReadyWrite(channelContext, buffers, listener));
verify(listener).accept(isNull(Void.class), any(ClosedSelectorException.class));
}
public void testQueueWriteChannelIsClosed() throws Exception {
- BytesWriteOperation writeOperation = new BytesWriteOperation(channelContext, buffers, listener);
+ WriteOperation writeOperation = new FlushReadyWrite(channelContext, buffers, listener);
socketSelector.queueWrite(writeOperation);
when(channelContext.isOpen()).thenReturn(false);
@@ -136,7 +136,7 @@ public void testQueueWriteChannelIsClosed() throws Exception {
public void testQueueWriteSelectionKeyThrowsException() throws Exception {
SelectionKey selectionKey = mock(SelectionKey.class);
- BytesWriteOperation writeOperation = new BytesWriteOperation(channelContext, buffers, listener);
+ WriteOperation writeOperation = new FlushReadyWrite(channelContext, buffers, listener);
CancelledKeyException cancelledKeyException = new CancelledKeyException();
socketSelector.queueWrite(writeOperation);
@@ -149,7 +149,7 @@ public void testQueueWriteSelectionKeyThrowsException() throws Exception {
}
public void testQueueWriteSuccessful() throws Exception {
- BytesWriteOperation writeOperation = new BytesWriteOperation(channelContext, buffers, listener);
+ WriteOperation writeOperation = new FlushReadyWrite(channelContext, buffers, listener);
socketSelector.queueWrite(writeOperation);
assertTrue((selectionKey.interestOps() & SelectionKey.OP_WRITE) == 0);
@@ -161,7 +161,7 @@ public void testQueueWriteSuccessful() throws Exception {
}
public void testQueueDirectlyInChannelBufferSuccessful() throws Exception {
- BytesWriteOperation writeOperation = new BytesWriteOperation(channelContext, buffers, listener);
+ WriteOperation writeOperation = new FlushReadyWrite(channelContext, buffers, listener);
assertTrue((selectionKey.interestOps() & SelectionKey.OP_WRITE) == 0);
@@ -174,7 +174,7 @@ public void testQueueDirectlyInChannelBufferSuccessful() throws Exception {
public void testQueueDirectlyInChannelBufferSelectionKeyThrowsException() throws Exception {
SelectionKey selectionKey = mock(SelectionKey.class);
- BytesWriteOperation writeOperation = new BytesWriteOperation(channelContext, buffers, listener);
+ WriteOperation writeOperation = new FlushReadyWrite(channelContext, buffers, listener);
CancelledKeyException cancelledKeyException = new CancelledKeyException();
when(channelContext.getSelectionKey()).thenReturn(selectionKey);
@@ -277,7 +277,7 @@ public void testCleanup() throws Exception {
socketSelector.preSelect();
- socketSelector.queueWrite(new BytesWriteOperation(channelContext, buffers, listener));
+ socketSelector.queueWrite(new FlushReadyWrite(channelContext, buffers, listener));
socketSelector.scheduleForRegistration(unregisteredChannel);
TestSelectionKey testSelectionKey = new TestSelectionKey(0);
diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java
index fd797c4340a8f..9969e6b38e54a 100644
--- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java
+++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestMultiSearchTemplateAction.java
@@ -77,7 +77,7 @@ public static MultiSearchTemplateRequest parseRequest(RestRequest restRequest, b
RestMultiSearchAction.parseMultiLineRequest(restRequest, multiRequest.indicesOptions(), allowExplicitIndex,
(searchRequest, bytes) -> {
- SearchTemplateRequest searchTemplateRequest = RestSearchTemplateAction.parse(bytes);
+ SearchTemplateRequest searchTemplateRequest = SearchTemplateRequest.fromXContent(bytes);
if (searchTemplateRequest.getScript() != null) {
searchTemplateRequest.setRequest(searchRequest);
multiRequest.add(searchTemplateRequest);
diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestRenderSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestRenderSearchTemplateAction.java
index d8c67839cb80f..75acc09424359 100644
--- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestRenderSearchTemplateAction.java
+++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestRenderSearchTemplateAction.java
@@ -52,7 +52,7 @@ public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client
// Creates the render template request
SearchTemplateRequest renderRequest;
try (XContentParser parser = request.contentOrSourceParamParser()) {
- renderRequest = RestSearchTemplateAction.parse(parser);
+ renderRequest = SearchTemplateRequest.fromXContent(parser);
}
renderRequest.setSimulate(true);
diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java
index 7ab9aa6003334..f42afcc19b80f 100644
--- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java
+++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java
@@ -47,33 +47,6 @@ public class RestSearchTemplateAction extends BaseRestHandler {
private static final Set RESPONSE_PARAMS = Collections.singleton(RestSearchAction.TYPED_KEYS_PARAM);
- private static final ObjectParser PARSER;
- static {
- PARSER = new ObjectParser<>("search_template");
- PARSER.declareField((parser, request, s) ->
- request.setScriptParams(parser.map())
- , new ParseField("params"), ObjectParser.ValueType.OBJECT);
- PARSER.declareString((request, s) -> {
- request.setScriptType(ScriptType.STORED);
- request.setScript(s);
- }, new ParseField("id"));
- PARSER.declareBoolean(SearchTemplateRequest::setExplain, new ParseField("explain"));
- PARSER.declareBoolean(SearchTemplateRequest::setProfile, new ParseField("profile"));
- PARSER.declareField((parser, request, value) -> {
- request.setScriptType(ScriptType.INLINE);
- if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
- //convert the template to json which is the only supported XContentType (see CustomMustacheFactory#createEncoder)
- try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
- request.setScript(Strings.toString(builder.copyCurrentStructure(parser)));
- } catch (IOException e) {
- throw new ParsingException(parser.getTokenLocation(), "Could not parse inline template", e);
- }
- } else {
- request.setScript(parser.text());
- }
- }, new ParseField("source", "inline", "template"), ObjectParser.ValueType.OBJECT_OR_STRING);
- }
-
public RestSearchTemplateAction(Settings settings, RestController controller) {
super(settings);
@@ -99,17 +72,13 @@ public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client
// Creates the search template request
SearchTemplateRequest searchTemplateRequest;
try (XContentParser parser = request.contentOrSourceParamParser()) {
- searchTemplateRequest = PARSER.parse(parser, new SearchTemplateRequest(), null);
+ searchTemplateRequest = SearchTemplateRequest.fromXContent(parser);
}
searchTemplateRequest.setRequest(searchRequest);
return channel -> client.execute(SearchTemplateAction.INSTANCE, searchTemplateRequest, new RestStatusToXContentListener<>(channel));
}
- public static SearchTemplateRequest parse(XContentParser parser) throws IOException {
- return PARSER.parse(parser, new SearchTemplateRequest(), null);
- }
-
@Override
protected Set responseParams() {
return RESPONSE_PARAMS;
diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateRequest.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateRequest.java
index b0186b7b0e3cf..da3cc3688149c 100644
--- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateRequest.java
+++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateRequest.java
@@ -23,19 +23,28 @@
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.CompositeIndicesRequest;
import org.elasticsearch.action.search.SearchRequest;
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.ParsingException;
+import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.xcontent.ObjectParser;
+import org.elasticsearch.common.xcontent.ToXContentObject;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.script.ScriptType;
import java.io.IOException;
import java.util.Map;
+import java.util.Objects;
import static org.elasticsearch.action.ValidateActions.addValidationError;
/**
* A request to execute a search based on a search template.
*/
-public class SearchTemplateRequest extends ActionRequest implements CompositeIndicesRequest {
+public class SearchTemplateRequest extends ActionRequest implements CompositeIndicesRequest, ToXContentObject {
private SearchRequest request;
private boolean simulate = false;
@@ -60,6 +69,24 @@ public SearchRequest getRequest() {
return request;
}
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ SearchTemplateRequest request1 = (SearchTemplateRequest) o;
+ return simulate == request1.simulate &&
+ explain == request1.explain &&
+ profile == request1.profile &&
+ Objects.equals(request, request1.request) &&
+ scriptType == request1.scriptType &&
+ Objects.equals(script, request1.script) &&
+ Objects.equals(scriptParams, request1.scriptParams);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(request, simulate, explain, profile, scriptType, script, scriptParams);
+ }
public boolean isSimulate() {
return simulate;
@@ -134,6 +161,62 @@ public ActionRequestValidationException validate() {
return validationException;
}
+ private static ParseField ID_FIELD = new ParseField("id");
+ private static ParseField SOURCE_FIELD = new ParseField("source", "inline", "template");
+
+ private static ParseField PARAMS_FIELD = new ParseField("params");
+ private static ParseField EXPLAIN_FIELD = new ParseField("explain");
+ private static ParseField PROFILE_FIELD = new ParseField("profile");
+
+ private static final ObjectParser PARSER;
+ static {
+ PARSER = new ObjectParser<>("search_template");
+ PARSER.declareField((parser, request, s) ->
+ request.setScriptParams(parser.map())
+ , PARAMS_FIELD, ObjectParser.ValueType.OBJECT);
+ PARSER.declareString((request, s) -> {
+ request.setScriptType(ScriptType.STORED);
+ request.setScript(s);
+ }, ID_FIELD);
+ PARSER.declareBoolean(SearchTemplateRequest::setExplain, EXPLAIN_FIELD);
+ PARSER.declareBoolean(SearchTemplateRequest::setProfile, PROFILE_FIELD);
+ PARSER.declareField((parser, request, value) -> {
+ request.setScriptType(ScriptType.INLINE);
+ if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
+ //convert the template to json which is the only supported XContentType (see CustomMustacheFactory#createEncoder)
+ try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
+ request.setScript(Strings.toString(builder.copyCurrentStructure(parser)));
+ } catch (IOException e) {
+ throw new ParsingException(parser.getTokenLocation(), "Could not parse inline template", e);
+ }
+ } else {
+ request.setScript(parser.text());
+ }
+ }, SOURCE_FIELD, ObjectParser.ValueType.OBJECT_OR_STRING);
+ }
+
+ public static SearchTemplateRequest fromXContent(XContentParser parser) throws IOException {
+ return PARSER.parse(parser, new SearchTemplateRequest(), null);
+ }
+
+ @Override
+ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
+ builder.startObject();
+
+ if (scriptType == ScriptType.STORED) {
+ builder.field(ID_FIELD.getPreferredName(), script);
+ } else if (scriptType == ScriptType.INLINE) {
+ builder.field(SOURCE_FIELD.getPreferredName(), script);
+ } else {
+ throw new UnsupportedOperationException("Unrecognized script type [" + scriptType + "].");
+ }
+
+ return builder.field(PARAMS_FIELD.getPreferredName(), scriptParams)
+ .field(EXPLAIN_FIELD.getPreferredName(), explain)
+ .field(PROFILE_FIELD.getPreferredName(), profile)
+ .endObject();
+ }
+
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java
index 792d993915992..500a5a399ef4a 100644
--- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java
+++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java
@@ -21,18 +21,23 @@
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.StatusToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
import java.io.InputStream;
+import java.util.Map;
-public class SearchTemplateResponse extends ActionResponse implements StatusToXContentObject {
+public class SearchTemplateResponse extends ActionResponse implements StatusToXContentObject {
+ public static ParseField TEMPLATE_OUTPUT_FIELD = new ParseField("template_output");
/** Contains the source of the rendered template **/
private BytesReference source;
@@ -77,6 +82,30 @@ public void readFrom(StreamInput in) throws IOException {
response = in.readOptionalStreamable(SearchResponse::new);
}
+ public static SearchTemplateResponse fromXContent(XContentParser parser) throws IOException {
+ SearchTemplateResponse searchTemplateResponse = new SearchTemplateResponse();
+ Map contentAsMap = parser.map();
+
+ if (contentAsMap.containsKey(TEMPLATE_OUTPUT_FIELD.getPreferredName())) {
+ Object source = contentAsMap.get(TEMPLATE_OUTPUT_FIELD.getPreferredName());
+ XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)
+ .value(source);
+ searchTemplateResponse.setSource(BytesReference.bytes(builder));
+ } else {
+ XContentType contentType = parser.contentType();
+ XContentBuilder builder = XContentFactory.contentBuilder(contentType)
+ .map(contentAsMap);
+ XContentParser searchResponseParser = contentType.xContent().createParser(
+ parser.getXContentRegistry(),
+ parser.getDeprecationHandler(),
+ BytesReference.bytes(builder).streamInput());
+
+ SearchResponse searchResponse = SearchResponse.fromXContent(searchResponseParser);
+ searchTemplateResponse.setResponse(searchResponse);
+ }
+ return searchTemplateResponse;
+ }
+
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (hasResponse()) {
@@ -85,7 +114,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
builder.startObject();
//we can assume the template is always json as we convert it before compiling it
try (InputStream stream = source.streamInput()) {
- builder.rawField("template_output", stream, XContentType.JSON);
+ builder.rawField(TEMPLATE_OUTPUT_FIELD.getPreferredName(), stream, XContentType.JSON);
}
builder.endObject();
}
diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateIT.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateIT.java
index 1529b655a5042..fe2fedf62b559 100644
--- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateIT.java
+++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateIT.java
@@ -101,7 +101,7 @@ public void testTemplateQueryAsEscapedString() throws Exception {
+ " \"size\": 1"
+ " }"
+ "}";
- SearchTemplateRequest request = RestSearchTemplateAction.parse(createParser(JsonXContent.jsonXContent, query));
+ SearchTemplateRequest request = SearchTemplateRequest.fromXContent(createParser(JsonXContent.jsonXContent, query));
request.setRequest(searchRequest);
SearchTemplateResponse searchResponse = client().execute(SearchTemplateAction.INSTANCE, request).get();
assertThat(searchResponse.getResponse().getHits().getHits().length, equalTo(1));
@@ -122,7 +122,7 @@ public void testTemplateQueryAsEscapedStringStartingWithConditionalClause() thro
+ " \"use_size\": true"
+ " }"
+ "}";
- SearchTemplateRequest request = RestSearchTemplateAction.parse(createParser(JsonXContent.jsonXContent, templateString));
+ SearchTemplateRequest request = SearchTemplateRequest.fromXContent(createParser(JsonXContent.jsonXContent, templateString));
request.setRequest(searchRequest);
SearchTemplateResponse searchResponse = client().execute(SearchTemplateAction.INSTANCE, request).get();
assertThat(searchResponse.getResponse().getHits().getHits().length, equalTo(1));
@@ -143,7 +143,7 @@ public void testTemplateQueryAsEscapedStringWithConditionalClauseAtEnd() throws
+ " \"use_size\": true"
+ " }"
+ "}";
- SearchTemplateRequest request = RestSearchTemplateAction.parse(createParser(JsonXContent.jsonXContent, templateString));
+ SearchTemplateRequest request = SearchTemplateRequest.fromXContent(createParser(JsonXContent.jsonXContent, templateString));
request.setRequest(searchRequest);
SearchTemplateResponse searchResponse = client().execute(SearchTemplateAction.INSTANCE, request).get();
assertThat(searchResponse.getResponse().getHits().getHits().length, equalTo(1));
diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateRequestTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateRequestTests.java
index 9cdca70f0e1a6..7d4a6479727e2 100644
--- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateRequestTests.java
+++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateRequestTests.java
@@ -19,117 +19,77 @@
package org.elasticsearch.script.mustache;
-import org.elasticsearch.common.xcontent.XContentParseException;
-import org.elasticsearch.common.xcontent.XContentParser;
-import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.script.ScriptType;
-import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.search.RandomSearchRequestGenerator;
+import org.elasticsearch.search.builder.SearchSourceBuilder;
+import org.elasticsearch.test.AbstractStreamableTestCase;
import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
+import java.util.function.Consumer;
-import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.hasEntry;
-import static org.hamcrest.Matchers.hasItems;
-import static org.hamcrest.Matchers.hasKey;
-import static org.hamcrest.Matchers.nullValue;
-
-public class SearchTemplateRequestTests extends ESTestCase {
-
- public void testParseInlineTemplate() throws Exception {
- String source = "{" +
- " 'source' : {\n" +
- " 'query': {\n" +
- " 'terms': {\n" +
- " 'status': [\n" +
- " '{{#status}}',\n" +
- " '{{.}}',\n" +
- " '{{/status}}'\n" +
- " ]\n" +
- " }\n" +
- " }\n" +
- " }" +
- "}";
-
- SearchTemplateRequest request = RestSearchTemplateAction.parse(newParser(source));
- assertThat(request.getScript(), equalTo("{\"query\":{\"terms\":{\"status\":[\"{{#status}}\",\"{{.}}\",\"{{/status}}\"]}}}"));
- assertThat(request.getScriptType(), equalTo(ScriptType.INLINE));
- assertThat(request.getScriptParams(), nullValue());
- }
+public class SearchTemplateRequestTests extends AbstractStreamableTestCase {
- public void testParseInlineTemplateWithParams() throws Exception {
- String source = "{" +
- " 'source' : {" +
- " 'query': { 'match' : { '{{my_field}}' : '{{my_value}}' } }," +
- " 'size' : '{{my_size}}'" +
- " }," +
- " 'params' : {" +
- " 'my_field' : 'foo'," +
- " 'my_value' : 'bar'," +
- " 'my_size' : 5" +
- " }" +
- "}";
-
- SearchTemplateRequest request = RestSearchTemplateAction.parse(newParser(source));
- assertThat(request.getScript(), equalTo("{\"query\":{\"match\":{\"{{my_field}}\":\"{{my_value}}\"}},\"size\":\"{{my_size}}\"}"));
- assertThat(request.getScriptType(), equalTo(ScriptType.INLINE));
- assertThat(request.getScriptParams().size(), equalTo(3));
- assertThat(request.getScriptParams(), hasEntry("my_field", "foo"));
- assertThat(request.getScriptParams(), hasEntry("my_value", "bar"));
- assertThat(request.getScriptParams(), hasEntry("my_size", 5));
+ @Override
+ protected SearchTemplateRequest createBlankInstance() {
+ return new SearchTemplateRequest();
}
- public void testParseInlineTemplateAsString() throws Exception {
- String source = "{'source' : '{\\\"query\\\":{\\\"bool\\\":{\\\"must\\\":{\\\"match\\\":{\\\"foo\\\":\\\"{{text}}\\\"}}}}}'}";
-
- SearchTemplateRequest request = RestSearchTemplateAction.parse(newParser(source));
- assertThat(request.getScript(), equalTo("{\"query\":{\"bool\":{\"must\":{\"match\":{\"foo\":\"{{text}}\"}}}}}"));
- assertThat(request.getScriptType(), equalTo(ScriptType.INLINE));
- assertThat(request.getScriptParams(), nullValue());
+ @Override
+ protected SearchTemplateRequest createTestInstance() {
+ return createRandomRequest();
}
- @SuppressWarnings("unchecked")
- public void testParseInlineTemplateAsStringWithParams() throws Exception {
- String source = "{'source' : '{\\\"query\\\":{\\\"match\\\":{\\\"{{field}}\\\":\\\"{{value}}\\\"}}}', " +
- "'params': {'status': ['pending', 'published']}}";
-
- SearchTemplateRequest request = RestSearchTemplateAction.parse(newParser(source));
- assertThat(request.getScript(), equalTo("{\"query\":{\"match\":{\"{{field}}\":\"{{value}}\"}}}"));
- assertThat(request.getScriptType(), equalTo(ScriptType.INLINE));
- assertThat(request.getScriptParams().size(), equalTo(1));
- assertThat(request.getScriptParams(), hasKey("status"));
- assertThat((List) request.getScriptParams().get("status"), hasItems("pending", "published"));
+ @Override
+ protected SearchTemplateRequest mutateInstance(SearchTemplateRequest instance) throws IOException {
+ List> mutators = new ArrayList<>();
+
+ mutators.add(request -> request.setScriptType(
+ randomValueOtherThan(request.getScriptType(), () -> randomFrom(ScriptType.values()))));
+ mutators.add(request -> request.setScript(
+ randomValueOtherThan(request.getScript(), () -> randomAlphaOfLength(50))));
+
+ mutators.add(request -> {
+ Map mutatedScriptParams = new HashMap<>(request.getScriptParams());
+ String newField = randomValueOtherThanMany(mutatedScriptParams::containsKey, () -> randomAlphaOfLength(5));
+ mutatedScriptParams.put(newField, randomAlphaOfLength(10));
+ request.setScriptParams(mutatedScriptParams);
+ });
+
+ mutators.add(request -> request.setProfile(!request.isProfile()));
+ mutators.add(request -> request.setExplain(!request.isExplain()));
+ mutators.add(request -> request.setSimulate(!request.isSimulate()));
+
+ mutators.add(request -> request.setRequest(
+ RandomSearchRequestGenerator.randomSearchRequest(SearchSourceBuilder::searchSource)));
+
+ SearchTemplateRequest mutatedInstance = copyInstance(instance);
+ Consumer mutator = randomFrom(mutators);
+ mutator.accept(mutatedInstance);
+ return mutatedInstance;
}
- public void testParseStoredTemplate() throws Exception {
- String source = "{'id' : 'storedTemplate'}";
-
- SearchTemplateRequest request = RestSearchTemplateAction.parse(newParser(source));
- assertThat(request.getScript(), equalTo("storedTemplate"));
- assertThat(request.getScriptType(), equalTo(ScriptType.STORED));
- assertThat(request.getScriptParams(), nullValue());
- }
- public void testParseStoredTemplateWithParams() throws Exception {
- String source = "{'id' : 'another_template', 'params' : {'bar': 'foo'}}";
+ public static SearchTemplateRequest createRandomRequest() {
+ SearchTemplateRequest request = new SearchTemplateRequest();
+ request.setScriptType(randomFrom(ScriptType.values()));
+ request.setScript(randomAlphaOfLength(50));
- SearchTemplateRequest request = RestSearchTemplateAction.parse(newParser(source));
- assertThat(request.getScript(), equalTo("another_template"));
- assertThat(request.getScriptType(), equalTo(ScriptType.STORED));
- assertThat(request.getScriptParams().size(), equalTo(1));
- assertThat(request.getScriptParams(), hasEntry("bar", "foo"));
- }
+ Map scriptParams = new HashMap<>();
+ for (int i = 0; i < randomInt(10); i++) {
+ scriptParams.put(randomAlphaOfLength(5), randomAlphaOfLength(10));
+ }
+ request.setScriptParams(scriptParams);
- public void testParseWrongTemplate() {
- // Unclosed template id
- expectThrows(XContentParseException.class, () -> RestSearchTemplateAction.parse(newParser("{'id' : 'another_temp }")));
- }
+ request.setExplain(randomBoolean());
+ request.setProfile(randomBoolean());
+ request.setSimulate(randomBoolean());
- /**
- * Creates a {@link XContentParser} with the given String while replacing single quote to double quotes.
- */
- private XContentParser newParser(String s) throws IOException {
- assertNotNull(s);
- return createParser(JsonXContent.jsonXContent, s.replace("'", "\""));
+ request.setRequest(RandomSearchRequestGenerator.randomSearchRequest(
+ SearchSourceBuilder::searchSource));
+ return request;
}
}
diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateRequestXContentTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateRequestXContentTests.java
new file mode 100644
index 0000000000000..0e9e8ca628975
--- /dev/null
+++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateRequestXContentTests.java
@@ -0,0 +1,197 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.script.mustache;
+
+import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.common.xcontent.ToXContent;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.common.xcontent.XContentParseException;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.common.xcontent.json.JsonXContent;
+import org.elasticsearch.script.ScriptType;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Objects;
+
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.hasEntry;
+import static org.hamcrest.Matchers.nullValue;
+
+public class SearchTemplateRequestXContentTests extends AbstractXContentTestCase {
+
+ @Override
+ public SearchTemplateRequest createTestInstance() {
+ return SearchTemplateRequestTests.createRandomRequest();
+ }
+
+ @Override
+ protected SearchTemplateRequest doParseInstance(XContentParser parser) throws IOException {
+ return SearchTemplateRequest.fromXContent(parser);
+ }
+
+ /**
+ * Note that when checking equality for xContent parsing, we omit two parts of the request:
+ * - The 'simulate' option, since this parameter is not included in the
+ * request's xContent (it's instead used to determine the request endpoint).
+ * - The random SearchRequest, since this component only affects the request
+ * parameters and also isn't captured in the request's xContent.
+ */
+ @Override
+ protected void assertEqualInstances(SearchTemplateRequest expectedInstance, SearchTemplateRequest newInstance) {
+ assertTrue(
+ expectedInstance.isExplain() == newInstance.isExplain() &&
+ expectedInstance.isProfile() == newInstance.isProfile() &&
+ expectedInstance.getScriptType() == newInstance.getScriptType() &&
+ Objects.equals(expectedInstance.getScript(), newInstance.getScript()) &&
+ Objects.equals(expectedInstance.getScriptParams(), newInstance.getScriptParams()));
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return false;
+ }
+
+ public void testToXContentWithInlineTemplate() throws IOException {
+ SearchTemplateRequest request = new SearchTemplateRequest();
+
+ request.setScriptType(ScriptType.INLINE);
+ request.setScript("{\"query\": { \"match\" : { \"{{my_field}}\" : \"{{my_value}}\" } } }");
+ request.setProfile(true);
+
+ Map scriptParams = new HashMap<>();
+ scriptParams.put("my_field", "foo");
+ scriptParams.put("my_value", "bar");
+ request.setScriptParams(scriptParams);
+
+ XContentType contentType = randomFrom(XContentType.values());
+ XContentBuilder expectedRequest = XContentFactory.contentBuilder(contentType)
+ .startObject()
+ .field("source", "{\"query\": { \"match\" : { \"{{my_field}}\" : \"{{my_value}}\" } } }")
+ .startObject("params")
+ .field("my_field", "foo")
+ .field("my_value", "bar")
+ .endObject()
+ .field("explain", false)
+ .field("profile", true)
+ .endObject();
+
+ XContentBuilder actualRequest = XContentFactory.contentBuilder(contentType);
+ request.toXContent(actualRequest, ToXContent.EMPTY_PARAMS);
+
+ assertToXContentEquivalent(BytesReference.bytes(expectedRequest),
+ BytesReference.bytes(actualRequest),
+ contentType);
+ }
+
+ public void testToXContentWithStoredTemplate() throws IOException {
+ SearchTemplateRequest request = new SearchTemplateRequest();
+
+ request.setScriptType(ScriptType.STORED);
+ request.setScript("match_template");
+ request.setExplain(true);
+
+ Map params = new HashMap<>();
+ params.put("my_field", "foo");
+ params.put("my_value", "bar");
+ request.setScriptParams(params);
+
+ XContentType contentType = randomFrom(XContentType.values());
+ XContentBuilder expectedRequest = XContentFactory.contentBuilder(contentType)
+ .startObject()
+ .field("id", "match_template")
+ .startObject("params")
+ .field("my_field", "foo")
+ .field("my_value", "bar")
+ .endObject()
+ .field("explain", true)
+ .field("profile", false)
+ .endObject();
+
+ XContentBuilder actualRequest = XContentFactory.contentBuilder(contentType);
+ request.toXContent(actualRequest, ToXContent.EMPTY_PARAMS);
+
+ assertToXContentEquivalent(
+ BytesReference.bytes(expectedRequest),
+ BytesReference.bytes(actualRequest),
+ contentType);
+ }
+
+ public void testFromXContentWithEmbeddedTemplate() throws Exception {
+ String source = "{" +
+ " 'source' : {\n" +
+ " 'query': {\n" +
+ " 'terms': {\n" +
+ " 'status': [\n" +
+ " '{{#status}}',\n" +
+ " '{{.}}',\n" +
+ " '{{/status}}'\n" +
+ " ]\n" +
+ " }\n" +
+ " }\n" +
+ " }" +
+ "}";
+
+ SearchTemplateRequest request = SearchTemplateRequest.fromXContent(newParser(source));
+ assertThat(request.getScript(), equalTo("{\"query\":{\"terms\":{\"status\":[\"{{#status}}\",\"{{.}}\",\"{{/status}}\"]}}}"));
+ assertThat(request.getScriptType(), equalTo(ScriptType.INLINE));
+ assertThat(request.getScriptParams(), nullValue());
+ }
+
+ public void testFromXContentWithEmbeddedTemplateAndParams() throws Exception {
+ String source = "{" +
+ " 'source' : {" +
+ " 'query': { 'match' : { '{{my_field}}' : '{{my_value}}' } }," +
+ " 'size' : '{{my_size}}'" +
+ " }," +
+ " 'params' : {" +
+ " 'my_field' : 'foo'," +
+ " 'my_value' : 'bar'," +
+ " 'my_size' : 5" +
+ " }" +
+ "}";
+
+ SearchTemplateRequest request = SearchTemplateRequest.fromXContent(newParser(source));
+ assertThat(request.getScript(), equalTo("{\"query\":{\"match\":{\"{{my_field}}\":\"{{my_value}}\"}},\"size\":\"{{my_size}}\"}"));
+ assertThat(request.getScriptType(), equalTo(ScriptType.INLINE));
+ assertThat(request.getScriptParams().size(), equalTo(3));
+ assertThat(request.getScriptParams(), hasEntry("my_field", "foo"));
+ assertThat(request.getScriptParams(), hasEntry("my_value", "bar"));
+ assertThat(request.getScriptParams(), hasEntry("my_size", 5));
+ }
+
+ public void testFromXContentWithMalformedRequest() {
+ // Unclosed template id
+ expectThrows(XContentParseException.class, () -> SearchTemplateRequest.fromXContent(newParser("{'id' : 'another_temp }")));
+ }
+
+ /**
+ * Creates a {@link XContentParser} with the given String while replacing single quote to double quotes.
+ */
+ private XContentParser newParser(String s) throws IOException {
+ assertNotNull(s);
+ return createParser(JsonXContent.jsonXContent, s.replace("'", "\""));
+ }
+}
diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java
new file mode 100644
index 0000000000000..53f5d1d8f842e
--- /dev/null
+++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java
@@ -0,0 +1,211 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.script.mustache;
+
+import org.elasticsearch.action.search.SearchResponse;
+import org.elasticsearch.action.search.ShardSearchFailure;
+import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.common.text.Text;
+import org.elasticsearch.common.xcontent.ToXContent;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.search.SearchHit;
+import org.elasticsearch.search.SearchHits;
+import org.elasticsearch.search.internal.InternalSearchResponse;
+import org.elasticsearch.test.AbstractXContentTestCase;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.function.Predicate;
+
+import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
+
+public class SearchTemplateResponseTests extends AbstractXContentTestCase {
+
+ @Override
+ protected SearchTemplateResponse createTestInstance() {
+ SearchTemplateResponse response = new SearchTemplateResponse();
+ if (randomBoolean()) {
+ response.setResponse(createSearchResponse());
+ } else {
+ response.setSource(createSource());
+ }
+ return response;
+ }
+
+ @Override
+ protected SearchTemplateResponse doParseInstance(XContentParser parser) throws IOException {
+ return SearchTemplateResponse.fromXContent(parser);
+ }
+
+ /**
+ * For simplicity we create a minimal response, as there is already a dedicated
+ * test class for search response parsing and serialization.
+ */
+ private static SearchResponse createSearchResponse() {
+ long tookInMillis = randomNonNegativeLong();
+ int totalShards = randomIntBetween(1, Integer.MAX_VALUE);
+ int successfulShards = randomIntBetween(0, totalShards);
+ int skippedShards = randomIntBetween(0, totalShards);
+ InternalSearchResponse internalSearchResponse = InternalSearchResponse.empty();
+
+ return new SearchResponse(internalSearchResponse, null, totalShards, successfulShards,
+ skippedShards, tookInMillis, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY);
+ }
+
+ private static BytesReference createSource() {
+ try {
+ XContentBuilder source = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("query")
+ .startObject("match")
+ .field(randomAlphaOfLength(5), randomAlphaOfLength(10))
+ .endObject()
+ .endObject()
+ .endObject();
+ return BytesReference.bytes(source);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ @Override
+ protected Predicate getRandomFieldsExcludeFilter() {
+ String templateOutputField = SearchTemplateResponse.TEMPLATE_OUTPUT_FIELD.getPreferredName();
+ return field -> field.equals(templateOutputField) || field.startsWith(templateOutputField + ".");
+ }
+
+ /**
+ * Note that we can't rely on normal equals and hashCode checks, since {@link SearchResponse} doesn't
+ * currently implement equals and hashCode. Instead, we compare the template outputs for equality,
+ * and perform some sanity checks on the search response instances.
+ */
+ @Override
+ protected void assertEqualInstances(SearchTemplateResponse expectedInstance, SearchTemplateResponse newInstance) {
+ assertNotSame(newInstance, expectedInstance);
+
+ BytesReference expectedSource = expectedInstance.getSource();
+ BytesReference newSource = newInstance.getSource();
+ assertEquals(expectedSource == null, newSource == null);
+ if (expectedSource != null) {
+ try {
+ assertToXContentEquivalent(expectedSource, newSource, XContentType.JSON);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ assertEquals(expectedInstance.hasResponse(), newInstance.hasResponse());
+ if (expectedInstance.hasResponse()) {
+ SearchResponse expectedResponse = expectedInstance.getResponse();
+ SearchResponse newResponse = newInstance.getResponse();
+
+ assertEquals(expectedResponse.getHits().totalHits, newResponse.getHits().totalHits);
+ assertEquals(expectedResponse.getHits().getMaxScore(), newResponse.getHits().getMaxScore(), 0.0001);
+ }
+ }
+
+ @Override
+ protected boolean supportsUnknownFields() {
+ return true;
+ }
+
+ public void testSourceToXContent() throws IOException {
+ SearchTemplateResponse response = new SearchTemplateResponse();
+
+ XContentBuilder source = XContentFactory.jsonBuilder()
+ .startObject()
+ .startObject("query")
+ .startObject("terms")
+ .field("status", new String[]{"pending", "published"})
+ .endObject()
+ .endObject()
+ .endObject();
+ response.setSource(BytesReference.bytes(source));
+
+ XContentType contentType = randomFrom(XContentType.values());
+ XContentBuilder expectedResponse = XContentFactory.contentBuilder(contentType)
+ .startObject()
+ .startObject("template_output")
+ .startObject("query")
+ .startObject("terms")
+ .field("status", new String[]{"pending", "published"})
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject();
+
+ XContentBuilder actualResponse = XContentFactory.contentBuilder(contentType);
+ response.toXContent(actualResponse, ToXContent.EMPTY_PARAMS);
+
+ assertToXContentEquivalent(
+ BytesReference.bytes(expectedResponse),
+ BytesReference.bytes(actualResponse),
+ contentType);
+ }
+
+ public void testSearchResponseToXContent() throws IOException {
+ SearchHit hit = new SearchHit(1, "id", new Text("type"), Collections.emptyMap());
+ hit.score(2.0f);
+ SearchHit[] hits = new SearchHit[] { hit };
+
+ InternalSearchResponse internalSearchResponse = new InternalSearchResponse(
+ new SearchHits(hits, 100, 1.5f), null, null, null, false, null, 1);
+ SearchResponse searchResponse = new SearchResponse(internalSearchResponse, null,
+ 0, 0, 0, 0, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY);
+
+ SearchTemplateResponse response = new SearchTemplateResponse();
+ response.setResponse(searchResponse);
+
+ XContentType contentType = randomFrom(XContentType.values());
+ XContentBuilder expectedResponse = XContentFactory.contentBuilder(contentType)
+ .startObject()
+ .field("took", 0)
+ .field("timed_out", false)
+ .startObject("_shards")
+ .field("total", 0)
+ .field("successful", 0)
+ .field("skipped", 0)
+ .field("failed", 0)
+ .endObject()
+ .startObject("hits")
+ .field("total", 100)
+ .field("max_score", 1.5F)
+ .startArray("hits")
+ .startObject()
+ .field("_type", "type")
+ .field("_id", "id")
+ .field("_score", 2.0F)
+ .endObject()
+ .endArray()
+ .endObject()
+ .endObject();
+
+ XContentBuilder actualResponse = XContentFactory.contentBuilder(contentType);
+ response.toXContent(actualResponse, ToXContent.EMPTY_PARAMS);
+
+ assertToXContentEquivalent(
+ BytesReference.bytes(expectedResponse),
+ BytesReference.bytes(actualResponse),
+ contentType);
+ }
+}
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java
index 0364ad667efc7..4ebcf8bfb82d2 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java
@@ -32,6 +32,7 @@
import org.elasticsearch.painless.spi.PainlessExtension;
import org.elasticsearch.painless.spi.Whitelist;
import org.elasticsearch.plugins.ActionPlugin;
+import org.elasticsearch.painless.spi.WhitelistLoader;
import org.elasticsearch.plugins.ExtensiblePlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.ScriptPlugin;
@@ -39,6 +40,7 @@
import org.elasticsearch.rest.RestHandler;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptEngine;
+import org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctionScript;
import java.util.ArrayList;
import java.util.Arrays;
@@ -55,18 +57,34 @@
*/
public final class PainlessPlugin extends Plugin implements ScriptPlugin, ExtensiblePlugin, ActionPlugin {
- private final Map, List> extendedWhitelists = new HashMap<>();
+ private static final Map, List> whitelists;
+
+ /*
+ * Contexts from Core that need custom whitelists can add them to the map below.
+ * Whitelist resources should be added as appropriately named, separate files
+ * under Painless' resources
+ */
+ static {
+ Map, List> map = new HashMap<>();
+
+ // Moving Function Pipeline Agg
+ List movFn = new ArrayList<>(Whitelist.BASE_WHITELISTS);
+ movFn.add(WhitelistLoader.loadFromResourceFiles(Whitelist.class, "org.elasticsearch.aggs.movfn.txt"));
+ map.put(MovingFunctionScript.CONTEXT, movFn);
+
+ whitelists = map;
+ }
@Override
public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) {
Map, List> contextsWithWhitelists = new HashMap<>();
for (ScriptContext> context : contexts) {
// we might have a context that only uses the base whitelists, so would not have been filled in by reloadSPI
- List whitelists = extendedWhitelists.get(context);
- if (whitelists == null) {
- whitelists = new ArrayList<>(Whitelist.BASE_WHITELISTS);
+ List contextWhitelists = whitelists.get(context);
+ if (contextWhitelists == null) {
+ contextWhitelists = new ArrayList<>(Whitelist.BASE_WHITELISTS);
}
- contextsWithWhitelists.put(context, whitelists);
+ contextsWithWhitelists.put(context, contextWhitelists);
}
return new PainlessScriptEngine(settings, contextsWithWhitelists);
}
@@ -80,7 +98,7 @@ public List> getSettings() {
public void reloadSPI(ClassLoader loader) {
for (PainlessExtension extension : ServiceLoader.load(PainlessExtension.class, loader)) {
for (Map.Entry, List> entry : extension.getContextWhitelists().entrySet()) {
- List existing = extendedWhitelists.computeIfAbsent(entry.getKey(),
+ List existing = whitelists.computeIfAbsent(entry.getKey(),
c -> new ArrayList<>(Whitelist.BASE_WHITELISTS));
existing.addAll(entry.getValue());
}
diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.aggs.movfn.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.aggs.movfn.txt
new file mode 100644
index 0000000000000..a120b73820ada
--- /dev/null
+++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.aggs.movfn.txt
@@ -0,0 +1,32 @@
+#
+# Licensed to Elasticsearch under one or more contributor
+# license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright
+# ownership. Elasticsearch licenses this file to you under
+# the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+# This file contains a whitelist for the Moving Function pipeline aggregator in core
+
+class org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctions {
+ double max(double[])
+ double min(double[])
+ double sum(double[])
+ double stdDev(double[], double)
+ double unweightedAvg(double[])
+ double linearWeightedAvg(double[])
+ double ewma(double[], double)
+ double holt(double[], double, double)
+ double holtWinters(double[], double, double, double, int, boolean)
+}
\ No newline at end of file
diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml
new file mode 100644
index 0000000000000..039b54aab01d1
--- /dev/null
+++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml
@@ -0,0 +1,315 @@
+# Sanity integration test to make sure the custom context and whitelist work for moving_fn pipeline agg
+#
+setup:
+ - skip:
+ version: " - 6.4.0"
+ reason: "moving_fn added in 6.4.0"
+ - do:
+ indices.create:
+ index: test
+ body:
+ mappings:
+ _doc:
+ properties:
+ value_field:
+ type: integer
+ date:
+ type: date
+
+ - do:
+ bulk:
+ refresh: true
+ body:
+ - index:
+ _index: test
+ _type: _doc
+ _id: 1
+ - date: "2017-01-01T00:00:00"
+ value_field: 1
+ - index:
+ _index: test
+ _type: _doc
+ _id: 2
+ - date: "2017-01-02T00:00:00"
+ value_field: 2
+ - index:
+ _index: test
+ _type: _doc
+ _id: 3
+ - date: "2017-01-03T00:00:00"
+ value_field: 3
+ - index:
+ _index: test
+ _type: _doc
+ _id: 4
+ - date: "2017-01-04T00:00:00"
+ value_field: 4
+ - index:
+ _index: test
+ _type: _doc
+ _id: 5
+ - date: "2017-01-05T00:00:00"
+ value_field: 5
+ - index:
+ _index: test
+ _type: _doc
+ _id: 6
+ - date: "2017-01-06T00:00:00"
+ value_field: 6
+
+ - do:
+ indices.refresh:
+ index: [test]
+
+---
+"max":
+
+ - do:
+ search:
+ body:
+ size: 0
+ aggs:
+ the_histo:
+ date_histogram:
+ field: "date"
+ interval: "1d"
+ aggs:
+ the_avg:
+ avg:
+ field: "value_field"
+ the_mov_fn:
+ moving_fn:
+ buckets_path: "the_avg"
+ window: 3
+ script: "MovingFunctions.max(values)"
+
+ - match: { hits.total: 6 }
+ - length: { hits.hits: 0 }
+ - is_false: aggregations.the_histo.buckets.0.the_mov_fn.value
+ - match: { aggregations.the_histo.buckets.1.the_mov_fn.value: 1.0 }
+ - match: { aggregations.the_histo.buckets.2.the_mov_fn.value: 2.0 }
+ - match: { aggregations.the_histo.buckets.3.the_mov_fn.value: 3.0 }
+ - match: { aggregations.the_histo.buckets.4.the_mov_fn.value: 4.0 }
+ - match: { aggregations.the_histo.buckets.5.the_mov_fn.value: 5.0 }
+
+---
+"min":
+
+ - do:
+ search:
+ body:
+ size: 0
+ aggs:
+ the_histo:
+ date_histogram:
+ field: "date"
+ interval: "1d"
+ aggs:
+ the_avg:
+ avg:
+ field: "value_field"
+ the_mov_fn:
+ moving_fn:
+ buckets_path: "the_avg"
+ window: 3
+ script: "MovingFunctions.min(values)"
+
+ - match: { hits.total: 6 }
+ - length: { hits.hits: 0 }
+ - is_false: aggregations.the_histo.buckets.0.the_mov_fn.value
+ - match: { aggregations.the_histo.buckets.1.the_mov_fn.value: 1.0 }
+ - match: { aggregations.the_histo.buckets.2.the_mov_fn.value: 1.0 }
+ - match: { aggregations.the_histo.buckets.3.the_mov_fn.value: 1.0 }
+ - match: { aggregations.the_histo.buckets.4.the_mov_fn.value: 2.0 }
+ - match: { aggregations.the_histo.buckets.5.the_mov_fn.value: 3.0 }
+
+---
+"sum":
+
+ - do:
+ search:
+ body:
+ size: 0
+ aggs:
+ the_histo:
+ date_histogram:
+ field: "date"
+ interval: "1d"
+ aggs:
+ the_avg:
+ avg:
+ field: "value_field"
+ the_mov_fn:
+ moving_fn:
+ buckets_path: "the_avg"
+ window: 3
+ script: "MovingFunctions.sum(values)"
+
+ - match: { hits.total: 6 }
+ - length: { hits.hits: 0 }
+ - match: { aggregations.the_histo.buckets.0.the_mov_fn.value: 0.0 }
+ - match: { aggregations.the_histo.buckets.1.the_mov_fn.value: 1.0 }
+ - match: { aggregations.the_histo.buckets.2.the_mov_fn.value: 3.0 }
+ - match: { aggregations.the_histo.buckets.3.the_mov_fn.value: 6.0 }
+ - match: { aggregations.the_histo.buckets.4.the_mov_fn.value: 9.0 }
+ - match: { aggregations.the_histo.buckets.5.the_mov_fn.value: 12.0 }
+
+---
+"unweightedAvg":
+
+ - do:
+ search:
+ body:
+ size: 0
+ aggs:
+ the_histo:
+ date_histogram:
+ field: "date"
+ interval: "1d"
+ aggs:
+ the_avg:
+ avg:
+ field: "value_field"
+ the_mov_fn:
+ moving_fn:
+ buckets_path: "the_avg"
+ window: 3
+ script: "MovingFunctions.unweightedAvg(values)"
+
+ - match: { hits.total: 6 }
+ - length: { hits.hits: 0 }
+
+
+---
+"linearWeightedAvg":
+
+ - do:
+ search:
+ body:
+ size: 0
+ aggs:
+ the_histo:
+ date_histogram:
+ field: "date"
+ interval: "1d"
+ aggs:
+ the_avg:
+ avg:
+ field: "value_field"
+ the_mov_fn:
+ moving_fn:
+ buckets_path: "the_avg"
+ window: 3
+ script: "MovingFunctions.linearWeightedAvg(values)"
+
+ - match: { hits.total: 6 }
+ - length: { hits.hits: 0 }
+
+
+---
+"ewma":
+
+ - do:
+ search:
+ body:
+ size: 0
+ aggs:
+ the_histo:
+ date_histogram:
+ field: "date"
+ interval: "1d"
+ aggs:
+ the_avg:
+ avg:
+ field: "value_field"
+ the_mov_fn:
+ moving_fn:
+ buckets_path: "the_avg"
+ window: 3
+ script: "MovingFunctions.ewma(values, 0.1)"
+
+ - match: { hits.total: 6 }
+ - length: { hits.hits: 0 }
+
+
+---
+"holt":
+
+ - do:
+ search:
+ body:
+ size: 0
+ aggs:
+ the_histo:
+ date_histogram:
+ field: "date"
+ interval: "1d"
+ aggs:
+ the_avg:
+ avg:
+ field: "value_field"
+ the_mov_fn:
+ moving_fn:
+ buckets_path: "the_avg"
+ window: 3
+ script: "MovingFunctions.holt(values, 0.1, 0.1)"
+
+ - match: { hits.total: 6 }
+ - length: { hits.hits: 0 }
+
+
+---
+"holtWinters":
+
+ - do:
+ search:
+ body:
+ size: 0
+ aggs:
+ the_histo:
+ date_histogram:
+ field: "date"
+ interval: "1d"
+ aggs:
+ the_avg:
+ avg:
+ field: "value_field"
+ the_mov_fn:
+ moving_fn:
+ buckets_path: "the_avg"
+ window: 1
+ script: "if (values.length > 1) { MovingFunctions.holtWinters(values, 0.1, 0.1, 0.1, 1, true)}"
+
+ - match: { hits.total: 6 }
+ - length: { hits.hits: 0 }
+
+---
+"stdDev":
+
+ - do:
+ search:
+ body:
+ size: 0
+ aggs:
+ the_histo:
+ date_histogram:
+ field: "date"
+ interval: "1d"
+ aggs:
+ the_avg:
+ avg:
+ field: "value_field"
+ the_mov_fn:
+ moving_fn:
+ buckets_path: "the_avg"
+ window: 3
+ script: "MovingFunctions.stdDev(values, MovingFunctions.unweightedAvg(values))"
+
+ - match: { hits.total: 6 }
+ - length: { hits.hits: 0 }
+
+
+
+
+
+
diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java
index 5babcef2e8d65..103679f5328ef 100644
--- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java
+++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java
@@ -161,7 +161,7 @@ static BinaryFieldMapper createQueryBuilderFieldBuilder(BuilderContext context)
}
static RangeFieldMapper createExtractedRangeFieldBuilder(String name, RangeType rangeType, BuilderContext context) {
- RangeFieldMapper.Builder builder = new RangeFieldMapper.Builder(name, rangeType, context.indexCreatedVersion());
+ RangeFieldMapper.Builder builder = new RangeFieldMapper.Builder(name, rangeType);
// For now no doc values, because in processQuery(...) only the Lucene range fields get added:
builder.docValues(false);
return builder.build(context);
diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java
index 12db47908d1f3..6e39a7f50d2cd 100644
--- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java
+++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpChannel.java
@@ -40,6 +40,7 @@
import org.elasticsearch.common.io.stream.ReleasableBytesStreamOutput;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.util.concurrent.ThreadContext;
+import org.elasticsearch.http.HttpHandlingSettings;
import org.elasticsearch.http.netty4.cors.Netty4CorsHandler;
import org.elasticsearch.http.netty4.pipelining.HttpPipelinedRequest;
import org.elasticsearch.rest.AbstractRestChannel;
@@ -60,27 +61,29 @@ final class Netty4HttpChannel extends AbstractRestChannel {
private final FullHttpRequest nettyRequest;
private final HttpPipelinedRequest pipelinedRequest;
private final ThreadContext threadContext;
+ private final HttpHandlingSettings handlingSettings;
/**
* @param transport The corresponding NettyHttpServerTransport
where this channel belongs to.
* @param request The request that is handled by this channel.
* @param pipelinedRequest If HTTP pipelining is enabled provide the corresponding pipelined request. May be null if
- * HTTP pipelining is disabled.
- * @param detailedErrorsEnabled true iff error messages should include stack traces.
+ * HTTP pipelining is disabled.
+ * @param handlingSettings true iff error messages should include stack traces.
* @param threadContext the thread context for the channel
*/
Netty4HttpChannel(
final Netty4HttpServerTransport transport,
final Netty4HttpRequest request,
final HttpPipelinedRequest pipelinedRequest,
- final boolean detailedErrorsEnabled,
+ final HttpHandlingSettings handlingSettings,
final ThreadContext threadContext) {
- super(request, detailedErrorsEnabled);
+ super(request, handlingSettings.getDetailedErrorsEnabled());
this.transport = transport;
this.channel = request.getChannel();
this.nettyRequest = request.request();
this.pipelinedRequest = pipelinedRequest;
this.threadContext = threadContext;
+ this.handlingSettings = handlingSettings;
}
@Override
@@ -170,7 +173,7 @@ private void setHeaderField(HttpResponse resp, String headerField, String value,
}
private void addCookies(HttpResponse resp) {
- if (transport.resetCookies) {
+ if (handlingSettings.isResetCookies()) {
String cookieString = nettyRequest.headers().get(HttpHeaderNames.COOKIE);
if (cookieString != null) {
Set cookies = ServerCookieDecoder.STRICT.decode(cookieString);
@@ -222,8 +225,6 @@ private FullHttpResponse newResponse(ByteBuf buffer) {
return response;
}
- private static final HttpResponseStatus TOO_MANY_REQUESTS = new HttpResponseStatus(429, "Too Many Requests");
-
private static Map MAP;
static {
@@ -266,7 +267,7 @@ private FullHttpResponse newResponse(ByteBuf buffer) {
map.put(RestStatus.UNPROCESSABLE_ENTITY, HttpResponseStatus.BAD_REQUEST);
map.put(RestStatus.LOCKED, HttpResponseStatus.BAD_REQUEST);
map.put(RestStatus.FAILED_DEPENDENCY, HttpResponseStatus.BAD_REQUEST);
- map.put(RestStatus.TOO_MANY_REQUESTS, TOO_MANY_REQUESTS);
+ map.put(RestStatus.TOO_MANY_REQUESTS, HttpResponseStatus.TOO_MANY_REQUESTS);
map.put(RestStatus.INTERNAL_SERVER_ERROR, HttpResponseStatus.INTERNAL_SERVER_ERROR);
map.put(RestStatus.NOT_IMPLEMENTED, HttpResponseStatus.NOT_IMPLEMENTED);
map.put(RestStatus.BAD_GATEWAY, HttpResponseStatus.BAD_GATEWAY);
@@ -279,5 +280,4 @@ private FullHttpResponse newResponse(ByteBuf buffer) {
private static HttpResponseStatus getStatus(RestStatus status) {
return MAP.getOrDefault(status, HttpResponseStatus.INTERNAL_SERVER_ERROR);
}
-
}
diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java
index 1fd18b2a016d7..74429c8dda9b7 100644
--- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java
+++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestHandler.java
@@ -29,6 +29,7 @@
import io.netty.handler.codec.http.FullHttpRequest;
import io.netty.handler.codec.http.HttpHeaders;
import org.elasticsearch.common.util.concurrent.ThreadContext;
+import org.elasticsearch.http.HttpHandlingSettings;
import org.elasticsearch.http.netty4.pipelining.HttpPipelinedRequest;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.transport.netty4.Netty4Utils;
@@ -39,14 +40,15 @@
class Netty4HttpRequestHandler extends SimpleChannelInboundHandler