Skip to content

Commit

Permalink
Merge branch 'master' into pr/33371
Browse files Browse the repository at this point in the history
* master: (91 commits)
  Preserve cluster settings on full restart tests (elastic#33590)
  Use IndexWriter.getFlushingBytes() rather than tracking it ourselves (elastic#33582)
  Fix upgrading of list settings (elastic#33589)
  Add read-only Engine (elastic#33563)
  HLRC: Add ML get categories API (elastic#33465)
  SQL: Adds MONTHNAME, DAYNAME and QUARTER functions (elastic#33411)
  Add predicate_token_filter (elastic#33431)
  Fix Replace function. Adds more tests to all string functions. (elastic#33478)
  [ML] Rename input_fields to column_names in file structure (elastic#33568)
  Add full cluster restart base class (elastic#33577)
  Validate list values for settings (elastic#33503)
  Copy and validatie soft-deletes setting on resize (elastic#33517)
  Test: Fix package name
  SQL: Fix result column names for arithmetic functions (elastic#33500)
  Upgrade to latest Lucene snapshot (elastic#33505)
  Enable not wiping cluster settings after REST test (elastic#33575)
  MINOR: Remove Dead Code in SearchScript (elastic#33569)
  [Test] Remove duplicate method in TestShardRouting (elastic#32815)
  mute test on windows
  Update beats template to include apm-server metrics (elastic#33286)
  ...
  • Loading branch information
jasontedor committed Sep 11, 2018
2 parents e2e2437 + 73c75be commit 4d77eb7
Show file tree
Hide file tree
Showing 993 changed files with 20,074 additions and 7,676 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -539,9 +539,9 @@ class BuildPlugin implements Plugin<Project> {
from generatePOMTask.destination
into "${project.buildDir}/distributions"
rename {
generatePOMTask.ext.pomFileName == null ?
"${project.archivesBaseName}-${project.version}.pom" :
generatePOMTask.ext.pomFileName
generatePOMTask.ext.pomFileName == null ?
"${project.archivesBaseName}-${project.version}.pom" :
generatePOMTask.ext.pomFileName
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -337,7 +337,13 @@ class ClusterFormationTasks {
if (node.nodeVersion.major >= 7) {
esConfig['indices.breaker.total.use_real_memory'] = false
}
esConfig.putAll(node.config.settings)
for (Map.Entry<String, Object> setting : node.config.settings) {
if (setting.value == null) {
esConfig.remove(setting.key)
} else {
esConfig.put(setting.key, setting.value)
}
}

Task writeConfig = project.tasks.create(name: name, type: DefaultTask, dependsOn: setup)
writeConfig.doFirst {
Expand Down
11 changes: 1 addition & 10 deletions buildSrc/src/main/resources/checkstyle_suppressions.xml
Original file line number Diff line number Diff line change
Expand Up @@ -360,13 +360,7 @@
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]terms[/\\]TermsAggregator.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]terms[/\\]TermsAggregatorFactory.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]terms[/\\]support[/\\]IncludeExclude.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]cardinality[/\\]CardinalityAggregator.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]cardinality[/\\]HyperLogLogPlusPlus.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]geobounds[/\\]GeoBoundsAggregator.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]geocentroid[/\\]InternalGeoCentroid.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]percentiles[/\\]tdigest[/\\]TDigestPercentileRanksAggregator.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]percentiles[/\\]tdigest[/\\]TDigestPercentilesAggregator.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]scripted[/\\]ScriptedMetricAggregator.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]HyperLogLogPlusPlus.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]support[/\\]AggregationPath.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]dfs[/\\]AggregatedDfs.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]dfs[/\\]DfsSearchResult.java" checks="LineLength" />
Expand Down Expand Up @@ -468,7 +462,6 @@
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]ExpectedShardSizeAllocationTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]FailedNodeRoutingTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]FailedShardsRoutingTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]FilterRoutingTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]IndexBalanceTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]NodeVersionAllocationDeciderTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]routing[/\\]allocation[/\\]PreferLocalPrimariesToRelocatingPrimariesTests.java" checks="LineLength" />
Expand Down Expand Up @@ -641,8 +634,6 @@
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]TermsDocCountErrorIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]TermsShardMinDocCountIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]bucket[/\\]nested[/\\]NestedAggregatorTests.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]AbstractGeoTestCase.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]metrics[/\\]TopHitsIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]pipeline[/\\]ExtendedStatsBucketIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]pipeline[/\\]moving[/\\]avg[/\\]MovAvgIT.java" checks="LineLength" />
<suppress files="server[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]pipeline[/\\]serialdiff[/\\]SerialDiffIT.java" checks="LineLength" />
Expand Down
2 changes: 1 addition & 1 deletion buildSrc/version.properties
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
elasticsearch = 7.0.0-alpha1
lucene = 7.5.0-snapshot-13b9e28f9d
lucene = 8.0.0-snapshot-66c671ea80

# optional dependencies
spatial4j = 0.7
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ public class GraphClient {
*/
public final GraphExploreResponse explore(GraphExploreRequest graphExploreRequest,
RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(graphExploreRequest, RequestConverters::xPackGraphExplore,
return restHighLevelClient.performRequestAndParseEntity(graphExploreRequest, GraphRequestConverters::explore,
options, GraphExploreResponse::fromXContext, emptySet());
}

Expand All @@ -56,7 +56,7 @@ public final GraphExploreResponse explore(GraphExploreRequest graphExploreReques
public final void exploreAsync(GraphExploreRequest graphExploreRequest,
RequestOptions options,
ActionListener<GraphExploreResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(graphExploreRequest, RequestConverters::xPackGraphExplore,
restHighLevelClient.performRequestAsyncAndParseEntity(graphExploreRequest, GraphRequestConverters::explore,
options, GraphExploreResponse::fromXContext, listener, emptySet());
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.elasticsearch.client;

import org.apache.http.client.methods.HttpGet;
import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest;

import java.io.IOException;

public class GraphRequestConverters {

static Request explore(GraphExploreRequest exploreRequest) throws IOException {
String endpoint = RequestConverters.endpoint(exploreRequest.indices(), exploreRequest.types(), "_xpack/graph/_explore");
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
request.setEntity(RequestConverters.createEntity(exploreRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ public final class IngestClient {
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public AcknowledgedResponse putPipeline(PutPipelineRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::putPipeline, options,
return restHighLevelClient.performRequestAndParseEntity( request, IngestRequestConverters::putPipeline, options,
AcknowledgedResponse::fromXContent, emptySet());
}

Expand All @@ -68,7 +68,7 @@ public AcknowledgedResponse putPipeline(PutPipelineRequest request, RequestOptio
* @param listener the listener to be notified upon request completion
*/
public void putPipelineAsync(PutPipelineRequest request, RequestOptions options, ActionListener<AcknowledgedResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::putPipeline, options,
restHighLevelClient.performRequestAsyncAndParseEntity( request, IngestRequestConverters::putPipeline, options,
AcknowledgedResponse::fromXContent, listener, emptySet());
}

Expand All @@ -82,7 +82,7 @@ public void putPipelineAsync(PutPipelineRequest request, RequestOptions options,
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public GetPipelineResponse getPipeline(GetPipelineRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::getPipeline, options,
return restHighLevelClient.performRequestAndParseEntity( request, IngestRequestConverters::getPipeline, options,
GetPipelineResponse::fromXContent, emptySet());
}

Expand All @@ -95,7 +95,7 @@ public GetPipelineResponse getPipeline(GetPipelineRequest request, RequestOption
* @param listener the listener to be notified upon request completion
*/
public void getPipelineAsync(GetPipelineRequest request, RequestOptions options, ActionListener<GetPipelineResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::getPipeline, options,
restHighLevelClient.performRequestAsyncAndParseEntity( request, IngestRequestConverters::getPipeline, options,
GetPipelineResponse::fromXContent, listener, emptySet());
}

Expand All @@ -110,7 +110,7 @@ public void getPipelineAsync(GetPipelineRequest request, RequestOptions options,
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public AcknowledgedResponse deletePipeline(DeletePipelineRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::deletePipeline, options,
return restHighLevelClient.performRequestAndParseEntity( request, IngestRequestConverters::deletePipeline, options,
AcknowledgedResponse::fromXContent, emptySet());
}

Expand All @@ -124,7 +124,7 @@ public AcknowledgedResponse deletePipeline(DeletePipelineRequest request, Reques
* @param listener the listener to be notified upon request completion
*/
public void deletePipelineAsync(DeletePipelineRequest request, RequestOptions options, ActionListener<AcknowledgedResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::deletePipeline, options,
restHighLevelClient.performRequestAsyncAndParseEntity( request, IngestRequestConverters::deletePipeline, options,
AcknowledgedResponse::fromXContent, listener, emptySet());
}

Expand All @@ -140,7 +140,7 @@ public void deletePipelineAsync(DeletePipelineRequest request, RequestOptions op
* @throws IOException in case there is a problem sending the request or parsing back the response
*/
public SimulatePipelineResponse simulate(SimulatePipelineRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::simulatePipeline, options,
return restHighLevelClient.performRequestAndParseEntity( request, IngestRequestConverters::simulatePipeline, options,
SimulatePipelineResponse::fromXContent, emptySet());
}

Expand All @@ -157,7 +157,7 @@ public SimulatePipelineResponse simulate(SimulatePipelineRequest request, Reques
public void simulateAsync(SimulatePipelineRequest request,
RequestOptions options,
ActionListener<SimulatePipelineResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::simulatePipeline, options,
restHighLevelClient.performRequestAsyncAndParseEntity( request, IngestRequestConverters::simulatePipeline, options,
SimulatePipelineResponse::fromXContent, listener, emptySet());
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.elasticsearch.client;

import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.elasticsearch.action.ingest.DeletePipelineRequest;
import org.elasticsearch.action.ingest.GetPipelineRequest;
import org.elasticsearch.action.ingest.PutPipelineRequest;
import org.elasticsearch.action.ingest.SimulatePipelineRequest;

import java.io.IOException;

public class IngestRequestConverters {

static Request getPipeline(GetPipelineRequest getPipelineRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_ingest/pipeline")
.addCommaSeparatedPathParts(getPipelineRequest.getIds())
.build();
Request request = new Request(HttpGet.METHOD_NAME, endpoint);

RequestConverters.Params parameters = new RequestConverters.Params(request);
parameters.withMasterTimeout(getPipelineRequest.masterNodeTimeout());
return request;
}

static Request putPipeline(PutPipelineRequest putPipelineRequest) throws IOException {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_ingest/pipeline")
.addPathPart(putPipelineRequest.getId())
.build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint);

RequestConverters.Params parameters = new RequestConverters.Params(request);
parameters.withTimeout(putPipelineRequest.timeout());
parameters.withMasterTimeout(putPipelineRequest.masterNodeTimeout());

request.setEntity(RequestConverters.createEntity(putPipelineRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request;
}

static Request deletePipeline(DeletePipelineRequest deletePipelineRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_ingest/pipeline")
.addPathPart(deletePipelineRequest.getId())
.build();
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);

RequestConverters.Params parameters = new RequestConverters.Params(request);
parameters.withTimeout(deletePipelineRequest.timeout());
parameters.withMasterTimeout(deletePipelineRequest.masterNodeTimeout());

return request;
}

static Request simulatePipeline(SimulatePipelineRequest simulatePipelineRequest) throws IOException {
RequestConverters.EndpointBuilder builder = new RequestConverters.EndpointBuilder().addPathPartAsIs("_ingest/pipeline");
if (simulatePipelineRequest.getId() != null && !simulatePipelineRequest.getId().isEmpty()) {
builder.addPathPart(simulatePipelineRequest.getId());
}
builder.addPathPartAsIs("_simulate");
String endpoint = builder.build();
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
RequestConverters.Params params = new RequestConverters.Params(request);
params.putParam("verbose", Boolean.toString(simulatePipelineRequest.isVerbose()));
request.setEntity(RequestConverters.createEntity(simulatePipelineRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE));
return request;
}
}
Loading

0 comments on commit 4d77eb7

Please sign in to comment.