Skip to content

Commit

Permalink
Use CollectionUtils.isEmpty where appropriate (elastic#55910)
Browse files Browse the repository at this point in the history
This commit uses the isEmpty utility method for arrays in place of null and greater than zero checks.
  • Loading branch information
zhenxianyimeng authored and rjernst committed May 11, 2020
1 parent 32471ab commit 8e96e5c
Show file tree
Hide file tree
Showing 25 changed files with 59 additions and 35 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.xcontent.DeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContent;
Expand Down Expand Up @@ -855,18 +856,18 @@ Params withFetchSourceContext(FetchSourceContext fetchSourceContext) {
if (fetchSourceContext.fetchSource() == false) {
putParam("_source", Boolean.FALSE.toString());
}
if (fetchSourceContext.includes() != null && fetchSourceContext.includes().length > 0) {
if (CollectionUtils.isEmpty(fetchSourceContext.includes()) == false) {
putParam("_source_includes", String.join(",", fetchSourceContext.includes()));
}
if (fetchSourceContext.excludes() != null && fetchSourceContext.excludes().length > 0) {
if (CollectionUtils.isEmpty(fetchSourceContext.excludes()) == false) {
putParam("_source_excludes", String.join(",", fetchSourceContext.excludes()));
}
}
return this;
}

Params withFields(String[] fields) {
if (fields != null && fields.length > 0) {
if (CollectionUtils.isEmpty(fields) == false) {
return putParam("fields", String.join(",", fields));
}
return this;
Expand Down Expand Up @@ -967,7 +968,7 @@ Params withSlices(int slices) {
}

Params withStoredFields(String[] storedFields) {
if (storedFields != null && storedFields.length > 0) {
if (CollectionUtils.isEmpty(storedFields) == false) {
return putParam("stored_fields", String.join(",", storedFields));
}
return this;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import org.elasticsearch.client.cluster.RemoteInfoRequest;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.test.ESTestCase;
import org.hamcrest.CoreMatchers;
import org.junit.Assert;
Expand Down Expand Up @@ -142,7 +143,7 @@ public void testClusterHealth() {
Assert.assertThat(request, CoreMatchers.notNullValue());
Assert.assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME));
Assert.assertThat(request.getEntity(), nullValue());
if (indices != null && indices.length > 0) {
if (CollectionUtils.isEmpty(indices) == false) {
Assert.assertThat(request.getEndpoint(), equalTo("/_cluster/health/" + String.join(",", indices)));
} else {
Assert.assertThat(request.getEndpoint(), equalTo("/_cluster/health"));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -474,7 +474,7 @@ public void testGetSettings() throws IOException {
}

StringJoiner endpoint = new StringJoiner("/", "/", "");
if (indicesUnderTest != null && indicesUnderTest.length > 0) {
if (CollectionUtils.isEmpty(indicesUnderTest) == false) {
endpoint.add(String.join(",", indicesUnderTest));
}
endpoint.add("_settings");
Expand All @@ -487,7 +487,7 @@ public void testGetSettings() throws IOException {
}
}
getSettingsRequest.names(names);
if (names != null && names.length > 0) {
if (CollectionUtils.isEmpty(names) == false) {
endpoint.add(String.join(",", names));
}
}
Expand Down Expand Up @@ -1207,7 +1207,7 @@ public void testDeleteTemplateRequest() {
public void testReloadAnalyzers() {
String[] indices = RequestConvertersTests.randomIndicesNames(1, 5);
StringJoiner endpoint = new StringJoiner("/", "/", "");
if (indices != null && indices.length > 0) {
if (CollectionUtils.isEmpty(indices) == false) {
endpoint.add(String.join(",", indices));
}
ReloadAnalyzersRequest reloadRequest = new ReloadAnalyzersRequest(indices);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.shard.DocsStats;
Expand Down Expand Up @@ -393,7 +394,7 @@ private void logSearchResponse(int numberOfShards, long numberOfDocs, int iterat
logger.info("iteration [{}] - successful shards: {} (expected {})", iteration,
searchResponse.getSuccessfulShards(), numberOfShards);
logger.info("iteration [{}] - failed shards: {} (expected 0)", iteration, searchResponse.getFailedShards());
if (searchResponse.getShardFailures() != null && searchResponse.getShardFailures().length > 0) {
if (CollectionUtils.isEmpty(searchResponse.getShardFailures()) == false) {
logger.info("iteration [{}] - shard failures: {}", iteration, Arrays.toString(searchResponse.getShardFailures()));
}
logger.info("iteration [{}] - returned documents: {} (expected {})", iteration,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import org.apache.lucene.search.TopFieldDocs;
import org.apache.lucene.search.TotalHits;
import org.apache.lucene.util.PriorityQueue;
import org.elasticsearch.common.util.CollectionUtils;

import java.util.ArrayList;
import java.util.HashSet;
Expand Down Expand Up @@ -184,7 +185,7 @@ public static CollapseTopFieldDocs merge(Sort sort, int start, int size,
if (shard.totalHits.relation == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) {
totalHitsRelation = TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO;
}
if (shard.scoreDocs != null && shard.scoreDocs.length > 0) {
if (CollectionUtils.isEmpty(shard.scoreDocs) == false) {
availHitCount += shard.scoreDocs.length;
queue.add(new ShardRef(shardIDX, setShardIndex == false));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
Expand Down Expand Up @@ -228,7 +229,7 @@ private static int getWaitCount(ClusterHealthRequest request) {
if (request.waitForNodes().isEmpty() == false) {
waitCount++;
}
if (request.indices() != null && request.indices().length > 0) { // check that they actually exists in the meta data
if (CollectionUtils.isEmpty(request.indices()) == false) { // check that they actually exists in the meta data
waitCount++;
}
return waitCount;
Expand Down Expand Up @@ -282,7 +283,7 @@ static int prepareResponse(final ClusterHealthRequest request, final ClusterHeal
waitForCounter++;
}
}
if (request.indices() != null && request.indices().length > 0) {
if (CollectionUtils.isEmpty(request.indices()) == false) {
try {
indexNameExpressionResolver.concreteIndexNames(clusterState, IndicesOptions.strictExpand(), true, request.indices());
waitForCounter++;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus;
Expand Down Expand Up @@ -208,7 +209,7 @@ private void buildResponse(@Nullable SnapshotsInProgress snapshotsInProgress, Sn
}
// Now add snapshots on disk that are not currently running
final String repositoryName = request.repository();
if (Strings.hasText(repositoryName) && request.snapshots() != null && request.snapshots().length > 0) {
if (Strings.hasText(repositoryName) && CollectionUtils.isEmpty(request.snapshots()) == false) {
loadRepositoryData(snapshotsInProgress, request, builder, currentSnapshotNames, repositoryName, listener);
} else {
listener.onResponse(new SnapshotsStatusResponse(Collections.unmodifiableList(builder)));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.Index;
Expand Down Expand Up @@ -203,7 +204,7 @@ public void writeTo(final StreamOutput out) throws IOException {
}

public boolean hasFailures() {
return failures != null && failures.length > 0;
return CollectionUtils.isEmpty(failures) == false;
}

public int getId() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
Expand Down Expand Up @@ -118,7 +119,7 @@ public ActionRequestValidationException validate() {
} else if (source.isEmpty()) {
validationException = addValidationError("mapping source is empty", validationException);
}
if (concreteIndex != null && (indices != null && indices.length > 0)) {
if (concreteIndex != null && CollectionUtils.isEmpty(indices) == false) {
validationException = addValidationError("either concrete index or unresolved indices can be set, concrete index: ["
+ concreteIndex + "] and indices: " + Arrays.asList(indices) , validationException);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.rest.RestStatus;

Expand Down Expand Up @@ -119,7 +120,7 @@ public Throwable getCause() {
private static String buildMessage(String phaseName, String msg, ShardSearchFailure[] shardFailures) {
StringBuilder sb = new StringBuilder();
sb.append("Failed to execute phase [").append(phaseName).append("], ").append(msg);
if (shardFailures != null && shardFailures.length > 0) {
if (CollectionUtils.isEmpty(shardFailures) == false) {
sb.append("; shardFailures ");
for (ShardSearchFailure shardFailure : shardFailures) {
if (shardFailure.shard() != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskId;

Expand Down Expand Up @@ -159,7 +160,7 @@ public final Request setTimeout(String timeout) {
}

public boolean match(Task task) {
if (getActions() != null && getActions().length > 0 && Regex.simpleMatch(getActions(), task.getAction()) == false) {
if (CollectionUtils.isEmpty(getActions()) == false && Regex.simpleMatch(getActions(), task.getAction()) == false) {
return false;
}
if (getTaskId().isSet()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.time.DateMathParser;
import org.elasticsearch.common.time.DateUtils;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexNotFoundException;
Expand Down Expand Up @@ -311,7 +312,7 @@ private static IllegalArgumentException dataStreamsNotSupportedException(String
* @return the concrete index obtained as a result of the index resolution
*/
public Index concreteSingleIndex(ClusterState state, IndicesRequest request) {
String indexExpression = request.indices() != null && request.indices().length > 0 ? request.indices()[0] : null;
String indexExpression = CollectionUtils.isEmpty(request.indices()) ? null : request.indices()[0];
Index[] indices = concreteIndices(state, request.indicesOptions(), indexExpression);
if (indices.length != 1) {
throw new IllegalArgumentException("unable to return a single index as the index and options" +
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
import org.elasticsearch.common.util.CollectionUtils;

import java.io.Reader;

Expand Down Expand Up @@ -97,7 +98,7 @@ protected TokenStreamComponents createComponents(String fieldName) {
@Override
protected Reader initReader(String fieldName, Reader reader) {
CharFilterFactory[] charFilters = charFilters();
if (charFilters != null && charFilters.length > 0) {
if (CollectionUtils.isEmpty(charFilters) == false) {
for (CharFilterFactory charFilter : charFilters) {
reader = charFilter.create(reader);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.util.CloseableThreadLocal;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.CollectionUtils;

import java.io.Reader;
import java.util.Map;
Expand Down Expand Up @@ -152,7 +153,7 @@ protected TokenStreamComponents createComponents(String fieldName) {
@Override
protected Reader initReader(String fieldName, Reader reader) {
final AnalyzerComponents components = getStoredComponents();
if (components.getCharFilters() != null && components.getCharFilters().length > 0) {
if (CollectionUtils.isEmpty(components.getCharFilters()) == false) {
for (CharFilterFactory charFilter : components.getCharFilters()) {
reader = charFilter.create(reader);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import org.elasticsearch.common.FieldMemoryStats;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.search.suggest.completion.CompletionStats;

import java.util.function.Supplier;
Expand Down Expand Up @@ -123,7 +124,7 @@ CompletionStats get(String... fieldNamePatterns) {

private static CompletionStats filterCompletionStatsByFieldName(String[] fieldNamePatterns, CompletionStats fullCompletionStats) {
final FieldMemoryStats fieldMemoryStats;
if (fieldNamePatterns != null && fieldNamePatterns.length > 0) {
if (CollectionUtils.isEmpty(fieldNamePatterns) == false) {
final ObjectLongHashMap<String> completionFields = new ObjectLongHashMap<>(fieldNamePatterns.length);
for (ObjectLongCursor<String> fieldCursor : fullCompletionStats.getFields()) {
if (Regex.simpleMatch(fieldNamePatterns, fieldCursor.key)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import org.elasticsearch.common.FieldMemoryStats;
import org.elasticsearch.common.metrics.CounterMetric;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.index.shard.ShardId;

Expand All @@ -38,7 +39,7 @@ public class ShardFieldData implements IndexFieldDataCache.Listener {

public FieldDataStats stats(String... fields) {
ObjectLongHashMap<String> fieldTotals = null;
if (fields != null && fields.length > 0) {
if (CollectionUtils.isEmpty(fields) == false) {
fieldTotals = new ObjectLongHashMap<>();
for (Map.Entry<String, CounterMetric> entry : perFieldTotals.entrySet()) {
if (Regex.simpleMatch(fields, entry.getKey())) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
Expand Down Expand Up @@ -192,7 +193,7 @@ private SourceFieldMapper(boolean enabled, String[] includes, String[] excludes,
this.enabled = enabled;
this.includes = includes;
this.excludes = excludes;
final boolean filtered = (includes != null && includes.length > 0) || (excludes != null && excludes.length > 0);
final boolean filtered = CollectionUtils.isEmpty(includes) == false || CollectionUtils.isEmpty(excludes) == false;
this.filter = enabled && filtered && fieldType().stored() ? XContentMapValues.filter(includes, excludes) : null;
this.complete = enabled && includes == null && excludes == null;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.search.QueryParserHelper;
Expand Down Expand Up @@ -273,7 +274,7 @@ public Operator defaultOperator() {
* none are specified.
*/
public SimpleQueryStringBuilder flags(SimpleQueryStringFlag... flags) {
if (flags != null && flags.length > 0) {
if (CollectionUtils.isEmpty(flags) == false) {
int value = 0;
for (SimpleQueryStringFlag flag : flags) {
value |= flag.value;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import org.elasticsearch.common.metrics.CounterMetric;
import org.elasticsearch.common.metrics.MeanMetric;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.index.shard.SearchOperationListener;
import org.elasticsearch.search.internal.SearchContext;

Expand All @@ -47,7 +48,7 @@ public final class ShardSearchStats implements SearchOperationListener {
public SearchStats stats(String... groups) {
SearchStats.Stats total = totalStats.stats();
Map<String, SearchStats.Stats> groupsSt = null;
if (groups != null && groups.length > 0) {
if (CollectionUtils.isEmpty(groups) == false) {
groupsSt = new HashMap<>(groupsStats.size());
if (groups.length == 1 && groups[0].equals("_all")) {
for (Map.Entry<String, StatsHolder> entry : groupsStats.entrySet()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContent.Params;
import org.elasticsearch.common.xcontent.XContentBuilder;
Expand Down Expand Up @@ -88,7 +89,7 @@ public static void buildBroadcastShardsHeader(XContentBuilder builder, Params pa
builder.field(SKIPPED_FIELD.getPreferredName(), skipped);
}
builder.field(FAILED_FIELD.getPreferredName(), failed);
if (shardFailures != null && shardFailures.length > 0) {
if (CollectionUtils.isEmpty(shardFailures) == false) {
builder.startArray(FAILURES_FIELD.getPreferredName());
for (ShardOperationFailedException shardFailure : ExceptionsHelper.groupBy(shardFailures)) {
shardFailure.toXContent(builder, params);
Expand Down
Loading

0 comments on commit 8e96e5c

Please sign in to comment.