Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Remove] LegacyESVersion.V_7_0_* and V_7_1_* constants #2768

Merged
merged 7 commits into from
Oct 7, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
### Removed
- Remove deprecated code to add node name into log pattern of log4j property file ([#4568](https://github.com/opensearch-project/OpenSearch/pull/4568))
- Unused object and import within TransportClusterAllocationExplainAction ([#4639](https://github.com/opensearch-project/OpenSearch/pull/4639))
- Remove LegacyESVersion.V_7_0_* and V_7_1_* Constants ([#2768](https://https://github.com/opensearch-project/OpenSearch/pull/2768))

### Fixed

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -485,19 +485,10 @@ public List<PreConfiguredTokenFilter> getPreConfiguredTokenFilters() {
filters.add(PreConfiguredTokenFilter.singleton("dutch_stem", false, input -> new SnowballFilter(input, new DutchStemmer())));
filters.add(PreConfiguredTokenFilter.singleton("edge_ngram", false, false, input -> new EdgeNGramTokenFilter(input, 1)));
filters.add(PreConfiguredTokenFilter.openSearchVersion("edgeNGram", false, false, (reader, version) -> {
if (version.onOrAfter(LegacyESVersion.V_7_0_0)) {
throw new IllegalArgumentException(
"The [edgeNGram] token filter name was deprecated in 6.4 and cannot be used in new indices. "
+ "Please change the filter name to [edge_ngram] instead."
);
} else {
deprecationLogger.deprecate(
"edgeNGram_deprecation",
"The [edgeNGram] token filter name is deprecated and will be removed in a future version. "
+ "Please change the filter name to [edge_ngram] instead."
);
}
return new EdgeNGramTokenFilter(reader, 1);
throw new IllegalArgumentException(
"The [edgeNGram] token filter name was deprecated in 6.4 and cannot be used in new indices. "
+ "Please change the filter name to [edge_ngram] instead."
);
}));
filters.add(
PreConfiguredTokenFilter.singleton("elision", true, input -> new ElisionFilter(input, FrenchAnalyzer.DEFAULT_ARTICLES))
Expand All @@ -524,19 +515,10 @@ public List<PreConfiguredTokenFilter> getPreConfiguredTokenFilters() {
);
filters.add(PreConfiguredTokenFilter.singleton("ngram", false, false, reader -> new NGramTokenFilter(reader, 1, 2, false)));
filters.add(PreConfiguredTokenFilter.openSearchVersion("nGram", false, false, (reader, version) -> {
if (version.onOrAfter(LegacyESVersion.V_7_0_0)) {
throw new IllegalArgumentException(
"The [nGram] token filter name was deprecated in 6.4 and cannot be used in new indices. "
+ "Please change the filter name to [ngram] instead."
);
} else {
deprecationLogger.deprecate(
"nGram_deprecation",
"The [nGram] token filter name is deprecated and will be removed in a future version. "
+ "Please change the filter name to [ngram] instead."
);
}
return new NGramTokenFilter(reader, 1, 2, false);
throw new IllegalArgumentException(
"The [nGram] token filter name was deprecated in 6.4 and cannot be used in new indices. "
+ "Please change the filter name to [ngram] instead."
);
}));
filters.add(PreConfiguredTokenFilter.singleton("persian_normalization", true, PersianNormalizationFilter::new));
filters.add(PreConfiguredTokenFilter.singleton("porter_stem", false, PorterStemFilter::new));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@

import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.ngram.NGramTokenFilter;
import org.opensearch.LegacyESVersion;
import org.opensearch.common.settings.Settings;
import org.opensearch.env.Environment;
import org.opensearch.index.IndexSettings;
Expand All @@ -54,25 +53,15 @@ public class NGramTokenFilterFactory extends AbstractTokenFilterFactory {
this.maxGram = settings.getAsInt("max_gram", 2);
int ngramDiff = maxGram - minGram;
if (ngramDiff > maxAllowedNgramDiff) {
if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_0_0)) {
throw new IllegalArgumentException(
"The difference between max_gram and min_gram in NGram Tokenizer must be less than or equal to: ["
+ maxAllowedNgramDiff
+ "] but was ["
+ ngramDiff
+ "]. This limit can be set by changing the ["
+ IndexSettings.MAX_NGRAM_DIFF_SETTING.getKey()
+ "] index level setting."
);
} else {
deprecationLogger.deprecate(
"ngram_big_difference",
"Deprecated big difference between max_gram and min_gram in NGram Tokenizer,"
+ "expected difference must be less than or equal to: ["
+ maxAllowedNgramDiff
+ "]"
);
}
throw new IllegalArgumentException(
"The difference between max_gram and min_gram in NGram Tokenizer must be less than or equal to: ["
+ maxAllowedNgramDiff
+ "] but was ["
+ ngramDiff
+ "]. This limit can be set by changing the ["
+ IndexSettings.MAX_NGRAM_DIFF_SETTING.getKey()
+ "] index level setting."
);
}
preserveOriginal = settings.getAsBoolean(PRESERVE_ORIG_KEY, false);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@

package org.opensearch.script.mustache;

import org.opensearch.LegacyESVersion;
import org.opensearch.OpenSearchException;
import org.opensearch.action.ActionResponse;
import org.opensearch.action.search.MultiSearchResponse;
Expand Down Expand Up @@ -125,11 +124,7 @@ public String toString() {
MultiSearchTemplateResponse(StreamInput in) throws IOException {
super(in);
items = in.readArray(Item::new, Item[]::new);
if (in.getVersion().onOrAfter(LegacyESVersion.V_7_0_0)) {
tookInMillis = in.readVLong();
} else {
tookInMillis = -1L;
}
tookInMillis = in.readVLong();
}

MultiSearchTemplateResponse(Item[] items, long tookInMillis) {
Expand Down Expand Up @@ -159,9 +154,7 @@ public TimeValue getTook() {
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeArray(items);
if (out.getVersion().onOrAfter(LegacyESVersion.V_7_0_0)) {
out.writeVLong(tookInMillis);
}
out.writeVLong(tookInMillis);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.opensearch.LegacyESVersion;
import org.opensearch.SpecialPermission;
import org.opensearch.common.SuppressForbidden;
import org.opensearch.core.internal.io.IOUtils;
Expand All @@ -61,7 +60,7 @@
/**
* Wrapper around reads from GCS that will retry blob downloads that fail part-way through, resuming from where the failure occurred.
* This should be handled by the SDK but it isn't today. This should be revisited in the future (e.g. before removing
* the {@link LegacyESVersion#V_7_0_0} version constant) and removed if the SDK handles retries itself in the future.
* the {@code LegacyESVersion#V_7_0_0} version constant) and removed if the SDK handles retries itself in the future.
*/
class GoogleCloudStorageRetryingInputStream extends InputStream {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.opensearch.LegacyESVersion;
import org.opensearch.core.internal.io.IOUtils;

import java.io.IOException;
Expand All @@ -52,7 +51,7 @@
/**
* Wrapper around an S3 object that will retry the {@link GetObjectRequest} if the download fails part-way through, resuming from where
* the failure occurred. This should be handled by the SDK but it isn't today. This should be revisited in the future (e.g. before removing
* the {@link LegacyESVersion#V_7_0_0} version constant) and removed when the SDK handles retries itself.
* the {@code LegacyESVersion#V_7_0_0} version constant) and removed when the SDK handles retries itself.
*
* See https://github.com/aws/aws-sdk-java/issues/856 for the related SDK issue
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -329,9 +329,6 @@ public void testShrink() throws IOException {
client().performRequest(updateSettingsRequest);

Request shrinkIndexRequest = new Request("PUT", "/" + index + "/_shrink/" + shrunkenIndex);
if (getOldClusterVersion().before(LegacyESVersion.V_7_0_0)) {
shrinkIndexRequest.addParameter("copy_settings", "true");
}
shrinkIndexRequest.setJsonEntity("{\"settings\": {\"index.number_of_shards\": 1}}");
client().performRequest(shrinkIndexRequest);

Expand Down Expand Up @@ -1253,7 +1250,7 @@ public void testPeerRecoveryRetentionLeases() throws Exception {
settings.startObject("settings");
settings.field("number_of_shards", between(1, 5));
settings.field("number_of_replicas", between(0, 1));
if (randomBoolean() || getOldClusterVersion().before(LegacyESVersion.V_7_0_0)) {
if (randomBoolean()) {
// this is the default after v7.0.0, but is required before that
settings.field("soft_deletes.enabled", true);
}
Expand Down Expand Up @@ -1436,10 +1433,6 @@ public void testSystemIndexMetadataIsUpgraded() throws Exception {
// make sure .tasks index exists
Request getTasksIndex = new Request("GET", "/.tasks");
getTasksIndex.addParameter("allow_no_indices", "false");
if (getOldClusterVersion().before(LegacyESVersion.V_7_0_0)) {
getTasksIndex.addParameter("include_type_name", "false");
}

getTasksIndex.setOptions(expectVersionSpecificWarnings(v -> {
v.current(systemIndexWarning);
v.compatible(systemIndexWarning);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentHelper;
import org.opensearch.common.xcontent.json.JsonXContent;
import org.opensearch.index.mapper.MapperService;
import org.opensearch.index.query.BoolQueryBuilder;
import org.opensearch.index.query.ConstantScoreQueryBuilder;
import org.opensearch.index.query.DisMaxQueryBuilder;
Expand Down Expand Up @@ -157,7 +158,7 @@ private static void addCandidate(String querySource, QueryBuilder expectedQb) {
}

public void testQueryBuilderBWC() throws Exception {
final String type = getOldClusterVersion().before(LegacyESVersion.V_7_0_0) ? "doc" : "_doc";
final String type = MapperService.SINGLE_MAPPING_NAME;
String index = "queries";
if (isRunningAgainstOldCluster()) {
XContentBuilder mappingsAndSettings = jsonBuilder();
Expand Down
Loading