From 253b9986810e3879fa00c6a370eb848b8bb18a60 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Fri, 8 Jun 2018 07:51:45 +0200 Subject: [PATCH 01/20] flush job to ensure all results have been written (#31187) flush ml job to ensure all results have been written fixes #31173 --- .../xpack/ml/integration/ForecastIT.java | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java index 18b1071280307..2f3ea6c83a536 100644 --- a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java +++ b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java @@ -239,6 +239,9 @@ public void testOverflowToDisk() throws Exception { throw e; } + // flushing the job forces an index refresh, see https://github.com/elastic/elasticsearch/issues/31173 + flushJob(job.getId(), false); + List forecastStats = getForecastStats(); assertThat(forecastStats.size(), equalTo(1)); ForecastRequestStats forecastRequestStats = forecastStats.get(0); @@ -261,6 +264,16 @@ public void testOverflowToDisk() throws Exception { } closeJob(job.getId()); + + forecastStats = getForecastStats(); + assertThat(forecastStats.size(), equalTo(2)); + for (ForecastRequestStats stats : forecastStats) { + forecasts = getForecasts(job.getId(), stats); + + assertThat(forecastRequestStats.getRecordCount(), equalTo(8000L)); + assertThat(forecasts.size(), equalTo(8000)); + } + } private void createDataWithLotsOfClientIps(TimeValue bucketSpan, Job.Builder job) throws IOException { From 435a825a53dbabc93add9d400c1a8653e16b8aad Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 8 Jun 2018 08:33:01 +0200 Subject: [PATCH 02/20] Default max concurrent search req. numNodes * 5 (#31171) We moved to 1 shard by default which caused some issues in how many concurrent shard requests we allow by default. For instance searching a 5 shard index on a single node will now be executed serially per shard while we want these cases to have a good concurrency out of the box. This change moves to `numNodes * 5` which corresponds to the default we used to have in the previous version. Relates to #30783 Closes #30994 --- .../org/elasticsearch/action/search/TransportSearchAction.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 46207b94c3af4..ac9248ef98d41 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -346,7 +346,8 @@ private void executeSearch(SearchTask task, SearchTimeProvider timeProvider, Sea * it sane. A single search request that fans out to lots of shards should not hit a cluster too hard while 256 is already a * lot. */ - searchRequest.setMaxConcurrentShardRequests(Math.min(256, nodeCount)); + // we use nodeCount * 5 as we used to default this to the default number of shard which used to be 5. + searchRequest.setMaxConcurrentShardRequests(Math.min(256, nodeCount * 5)); } boolean preFilterSearchShards = shouldPreFilterSearchShards(searchRequest, shardIterators); searchAsyncAction(task, searchRequest, shardIterators, timeProvider, connectionLookup, clusterState.version(), From 07a57cc1310f04ee0544b9222e737897e9e3ff99 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 8 Jun 2018 08:58:46 +0200 Subject: [PATCH 03/20] Move number of language analyzers to analysis-common module (#31143) The following analyzers were moved from server module to analysis-common module: `snowball`, `arabic`, `armenian`, `basque`, `bengali`, `brazilian`, `bulgarian`, `catalan`, `chinese`, `cjk`, `czech`, `danish`, `dutch`, `english`, `finnish`, `french`, `galician` and `german`. Relates to #23658 --- .../common}/ArabicAnalyzerProvider.java | 6 +- .../common}/ArmenianAnalyzerProvider.java | 6 +- .../common}/BasqueAnalyzerProvider.java | 6 +- .../common}/BengaliAnalyzerProvider.java | 6 +- .../common}/BrazilianAnalyzerProvider.java | 6 +- .../common}/BulgarianAnalyzerProvider.java | 6 +- .../common}/CatalanAnalyzerProvider.java | 6 +- .../common}/ChineseAnalyzerProvider.java | 9 +- .../analysis/common}/CjkAnalyzerProvider.java | 6 +- .../analysis/common/CommonAnalysisPlugin.java | 140 ++++- .../common}/CzechAnalyzerProvider.java | 6 +- .../common}/DanishAnalyzerProvider.java | 6 +- .../common}/DutchAnalyzerProvider.java | 6 +- .../common}/EnglishAnalyzerProvider.java | 6 +- .../common}/FinnishAnalyzerProvider.java | 6 +- .../common}/FrenchAnalyzerProvider.java | 6 +- .../common}/GalicianAnalyzerProvider.java | 6 +- .../common}/GermanAnalyzerProvider.java | 6 +- .../analysis/common}/SnowballAnalyzer.java | 6 +- .../common}/SnowballAnalyzerProvider.java | 6 +- .../common}/SnowballAnalyzerTests.java | 10 +- .../test/analysis-common/20_analyzers.yml | 523 ++++++++++++++++++ .../test/search.query/40_query_string.yml | 58 ++ .../TokenCountFieldMapperIntegrationIT.java | 15 +- .../test/update_by_query/30_new_fields.yml | 7 +- .../test/count/20_query_string.yml | 8 - .../test/explain/30_query_string.yml | 10 - .../20_query_string.yml | 8 - .../test/search/60_query_string.yml | 8 - .../indices/analysis/AnalysisModule.java | 36 -- .../indices/analysis/PreBuiltAnalyzers.java | 179 ------ .../index/analysis/PreBuiltAnalyzerTests.java | 19 +- .../index/mapper/TextFieldMapperTests.java | 40 +- .../query/QueryStringQueryBuilderTests.java | 4 +- .../query/SimpleQueryStringBuilderTests.java | 4 +- .../highlight/HighlighterSearchIT.java | 53 +- .../search/query/SimpleQueryStringIT.java | 59 +- .../validate/SimpleValidateQueryIT.java | 2 +- 38 files changed, 952 insertions(+), 348 deletions(-) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/ArabicAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/ArmenianAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/BasqueAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/BengaliAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/BrazilianAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/BulgarianAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/CatalanAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/ChineseAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/CjkAnalyzerProvider.java (84%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/CzechAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/DanishAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/DutchAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/EnglishAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/FinnishAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/FrenchAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/GalicianAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/GermanAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/SnowballAnalyzer.java (95%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/SnowballAnalyzerProvider.java (92%) rename {server/src/test/java/org/elasticsearch/index/analysis => modules/analysis-common/src/test/java/org/elasticsearch/analysis/common}/SnowballAnalyzerTests.java (97%) create mode 100644 modules/analysis-common/src/test/resources/rest-api-spec/test/search.query/40_query_string.yml diff --git a/server/src/main/java/org/elasticsearch/index/analysis/ArabicAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ArabicAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/ArabicAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ArabicAnalyzerProvider.java index 8dcc6cc907569..f5e1d882d2b8d 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/ArabicAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ArabicAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.ar.ArabicAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class ArabicAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final ArabicAnalyzer arabicAnalyzer; - public ArabicAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + ArabicAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); arabicAnalyzer = new ArabicAnalyzer( Analysis.parseStopWords(env, settings, ArabicAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/ArmenianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ArmenianAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/ArmenianAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ArmenianAnalyzerProvider.java index ba9f55f331f2b..d066aed14eeaf 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/ArmenianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ArmenianAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.hy.ArmenianAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class ArmenianAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final ArmenianAnalyzer analyzer; - public ArmenianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + ArmenianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new ArmenianAnalyzer( Analysis.parseStopWords(env, settings, ArmenianAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/BasqueAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/BasqueAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/BasqueAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/BasqueAnalyzerProvider.java index 45ff947c61eac..8fe32a697f756 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/BasqueAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/BasqueAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.eu.BasqueAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class BasqueAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final BasqueAnalyzer analyzer; - public BasqueAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + BasqueAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new BasqueAnalyzer( Analysis.parseStopWords(env, settings, BasqueAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/BengaliAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/BengaliAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/BengaliAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/BengaliAnalyzerProvider.java index 41931833301cf..5696d0a604555 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/BengaliAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/BengaliAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.bn.BengaliAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class BengaliAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final BengaliAnalyzer analyzer; - public BengaliAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + BengaliAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new BengaliAnalyzer( Analysis.parseStopWords(env, settings, BengaliAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/BrazilianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/BrazilianAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/BrazilianAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/BrazilianAnalyzerProvider.java index 36b13e67bf4ee..07399a2263dcc 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/BrazilianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/BrazilianAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.br.BrazilianAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class BrazilianAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final BrazilianAnalyzer analyzer; - public BrazilianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + BrazilianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new BrazilianAnalyzer( Analysis.parseStopWords(env, settings, BrazilianAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/BulgarianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/BulgarianAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/BulgarianAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/BulgarianAnalyzerProvider.java index 26e82cbfb2f70..a6b1cb97a1b5c 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/BulgarianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/BulgarianAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.bg.BulgarianAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class BulgarianAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final BulgarianAnalyzer analyzer; - public BulgarianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + BulgarianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new BulgarianAnalyzer( Analysis.parseStopWords(env, settings, BulgarianAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/CatalanAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CatalanAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/CatalanAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CatalanAnalyzerProvider.java index 94babaa52f83d..db229ffb4924a 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/CatalanAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CatalanAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.ca.CatalanAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class CatalanAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final CatalanAnalyzer analyzer; - public CatalanAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + CatalanAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new CatalanAnalyzer( Analysis.parseStopWords(env, settings, CatalanAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/ChineseAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ChineseAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/ChineseAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ChineseAnalyzerProvider.java index 10e6f0dc42f1e..01b529188c6f0 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/ChineseAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ChineseAnalyzerProvider.java @@ -17,12 +17,13 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; /** * Only for old indexes @@ -31,16 +32,16 @@ public class ChineseAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final CJKAnalyzer analyzer; - public CjkAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + CjkAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); CharArraySet stopWords = Analysis.parseStopWords(env, settings, CJKAnalyzer.getDefaultStopSet()); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index 433bef902c1a1..24dce7abcf370 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -24,11 +24,17 @@ import org.apache.lucene.analysis.LowerCaseFilter; import org.apache.lucene.analysis.StopFilter; import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.ar.ArabicAnalyzer; import org.apache.lucene.analysis.ar.ArabicNormalizationFilter; import org.apache.lucene.analysis.ar.ArabicStemFilter; +import org.apache.lucene.analysis.bg.BulgarianAnalyzer; +import org.apache.lucene.analysis.bn.BengaliAnalyzer; import org.apache.lucene.analysis.bn.BengaliNormalizationFilter; +import org.apache.lucene.analysis.br.BrazilianAnalyzer; import org.apache.lucene.analysis.br.BrazilianStemFilter; +import org.apache.lucene.analysis.ca.CatalanAnalyzer; import org.apache.lucene.analysis.charfilter.HTMLStripCharFilter; +import org.apache.lucene.analysis.cjk.CJKAnalyzer; import org.apache.lucene.analysis.cjk.CJKBigramFilter; import org.apache.lucene.analysis.cjk.CJKWidthFilter; import org.apache.lucene.analysis.ckb.SoraniNormalizationFilter; @@ -40,14 +46,22 @@ import org.apache.lucene.analysis.core.StopAnalyzer; import org.apache.lucene.analysis.core.UpperCaseFilter; import org.apache.lucene.analysis.core.WhitespaceTokenizer; +import org.apache.lucene.analysis.cz.CzechAnalyzer; import org.apache.lucene.analysis.cz.CzechStemFilter; +import org.apache.lucene.analysis.da.DanishAnalyzer; +import org.apache.lucene.analysis.de.GermanAnalyzer; import org.apache.lucene.analysis.de.GermanNormalizationFilter; import org.apache.lucene.analysis.de.GermanStemFilter; +import org.apache.lucene.analysis.en.EnglishAnalyzer; import org.apache.lucene.analysis.en.KStemFilter; import org.apache.lucene.analysis.en.PorterStemFilter; +import org.apache.lucene.analysis.eu.BasqueAnalyzer; import org.apache.lucene.analysis.fa.PersianNormalizationFilter; +import org.apache.lucene.analysis.fi.FinnishAnalyzer; import org.apache.lucene.analysis.fr.FrenchAnalyzer; +import org.apache.lucene.analysis.gl.GalicianAnalyzer; import org.apache.lucene.analysis.hi.HindiNormalizationFilter; +import org.apache.lucene.analysis.hy.ArmenianAnalyzer; import org.apache.lucene.analysis.in.IndicNormalizationFilter; import org.apache.lucene.analysis.miscellaneous.ASCIIFoldingFilter; import org.apache.lucene.analysis.miscellaneous.DisableGraphAttribute; @@ -64,6 +78,7 @@ import org.apache.lucene.analysis.ngram.EdgeNGramTokenizer; import org.apache.lucene.analysis.ngram.NGramTokenFilter; import org.apache.lucene.analysis.ngram.NGramTokenizer; +import org.apache.lucene.analysis.nl.DutchAnalyzer; import org.apache.lucene.analysis.path.PathHierarchyTokenizer; import org.apache.lucene.analysis.pattern.PatternTokenizer; import org.apache.lucene.analysis.payloads.DelimitedPayloadTokenFilter; @@ -73,6 +88,7 @@ import org.apache.lucene.analysis.snowball.SnowballFilter; import org.apache.lucene.analysis.standard.ClassicFilter; import org.apache.lucene.analysis.standard.ClassicTokenizer; +import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.analysis.standard.UAX29URLEmailTokenizer; import org.apache.lucene.analysis.th.ThaiTokenizer; import org.apache.lucene.analysis.tr.ApostropheFilter; @@ -113,6 +129,24 @@ public Map>> getAn analyzers.put("fingerprint", FingerprintAnalyzerProvider::new); analyzers.put("standard_html_strip", StandardHtmlStripAnalyzerProvider::new); analyzers.put("pattern", PatternAnalyzerProvider::new); + analyzers.put("snowball", SnowballAnalyzerProvider::new); + analyzers.put("arabic", ArabicAnalyzerProvider::new); + analyzers.put("armenian", ArmenianAnalyzerProvider::new); + analyzers.put("basque", BasqueAnalyzerProvider::new); + analyzers.put("bengali", BengaliAnalyzerProvider::new); + analyzers.put("brazilian", BrazilianAnalyzerProvider::new); + analyzers.put("bulgarian", BulgarianAnalyzerProvider::new); + analyzers.put("catalan", CatalanAnalyzerProvider::new); + analyzers.put("chinese", ChineseAnalyzerProvider::new); + analyzers.put("cjk", CjkAnalyzerProvider::new); + analyzers.put("czech", CzechAnalyzerProvider::new); + analyzers.put("danish", DanishAnalyzerProvider::new); + analyzers.put("dutch", DutchAnalyzerProvider::new); + analyzers.put("english", EnglishAnalyzerProvider::new); + analyzers.put("finnish", FinnishAnalyzerProvider::new); + analyzers.put("french", FrenchAnalyzerProvider::new); + analyzers.put("galician", GalicianAnalyzerProvider::new); + analyzers.put("german", GermanAnalyzerProvider::new); return analyzers; } @@ -213,10 +247,108 @@ public Map> getTokenizers() { @Override public List getPreBuiltAnalyzerProviderFactories() { List analyzers = new ArrayList<>(); - analyzers.add(new PreBuiltAnalyzerProviderFactory("standard_html_strip", CachingStrategy.LUCENE, - version -> new StandardHtmlStripAnalyzer(CharArraySet.EMPTY_SET))); - analyzers.add(new PreBuiltAnalyzerProviderFactory("pattern", CachingStrategy.ELASTICSEARCH, version -> - new PatternAnalyzer(Regex.compile("\\W+" /*PatternAnalyzer.NON_WORD_PATTERN*/, null), true, CharArraySet.EMPTY_SET))); + analyzers.add(new PreBuiltAnalyzerProviderFactory("standard_html_strip", CachingStrategy.LUCENE, version -> { + Analyzer a = new StandardHtmlStripAnalyzer(CharArraySet.EMPTY_SET); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("pattern", CachingStrategy.ELASTICSEARCH, version -> { + Analyzer a = new PatternAnalyzer(Regex.compile("\\W+" /*PatternAnalyzer.NON_WORD_PATTERN*/, null), true, + CharArraySet.EMPTY_SET); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("snowball", CachingStrategy.LUCENE, version -> { + Analyzer a = new SnowballAnalyzer("English", StopAnalyzer.ENGLISH_STOP_WORDS_SET); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("arabic", CachingStrategy.LUCENE, version -> { + Analyzer a = new ArabicAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("armenian", CachingStrategy.LUCENE, version -> { + Analyzer a = new ArmenianAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("basque", CachingStrategy.LUCENE, version -> { + Analyzer a = new BasqueAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("bengali", CachingStrategy.LUCENE, version -> { + Analyzer a = new BengaliAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("brazilian", CachingStrategy.LUCENE, version -> { + Analyzer a = new BrazilianAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("bulgarian", CachingStrategy.LUCENE, version -> { + Analyzer a = new BulgarianAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("catalan", CachingStrategy.LUCENE, version -> { + Analyzer a = new CatalanAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("chinese", CachingStrategy.LUCENE, version -> { + // only for old indices, best effort + Analyzer a = new StandardAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("cjk", CachingStrategy.LUCENE, version -> { + Analyzer a = new CJKAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("czech", CachingStrategy.LUCENE, version -> { + Analyzer a = new CzechAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("danish", CachingStrategy.LUCENE, version -> { + Analyzer a = new DanishAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("dutch", CachingStrategy.LUCENE, version -> { + Analyzer a = new DutchAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("english", CachingStrategy.LUCENE, version -> { + Analyzer a = new EnglishAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("finnish", CachingStrategy.LUCENE, version -> { + Analyzer a = new FinnishAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("french", CachingStrategy.LUCENE, version -> { + Analyzer a = new FrenchAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("galician", CachingStrategy.LUCENE, version -> { + Analyzer a = new GalicianAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("german", CachingStrategy.LUCENE, version -> { + Analyzer a = new GermanAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); return analyzers; } diff --git a/server/src/main/java/org/elasticsearch/index/analysis/CzechAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CzechAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/CzechAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CzechAnalyzerProvider.java index 12d2349d9bac5..c14c2d57a3afa 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/CzechAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CzechAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.cz.CzechAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class CzechAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final CzechAnalyzer analyzer; - public CzechAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + CzechAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new CzechAnalyzer( Analysis.parseStopWords(env, settings, CzechAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/DanishAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/DanishAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/DanishAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/DanishAnalyzerProvider.java index 01021cbfd8e19..e3f024693c2d7 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/DanishAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/DanishAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.da.DanishAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class DanishAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final DanishAnalyzer analyzer; - public DanishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + DanishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new DanishAnalyzer( Analysis.parseStopWords(env, settings, DanishAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/DutchAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/DutchAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/DutchAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/DutchAnalyzerProvider.java index e215a89241e9b..70ab2a5ea62f1 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/DutchAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/DutchAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.nl.DutchAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class DutchAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final DutchAnalyzer analyzer; - public DutchAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + DutchAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new DutchAnalyzer( Analysis.parseStopWords(env, settings, DutchAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/EnglishAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EnglishAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/EnglishAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EnglishAnalyzerProvider.java index 300381ef14754..b14a83dbf7c4f 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/EnglishAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EnglishAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.en.EnglishAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class EnglishAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final EnglishAnalyzer analyzer; - public EnglishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + EnglishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new EnglishAnalyzer( Analysis.parseStopWords(env, settings, EnglishAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/FinnishAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/FinnishAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/FinnishAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/FinnishAnalyzerProvider.java index 95f0819293aae..ec18a71a12541 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/FinnishAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/FinnishAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.fi.FinnishAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class FinnishAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final FinnishAnalyzer analyzer; - public FinnishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + FinnishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new FinnishAnalyzer( Analysis.parseStopWords(env, settings, FinnishAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/FrenchAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/FrenchAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/FrenchAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/FrenchAnalyzerProvider.java index eac40c375caac..caee3618afbfc 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/FrenchAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/FrenchAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.fr.FrenchAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class FrenchAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final FrenchAnalyzer analyzer; - public FrenchAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + FrenchAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new FrenchAnalyzer( Analysis.parseStopWords(env, settings, FrenchAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/GalicianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/GalicianAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/GalicianAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/GalicianAnalyzerProvider.java index 57550594dc053..23f8da7b8dded 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/GalicianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/GalicianAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.gl.GalicianAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class GalicianAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final GalicianAnalyzer analyzer; - public GalicianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + GalicianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new GalicianAnalyzer( Analysis.parseStopWords(env, settings, GalicianAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/GermanAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/GermanAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/GermanAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/GermanAnalyzerProvider.java index 7951f17b79db9..cf96f50845f22 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/GermanAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/GermanAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.de.GermanAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class GermanAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final GermanAnalyzer analyzer; - public GermanAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + GermanAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new GermanAnalyzer( Analysis.parseStopWords(env, settings, GermanAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/SnowballAnalyzer.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SnowballAnalyzer.java similarity index 95% rename from server/src/main/java/org/elasticsearch/index/analysis/SnowballAnalyzer.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SnowballAnalyzer.java index 1a096b8fa4b9f..5dbe902fe1500 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/SnowballAnalyzer.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SnowballAnalyzer.java @@ -1,4 +1,4 @@ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; /* * Licensed to Elasticsearch under one or more contributor @@ -48,12 +48,12 @@ public final class SnowballAnalyzer extends Analyzer { private CharArraySet stopSet; /** Builds the named analyzer with no stop words. */ - public SnowballAnalyzer(String name) { + SnowballAnalyzer(String name) { this.name = name; } /** Builds the named analyzer with the given stop words. */ - public SnowballAnalyzer(String name, CharArraySet stopWords) { + SnowballAnalyzer(String name, CharArraySet stopWords) { this(name); stopSet = CharArraySet.unmodifiableSet(CharArraySet.copy(stopWords)); } diff --git a/server/src/main/java/org/elasticsearch/index/analysis/SnowballAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SnowballAnalyzerProvider.java similarity index 92% rename from server/src/main/java/org/elasticsearch/index/analysis/SnowballAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SnowballAnalyzerProvider.java index 84f1931633100..0f213df9ad722 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/SnowballAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SnowballAnalyzerProvider.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.core.StopAnalyzer; @@ -26,6 +26,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; import java.util.HashMap; import java.util.Map; @@ -60,7 +62,7 @@ public class SnowballAnalyzerProvider extends AbstractIndexAnalyzerProvider>> setupAnalyzers(List analyzers.register("stop", StopAnalyzerProvider::new); analyzers.register("whitespace", WhitespaceAnalyzerProvider::new); analyzers.register("keyword", KeywordAnalyzerProvider::new); - analyzers.register("snowball", SnowballAnalyzerProvider::new); - analyzers.register("arabic", ArabicAnalyzerProvider::new); - analyzers.register("armenian", ArmenianAnalyzerProvider::new); - analyzers.register("basque", BasqueAnalyzerProvider::new); - analyzers.register("bengali", BengaliAnalyzerProvider::new); - analyzers.register("brazilian", BrazilianAnalyzerProvider::new); - analyzers.register("bulgarian", BulgarianAnalyzerProvider::new); - analyzers.register("catalan", CatalanAnalyzerProvider::new); - analyzers.register("chinese", ChineseAnalyzerProvider::new); - analyzers.register("cjk", CjkAnalyzerProvider::new); - analyzers.register("czech", CzechAnalyzerProvider::new); - analyzers.register("danish", DanishAnalyzerProvider::new); - analyzers.register("dutch", DutchAnalyzerProvider::new); - analyzers.register("english", EnglishAnalyzerProvider::new); - analyzers.register("finnish", FinnishAnalyzerProvider::new); - analyzers.register("french", FrenchAnalyzerProvider::new); - analyzers.register("galician", GalicianAnalyzerProvider::new); - analyzers.register("german", GermanAnalyzerProvider::new); analyzers.register("greek", GreekAnalyzerProvider::new); analyzers.register("hindi", HindiAnalyzerProvider::new); analyzers.register("hungarian", HungarianAnalyzerProvider::new); diff --git a/server/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java b/server/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java index 18cc247b84493..0e9aed3c142d9 100644 --- a/server/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java +++ b/server/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java @@ -20,37 +20,21 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.CharArraySet; -import org.apache.lucene.analysis.ar.ArabicAnalyzer; -import org.apache.lucene.analysis.bg.BulgarianAnalyzer; -import org.apache.lucene.analysis.bn.BengaliAnalyzer; -import org.apache.lucene.analysis.br.BrazilianAnalyzer; -import org.apache.lucene.analysis.ca.CatalanAnalyzer; -import org.apache.lucene.analysis.cjk.CJKAnalyzer; import org.apache.lucene.analysis.ckb.SoraniAnalyzer; import org.apache.lucene.analysis.core.KeywordAnalyzer; import org.apache.lucene.analysis.core.SimpleAnalyzer; import org.apache.lucene.analysis.core.StopAnalyzer; import org.apache.lucene.analysis.core.WhitespaceAnalyzer; -import org.apache.lucene.analysis.cz.CzechAnalyzer; -import org.apache.lucene.analysis.da.DanishAnalyzer; -import org.apache.lucene.analysis.de.GermanAnalyzer; import org.apache.lucene.analysis.el.GreekAnalyzer; -import org.apache.lucene.analysis.en.EnglishAnalyzer; import org.apache.lucene.analysis.es.SpanishAnalyzer; -import org.apache.lucene.analysis.eu.BasqueAnalyzer; import org.apache.lucene.analysis.fa.PersianAnalyzer; -import org.apache.lucene.analysis.fi.FinnishAnalyzer; -import org.apache.lucene.analysis.fr.FrenchAnalyzer; import org.apache.lucene.analysis.ga.IrishAnalyzer; -import org.apache.lucene.analysis.gl.GalicianAnalyzer; import org.apache.lucene.analysis.hi.HindiAnalyzer; import org.apache.lucene.analysis.hu.HungarianAnalyzer; -import org.apache.lucene.analysis.hy.ArmenianAnalyzer; import org.apache.lucene.analysis.id.IndonesianAnalyzer; import org.apache.lucene.analysis.it.ItalianAnalyzer; import org.apache.lucene.analysis.lt.LithuanianAnalyzer; import org.apache.lucene.analysis.lv.LatvianAnalyzer; -import org.apache.lucene.analysis.nl.DutchAnalyzer; import org.apache.lucene.analysis.no.NorwegianAnalyzer; import org.apache.lucene.analysis.pt.PortugueseAnalyzer; import org.apache.lucene.analysis.ro.RomanianAnalyzer; @@ -61,7 +45,6 @@ import org.apache.lucene.analysis.th.ThaiAnalyzer; import org.apache.lucene.analysis.tr.TurkishAnalyzer; import org.elasticsearch.Version; -import org.elasticsearch.index.analysis.SnowballAnalyzer; import org.elasticsearch.indices.analysis.PreBuiltCacheFactory.CachingStrategy; import java.util.Locale; @@ -129,168 +112,6 @@ protected Analyzer create(Version version) { } }, - SNOWBALL { - @Override - protected Analyzer create(Version version) { - Analyzer analyzer = new SnowballAnalyzer("English", StopAnalyzer.ENGLISH_STOP_WORDS_SET); - analyzer.setVersion(version.luceneVersion); - return analyzer; - } - }, - - ARABIC { - @Override - protected Analyzer create(Version version) { - Analyzer a = new ArabicAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - ARMENIAN { - @Override - protected Analyzer create(Version version) { - Analyzer a = new ArmenianAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - BASQUE { - @Override - protected Analyzer create(Version version) { - Analyzer a = new BasqueAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - BENGALI { - @Override - protected Analyzer create(Version version) { - Analyzer a = new BengaliAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - BRAZILIAN { - @Override - protected Analyzer create(Version version) { - Analyzer a = new BrazilianAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - BULGARIAN { - @Override - protected Analyzer create(Version version) { - Analyzer a = new BulgarianAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - CATALAN { - @Override - protected Analyzer create(Version version) { - Analyzer a = new CatalanAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - CHINESE(CachingStrategy.ONE) { - @Override - protected Analyzer create(Version version) { - Analyzer a = new StandardAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - CJK { - @Override - protected Analyzer create(Version version) { - Analyzer a = new CJKAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - CZECH { - @Override - protected Analyzer create(Version version) { - Analyzer a = new CzechAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - DUTCH { - @Override - protected Analyzer create(Version version) { - Analyzer a = new DutchAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - DANISH { - @Override - protected Analyzer create(Version version) { - Analyzer a = new DanishAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - ENGLISH { - @Override - protected Analyzer create(Version version) { - Analyzer a = new EnglishAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - FINNISH { - @Override - protected Analyzer create(Version version) { - Analyzer a = new FinnishAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - FRENCH { - @Override - protected Analyzer create(Version version) { - Analyzer a = new FrenchAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - GALICIAN { - @Override - protected Analyzer create(Version version) { - Analyzer a = new GalicianAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - GERMAN { - @Override - protected Analyzer create(Version version) { - Analyzer a = new GermanAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - GREEK { @Override protected Analyzer create(Version version) { diff --git a/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java b/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java index d0ffdbe229dd6..8c4879fd35e82 100644 --- a/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java @@ -61,14 +61,17 @@ public void testThatInstancesAreTheSameAlwaysForKeywordAnalyzer() { } public void testThatInstancesAreCachedAndReused() { - assertSame(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.CURRENT), - PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.CURRENT)); - // same lucene version should be cached - assertSame(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_5_2_1), - PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_5_2_2)); - - assertNotSame(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_5_0_0), - PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_5_0_1)); + assertSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.CURRENT), + PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.CURRENT)); + // same es version should be cached + assertSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_5_2_1), + PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_5_2_1)); + assertNotSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_5_0_0), + PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_5_0_1)); + + // Same Lucene version should be cached: + assertSame(PreBuiltAnalyzers.STOP.getAnalyzer(Version.V_5_2_1), + PreBuiltAnalyzers.STOP.getAnalyzer(Version.V_5_2_2)); } public void testThatAnalyzersAreUsedInMapping() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index ed8274fad05da..e3dc8ff0b78fc 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -55,7 +55,6 @@ import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType; import org.elasticsearch.index.query.MatchPhraseQueryBuilder; import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.index.search.MatchQuery; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -87,6 +86,9 @@ public void setup() { .putList("index.analysis.filter.mySynonyms.synonyms", Collections.singletonList("car, auto")) .put("index.analysis.analyzer.synonym.tokenizer", "standard") .put("index.analysis.analyzer.synonym.filter", "mySynonyms") + // Stop filter remains in server as it is part of lucene-core + .put("index.analysis.analyzer.my_stop_analyzer.tokenizer", "standard") + .put("index.analysis.analyzer.my_stop_analyzer.filter", "stop") .build(); indexService = createIndex("test", settings); parser = indexService.mapperService().documentMapperParser(); @@ -621,7 +623,7 @@ public void testIndexPrefixIndexTypes() throws IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") - .field("analyzer", "english") + .field("analyzer", "standard") .startObject("index_prefixes").endObject() .field("index_options", "offsets") .endObject().endObject().endObject().endObject()); @@ -637,7 +639,7 @@ public void testIndexPrefixIndexTypes() throws IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") - .field("analyzer", "english") + .field("analyzer", "standard") .startObject("index_prefixes").endObject() .field("index_options", "freqs") .endObject().endObject().endObject().endObject()); @@ -654,7 +656,7 @@ public void testIndexPrefixIndexTypes() throws IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") - .field("analyzer", "english") + .field("analyzer", "standard") .startObject("index_prefixes").endObject() .field("index_options", "positions") .endObject().endObject().endObject().endObject()); @@ -675,7 +677,7 @@ public void testIndexPrefixIndexTypes() throws IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") - .field("analyzer", "english") + .field("analyzer", "standard") .startObject("index_prefixes").endObject() .field("term_vector", "with_positions_offsets") .endObject().endObject().endObject().endObject()); @@ -696,7 +698,7 @@ public void testIndexPrefixIndexTypes() throws IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") - .field("analyzer", "english") + .field("analyzer", "standard") .startObject("index_prefixes").endObject() .field("term_vector", "with_positions") .endObject().endObject().endObject().endObject()); @@ -725,7 +727,7 @@ public void testFastPhraseMapping() throws IOException { .startObject("properties") .startObject("field") .field("type", "text") - .field("analyzer", "english") + .field("analyzer", "my_stop_analyzer") .field("index_phrases", true) .endObject() .startObject("synfield") @@ -742,20 +744,20 @@ public void testFastPhraseMapping() throws IOException { queryShardContext.getMapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); Query q = new MatchPhraseQueryBuilder("field", "two words").toQuery(queryShardContext); - assertThat(q, is(new PhraseQuery("field._index_phrase", "two word"))); + assertThat(q, is(new PhraseQuery("field._index_phrase", "two words"))); Query q2 = new MatchPhraseQueryBuilder("field", "three words here").toQuery(queryShardContext); - assertThat(q2, is(new PhraseQuery("field._index_phrase", "three word", "word here"))); + assertThat(q2, is(new PhraseQuery("field._index_phrase", "three words", "words here"))); Query q3 = new MatchPhraseQueryBuilder("field", "two words").slop(1).toQuery(queryShardContext); - assertThat(q3, is(new PhraseQuery(1, "field", "two", "word"))); + assertThat(q3, is(new PhraseQuery(1, "field", "two", "words"))); Query q4 = new MatchPhraseQueryBuilder("field", "singleton").toQuery(queryShardContext); assertThat(q4, is(new TermQuery(new Term("field", "singleton")))); Query q5 = new MatchPhraseQueryBuilder("field", "sparkle a stopword").toQuery(queryShardContext); assertThat(q5, - is(new PhraseQuery.Builder().add(new Term("field", "sparkl")).add(new Term("field", "stopword"), 2).build())); + is(new PhraseQuery.Builder().add(new Term("field", "sparkle")).add(new Term("field", "stopword"), 2).build())); Query q6 = new MatchPhraseQueryBuilder("synfield", "motor car").toQuery(queryShardContext); assertThat(q6, is(new MultiPhraseQuery.Builder() @@ -778,7 +780,7 @@ public void testFastPhraseMapping() throws IOException { CharTermAttribute termAtt = ts.addAttribute(CharTermAttribute.class); ts.reset(); assertTrue(ts.incrementToken()); - assertEquals("some english", termAtt.toString()); + assertEquals("Some English", termAtt.toString()); } { @@ -821,7 +823,7 @@ public void testIndexPrefixMapping() throws IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") - .field("analyzer", "english") + .field("analyzer", "standard") .startObject("index_prefixes") .field("min_chars", 1) .field("max_chars", 10) @@ -855,7 +857,7 @@ public void testIndexPrefixMapping() throws IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") - .field("analyzer", "english") + .field("analyzer", "standard") .startObject("index_prefixes").endObject() .endObject().endObject() .endObject().endObject()); @@ -880,7 +882,7 @@ public void testIndexPrefixMapping() throws IOException { String illegalMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") - .field("analyzer", "english") + .field("analyzer", "standard") .startObject("index_prefixes") .field("min_chars", 1) .field("max_chars", 10) @@ -903,7 +905,7 @@ public void testIndexPrefixMapping() throws IOException { String badConfigMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") - .field("analyzer", "english") + .field("analyzer", "standard") .startObject("index_prefixes") .field("min_chars", 11) .field("max_chars", 10) @@ -920,7 +922,7 @@ public void testIndexPrefixMapping() throws IOException { String badConfigMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") - .field("analyzer", "english") + .field("analyzer", "standard") .startObject("index_prefixes") .field("min_chars", 0) .field("max_chars", 10) @@ -937,7 +939,7 @@ public void testIndexPrefixMapping() throws IOException { String badConfigMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") - .field("analyzer", "english") + .field("analyzer", "standard") .startObject("index_prefixes") .field("min_chars", 1) .field("max_chars", 25) @@ -954,7 +956,7 @@ public void testIndexPrefixMapping() throws IOException { String badConfigMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") - .field("analyzer", "english") + .field("analyzer", "standard") .field("index_prefixes", (String) null) .endObject().endObject() .endObject().endObject()); diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index aafc66b3985e1..1c8bc48fb7ef3 100644 --- a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -1300,7 +1300,7 @@ public void testWithStopWords() throws Exception { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); Query query = new QueryStringQueryBuilder("the quick fox") .field(STRING_FIELD_NAME) - .analyzer("english") + .analyzer("stop") .toQuery(createShardContext()); BooleanQuery expected = new BooleanQuery.Builder() .add(new TermQuery(new Term(STRING_FIELD_NAME, "quick")), Occur.SHOULD) @@ -1313,7 +1313,7 @@ public void testWithPrefixStopWords() throws Exception { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); Query query = new QueryStringQueryBuilder("the* quick fox") .field(STRING_FIELD_NAME) - .analyzer("english") + .analyzer("stop") .toQuery(createShardContext()); BooleanQuery expected = new BooleanQuery.Builder() .add(new PrefixQuery(new Term(STRING_FIELD_NAME, "the")), Occur.SHOULD) diff --git a/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java index 7ff2b7ec12285..0cd5e7fe330eb 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java @@ -629,7 +629,7 @@ public void testWithStopWords() throws Exception { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); Query query = new SimpleQueryStringBuilder("the quick fox") .field(STRING_FIELD_NAME) - .analyzer("english") + .analyzer("stop") .toQuery(createShardContext()); BooleanQuery expected = new BooleanQuery.Builder() .add(new TermQuery(new Term(STRING_FIELD_NAME, "quick")), BooleanClause.Occur.SHOULD) @@ -642,7 +642,7 @@ public void testWithPrefixStopWords() throws Exception { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); Query query = new SimpleQueryStringBuilder("the* quick fox") .field(STRING_FIELD_NAME) - .analyzer("english") + .analyzer("stop") .toQuery(createShardContext()); BooleanQuery expected = new BooleanQuery.Builder() .add(new PrefixQuery(new Term(STRING_FIELD_NAME, "the")), BooleanClause.Occur.SHOULD) diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 717bab12ea5cb..35c5a19cc2e8c 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -22,6 +22,9 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockTokenizer; +import org.apache.lucene.analysis.TokenFilter; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder; @@ -36,6 +39,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; import org.elasticsearch.index.analysis.AnalyzerProvider; +import org.elasticsearch.index.analysis.PreConfiguredTokenFilter; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.IdsQueryBuilder; import org.elasticsearch.index.query.MatchQueryBuilder; @@ -66,9 +70,11 @@ import java.util.Arrays; import java.util.Collection; import java.util.HashMap; +import java.util.List; import java.util.Locale; import java.util.Map; +import static java.util.Collections.singletonList; import static java.util.Collections.singletonMap; import static org.elasticsearch.client.Requests.searchRequest; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @@ -113,7 +119,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Arrays.asList(InternalSettingsPlugin.class, MockKeywordPlugin.class, MockWhitespacePlugin.class); + return Arrays.asList(InternalSettingsPlugin.class, MockKeywordPlugin.class, MockAnalysisPlugin.class); } public void testHighlightingWithStoredKeyword() throws IOException { @@ -765,14 +771,19 @@ public void testMatchedFieldsFvhNoRequireFieldMatch() throws Exception { } private void checkMatchedFieldsCase(boolean requireFieldMatch) throws Exception { + Settings.Builder settings = Settings.builder(); + settings.put(indexSettings()); + settings.put("index.analysis.analyzer.mock_english.tokenizer", "standard"); + settings.put("index.analysis.analyzer.mock_english.filter", "mock_snowball"); assertAcked(prepareCreate("test") + .setSettings(settings) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("foo") .field("type", "text") .field("term_vector", "with_positions_offsets") .field("store", true) - .field("analyzer", "english") + .field("analyzer", "mock_english") .startObject("fields") .startObject("plain") .field("type", "text") @@ -785,7 +796,7 @@ private void checkMatchedFieldsCase(boolean requireFieldMatch) throws Exception .field("type", "text") .field("term_vector", "with_positions_offsets") .field("store", true) - .field("analyzer", "english") + .field("analyzer", "mock_english") .startObject("fields") .startObject("plain") .field("type", "text") @@ -2819,7 +2830,7 @@ public void testSynonyms() throws IOException { assertAcked(prepareCreate("test").setSettings(builder.build()) .addMapping("type1", "field1", "type=text,term_vector=with_positions_offsets,search_analyzer=synonym," + - "analyzer=english,index_options=offsets")); + "analyzer=standard,index_options=offsets")); ensureGreen(); client().prepareIndex("test", "type1", "0").setSource( @@ -2983,7 +2994,39 @@ public void testWithNormalizer() throws Exception { } } - public static class MockWhitespacePlugin extends Plugin implements AnalysisPlugin { + public static class MockAnalysisPlugin extends Plugin implements AnalysisPlugin { + + public final class MockSnowBall extends TokenFilter { + private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); + + /** Sole constructor. */ + MockSnowBall(TokenStream in) { + super(in); + } + + @Override + public boolean incrementToken() throws IOException { + if (input.incrementToken()) { + final char[] buffer = termAtt.buffer(); + final int length = termAtt.length(); + if (buffer[length - 1] == 's') { + termAtt.setLength(length - 1); + } + if (length > 3) { + if (buffer[length - 1] == 'g' && buffer[length - 2] == 'n' && buffer[length - 3] == 'i') { + termAtt.setLength(length- 3); + } + } + return true; + } else + return false; + } + } + + @Override + public List getPreConfiguredTokenFilters() { + return singletonList(PreConfiguredTokenFilter.singleton("mock_snowball", false, MockSnowBall::new)); + } @Override public Map>> getAnalyzers() { diff --git a/server/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java b/server/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java index 7aef2d208ecc5..147caa4c1c131 100644 --- a/server/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java @@ -19,6 +19,12 @@ package org.elasticsearch.search.query; +import org.apache.lucene.analysis.CharacterUtils; +import org.apache.lucene.analysis.MockLowerCaseFilter; +import org.apache.lucene.analysis.MockTokenizer; +import org.apache.lucene.analysis.TokenFilter; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -28,12 +34,19 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.analysis.CharFilterFactory; +import org.elasticsearch.index.analysis.MultiTermAwareComponent; +import org.elasticsearch.index.analysis.PreConfiguredCharFilter; +import org.elasticsearch.index.analysis.PreConfiguredTokenFilter; +import org.elasticsearch.index.analysis.TokenizerFactory; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.SimpleQueryStringBuilder; import org.elasticsearch.index.query.SimpleQueryStringFlag; +import org.elasticsearch.indices.analysis.AnalysisModule; +import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; @@ -42,14 +55,19 @@ import org.elasticsearch.test.InternalSettingsPlugin; import java.io.IOException; +import java.io.Reader; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutionException; +import java.util.function.Function; +import static java.util.Collections.singletonList; +import static java.util.Collections.singletonMap; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.simpleQueryStringQuery; @@ -72,11 +90,15 @@ public class SimpleQueryStringIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Arrays.asList(InternalSettingsPlugin.class); // uses index.version.created + return Arrays.asList(MockAnalysisPlugin.class, InternalSettingsPlugin.class); // uses index.version.created } public void testSimpleQueryString() throws ExecutionException, InterruptedException { - createIndex("test"); + Settings.Builder settings = Settings.builder(); + settings.put(indexSettings()); + settings.put("index.analysis.analyzer.mock_snowball.tokenizer", "standard"); + settings.put("index.analysis.analyzer.mock_snowball.filter", "mock_snowball"); + createIndex("test", settings.build()); indexRandom(true, false, client().prepareIndex("test", "type1", "1").setSource("body", "foo"), client().prepareIndex("test", "type1", "2").setSource("body", "bar"), @@ -108,7 +130,7 @@ public void testSimpleQueryString() throws ExecutionException, InterruptedExcept assertSearchHits(searchResponse, "4", "5"); searchResponse = client().prepareSearch().setQuery( - simpleQueryStringQuery("eggplants").analyzer("snowball")).get(); + simpleQueryStringQuery("eggplants").analyzer("mock_snowball")).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("4")); @@ -312,7 +334,7 @@ public void testSimpleQueryStringAnalyzeWildcard() throws ExecutionException, In .startObject("properties") .startObject("location") .field("type", "text") - .field("analyzer", "german") + .field("analyzer", "standard") .endObject() .endObject() .endObject() @@ -583,4 +605,33 @@ private void assertHits(SearchHits hits, String... ids) { } assertThat(hitIds, containsInAnyOrder(ids)); } + + public static class MockAnalysisPlugin extends Plugin implements AnalysisPlugin { + + public final class MockSnowBall extends TokenFilter { + private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); + + /** Sole constructor. */ + MockSnowBall(TokenStream in) { + super(in); + } + + @Override + public boolean incrementToken() throws IOException { + if (input.incrementToken()) { + char[] buffer = termAtt.buffer(); + if (buffer[termAtt.length() - 1] == 's') { + termAtt.setLength(termAtt.length() - 1); + } + return true; + } else + return false; + } + } + + @Override + public List getPreConfiguredTokenFilters() { + return singletonList(PreConfiguredTokenFilter.singleton("mock_snowball", false, MockSnowBall::new)); + } + } } diff --git a/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java b/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java index 34501ba8a1b02..8b3aff90e8d4c 100644 --- a/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java +++ b/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java @@ -87,7 +87,7 @@ public void testExplainValidateQueryTwoNodes() throws IOException { .setSource(XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("foo").field("type", "text").endObject() .startObject("bar").field("type", "integer").endObject() - .startObject("baz").field("type", "text").field("analyzer", "snowball").endObject() + .startObject("baz").field("type", "text").field("analyzer", "standard").endObject() .startObject("pin").startObject("properties").startObject("location").field("type", "geo_point").endObject().endObject().endObject() .endObject().endObject().endObject()) .execute().actionGet(); From 8d4f09f7f20dc044fec89b80deb8bdac32c09d62 Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 8 Jun 2018 13:36:19 +0100 Subject: [PATCH 04/20] [DOCS] Add note about long-lived idle connections (#30990) Clarify that we expect to have idle inter-node connections within the cluster, and that the network needs to be configured not to disrupt these. --- docs/reference/modules/transport.asciidoc | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/docs/reference/modules/transport.asciidoc b/docs/reference/modules/transport.asciidoc index b7a65d98592cc..046d82cc507eb 100644 --- a/docs/reference/modules/transport.asciidoc +++ b/docs/reference/modules/transport.asciidoc @@ -44,7 +44,12 @@ time setting format). Defaults to `30s`. |`transport.tcp.compress` |Set to `true` to enable compression (`DEFLATE`) between all nodes. Defaults to `false`. -|`transport.ping_schedule` | Schedule a regular ping message to ensure that connections are kept alive. Defaults to `5s` in the transport client and `-1` (disabled) elsewhere. +|`transport.ping_schedule` | Schedule a regular application-level ping message +to ensure that transport connections between nodes are kept alive. Defaults to +`5s` in the transport client and `-1` (disabled) elsewhere. It is preferable to +correctly configure TCP keep-alives instead of using this feature, because TCP +keep-alives apply to all kinds of long-lived connection and not just to +transport connections. |======================================================================= @@ -80,6 +85,20 @@ The following parameters can be configured like that * `tcp_send_buffer_size`: Configures the send buffer size of the socket * `tcp_receive_buffer_size`: Configures the receive buffer size of the socket +[float] +==== Long-lived idle connections + +Elasticsearch opens a number of long-lived TCP connections between each pair of +nodes in the cluster, and some of these connections may be idle for an extended +period of time. Nonetheless, Elasticsearch requires these connections to remain +open, and it can disrupt the operation of the cluster if any inter-node +connections are closed by an external influence such as a firewall. It is +important to configure your network to preserve long-lived idle connections +between Elasticsearch nodes, for instance by leaving `tcp_keep_alive` enabled +and ensuring that the keepalive interval is shorter than any timeout that might +cause idle connections to be closed, or by setting `transport.ping_schedule` if +keepalives cannot be configured. + [float] === Transport Tracer From e467e67fd460af8e63d6ea01424fb377b0273181 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 8 Jun 2018 08:55:10 -0400 Subject: [PATCH 05/20] Enhance license detection for various licenses (#31198) This commit enhances the license detection that we have for various licenses. Here we improve the detection for all licenses (especially the Apache 2.0 License), the BSD 2-clause license, the MIT (with attribution) license, and we add detection for the BSD 3-clause license. One way that we achieved this improvement is by changing how the license files are read so that rather than reading them as a multi-line string which ended up represented as "[line1, line2, line3, ...]" internally, we read the full bytes of the license text and replace all whitespace with a single space so the license text is now loaded as "line1 line2 line3". For the MIT license we add the actual license text and remove the "MIT" string as not all copies of the license clearly indicate that the text is the MIT license. We take a similar strategy for the BSD-2 and BSD-3 clause licenses. With this change, we reduce the number of "custom" licenses in the codebase from 31 to 2. The two remaining appear to be truly custom licenses, not carrying licenses identifiable by SPDX. A follow-up will address "unknown" licenses. --- .../gradle/DependenciesInfoTask.groovy | 83 ++++++++++++++++++- 1 file changed, 79 insertions(+), 4 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/DependenciesInfoTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/DependenciesInfoTask.groovy index eb82b4675f287..b42e6cc8e3caa 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/DependenciesInfoTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/DependenciesInfoTask.groovy @@ -109,7 +109,8 @@ public class DependenciesInfoTask extends DefaultTask { } if (license) { - final String content = license.readLines("UTF-8").toString() + // replace * because they are sometimes used at the beginning lines as if the license was a multi-line comment + final String content = new String(license.readBytes(), "UTF-8").replaceAll("\\s+", " ").replaceAll("\\*", " ") final String spdx = checkSPDXLicense(content) if (spdx == null) { // License has not be identified as SPDX. @@ -133,13 +134,84 @@ public class DependenciesInfoTask extends DefaultTask { private String checkSPDXLicense(final String licenseText) { String spdx = null - final String APACHE_2_0 = "Apache.*License.*(v|V)ersion 2.0" - final String BSD_2 = "BSD 2-clause.*License" + final String APACHE_2_0 = "Apache.*License.*(v|V)ersion.*2\\.0" + + final String BSD_2 = """ +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + + 1\\. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer\\. + 2\\. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution\\. + +THIS SOFTWARE IS PROVIDED BY .+ (``|''|")AS IS(''|") AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED\\. +IN NO EVENT SHALL .+ BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES \\(INCLUDING, BUT +NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION\\) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +\\(INCLUDING NEGLIGENCE OR OTHERWISE\\) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE\\. +""".replaceAll("\\s+", "\\\\s*") + + final String BSD_3 = """ +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + + (1\\.)? Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer\\. + (2\\.)? Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution\\. + ((3\\.)? The name of .+ may not be used to endorse or promote products + derived from this software without specific prior written permission\\.| + (3\\.)? Neither the name of .+ nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission\\.) + +THIS SOFTWARE IS PROVIDED BY .+ (``|''|")AS IS(''|") AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED\\. +IN NO EVENT SHALL .+ BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES \\(INCLUDING, BUT +NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION\\) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +\\(INCLUDING NEGLIGENCE OR OTHERWISE\\) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE\\. +""".replaceAll("\\s+", "\\\\s*") + final String CDDL_1_0 = "COMMON DEVELOPMENT AND DISTRIBUTION LICENSE.*Version 1.0" final String CDDL_1_1 = "COMMON DEVELOPMENT AND DISTRIBUTION LICENSE.*Version 1.1" final String ICU = "ICU License - ICU 1.8.1 and later" final String LGPL_3 = "GNU LESSER GENERAL PUBLIC LICENSE.*Version 3" - final String MIT = "MIT License" + + final String MIT = """ +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files \\(the "Software"\\), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software\\. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT\\. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE\\. +""".replaceAll("\\s+", "\\\\s*") + final String MOZILLA_1_1 = "Mozilla Public License.*Version 1.1" switch (licenseText) { @@ -152,6 +224,9 @@ public class DependenciesInfoTask extends DefaultTask { case ~/.*${BSD_2}.*/: spdx = 'BSD-2-Clause' break + case ~/.*${BSD_3}.*/: + spdx = 'BSD-3-Clause' + break case ~/.*${LGPL_3}.*/: spdx = 'LGPL-3.0' break From 01140a3ad83cd13b20968e20118c5e373975d888 Mon Sep 17 00:00:00 2001 From: Igor Motov Date: Fri, 8 Jun 2018 10:15:28 -0400 Subject: [PATCH 06/20] SQL: Make a single JDBC driver jar (#31012) Replaces zip archive containing multiple jars with a single JDBC driver jar that shades all external dependencies. Closes #29856 --- x-pack/plugin/sql/jdbc/build.gradle | 125 +++++++++++++--------------- x-pack/qa/sql/build.gradle | 6 +- 2 files changed, 62 insertions(+), 69 deletions(-) diff --git a/x-pack/plugin/sql/jdbc/build.gradle b/x-pack/plugin/sql/jdbc/build.gradle index 26cf913aa2790..e383e71cd4c76 100644 --- a/x-pack/plugin/sql/jdbc/build.gradle +++ b/x-pack/plugin/sql/jdbc/build.gradle @@ -1,74 +1,40 @@ + +buildscript { + repositories { + maven { + url 'https://plugins.gradle.org/m2/' + } + } + dependencies { + classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.2' + } +} + apply plugin: 'elasticsearch.build' apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' +apply plugin: 'com.github.johnrengelman.shadow' description = 'JDBC driver for Elasticsearch' +archivesBaseName = "x-pack-sql-jdbc" forbiddenApisMain { // does not depend on core, so only jdk and http signatures should be checked signaturesURLs = [this.class.getResource('/forbidden/jdk-signatures.txt')] } -/* - * Bundle as many of our dependencies as we can get away with into the jar. - * We can't currently bundle *all* dependencies into the jar, but we'd like - * to avoid publishing the sql shared libraries if possible. This allows that. - * - * It is possible to use configure this bundling in a bunch of different ways - * but this particular way generates a pom that doesn't declare the bundled - * dependencies as dependencies. Which is a good thing because we don't publish - * them and we don't want consumers to get two copies of them. - * - * We'd *like* to shade these dependencies, at least ones like jackson which we - * know that we can't remove entirely. But for now something like this is - * simpler. - */ -configurations { - bundled -} -sourceSets { - main { - compileClasspath += configurations.bundled - } - test { - compileClasspath += configurations.bundled - } -} -javadoc { - classpath += configurations.bundled -} -jar { - from({configurations.bundled.collect { it.isDirectory() ? it : zipTree(it) }}) { - // We don't need the META-INF from the things we bundle. For now. - exclude 'META-INF/*' - } -} - dependencies { - - // Eclipse doesn't know how to deal with these bundled deependencies so make them compile - // dependencies if we are running in Eclipse - if (isEclipse) { - compile (xpackProject('plugin:sql:sql-shared-client')) { - transitive = false - } - compile (xpackProject('plugin:sql:sql-shared-proto')) { - transitive = false - } - } else { - bundled (xpackProject('plugin:sql:sql-shared-client')) { - transitive = false - } - bundled (xpackProject('plugin:sql:sql-shared-proto')) { - transitive = false - } + compile (xpackProject('plugin:sql:sql-shared-client')) { + transitive = false + } + compile (xpackProject('plugin:sql:sql-shared-proto')) { + transitive = false } compile (project(':libs:x-content')) { transitive = false } compile project(':libs:core') runtime "com.fasterxml.jackson.core:jackson-core:${versions.jackson}" - testCompile "org.elasticsearch.test:framework:${version}" } @@ -82,23 +48,48 @@ dependencyLicenses { ignoreSha 'elasticsearch' } -/* - * Temporary zip file to make the jdbc driver more usable during the 6.3 - * release. We'd like to remove this in future releases when the jdbc driver - * bundles or shades all of its dependencies. But for now this should help - * non-maven jdbc users, specifically those folks using BI tools. - */ -task zipWithDependencies(type: Zip) { - from configurations.runtime - from configurations.runtime.artifacts.files - baseName 'elasticsearch-jdbc-with-dependencies' - into "elasticsearch-jdbc-with-dependencies-$version" +shadowJar { + classifier = null + relocate 'com.fasterxml', 'org.elasticsearch.fasterxml' +} + +// We don't need normal jar, we use shadow jar instead +jar.enabled = false + +// We need a no-depenencies jar though for qa testing so it doesn't conflict with cli +configurations { + nodeps +} + +task nodepsJar(type: Jar) { + appendix 'nodeps' + from sourceSets.main.output } -assemble.dependsOn zipWithDependencies + +artifacts { + nodeps nodepsJar + archives shadowJar +} + +publishing { + publications { + nebula(MavenPublication) { + artifact shadowJar + pom.withXml { + // Nebula is mistakenly including all dependencies that are already shadowed into the shadow jar + asNode().remove(asNode().dependencies) + } + } + } +} + +assemble.dependsOn shadowJar // Use the jar for testing so the tests are more "real" test { classpath -= compileJava.outputs.files - classpath += jar.outputs.files - dependsOn jar + classpath -= configurations.compile + classpath -= configurations.runtime + classpath += shadowJar.outputs.files + dependsOn shadowJar } diff --git a/x-pack/qa/sql/build.gradle b/x-pack/qa/sql/build.gradle index 8f77e1608d6d0..a3c147bbc04fc 100644 --- a/x-pack/qa/sql/build.gradle +++ b/x-pack/qa/sql/build.gradle @@ -9,7 +9,7 @@ dependencies { compile "org.elasticsearch.test:framework:${version}" // JDBC testing dependencies - compile xpackProject('plugin:sql:jdbc') + compile project(path: xpackModule('sql:jdbc'), configuration: 'nodeps') compile "net.sourceforge.csvjdbc:csvjdbc:1.0.34" // CLI testing dependencies @@ -87,7 +87,9 @@ subprojects { // JDBC testing dependencies testRuntime "net.sourceforge.csvjdbc:csvjdbc:1.0.34" testRuntime "com.h2database:h2:1.4.197" - testRuntime xpackProject('plugin:sql:jdbc') + testRuntime project(path: xpackModule('sql:jdbc'), configuration: 'nodeps') + testRuntime xpackProject('plugin:sql:sql-shared-client') + // TODO check if needed testRuntime("org.antlr:antlr4-runtime:4.5.3") { From a86c0f8c25fb146793c4992f2a18caffeecdfca6 Mon Sep 17 00:00:00 2001 From: Vladimir Dolzhenko Date: Fri, 8 Jun 2018 09:16:38 -0700 Subject: [PATCH 07/20] Allow to trim all ops above a certain seq# with a term lower than X (#30176) Allow to trim all ops above a certain seq# with a term lower than X Relates to #10708 --- .../resync/ResyncReplicationRequest.java | 24 +- .../TransportResyncReplicationAction.java | 3 + .../elasticsearch/index/engine/Engine.java | 8 +- .../index/engine/InternalEngine.java | 20 +- .../index/seqno/SequenceNumbers.java | 2 +- .../elasticsearch/index/shard/IndexShard.java | 6 +- .../index/shard/PrimaryReplicaSyncer.java | 21 +- .../index/translog/Checkpoint.java | 92 ++++++-- .../index/translog/MultiSnapshot.java | 9 + .../index/translog/Translog.java | 42 ++++ .../index/translog/TranslogReader.java | 38 +++ .../index/translog/TranslogSnapshot.java | 17 +- .../index/translog/TranslogWriter.java | 24 +- .../recovery/RecoverySourceHandler.java | 4 +- .../resync/ResyncReplicationRequestTests.java | 2 +- .../IndexLevelReplicationTests.java | 10 +- .../RecoveryDuringReplicationTests.java | 71 +++++- .../shard/PrimaryReplicaSyncerTests.java | 36 ++- .../index/translog/TranslogHeaderTests.java | 3 +- .../index/translog/TranslogTests.java | 216 +++++++++++++++++- 20 files changed, 579 insertions(+), 69 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/resync/ResyncReplicationRequest.java b/server/src/main/java/org/elasticsearch/action/resync/ResyncReplicationRequest.java index 07dcdf7db116a..7be3406659e62 100644 --- a/server/src/main/java/org/elasticsearch/action/resync/ResyncReplicationRequest.java +++ b/server/src/main/java/org/elasticsearch/action/resync/ResyncReplicationRequest.java @@ -22,6 +22,7 @@ import org.elasticsearch.action.support.replication.ReplicatedWriteRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.translog.Translog; @@ -33,17 +34,24 @@ */ public final class ResyncReplicationRequest extends ReplicatedWriteRequest { + private long trimAboveSeqNo; private Translog.Operation[] operations; ResyncReplicationRequest() { super(); } - public ResyncReplicationRequest(final ShardId shardId, final Translog.Operation[] operations) { + public ResyncReplicationRequest(final ShardId shardId, final long trimAboveSeqNo, + final Translog.Operation[] operations) { super(shardId); + this.trimAboveSeqNo = trimAboveSeqNo; this.operations = operations; } + public long getTrimAboveSeqNo() { + return trimAboveSeqNo; + } + public Translog.Operation[] getOperations() { return operations; } @@ -60,12 +68,20 @@ public void readFrom(final StreamInput in) throws IOException { throw new IllegalStateException("resync replication request serialization is broken in 6.0.0"); } super.readFrom(in); + if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + trimAboveSeqNo = in.readZLong(); + } else { + trimAboveSeqNo = SequenceNumbers.UNASSIGNED_SEQ_NO; + } operations = in.readArray(Translog.Operation::readOperation, Translog.Operation[]::new); } @Override public void writeTo(final StreamOutput out) throws IOException { super.writeTo(out); + if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + out.writeZLong(trimAboveSeqNo); + } out.writeArray(Translog.Operation::writeOperation, operations); } @@ -74,12 +90,13 @@ public boolean equals(final Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; final ResyncReplicationRequest that = (ResyncReplicationRequest) o; - return Arrays.equals(operations, that.operations); + return trimAboveSeqNo == that.trimAboveSeqNo + && Arrays.equals(operations, that.operations); } @Override public int hashCode() { - return Arrays.hashCode(operations); + return Long.hashCode(trimAboveSeqNo) + 31 * Arrays.hashCode(operations); } @Override @@ -88,6 +105,7 @@ public String toString() { "shardId=" + shardId + ", timeout=" + timeout + ", index='" + index + '\'' + + ", trimAboveSeqNo=" + trimAboveSeqNo + ", ops=" + operations.length + "}"; } diff --git a/server/src/main/java/org/elasticsearch/action/resync/TransportResyncReplicationAction.java b/server/src/main/java/org/elasticsearch/action/resync/TransportResyncReplicationAction.java index 3dd2bd4df580f..78c1e835d4087 100644 --- a/server/src/main/java/org/elasticsearch/action/resync/TransportResyncReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/resync/TransportResyncReplicationAction.java @@ -135,6 +135,9 @@ public static Translog.Location performOnReplica(ResyncReplicationRequest reques } } } + if (request.getTrimAboveSeqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO) { + replica.trimOperationOfPreviousPrimaryTerms(request.getTrimAboveSeqNo()); + } return location; } diff --git a/server/src/main/java/org/elasticsearch/index/engine/Engine.java b/server/src/main/java/org/elasticsearch/index/engine/Engine.java index 314eeffd7aa6a..5283975be7b12 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -236,6 +236,12 @@ boolean isThrottled() { */ public abstract boolean isThrottled(); + /** + * Trims translog for terms below belowTerm and seq# above aboveSeqNo + * @see Translog#trimOperations(long, long) + */ + public abstract void trimOperationsFromTranslog(long belowTerm, long aboveSeqNo) throws EngineException; + /** A Lock implementation that always allows the lock to be acquired */ protected static final class NoOpLock implements Lock { @@ -904,7 +910,7 @@ public final boolean refreshNeeded() { * checks and removes translog files that no longer need to be retained. See * {@link org.elasticsearch.index.translog.TranslogDeletionPolicy} for details */ - public abstract void trimTranslog() throws EngineException; + public abstract void trimUnreferencedTranslogFiles() throws EngineException; /** * Tests whether or not the translog generation should be rolled to a new generation. diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index bca84f81a29c4..88e7160845266 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -1552,7 +1552,7 @@ public void rollTranslogGeneration() throws EngineException { } @Override - public void trimTranslog() throws EngineException { + public void trimUnreferencedTranslogFiles() throws EngineException { try (ReleasableLock lock = readLock.acquire()) { ensureOpen(); translog.trimUnreferencedReaders(); @@ -1569,6 +1569,24 @@ public void trimTranslog() throws EngineException { } } + @Override + public void trimOperationsFromTranslog(long belowTerm, long aboveSeqNo) throws EngineException { + try (ReleasableLock lock = readLock.acquire()) { + ensureOpen(); + translog.trimOperations(belowTerm, aboveSeqNo); + } catch (AlreadyClosedException e) { + failOnTragicEvent(e); + throw e; + } catch (Exception e) { + try { + failEngine("translog operations trimming failed", e); + } catch (Exception inner) { + e.addSuppressed(inner); + } + throw new EngineException(shardId, "failed to trim translog operations", e); + } + } + private void pruneDeletedTombstones() { /* * We need to deploy two different trimming strategies for GC deletes on primary and replicas. Delete operations on primary diff --git a/server/src/main/java/org/elasticsearch/index/seqno/SequenceNumbers.java b/server/src/main/java/org/elasticsearch/index/seqno/SequenceNumbers.java index 0c071f4b2d422..7cffc8c1ac911 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/SequenceNumbers.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/SequenceNumbers.java @@ -37,7 +37,7 @@ public class SequenceNumbers { */ public static final long UNASSIGNED_SEQ_NO = -2L; /** - * Represents no operations have been performed on the shard. + * Represents no operations have been performed on the shard. Initial value of a sequence number. */ public static final long NO_OPS_PERFORMED = -1L; diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index c76ce128763b3..8583b6b4c9b33 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -992,7 +992,7 @@ public Engine.CommitId flush(FlushRequest request) { public void trimTranslog() { verifyNotClosed(); final Engine engine = getEngine(); - engine.trimTranslog(); + engine.trimUnreferencedTranslogFiles(); } /** @@ -1194,6 +1194,10 @@ public void prepareForIndexRecovery() { assert currentEngineReference.get() == null; } + public void trimOperationOfPreviousPrimaryTerms(long aboveSeqNo) { + getEngine().trimOperationsFromTranslog(primaryTerm, aboveSeqNo); + } + public Engine.Result applyTranslogOperation(Translog.Operation operation, Engine.Operation.Origin origin) throws IOException { final Engine.Result result; switch (operation.opType()) { diff --git a/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java b/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java index af8c9bdd0272f..8e05e7bf08efa 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java +++ b/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.tasks.Task; @@ -84,6 +85,7 @@ public void resync(final IndexShard indexShard, final ActionListener try { final long startingSeqNo = indexShard.getGlobalCheckpoint() + 1; Translog.Snapshot snapshot = indexShard.newTranslogSnapshotFromMinSeqNo(startingSeqNo); + final long maxSeqNo = indexShard.seqNoStats().getMaxSeqNo(); resyncListener = new ActionListener() { @Override public void onResponse(final ResyncTask resyncTask) { @@ -135,7 +137,7 @@ public synchronized Translog.Operation next() throws IOException { } }; resync(shardId, indexShard.routingEntry().allocationId().getId(), indexShard.getPrimaryTerm(), wrappedSnapshot, - startingSeqNo, resyncListener); + startingSeqNo, maxSeqNo, resyncListener); } catch (Exception e) { if (resyncListener != null) { resyncListener.onFailure(e); @@ -146,7 +148,7 @@ public synchronized Translog.Operation next() throws IOException { } private void resync(final ShardId shardId, final String primaryAllocationId, final long primaryTerm, final Translog.Snapshot snapshot, - long startingSeqNo, ActionListener listener) { + long startingSeqNo, long maxSeqNo, ActionListener listener) { ResyncRequest request = new ResyncRequest(shardId, primaryAllocationId); ResyncTask resyncTask = (ResyncTask) taskManager.register("transport", "resync", request); // it's not transport :-) ActionListener wrappedListener = new ActionListener() { @@ -166,7 +168,7 @@ public void onFailure(Exception e) { }; try { new SnapshotSender(logger, syncAction, resyncTask, shardId, primaryAllocationId, primaryTerm, snapshot, chunkSize.bytesAsInt(), - startingSeqNo, wrappedListener).run(); + startingSeqNo, maxSeqNo, wrappedListener).run(); } catch (Exception e) { wrappedListener.onFailure(e); } @@ -186,14 +188,16 @@ static class SnapshotSender extends AbstractRunnable implements ActionListener listener; + private final AtomicBoolean firstMessage = new AtomicBoolean(true); private final AtomicInteger totalSentOps = new AtomicInteger(); private final AtomicInteger totalSkippedOps = new AtomicInteger(); private AtomicBoolean closed = new AtomicBoolean(); SnapshotSender(Logger logger, SyncAction syncAction, ResyncTask task, ShardId shardId, String primaryAllocationId, long primaryTerm, - Translog.Snapshot snapshot, int chunkSizeInBytes, long startingSeqNo, ActionListener listener) { + Translog.Snapshot snapshot, int chunkSizeInBytes, long startingSeqNo, long maxSeqNo, ActionListener listener) { this.logger = logger; this.syncAction = syncAction; this.task = task; @@ -203,6 +207,7 @@ static class SnapshotSender extends AbstractRunnable implements ActionListenerbelowTerm and seq# above aboveSeqNo. + * Effectively it moves max visible seq# {@link Checkpoint#trimmedAboveSeqNo} therefore {@link TranslogSnapshot} skips those operations. + */ + public void trimOperations(long belowTerm, long aboveSeqNo) throws IOException { + assert aboveSeqNo >= SequenceNumbers.NO_OPS_PERFORMED : "aboveSeqNo has to a valid sequence number"; + + try (ReleasableLock lock = writeLock.acquire()) { + ensureOpen(); + if (current.getPrimaryTerm() < belowTerm) { + throw new IllegalArgumentException("Trimming the translog can only be done for terms lower than the current one. " + + "Trim requested for term [ " + belowTerm + " ] , current is [ " + current.getPrimaryTerm() + " ]"); + } + // we assume that the current translog generation doesn't have trimmable ops. Verify that. + assert current.assertNoSeqAbove(belowTerm, aboveSeqNo); + // update all existed ones (if it is necessary) as checkpoint and reader are immutable + final List newReaders = new ArrayList<>(readers.size()); + try { + for (TranslogReader reader : readers) { + final TranslogReader newReader = + reader.getPrimaryTerm() < belowTerm + ? reader.closeIntoTrimmedReader(aboveSeqNo, getChannelFactory()) + : reader; + newReaders.add(newReader); + } + } catch (IOException e) { + IOUtils.closeWhileHandlingException(newReaders); + close(); + throw e; + } + + this.readers.clear(); + this.readers.addAll(newReaders); + } + } /** * Ensures that the given location has be synced / written to the underlying storage. @@ -845,6 +880,13 @@ public interface Snapshot extends Closeable { */ int totalOperations(); + /** + * The number of operations have been skipped (overridden or trimmed) in the snapshot so far. + */ + default int skippedOperations() { + return 0; + } + /** * The number of operations have been overridden (eg. superseded) in the snapshot so far. * If two operations have the same sequence number, the operation with a lower term will be overridden by the operation diff --git a/server/src/main/java/org/elasticsearch/index/translog/TranslogReader.java b/server/src/main/java/org/elasticsearch/index/translog/TranslogReader.java index 29e30bd25dd37..4091fa45762e1 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TranslogReader.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TranslogReader.java @@ -21,6 +21,8 @@ import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.common.io.Channels; +import org.elasticsearch.core.internal.io.IOUtils; +import org.elasticsearch.index.seqno.SequenceNumbers; import java.io.Closeable; import java.io.EOFException; @@ -28,8 +30,11 @@ import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.nio.file.Path; +import java.nio.file.StandardOpenOption; import java.util.concurrent.atomic.AtomicBoolean; +import static org.elasticsearch.index.translog.Translog.getCommitCheckpointFileName; + /** * an immutable translog filereader */ @@ -70,6 +75,39 @@ public static TranslogReader open( return new TranslogReader(checkpoint, channel, path, header); } + /** + * Closes current reader and creates new one with new checkoint and same file channel + */ + TranslogReader closeIntoTrimmedReader(long aboveSeqNo, ChannelFactory channelFactory) throws IOException { + if (closed.compareAndSet(false, true)) { + Closeable toCloseOnFailure = channel; + final TranslogReader newReader; + try { + if (aboveSeqNo < checkpoint.trimmedAboveSeqNo + || aboveSeqNo < checkpoint.maxSeqNo && checkpoint.trimmedAboveSeqNo == SequenceNumbers.UNASSIGNED_SEQ_NO) { + final Path checkpointFile = path.getParent().resolve(getCommitCheckpointFileName(checkpoint.generation)); + final Checkpoint newCheckpoint = new Checkpoint(checkpoint.offset, checkpoint.numOps, + checkpoint.generation, checkpoint.minSeqNo, checkpoint.maxSeqNo, + checkpoint.globalCheckpoint, checkpoint.minTranslogGeneration, aboveSeqNo); + Checkpoint.write(channelFactory, checkpointFile, newCheckpoint, StandardOpenOption.WRITE); + + IOUtils.fsync(checkpointFile, false); + IOUtils.fsync(checkpointFile.getParent(), true); + + newReader = new TranslogReader(newCheckpoint, channel, path, header); + } else { + newReader = new TranslogReader(checkpoint, channel, path, header); + } + toCloseOnFailure = null; + return newReader; + } finally { + IOUtils.close(toCloseOnFailure); + } + } else { + throw new AlreadyClosedException(toString() + " is already closed"); + } + } + public long sizeInBytes() { return length; } diff --git a/server/src/main/java/org/elasticsearch/index/translog/TranslogSnapshot.java b/server/src/main/java/org/elasticsearch/index/translog/TranslogSnapshot.java index a966720353297..8fe92bba0097c 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TranslogSnapshot.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TranslogSnapshot.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.translog; import org.elasticsearch.common.io.Channels; +import org.elasticsearch.index.seqno.SequenceNumbers; import java.io.EOFException; import java.io.IOException; @@ -32,6 +33,7 @@ final class TranslogSnapshot extends BaseTranslogReader { private final ByteBuffer reusableBuffer; private long position; + private int skippedOperations; private int readOperations; private BufferedChecksumStreamInput reuse; @@ -54,17 +56,24 @@ public int totalOperations() { return totalOperations; } + int skippedOperations(){ + return skippedOperations; + } + @Override Checkpoint getCheckpoint() { return checkpoint; } public Translog.Operation next() throws IOException { - if (readOperations < totalOperations) { - return readOperation(); - } else { - return null; + while (readOperations < totalOperations) { + final Translog.Operation operation = readOperation(); + if (operation.seqNo() <= checkpoint.trimmedAboveSeqNo || checkpoint.trimmedAboveSeqNo == SequenceNumbers.UNASSIGNED_SEQ_NO) { + return operation; + } + skippedOperations++; } + return null; } protected Translog.Operation readOperation() throws IOException { diff --git a/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index cae6578886534..b89b21c52588a 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.translog; import org.apache.lucene.store.AlreadyClosedException; -import org.apache.lucene.store.OutputStreamDataOutput; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Assertions; import org.elasticsearch.common.bytes.BytesArray; @@ -92,6 +91,7 @@ private TranslogWriter( this.minSeqNo = initialCheckpoint.minSeqNo; assert initialCheckpoint.maxSeqNo == SequenceNumbers.NO_OPS_PERFORMED : initialCheckpoint.maxSeqNo; this.maxSeqNo = initialCheckpoint.maxSeqNo; + assert initialCheckpoint.trimmedAboveSeqNo == SequenceNumbers.UNASSIGNED_SEQ_NO : initialCheckpoint.trimmedAboveSeqNo; this.globalCheckpointSupplier = globalCheckpointSupplier; this.seenSequenceNumbers = Assertions.ENABLED ? new HashMap<>() : null; } @@ -213,6 +213,25 @@ private synchronized boolean assertNoSeqNumberConflict(long seqNo, BytesReferenc return true; } + synchronized boolean assertNoSeqAbove(long belowTerm, long aboveSeqNo) { + seenSequenceNumbers.entrySet().stream().filter(e -> e.getKey().longValue() > aboveSeqNo) + .forEach(e -> { + final Translog.Operation op; + try { + op = Translog.readOperation(new BufferedChecksumStreamInput(e.getValue().v1().streamInput())); + } catch (IOException ex) { + throw new RuntimeException(ex); + } + long seqNo = op.seqNo(); + long primaryTerm = op.primaryTerm(); + if (primaryTerm < belowTerm) { + throw new AssertionError("current should not have any operations with seq#:primaryTerm [" + + seqNo + ":" + primaryTerm + "] > " + aboveSeqNo + ":" + belowTerm); + } + }); + return true; + } + /** * write all buffered ops to disk and fsync file. * @@ -241,7 +260,8 @@ public int totalOperations() { @Override synchronized Checkpoint getCheckpoint() { return new Checkpoint(totalOffset, operationCounter, generation, minSeqNo, maxSeqNo, - globalCheckpointSupplier.getAsLong(), minTranslogGenerationSupplier.getAsLong()); + globalCheckpointSupplier.getAsLong(), minTranslogGenerationSupplier.getAsLong(), + SequenceNumbers.UNASSIGNED_SEQ_NO); } @Override diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 4c543aeeb22d4..72a6fcb6ba329 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -615,9 +615,9 @@ protected SendSnapshotResult sendSnapshot(final long startingSeqNo, long require cancellableThreads.executeIO(sendBatch); } - assert expectedTotalOps == snapshot.overriddenOperations() + skippedOps + totalSentOps + assert expectedTotalOps == snapshot.skippedOperations() + skippedOps + totalSentOps : String.format(Locale.ROOT, "expected total [%d], overridden [%d], skipped [%d], total sent [%d]", - expectedTotalOps, snapshot.overriddenOperations(), skippedOps, totalSentOps); + expectedTotalOps, snapshot.skippedOperations(), skippedOps, totalSentOps); if (requiredOpsTracker.getCheckpoint() < endingSeqNo) { throw new IllegalStateException("translog replay failed to cover required sequence numbers" + diff --git a/server/src/test/java/org/elasticsearch/action/resync/ResyncReplicationRequestTests.java b/server/src/test/java/org/elasticsearch/action/resync/ResyncReplicationRequestTests.java index d5ad3941a5e8f..914c2b87422db 100644 --- a/server/src/test/java/org/elasticsearch/action/resync/ResyncReplicationRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/resync/ResyncReplicationRequestTests.java @@ -40,7 +40,7 @@ public void testSerialization() throws IOException { final Translog.Index index = new Translog.Index("type", "id", 0, randomNonNegativeLong(), Versions.MATCH_ANY, VersionType.INTERNAL, bytes, null, -1); final ShardId shardId = new ShardId(new Index("index", "uuid"), 0); - final ResyncReplicationRequest before = new ResyncReplicationRequest(shardId, new Translog.Operation[]{index}); + final ResyncReplicationRequest before = new ResyncReplicationRequest(shardId, 42L, new Translog.Operation[]{index}); final BytesStreamOutput out = new BytesStreamOutput(); before.writeTo(out); diff --git a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java index 736dc40e6867d..018548be9629f 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java @@ -340,9 +340,10 @@ public void testSeqNoCollision() throws Exception { op1 = snapshot.next(); assertThat(op1, notNullValue()); assertThat(snapshot.next(), nullValue()); - assertThat(snapshot.overriddenOperations(), equalTo(0)); + assertThat(snapshot.skippedOperations(), equalTo(0)); } - // Make sure that replica2 receives translog ops (eg. op2) from replica1 and overwrites its stale operation (op1). + // Make sure that replica2 receives translog ops (eg. op2) from replica1 + // and does not overwrite its stale operation (op1) as it is trimmed. logger.info("--> Promote replica1 as the primary"); shards.promoteReplicaToPrimary(replica1).get(); // wait until resync completed. shards.index(new IndexRequest(index.getName(), "type", "d2").source("{}", XContentType.JSON)); @@ -353,7 +354,8 @@ public void testSeqNoCollision() throws Exception { assertThat(op2.seqNo(), equalTo(op1.seqNo())); assertThat(op2.primaryTerm(), greaterThan(op1.primaryTerm())); assertThat("Remaining of snapshot should contain init operations", snapshot, containsOperationsInAnyOrder(initOperations)); - assertThat(snapshot.overriddenOperations(), equalTo(1)); + assertThat(snapshot.overriddenOperations(), equalTo(0)); + assertThat(snapshot.skippedOperations(), equalTo(1)); } // Make sure that peer-recovery transfers all but non-overridden operations. @@ -366,7 +368,7 @@ public void testSeqNoCollision() throws Exception { assertThat(snapshot.totalOperations(), equalTo(initDocs + 1)); assertThat(snapshot.next(), equalTo(op2)); assertThat("Remaining of snapshot should contain init operations", snapshot, containsOperationsInAnyOrder(initOperations)); - assertThat("Peer-recovery should not send overridden operations", snapshot.overriddenOperations(), equalTo(0)); + assertThat("Peer-recovery should not send overridden operations", snapshot.skippedOperations(), equalTo(0)); } // TODO: We should assert the content of shards in the ReplicationGroup. // Without rollback replicas(current implementation), we don't have the same content across shards: diff --git a/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java index 21be1da3845b6..ee97ba14fe09e 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java @@ -53,8 +53,12 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.EnumSet; +import java.util.HashSet; import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicBoolean; @@ -65,6 +69,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; @@ -353,10 +358,19 @@ public void testReplicaRollbackStaleDocumentsInPeerRecovery() throws Exception { @TestLogging("org.elasticsearch.index.shard:TRACE,org.elasticsearch.action.resync:TRACE") public void testResyncAfterPrimaryPromotion() throws Exception { - // TODO: check translog trimming functionality once it's implemented - try (ReplicationGroup shards = createGroup(2)) { + // TODO: check translog trimming functionality once rollback is implemented in Lucene (ES trimming is done) + Map mappings = + Collections.singletonMap("type", "{ \"type\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}"); + try (ReplicationGroup shards = new ReplicationGroup(buildIndexMetaData(2, mappings))) { shards.startAll(); - int initialDocs = shards.indexDocs(randomInt(10)); + int initialDocs = randomInt(10); + + for (int i = 0; i < initialDocs; i++) { + final IndexRequest indexRequest = new IndexRequest(index.getName(), "type", "initial_doc_" + i) + .source("{ \"f\": \"normal\"}", XContentType.JSON); + shards.index(indexRequest); + } + boolean syncedGlobalCheckPoint = randomBoolean(); if (syncedGlobalCheckPoint) { shards.syncGlobalCheckpoint(); @@ -364,16 +378,30 @@ public void testResyncAfterPrimaryPromotion() throws Exception { final IndexShard oldPrimary = shards.getPrimary(); final IndexShard newPrimary = shards.getReplicas().get(0); + final IndexShard justReplica = shards.getReplicas().get(1); // simulate docs that were inflight when primary failed - final int extraDocs = randomIntBetween(0, 5); + final int extraDocs = randomInt(5); logger.info("--> indexing {} extra docs", extraDocs); for (int i = 0; i < extraDocs; i++) { - final IndexRequest indexRequest = new IndexRequest(index.getName(), "type", "extra_" + i) - .source("{}", XContentType.JSON); + final IndexRequest indexRequest = new IndexRequest(index.getName(), "type", "extra_doc_" + i) + .source("{ \"f\": \"normal\"}", XContentType.JSON); final BulkShardRequest bulkShardRequest = indexOnPrimary(indexRequest, oldPrimary); indexOnReplica(bulkShardRequest, shards, newPrimary); } + + final int extraDocsToBeTrimmed = randomIntBetween(0, 10); + logger.info("--> indexing {} extra docs to be trimmed", extraDocsToBeTrimmed); + for (int i = 0; i < extraDocsToBeTrimmed; i++) { + final IndexRequest indexRequest = new IndexRequest(index.getName(), "type", "extra_trimmed_" + i) + .source("{ \"f\": \"trimmed\"}", XContentType.JSON); + final BulkShardRequest bulkShardRequest = indexOnPrimary(indexRequest, oldPrimary); + // have to replicate to another replica != newPrimary one - the subject to trim + indexOnReplica(bulkShardRequest, shards, justReplica); + } + + logger.info("--> seqNo primary {} replica {}", oldPrimary.seqNoStats(), newPrimary.seqNoStats()); + logger.info("--> resyncing replicas"); PrimaryReplicaSyncer.ResyncTask task = shards.promoteReplicaToPrimary(newPrimary).get(); if (syncedGlobalCheckPoint) { @@ -381,7 +409,36 @@ public void testResyncAfterPrimaryPromotion() throws Exception { } else { assertThat(task.getResyncedOperations(), greaterThanOrEqualTo(extraDocs)); } - shards.assertAllEqual(initialDocs + extraDocs); + List replicas = shards.getReplicas(); + + // check all docs on primary are available on replica + Set primaryIds = getShardDocUIDs(newPrimary); + assertThat(primaryIds.size(), equalTo(initialDocs + extraDocs)); + for (IndexShard replica : replicas) { + Set replicaIds = getShardDocUIDs(replica); + Set temp = new HashSet<>(primaryIds); + temp.removeAll(replicaIds); + assertThat(replica.routingEntry() + " is missing docs", temp, empty()); + temp = new HashSet<>(replicaIds); + temp.removeAll(primaryIds); + // yeah, replica has more docs as there is no Lucene roll back on it + assertThat(replica.routingEntry() + " has to have extra docs", temp, + extraDocsToBeTrimmed > 0 ? not(empty()) : empty()); + } + + // check translog on replica is trimmed + int translogOperations = 0; + try(Translog.Snapshot snapshot = getTranslog(justReplica).newSnapshot()) { + Translog.Operation next; + while ((next = snapshot.next()) != null) { + translogOperations++; + assertThat("unexpected op: " + next, (int)next.seqNo(), lessThan(initialDocs + extraDocs)); + assertThat("unexpected primaryTerm: " + next.primaryTerm(), next.primaryTerm(), is(oldPrimary.getPrimaryTerm())); + final Translog.Source source = next.getSource(); + assertThat(source.source.utf8ToString(), is("{ \"f\": \"normal\"}")); + } + } + assertThat(translogOperations, is(initialDocs + extraDocs)); } } diff --git a/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java b/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java index 1257aea3d14fa..b290f4d45597b 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java @@ -20,6 +20,7 @@ import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.resync.ResyncReplicationRequest; import org.elasticsearch.action.resync.ResyncReplicationResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; @@ -36,15 +37,20 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.mapper.SourceToParse; +import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.tasks.TaskManager; import java.io.IOException; import java.nio.ByteBuffer; +import java.util.ArrayList; import java.util.Collections; +import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; import static org.hamcrest.core.IsInstanceOf.instanceOf; public class PrimaryReplicaSyncerTests extends IndexShardTestCase { @@ -53,15 +59,17 @@ public void testSyncerSendsOffCorrectDocuments() throws Exception { IndexShard shard = newStartedShard(true); TaskManager taskManager = new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet()); AtomicBoolean syncActionCalled = new AtomicBoolean(); + List resyncRequests = new ArrayList<>(); PrimaryReplicaSyncer.SyncAction syncAction = (request, parentTask, allocationId, primaryTerm, listener) -> { logger.info("Sending off {} operations", request.getOperations().length); syncActionCalled.set(true); + resyncRequests.add(request); assertThat(parentTask, instanceOf(PrimaryReplicaSyncer.ResyncTask.class)); listener.onResponse(new ResyncReplicationResponse()); }; PrimaryReplicaSyncer syncer = new PrimaryReplicaSyncer(Settings.EMPTY, taskManager, syncAction); - syncer.setChunkSize(new ByteSizeValue(randomIntBetween(1, 100))); + syncer.setChunkSize(new ByteSizeValue(randomIntBetween(1, 10))); int numDocs = randomInt(10); for (int i = 0; i < numDocs; i++) { @@ -72,7 +80,7 @@ public void testSyncerSendsOffCorrectDocuments() throws Exception { } long globalCheckPoint = numDocs > 0 ? randomIntBetween(0, numDocs - 1) : 0; - boolean syncNeeded = numDocs > 0 && globalCheckPoint < numDocs - 1; + boolean syncNeeded = numDocs > 0; String allocationId = shard.routingEntry().allocationId().getId(); shard.updateShardState(shard.routingEntry(), shard.getPrimaryTerm(), null, 1000L, Collections.singleton(allocationId), @@ -84,19 +92,29 @@ public void testSyncerSendsOffCorrectDocuments() throws Exception { PlainActionFuture fut = new PlainActionFuture<>(); syncer.resync(shard, fut); - fut.get(); + PrimaryReplicaSyncer.ResyncTask resyncTask = fut.get(); if (syncNeeded) { assertTrue("Sync action was not called", syncActionCalled.get()); + ResyncReplicationRequest resyncRequest = resyncRequests.remove(0); + assertThat(resyncRequest.getTrimAboveSeqNo(), equalTo(numDocs - 1L)); + + assertThat("trimAboveSeqNo has to be specified in request #0 only", resyncRequests.stream() + .mapToLong(ResyncReplicationRequest::getTrimAboveSeqNo) + .filter(seqNo -> seqNo != SequenceNumbers.UNASSIGNED_SEQ_NO) + .findFirst() + .isPresent(), + is(false)); } - assertEquals(globalCheckPoint == numDocs - 1 ? 0 : numDocs, fut.get().getTotalOperations()); - if (syncNeeded) { + + assertEquals(globalCheckPoint == numDocs - 1 ? 0 : numDocs, resyncTask.getTotalOperations()); + if (syncNeeded && globalCheckPoint < numDocs - 1) { long skippedOps = globalCheckPoint + 1; // everything up to global checkpoint included - assertEquals(skippedOps, fut.get().getSkippedOperations()); - assertEquals(numDocs - skippedOps, fut.get().getResyncedOperations()); + assertEquals(skippedOps, resyncTask.getSkippedOperations()); + assertEquals(numDocs - skippedOps, resyncTask.getResyncedOperations()); } else { - assertEquals(0, fut.get().getSkippedOperations()); - assertEquals(0, fut.get().getResyncedOperations()); + assertEquals(0, resyncTask.getSkippedOperations()); + assertEquals(0, resyncTask.getResyncedOperations()); } closeShards(shard); diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogHeaderTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogHeaderTests.java index 0dc404767de3c..99e21d4760463 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogHeaderTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogHeaderTests.java @@ -118,7 +118,8 @@ private void checkFailsToOpen(String file, Class expect assertThat("test file [" + translogFile + "] should exist", Files.exists(translogFile), equalTo(true)); final E error = expectThrows(expectedErrorType, () -> { final Checkpoint checkpoint = new Checkpoint(Files.size(translogFile), 1, 1, - SequenceNumbers.NO_OPS_PERFORMED, SequenceNumbers.NO_OPS_PERFORMED, SequenceNumbers.NO_OPS_PERFORMED, 1); + SequenceNumbers.NO_OPS_PERFORMED, SequenceNumbers.NO_OPS_PERFORMED, + SequenceNumbers.NO_OPS_PERFORMED, 1, SequenceNumbers.NO_OPS_PERFORMED); try (FileChannel channel = FileChannel.open(translogFile, StandardOpenOption.READ)) { TranslogReader.open(channel, translogFile, checkpoint, null); } diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index b3b9fca886e17..cf6e753684676 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -107,7 +107,9 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; +import java.util.stream.IntStream; import java.util.stream.LongStream; import java.util.stream.Stream; @@ -120,8 +122,11 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasToString; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.isIn; import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.stub; @@ -1474,8 +1479,8 @@ public void testRecoveryUncommittedCorruptedCheckpoint() throws IOException { fail("corrupted"); } catch (IllegalStateException ex) { assertEquals("Checkpoint file translog-3.ckp already exists but has corrupted content expected: Checkpoint{offset=3080, " + - "numOps=55, generation=3, minSeqNo=45, maxSeqNo=99, globalCheckpoint=-1, minTranslogGeneration=1} but got: Checkpoint{offset=0, numOps=0, " + - "generation=0, minSeqNo=-1, maxSeqNo=-1, globalCheckpoint=-1, minTranslogGeneration=0}", ex.getMessage()); + "numOps=55, generation=3, minSeqNo=45, maxSeqNo=99, globalCheckpoint=-1, minTranslogGeneration=1, trimmedAboveSeqNo=-2} but got: Checkpoint{offset=0, numOps=0, " + + "generation=0, minSeqNo=-1, maxSeqNo=-1, globalCheckpoint=-1, minTranslogGeneration=0, trimmedAboveSeqNo=-2}", ex.getMessage()); } Checkpoint.write(FileChannel::open, config.getTranslogPath().resolve(Translog.getCommitCheckpointFileName(read.generation)), read, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING); try (Translog translog = new Translog(config, translogUUID, deletionPolicy, () -> SequenceNumbers.NO_OPS_PERFORMED, primaryTerm::get)) { @@ -1507,6 +1512,191 @@ public void testSnapshotFromStreamInput() throws IOException { assertEquals(ops, readOperations); } + public void testSnapshotCurrentHasUnexpectedOperationsForTrimmedOperations() throws Exception { + int extraDocs = randomIntBetween(10, 15); + + // increment primaryTerm to avoid potential negative numbers + primaryTerm.addAndGet(extraDocs); + translog.rollGeneration(); + + for (int op = 0; op < extraDocs; op++) { + String ascii = randomAlphaOfLengthBetween(1, 50); + Translog.Index operation = new Translog.Index("test", "" + op, op, primaryTerm.get() - op, + ascii.getBytes("UTF-8")); + translog.add(operation); + } + + AssertionError error = expectThrows(AssertionError.class, () -> translog.trimOperations(primaryTerm.get(), 0)); + assertThat(error.getMessage(), is("current should not have any operations with seq#:primaryTerm " + + "[1:" + (primaryTerm.get() - 1) + "] > 0:" + primaryTerm.get())); + + primaryTerm.incrementAndGet(); + translog.rollGeneration(); + + // add a single operation to current with seq# > trimmed seq# but higher primary term + Translog.Index operation = new Translog.Index("test", "" + 1, 1L, primaryTerm.get(), + randomAlphaOfLengthBetween(1, 50).getBytes("UTF-8")); + translog.add(operation); + + // it is possible to trim after generation rollover + translog.trimOperations(primaryTerm.get(), 0); + } + + public void testSnapshotTrimmedOperations() throws Exception { + final InMemoryTranslog inMemoryTranslog = new InMemoryTranslog(); + final List allOperations = new ArrayList<>(); + + for(int attempt = 0, maxAttempts = randomIntBetween(3, 10); attempt < maxAttempts; attempt++) { + List ops = LongStream.range(0, allOperations.size() + randomIntBetween(10, 15)) + .boxed().collect(Collectors.toList()); + Randomness.shuffle(ops); + + AtomicReference source = new AtomicReference<>(); + for (final long op : ops) { + source.set(randomAlphaOfLengthBetween(1, 50)); + + // have to use exactly the same source for same seq# if primaryTerm is not changed + if (primaryTerm.get() == translog.getCurrent().getPrimaryTerm()) { + // use the latest source of op with the same seq# - therefore no break + allOperations + .stream() + .filter(allOp -> allOp instanceof Translog.Index && allOp.seqNo() == op) + .map(allOp -> ((Translog.Index)allOp).source().utf8ToString()) + .reduce((a, b) -> b) + .ifPresent(source::set); + } + + // use ongoing primaryTerms - or the same as it was + Translog.Index operation = new Translog.Index("test", "" + op, op, primaryTerm.get(), + source.get().getBytes("UTF-8")); + translog.add(operation); + inMemoryTranslog.add(operation); + allOperations.add(operation); + } + + if (randomBoolean()) { + primaryTerm.incrementAndGet(); + translog.rollGeneration(); + } + + long maxTrimmedSeqNo = randomInt(allOperations.size()); + + translog.trimOperations(primaryTerm.get(), maxTrimmedSeqNo); + inMemoryTranslog.trimOperations(primaryTerm.get(), maxTrimmedSeqNo); + translog.sync(); + + Collection effectiveOperations = inMemoryTranslog.operations(); + + try (Translog.Snapshot snapshot = translog.newSnapshot()) { + assertThat(snapshot, containsOperationsInAnyOrder(effectiveOperations)); + assertThat(snapshot.totalOperations(), is(allOperations.size())); + assertThat(snapshot.skippedOperations(), is(allOperations.size() - effectiveOperations.size())); + } + } + } + + /** + * this class mimic behaviour of original {@link Translog} + */ + static class InMemoryTranslog { + private final Map operations = new HashMap<>(); + + void add(Translog.Operation operation) { + final Translog.Operation old = operations.put(operation.seqNo(), operation); + assert old == null || old.primaryTerm() <= operation.primaryTerm(); + } + + void trimOperations(long belowTerm, long aboveSeqNo) { + for (final Iterator> it = operations.entrySet().iterator(); it.hasNext(); ) { + final Map.Entry next = it.next(); + Translog.Operation op = next.getValue(); + boolean drop = op.primaryTerm() < belowTerm && op.seqNo() > aboveSeqNo; + if (drop) { + it.remove(); + } + } + } + + Collection operations() { + return operations.values(); + } + } + + public void testRandomExceptionsOnTrimOperations( ) throws Exception { + Path tempDir = createTempDir(); + final FailSwitch fail = new FailSwitch(); + fail.failNever(); + TranslogConfig config = getTranslogConfig(tempDir); + List fileChannels = new ArrayList<>(); + final Translog failableTLog = + getFailableTranslog(fail, config, randomBoolean(), false, null, createTranslogDeletionPolicy(), fileChannels); + + IOException expectedException = null; + int translogOperations = 0; + final int maxAttempts = 10; + for(int attempt = 0; attempt < maxAttempts; attempt++) { + int maxTrimmedSeqNo; + fail.failNever(); + int extraTranslogOperations = randomIntBetween(10, 100); + + List ops = IntStream.range(translogOperations, translogOperations + extraTranslogOperations) + .boxed().collect(Collectors.toList()); + Randomness.shuffle(ops); + for (int op : ops) { + String ascii = randomAlphaOfLengthBetween(1, 50); + Translog.Index operation = new Translog.Index("test", "" + op, op, + primaryTerm.get(), ascii.getBytes("UTF-8")); + + failableTLog.add(operation); + } + + translogOperations += extraTranslogOperations; + + // at least one roll + inc of primary term has to be there - otherwise trim would not take place at all + // last attempt we have to make roll as well - otherwise could skip trimming as it has been trimmed already + boolean rollover = attempt == 0 || attempt == maxAttempts - 1 || randomBoolean(); + if (rollover) { + primaryTerm.incrementAndGet(); + failableTLog.rollGeneration(); + } + + maxTrimmedSeqNo = rollover ? translogOperations - randomIntBetween(4, 8) : translogOperations + 1; + + // if we are so happy to reach the max attempts - fail it always` + fail.failRate(attempt < maxAttempts - 1 ? 25 : 100); + try { + failableTLog.trimOperations(primaryTerm.get(), maxTrimmedSeqNo); + } catch (IOException e){ + expectedException = e; + break; + } + } + + assertThat(expectedException, is(not(nullValue()))); + + assertThat(fileChannels, is(not(empty()))); + assertThat("all file channels have to be closed", + fileChannels.stream().filter(f -> f.isOpen()).findFirst().isPresent(), is(false)); + + assertThat(failableTLog.isOpen(), is(false)); + final AlreadyClosedException alreadyClosedException = expectThrows(AlreadyClosedException.class, () -> failableTLog.newSnapshot()); + assertThat(alreadyClosedException.getMessage(), + is("translog is already closed")); + + fail.failNever(); + + // check that despite of IO exception translog is not corrupted + try(Translog reopenedTranslog = openTranslog(config, failableTLog.getTranslogUUID())) { + try (Translog.Snapshot snapshot = reopenedTranslog.newSnapshot()) { + assertThat(snapshot.totalOperations(), greaterThan(0)); + Translog.Operation operation; + for (int i = 0; (operation = snapshot.next()) != null; i++) { + assertNotNull("operation " + i + " must be non-null", operation); + } + } + } + } + public void testLocationHashCodeEquals() throws IOException { List locations = new ArrayList<>(); List locations2 = new ArrayList<>(); @@ -2007,7 +2197,8 @@ private static class FailSwitch { private volatile boolean onceFailedFailAlways = false; public boolean fail() { - boolean fail = randomIntBetween(1, 100) <= failRate; + final int rnd = randomIntBetween(1, 100); + boolean fail = rnd <= failRate; if (fail && onceFailedFailAlways) { failAlways(); } @@ -2026,17 +2217,30 @@ public void failRandomly() { failRate = randomIntBetween(1, 100); } + public void failRate(int rate) { + failRate = rate; + } + public void onceFailedFailAlways() { onceFailedFailAlways = true; } } - private Translog getFailableTranslog(final FailSwitch fail, final TranslogConfig config, final boolean partialWrites, final boolean throwUnknownException, String translogUUID, final TranslogDeletionPolicy deletionPolicy) throws IOException { + return getFailableTranslog(fail, config, partialWrites, throwUnknownException, translogUUID, deletionPolicy, null); + } + + private Translog getFailableTranslog(final FailSwitch fail, final TranslogConfig config, final boolean partialWrites, + final boolean throwUnknownException, String translogUUID, + final TranslogDeletionPolicy deletionPolicy, + final List fileChannels) throws IOException { final ChannelFactory channelFactory = (file, openOption) -> { FileChannel channel = FileChannel.open(file, openOption); + if (fileChannels != null) { + fileChannels.add(channel); + } boolean success = false; try { final boolean isCkpFile = file.getFileName().toString().endsWith(".ckp"); // don't do partial writes for checkpoints we rely on the fact that the bytes are written as an atomic operation @@ -2393,7 +2597,7 @@ private Checkpoint randomCheckpoint() { } final long generation = randomNonNegativeLong(); return new Checkpoint(randomLong(), randomInt(), generation, minSeqNo, maxSeqNo, randomNonNegativeLong(), - randomLongBetween(1, generation)); + randomLongBetween(1, generation), maxSeqNo); } public void testCheckpointOnDiskFull() throws IOException { @@ -2617,7 +2821,7 @@ public void testMinSeqNoBasedAPI() throws IOException { assertThat(Tuple.tuple(op.seqNo(), op.primaryTerm()), isIn(seenSeqNos)); readFromSnapshot++; } - readFromSnapshot += snapshot.overriddenOperations(); + readFromSnapshot += snapshot.skippedOperations(); } assertThat(readFromSnapshot, equalTo(expectedSnapshotOps)); final long seqNoLowerBound = seqNo; From 2378fa19b8cf31399434815d067765917825924a Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Fri, 8 Jun 2018 09:23:46 -0700 Subject: [PATCH 08/20] Remove extraneous references to 'tokenized' in the mapper code. (#31010) These are likely left over from when there were three options for the index mapping ('no', 'analyzed', 'not_analyzed'). --- .../index/mapper/BooleanFieldMapper.java | 8 -------- .../elasticsearch/index/mapper/FieldMapper.java | 15 ++------------- .../index/mapper/MappedFieldType.java | 2 +- .../index/mapper/MultiFieldTests.java | 2 +- 4 files changed, 4 insertions(+), 23 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java index 52b9a0d46e55d..c50a7d18113bf 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java @@ -84,14 +84,6 @@ public Builder(String name) { this.builder = this; } - @Override - public Builder tokenized(boolean tokenized) { - if (tokenized) { - throw new IllegalArgumentException("bool field can't be tokenized"); - } - return super.tokenized(tokenized); - } - @Override public BooleanFieldMapper build(BuilderContext context) { setupFieldType(context); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index f23a8d0ce96aa..977b930c41e5b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -147,11 +147,6 @@ public T storeTermVectorPayloads(boolean termVectorPayloads) { return builder; } - public T tokenized(boolean tokenized) { - this.fieldType.setTokenized(tokenized); - return builder; - } - public T boost(float boost) { this.fieldType.setBoost(boost); return builder; @@ -376,9 +371,8 @@ protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, boolean indexed = fieldType().indexOptions() != IndexOptions.NONE; boolean defaultIndexed = defaultFieldType.indexOptions() != IndexOptions.NONE; - if (includeDefaults || indexed != defaultIndexed || - fieldType().tokenized() != defaultFieldType.tokenized()) { - builder.field("index", indexTokenizeOption(indexed, fieldType().tokenized())); + if (includeDefaults || indexed != defaultIndexed) { + builder.field("index", indexed); } if (includeDefaults || fieldType().stored() != defaultFieldType.stored()) { builder.field("store", fieldType().stored()); @@ -474,11 +468,6 @@ public static String termVectorOptionsToString(FieldType fieldType) { } } - /* Only protected so that string can override it */ - protected Object indexTokenizeOption(boolean indexed, boolean tokenized) { - return indexed; - } - protected abstract String contentType(); public static class MultiFields { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 71450e69948fb..24f27aa9ea7f3 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -165,7 +165,7 @@ public void checkCompatibility(MappedFieldType other, List conflicts) { boolean indexed = indexOptions() != IndexOptions.NONE; boolean mergeWithIndexed = other.indexOptions() != IndexOptions.NONE; // TODO: should be validating if index options go "up" (but "down" is ok) - if (indexed != mergeWithIndexed || tokenized() != other.tokenized()) { + if (indexed != mergeWithIndexed) { conflicts.add("mapper [" + name() + "] has different [index] values"); } if (stored() != other.stored()) { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java index 0d5b4ca154a5a..45bb8090206bf 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java @@ -122,7 +122,7 @@ public void testBuildThenParse() throws Exception { DocumentMapper builderDocMapper = new DocumentMapper.Builder(new RootObjectMapper.Builder("person").add( new TextFieldMapper.Builder("name").store(true) - .addMultiField(new TextFieldMapper.Builder("indexed").index(true).tokenized(true)) + .addMultiField(new TextFieldMapper.Builder("indexed").index(true)) .addMultiField(new TextFieldMapper.Builder("not_indexed").index(false).store(true)) ), indexService.mapperService()).build(indexService.mapperService()); From 8f607071b6e34990d4e42f235d014d02cd680e8b Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Fri, 8 Jun 2018 09:24:09 -0700 Subject: [PATCH 09/20] Remove DocumentFieldMappers#smartNameFieldMapper, as it is no longer needed. (#31018) --- .../index/mapper/TokenCountFieldMapperTests.java | 4 ++-- .../get/TransportGetFieldMappingsIndexAction.java | 2 +- .../elasticsearch/index/get/ShardGetService.java | 2 +- .../index/mapper/DocumentFieldMappers.java | 13 ------------- .../index/search/QueryParserHelper.java | 2 +- .../index/mapper/BinaryFieldMapperTests.java | 4 ++-- .../index/mapper/DocumentMapperMergeTests.java | 8 ++++---- .../index/mapper/DoubleIndexingDocTests.java | 14 +++++++------- .../index/mapper/DynamicMappingTests.java | 14 +++++++------- .../index/mapper/DynamicTemplatesTests.java | 8 ++++---- 10 files changed, 29 insertions(+), 42 deletions(-) diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/TokenCountFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/TokenCountFieldMapperTests.java index e906f5755de9b..34ad1934a3e09 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/TokenCountFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/TokenCountFieldMapperTests.java @@ -79,9 +79,9 @@ public void testMerge() throws IOException { new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE); // previous mapper has not been modified - assertThat(((TokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("keyword")); + assertThat(((TokenCountFieldMapper) stage1.mappers().getMapper("tc")).analyzer(), equalTo("keyword")); // but the new one has the change - assertThat(((TokenCountFieldMapper) stage2.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("standard")); + assertThat(((TokenCountFieldMapper) stage2.mappers().getMapper("tc")).analyzer(), equalTo("standard")); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java index 54079592224a0..07ae513351e1b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java @@ -186,7 +186,7 @@ private static Map findFieldMappingsByType(Predica } } else { // not a pattern - FieldMapper fieldMapper = allFieldMappers.smartNameFieldMapper(field); + FieldMapper fieldMapper = allFieldMappers.getMapper(field); if (fieldMapper != null) { addFieldMapper(fieldPredicate, field, fieldMapper, fieldMappings, request.includeDefaults()); } else if (request.probablySingleFieldRequest()) { diff --git a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java index 13f6e58cd79ec..a759f6a676714 100644 --- a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java +++ b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java @@ -202,7 +202,7 @@ private GetResult innerGetLoadFromStoredFields(String type, String id, String[] if (gFields != null && gFields.length > 0) { for (String field : gFields) { - FieldMapper fieldMapper = docMapper.mappers().smartNameFieldMapper(field); + FieldMapper fieldMapper = docMapper.mappers().getMapper(field); if (fieldMapper == null) { if (docMapper.objectMappers().get(field) != null) { // Only fail if we know it is a object field, missing paths / fields shouldn't fail. diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java index 2a19cb3f8bd40..ea242aca68f44 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java @@ -80,19 +80,6 @@ public Collection simpleMatchToFullName(String pattern) { return fields; } - public FieldMapper smartNameFieldMapper(String name) { - FieldMapper fieldMapper = getMapper(name); - if (fieldMapper != null) { - return fieldMapper; - } - for (FieldMapper otherFieldMapper : this) { - if (otherFieldMapper.fieldType().name().equals(name)) { - return otherFieldMapper; - } - } - return null; - } - /** * A smart analyzer used for indexing that takes into account specific analyzers configured * per {@link FieldMapper}. diff --git a/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java b/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java index c3a695beff083..b3751afbc9c5d 100644 --- a/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java +++ b/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java @@ -91,7 +91,7 @@ public static Map parseFieldsAndWeights(List fields) { public static FieldMapper getFieldMapper(MapperService mapperService, String field) { DocumentMapper mapper = mapperService.documentMapper(); if (mapper != null) { - FieldMapper fieldMapper = mapper.mappers().smartNameFieldMapper(field); + FieldMapper fieldMapper = mapper.mappers().getMapper(field); if (fieldMapper != null) { return fieldMapper; } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java index e4cd5731daafa..6e9cb6c0b5980 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java @@ -59,7 +59,7 @@ public void testDefaultMapping() throws Exception { DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - FieldMapper fieldMapper = mapper.mappers().smartNameFieldMapper("field"); + FieldMapper fieldMapper = mapper.mappers().getMapper("field"); assertThat(fieldMapper, instanceOf(BinaryFieldMapper.class)); assertThat(fieldMapper.fieldType().stored(), equalTo(false)); } @@ -94,7 +94,7 @@ public void testStoredValue() throws IOException { XContentType.JSON)); BytesRef indexedValue = doc.rootDoc().getBinaryValue("field"); assertEquals(new BytesRef(value), indexedValue); - FieldMapper fieldMapper = mapper.mappers().smartNameFieldMapper("field"); + FieldMapper fieldMapper = mapper.mappers().getMapper("field"); Object originalValue = fieldMapper.fieldType().valueForDisplay(indexedValue); assertEquals(new BytesArray(value), originalValue); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java index e69d48b7b44b7..0234fcb681d82 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java @@ -56,11 +56,11 @@ public void test1Merge() throws Exception { DocumentMapper merged = stage1.merge(stage2.mapping()); // stage1 mapping should not have been modified - assertThat(stage1.mappers().smartNameFieldMapper("age"), nullValue()); - assertThat(stage1.mappers().smartNameFieldMapper("obj1.prop1"), nullValue()); + assertThat(stage1.mappers().getMapper("age"), nullValue()); + assertThat(stage1.mappers().getMapper("obj1.prop1"), nullValue()); // but merged should - assertThat(merged.mappers().smartNameFieldMapper("age"), notNullValue()); - assertThat(merged.mappers().smartNameFieldMapper("obj1.prop1"), notNullValue()); + assertThat(merged.mappers().getMapper("age"), notNullValue()); + assertThat(merged.mappers().getMapper("obj1.prop1"), notNullValue()); } public void testMergeObjectDynamic() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java index 6c83f31f93fe6..c50320900923c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java @@ -69,25 +69,25 @@ public void testDoubleIndexingSameDoc() throws Exception { IndexReader reader = DirectoryReader.open(writer); IndexSearcher searcher = new IndexSearcher(reader); - TopDocs topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field1").fieldType().termQuery("value1", context), 10); + TopDocs topDocs = searcher.search(mapper.mappers().getMapper("field1").fieldType().termQuery("value1", context), 10); assertThat(topDocs.totalHits, equalTo(2L)); - topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field2").fieldType().termQuery("1", context), 10); + topDocs = searcher.search(mapper.mappers().getMapper("field2").fieldType().termQuery("1", context), 10); assertThat(topDocs.totalHits, equalTo(2L)); - topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field3").fieldType().termQuery("1.1", context), 10); + topDocs = searcher.search(mapper.mappers().getMapper("field3").fieldType().termQuery("1.1", context), 10); assertThat(topDocs.totalHits, equalTo(2L)); - topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field4").fieldType().termQuery("2010-01-01", context), 10); + topDocs = searcher.search(mapper.mappers().getMapper("field4").fieldType().termQuery("2010-01-01", context), 10); assertThat(topDocs.totalHits, equalTo(2L)); - topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field5").fieldType().termQuery("1", context), 10); + topDocs = searcher.search(mapper.mappers().getMapper("field5").fieldType().termQuery("1", context), 10); assertThat(topDocs.totalHits, equalTo(2L)); - topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field5").fieldType().termQuery("2", context), 10); + topDocs = searcher.search(mapper.mappers().getMapper("field5").fieldType().termQuery("2", context), 10); assertThat(topDocs.totalHits, equalTo(2L)); - topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field5").fieldType().termQuery("3", context), 10); + topDocs = searcher.search(mapper.mappers().getMapper("field5").fieldType().termQuery("3", context), 10); assertThat(topDocs.totalHits, equalTo(2L)); writer.close(); reader.close(); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java index 30923b5a6559b..0d7dde415aaa8 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java @@ -625,10 +625,10 @@ public void testNumericDetectionEnabled() throws Exception { .setSource(doc.dynamicMappingsUpdate().toString(), XContentType.JSON).get(); defaultMapper = index.mapperService().documentMapper("type"); - FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("s_long"); + FieldMapper mapper = defaultMapper.mappers().getMapper("s_long"); assertThat(mapper.fieldType().typeName(), equalTo("long")); - mapper = defaultMapper.mappers().smartNameFieldMapper("s_double"); + mapper = defaultMapper.mappers().getMapper("s_double"); assertThat(mapper.fieldType().typeName(), equalTo("float")); } @@ -652,10 +652,10 @@ public void testNumericDetectionDefault() throws Exception { .setSource(doc.dynamicMappingsUpdate().toString(), XContentType.JSON).get()); defaultMapper = index.mapperService().documentMapper("type"); - FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("s_long"); + FieldMapper mapper = defaultMapper.mappers().getMapper("s_long"); assertThat(mapper, instanceOf(TextFieldMapper.class)); - mapper = defaultMapper.mappers().smartNameFieldMapper("s_double"); + mapper = defaultMapper.mappers().getMapper("s_double"); assertThat(mapper, instanceOf(TextFieldMapper.class)); } @@ -703,9 +703,9 @@ public void testDateDetectionInheritsFormat() throws Exception { defaultMapper = index.mapperService().documentMapper("type"); - DateFieldMapper dateMapper1 = (DateFieldMapper) defaultMapper.mappers().smartNameFieldMapper("date1"); - DateFieldMapper dateMapper2 = (DateFieldMapper) defaultMapper.mappers().smartNameFieldMapper("date2"); - DateFieldMapper dateMapper3 = (DateFieldMapper) defaultMapper.mappers().smartNameFieldMapper("date3"); + DateFieldMapper dateMapper1 = (DateFieldMapper) defaultMapper.mappers().getMapper("date1"); + DateFieldMapper dateMapper2 = (DateFieldMapper) defaultMapper.mappers().getMapper("date2"); + DateFieldMapper dateMapper3 = (DateFieldMapper) defaultMapper.mappers().getMapper("date3"); // inherited from dynamic date format assertEquals("yyyy-MM-dd", dateMapper1.fieldType().dateTimeFormatter().format()); // inherited from dynamic date format since the mapping in the template did not specify a format diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java index 64927103e6d1d..d8e8c8e0e3da5 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java @@ -56,11 +56,11 @@ public void testMatchTypeOnly() throws Exception { docMapper = index.mapperService().documentMapper("person"); DocumentFieldMappers mappers = docMapper.mappers(); - assertThat(mappers.smartNameFieldMapper("s"), Matchers.notNullValue()); - assertEquals(IndexOptions.NONE, mappers.smartNameFieldMapper("s").fieldType().indexOptions()); + assertThat(mappers.getMapper("s"), Matchers.notNullValue()); + assertEquals(IndexOptions.NONE, mappers.getMapper("s").fieldType().indexOptions()); - assertThat(mappers.smartNameFieldMapper("l"), Matchers.notNullValue()); - assertNotSame(IndexOptions.NONE, mappers.smartNameFieldMapper("l").fieldType().indexOptions()); + assertThat(mappers.getMapper("l"), Matchers.notNullValue()); + assertNotSame(IndexOptions.NONE, mappers.getMapper("l").fieldType().indexOptions()); } From b26aae3915e0402ab70504cdc4b8b1bb2b8f10ae Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Fri, 8 Jun 2018 20:36:31 +0300 Subject: [PATCH 10/20] Compliant SAML Response destination check (#31175) Make SAML Response Destination check compliant Only validate the Destination element of an incoming SAML Response if Destination is present and the SAML Response is signed. The standard [1] - 3.5.5.2 and [2] - 3.2.2 does mention that the Destination element is optional and should only be verified when the SAML Response is signed. Some Identity Provider implementations are known to not set a Destination XML Attribute in their SAML responses when those are not signed, so this change also aims to enhance interoperability. [1] https://docs.oasis-open.org/security/saml/v2.0/saml-bindings-2.0-os.pdf [2] https://docs.oasis-open.org/security/saml/v2.0/saml-core-2.0-os.pdf --- .../authc/saml/SamlAuthenticator.java | 4 +- .../authc/saml/SamlAuthenticatorTests.java | 48 ++++++++++++++++++- 2 files changed, 50 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticator.java index f8826bebcac71..61e451150cd08 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticator.java @@ -159,8 +159,10 @@ private String getSessionIndex(Assertion assertion) { private void checkResponseDestination(Response response) { final String asc = getSpConfiguration().getAscUrl(); if (asc.equals(response.getDestination()) == false) { - throw samlException("SAML response " + response.getID() + " is for destination " + response.getDestination() + if (response.isSigned() || Strings.hasText(response.getDestination())) { + throw samlException("SAML response " + response.getID() + " is for destination " + response.getDestination() + " but this realm uses " + asc); + } } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java index 8bb9890151ff0..913258cf45c5d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java @@ -523,13 +523,59 @@ public void testIncorrectDestinationIsRejected() throws Exception { "" + "" + ""; - SamlToken token = token(signDoc(xml)); + SamlToken token = randomBoolean() ? token(signDoc(xml)) : token(signAssertions(xml, idpSigningCertificatePair)); final ElasticsearchSecurityException exception = expectSamlException(() -> authenticator.authenticate(token)); assertThat(exception.getMessage(), containsString("destination")); assertThat(exception.getCause(), nullValue()); assertThat(SamlUtils.isSamlException(exception), is(true)); } + public void testMissingDestinationIsNotRejectedForNotSignedResponse() throws Exception { + Instant now = clock.instant(); + Instant validUntil = now.plusSeconds(30); + String sessionindex = randomId(); + final String xml = "\n" + + "" + + "" + IDP_ENTITY_ID + "" + + "" + + "" + + "" + IDP_ENTITY_ID + "" + + "" + + "randomopaquestring" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + PASSWORD_AUTHN_CTX + "" + + "" + + "" + + "" + + "daredevil" + + "" + + "" + + ""; + SamlToken token = token(signAssertions(xml, idpSigningCertificatePair)); + final SamlAttributes attributes = authenticator.authenticate(token); + assertThat(attributes, notNullValue()); + assertThat(attributes.attributes(), iterableWithSize(1)); + final List uid = attributes.getAttributeValues("urn:oid:0.9.2342.19200300.100.1.1"); + assertThat(uid, contains("daredevil")); + assertThat(uid, iterableWithSize(1)); + assertThat(attributes.name(), notNullValue()); + assertThat(attributes.name().format, equalTo(TRANSIENT)); + } + public void testIncorrectRequestIdIsRejected() throws Exception { Instant now = clock.instant(); Instant validUntil = now.plusSeconds(30); From 12fa0f437a3ea5e26a9d2783fae54622ddea4826 Mon Sep 17 00:00:00 2001 From: Vladimir Dolzhenko Date: Fri, 8 Jun 2018 12:11:20 -0700 Subject: [PATCH 11/20] Allow to trim all ops above a certain seq# with a term lower than X, post backport fix (#31211) --- .../elasticsearch/action/resync/ResyncReplicationRequest.java | 4 ++-- .../java/org/elasticsearch/index/translog/Checkpoint.java | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/resync/ResyncReplicationRequest.java b/server/src/main/java/org/elasticsearch/action/resync/ResyncReplicationRequest.java index 7be3406659e62..d4e2c652fa875 100644 --- a/server/src/main/java/org/elasticsearch/action/resync/ResyncReplicationRequest.java +++ b/server/src/main/java/org/elasticsearch/action/resync/ResyncReplicationRequest.java @@ -68,7 +68,7 @@ public void readFrom(final StreamInput in) throws IOException { throw new IllegalStateException("resync replication request serialization is broken in 6.0.0"); } super.readFrom(in); - if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + if (in.getVersion().onOrAfter(Version.V_6_4_0)) { trimAboveSeqNo = in.readZLong(); } else { trimAboveSeqNo = SequenceNumbers.UNASSIGNED_SEQ_NO; @@ -79,7 +79,7 @@ public void readFrom(final StreamInput in) throws IOException { @Override public void writeTo(final StreamOutput out) throws IOException { super.writeTo(out); - if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + if (out.getVersion().onOrAfter(Version.V_6_4_0)) { out.writeZLong(trimAboveSeqNo); } out.writeArray(Translog.Operation::writeOperation, operations); diff --git a/server/src/main/java/org/elasticsearch/index/translog/Checkpoint.java b/server/src/main/java/org/elasticsearch/index/translog/Checkpoint.java index 21a400f9f65dc..364203f898cc1 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/Checkpoint.java +++ b/server/src/main/java/org/elasticsearch/index/translog/Checkpoint.java @@ -53,7 +53,7 @@ final class Checkpoint { private static final String CHECKPOINT_CODEC = "ckp"; - // size of 7.0.0 checkpoint + // size of 6.4.0 checkpoint static final int V3_FILE_SIZE = CodecUtil.headerLength(CHECKPOINT_CODEC) + Integer.BYTES // ops From 00b0e1006320cf8d11d2aaf2f4233dab1bcc1be3 Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Fri, 8 Jun 2018 13:53:35 -0700 Subject: [PATCH 12/20] Remove DocumentFieldMappers#simpleMatchToFullName. (#31041) * Remove DocumentFieldMappers#simpleMatchToFullName, as it is duplicative of MapperService#simpleMatchToIndexNames. * Rename MapperService#simpleMatchToIndexNames -> simpleMatchToFullName for consistency. * Simplify EsIntegTestCase#assertConcreteMappingsOnAll to accept concrete fields instead of wildcard patterns. --- .../TransportFieldCapabilitiesIndexAction.java | 2 +- .../index/mapper/DocumentFieldMappers.java | 13 ------------- .../elasticsearch/index/mapper/MapperService.java | 2 +- .../index/query/QueryShardContext.java | 2 +- .../index/termvectors/TermVectorsService.java | 2 +- .../fetch/subphase/highlight/HighlightPhase.java | 3 +-- .../index/mapper/FieldNamesFieldTypeTests.java | 2 +- .../org/elasticsearch/test/ESIntegTestCase.java | 12 ++++++------ ...ecurityIndexSearcherWrapperIntegrationTests.java | 2 +- 9 files changed, 13 insertions(+), 27 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java index b24dc685df6d0..f1a1dc451406e 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java @@ -76,7 +76,7 @@ protected FieldCapabilitiesIndexResponse shardOperation(final FieldCapabilitiesI MapperService mapperService = indicesService.indexServiceSafe(shardId.getIndex()).mapperService(); Set fieldNames = new HashSet<>(); for (String field : request.fields()) { - fieldNames.addAll(mapperService.simpleMatchToIndexNames(field)); + fieldNames.addAll(mapperService.simpleMatchToFullName(field)); } Predicate fieldPredicate = indicesService.getFieldFilter().apply(shardId.getIndexName()); Map responseMap = new HashMap<>(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java index ea242aca68f44..9193ca209ba23 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java @@ -20,16 +20,13 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.analysis.Analyzer; -import org.elasticsearch.common.regex.Regex; import org.elasticsearch.index.analysis.FieldNameAnalyzer; import java.util.Collection; import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.Iterator; import java.util.Map; -import java.util.Set; public final class DocumentFieldMappers implements Iterable { @@ -70,16 +67,6 @@ public FieldMapper getMapper(String field) { return fieldMappers.get(field); } - public Collection simpleMatchToFullName(String pattern) { - Set fields = new HashSet<>(); - for (FieldMapper fieldMapper : this) { - if (Regex.simpleMatch(pattern, fieldMapper.fieldType().name())) { - fields.add(fieldMapper.fieldType().name()); - } - } - return fields; - } - /** * A smart analyzer used for indexing that takes into account specific analyzers configured * per {@link FieldMapper}. diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index a06288b67e3bd..8988238d9277e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -721,7 +721,7 @@ public MappedFieldType fullName(String fullName) { * Returns all the fields that match the given pattern. If the pattern is prefixed with a type * then the fields will be returned with a type prefix. */ - public Collection simpleMatchToIndexNames(String pattern) { + public Collection simpleMatchToFullName(String pattern) { if (Regex.isSimpleMatchPattern(pattern) == false) { // no wildcards return Collections.singletonList(pattern); diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java b/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java index 6bb69c0cab990..598a6f38a2ef8 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java @@ -198,7 +198,7 @@ public void setIsFilter(boolean isFilter) { * type then the fields will be returned with a type prefix. */ public Collection simpleMatchToIndexNames(String pattern) { - return mapperService.simpleMatchToIndexNames(pattern); + return mapperService.simpleMatchToFullName(pattern); } public MappedFieldType fieldMapper(String name) { diff --git a/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java b/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java index 0148ab44d1b3e..c13c56beb5a20 100644 --- a/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java +++ b/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java @@ -155,7 +155,7 @@ else if (docIdAndVersion != null) { private static void handleFieldWildcards(IndexShard indexShard, TermVectorsRequest request) { Set fieldNames = new HashSet<>(); for (String pattern : request.selectedFields()) { - fieldNames.addAll(indexShard.mapperService().simpleMatchToIndexNames(pattern)); + fieldNames.addAll(indexShard.mapperService().simpleMatchToFullName(pattern)); } request.selectedFields(fieldNames.toArray(Strings.EMPTY_ARRAY)); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java index 16cc6a50e8c7e..4343a1ebca564 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java @@ -53,8 +53,7 @@ public void hitExecute(SearchContext context, HitContext hitContext) { for (SearchContextHighlight.Field field : context.highlight().fields()) { Collection fieldNamesToHighlight; if (Regex.isSimpleMatchPattern(field.field())) { - DocumentMapper documentMapper = context.mapperService().documentMapper(hitContext.hit().getType()); - fieldNamesToHighlight = documentMapper.mappers().simpleMatchToFullName(field.field()); + fieldNamesToHighlight = context.mapperService().simpleMatchToFullName(field.field()); } else { fieldNamesToHighlight = Collections.singletonList(field.field()); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java index 945407fc39492..4ae03138764d7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java @@ -63,7 +63,7 @@ public void testTermQuery() { MapperService mapperService = mock(MapperService.class); when(mapperService.fullName("_field_names")).thenReturn(fieldNamesFieldType); when(mapperService.fullName("field_name")).thenReturn(fieldType); - when(mapperService.simpleMatchToIndexNames("field_name")).thenReturn(Collections.singletonList("field_name")); + when(mapperService.simpleMatchToFullName("field_name")).thenReturn(Collections.singletonList("field_name")); QueryShardContext queryShardContext = new QueryShardContext(0, indexSettings, null, null, mapperService, null, null, null, null, null, null, () -> 0L, null); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 8b58cea4d0a54..979bfccdb64b8 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -126,7 +126,8 @@ import org.elasticsearch.index.MockEngineFactoryPlugin; import org.elasticsearch.index.codec.CodecService; import org.elasticsearch.index.engine.Segment; -import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MockFieldFilterPlugin; import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.seqno.SequenceNumbers; @@ -823,7 +824,7 @@ public void waitNoPendingTasksOnAll() throws Exception { } /** - * Waits till a (pattern) field name mappings concretely exists on all nodes. Note, this waits for the current + * Waits until mappings for the provided fields exist on all nodes. Note, this waits for the current * started shards and checks for concrete mappings. */ public void assertConcreteMappingsOnAll(final String index, final String type, final String... fieldNames) throws Exception { @@ -833,11 +834,10 @@ public void assertConcreteMappingsOnAll(final String index, final String type, f IndicesService indicesService = internalCluster().getInstance(IndicesService.class, node); IndexService indexService = indicesService.indexService(resolveIndex(index)); assertThat("index service doesn't exists on " + node, indexService, notNullValue()); - DocumentMapper documentMapper = indexService.mapperService().documentMapper(type); - assertThat("document mapper doesn't exists on " + node, documentMapper, notNullValue()); + MapperService mapperService = indexService.mapperService(); for (String fieldName : fieldNames) { - Collection matches = documentMapper.mappers().simpleMatchToFullName(fieldName); - assertThat("field " + fieldName + " doesn't exists on " + node, matches, Matchers.not(emptyIterable())); + MappedFieldType fieldType = mapperService.fullName(fieldName); + assertNotNull("field " + fieldName + " doesn't exists on " + node, fieldType); } } assertMappingOnMaster(index, type, fieldNames); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperIntegrationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperIntegrationTests.java index 08c1d010d99dd..9abaaf0ecf0e2 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperIntegrationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperIntegrationTests.java @@ -59,7 +59,7 @@ public void testDLS() throws Exception { MapperService mapperService = mock(MapperService.class); ScriptService scriptService = mock(ScriptService.class); when(mapperService.documentMapper()).thenReturn(null); - when(mapperService.simpleMatchToIndexNames(anyString())) + when(mapperService.simpleMatchToFullName(anyString())) .then(invocationOnMock -> Collections.singletonList((String) invocationOnMock.getArguments()[0])); ThreadContext threadContext = new ThreadContext(Settings.EMPTY); From f6d69c9d40ea401a2347a2dd6594ffdb1ae046b3 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 8 Jun 2018 17:15:28 -0400 Subject: [PATCH 13/20] Add licenses for transport-nio (#31218) We have a direct dependency on Netty here so we should be attaching the licenses. This is needed for accurate dependency-license reporting. --- plugins/transport-nio/build.gradle | 6 +- .../transport-nio/licenses/netty-LICENSE.txt | 202 ++++++++++++++++++ .../transport-nio/licenses/netty-NOTICE.txt | 116 ++++++++++ .../netty-buffer-4.1.16.Final.jar.sha1 | 1 + .../netty-codec-4.1.16.Final.jar.sha1 | 1 + .../netty-codec-http-4.1.16.Final.jar.sha1 | 1 + .../netty-common-4.1.16.Final.jar.sha1 | 1 + .../netty-handler-4.1.16.Final.jar.sha1 | 1 + .../netty-resolver-4.1.16.Final.jar.sha1 | 1 + .../netty-transport-4.1.16.Final.jar.sha1 | 1 + 10 files changed, 329 insertions(+), 2 deletions(-) create mode 100644 plugins/transport-nio/licenses/netty-LICENSE.txt create mode 100644 plugins/transport-nio/licenses/netty-NOTICE.txt create mode 100644 plugins/transport-nio/licenses/netty-buffer-4.1.16.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-codec-4.1.16.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-codec-http-4.1.16.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-common-4.1.16.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-handler-4.1.16.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-resolver-4.1.16.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-transport-4.1.16.Final.jar.sha1 diff --git a/plugins/transport-nio/build.gradle b/plugins/transport-nio/build.gradle index e278ebf47983e..856a8552b8e5a 100644 --- a/plugins/transport-nio/build.gradle +++ b/plugins/transport-nio/build.gradle @@ -22,8 +22,6 @@ esplugin { classname 'org.elasticsearch.transport.nio.NioTransportPlugin' } -dependencyLicenses.enabled = false - compileJava.options.compilerArgs << "-Xlint:-try" compileTestJava.options.compilerArgs << "-Xlint:-rawtypes,-unchecked" @@ -40,6 +38,10 @@ dependencies { compile "io.netty:netty-transport:4.1.16.Final" } +dependencyLicenses { + mapping from: /netty-.*/, to: 'netty' +} + thirdPartyAudit.excludes = [ // classes are missing diff --git a/plugins/transport-nio/licenses/netty-LICENSE.txt b/plugins/transport-nio/licenses/netty-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/transport-nio/licenses/netty-NOTICE.txt b/plugins/transport-nio/licenses/netty-NOTICE.txt new file mode 100644 index 0000000000000..5bbf91a14de23 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-NOTICE.txt @@ -0,0 +1,116 @@ + + The Netty Project + ================= + +Please visit the Netty web site for more information: + + * http://netty.io/ + +Copyright 2011 The Netty Project + +The Netty Project licenses this file to you under the Apache License, +version 2.0 (the "License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at: + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +License for the specific language governing permissions and limitations +under the License. + +Also, please refer to each LICENSE..txt file, which is located in +the 'license' directory of the distribution file, for the license terms of the +components that this product depends on. + +------------------------------------------------------------------------------- +This product contains the extensions to Java Collections Framework which has +been derived from the works by JSR-166 EG, Doug Lea, and Jason T. Greene: + + * LICENSE: + * license/LICENSE.jsr166y.txt (Public Domain) + * HOMEPAGE: + * http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/ + * http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/ + +This product contains a modified version of Robert Harder's Public Domain +Base64 Encoder and Decoder, which can be obtained at: + + * LICENSE: + * license/LICENSE.base64.txt (Public Domain) + * HOMEPAGE: + * http://iharder.sourceforge.net/current/java/base64/ + +This product contains a modified version of 'JZlib', a re-implementation of +zlib in pure Java, which can be obtained at: + + * LICENSE: + * license/LICENSE.jzlib.txt (BSD Style License) + * HOMEPAGE: + * http://www.jcraft.com/jzlib/ + +This product contains a modified version of 'Webbit', a Java event based +WebSocket and HTTP server: + + * LICENSE: + * license/LICENSE.webbit.txt (BSD License) + * HOMEPAGE: + * https://github.com/joewalnes/webbit + +This product optionally depends on 'Protocol Buffers', Google's data +interchange format, which can be obtained at: + + * LICENSE: + * license/LICENSE.protobuf.txt (New BSD License) + * HOMEPAGE: + * http://code.google.com/p/protobuf/ + +This product optionally depends on 'Bouncy Castle Crypto APIs' to generate +a temporary self-signed X.509 certificate when the JVM does not provide the +equivalent functionality. It can be obtained at: + + * LICENSE: + * license/LICENSE.bouncycastle.txt (MIT License) + * HOMEPAGE: + * http://www.bouncycastle.org/ + +This product optionally depends on 'SLF4J', a simple logging facade for Java, +which can be obtained at: + + * LICENSE: + * license/LICENSE.slf4j.txt (MIT License) + * HOMEPAGE: + * http://www.slf4j.org/ + +This product optionally depends on 'Apache Commons Logging', a logging +framework, which can be obtained at: + + * LICENSE: + * license/LICENSE.commons-logging.txt (Apache License 2.0) + * HOMEPAGE: + * http://commons.apache.org/logging/ + +This product optionally depends on 'Apache Log4J', a logging framework, +which can be obtained at: + + * LICENSE: + * license/LICENSE.log4j.txt (Apache License 2.0) + * HOMEPAGE: + * http://logging.apache.org/log4j/ + +This product optionally depends on 'JBoss Logging', a logging framework, +which can be obtained at: + + * LICENSE: + * license/LICENSE.jboss-logging.txt (GNU LGPL 2.1) + * HOMEPAGE: + * http://anonsvn.jboss.org/repos/common/common-logging-spi/ + +This product optionally depends on 'Apache Felix', an open source OSGi +framework implementation, which can be obtained at: + + * LICENSE: + * license/LICENSE.felix.txt (Apache License 2.0) + * HOMEPAGE: + * http://felix.apache.org/ diff --git a/plugins/transport-nio/licenses/netty-buffer-4.1.16.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-buffer-4.1.16.Final.jar.sha1 new file mode 100644 index 0000000000000..c546222971985 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-buffer-4.1.16.Final.jar.sha1 @@ -0,0 +1 @@ +63b5fa95c74785e16f2c30ce268bc222e35c8cb5 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-4.1.16.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-4.1.16.Final.jar.sha1 new file mode 100644 index 0000000000000..1e6c241ea0b17 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-codec-4.1.16.Final.jar.sha1 @@ -0,0 +1 @@ +d84a1f21768b7309c2954521cf5a1f46c2309eb1 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-http-4.1.16.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-http-4.1.16.Final.jar.sha1 new file mode 100644 index 0000000000000..71c33af1c5fc2 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-codec-http-4.1.16.Final.jar.sha1 @@ -0,0 +1 @@ +d64312378b438dfdad84267c599a053327c6f02a \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-common-4.1.16.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-common-4.1.16.Final.jar.sha1 new file mode 100644 index 0000000000000..3edf5fcea59b3 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-common-4.1.16.Final.jar.sha1 @@ -0,0 +1 @@ +177a6b30cca92f6f5f9873c9befd681377a4c328 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-handler-4.1.16.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-handler-4.1.16.Final.jar.sha1 new file mode 100644 index 0000000000000..cba27387268d1 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-handler-4.1.16.Final.jar.sha1 @@ -0,0 +1 @@ +fec0e63e7dd7f4eeef7ea8dc47a1ff32dfc7ebc2 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-resolver-4.1.16.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-resolver-4.1.16.Final.jar.sha1 new file mode 100644 index 0000000000000..3571d2ecfdc48 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-resolver-4.1.16.Final.jar.sha1 @@ -0,0 +1 @@ +f6eb553b53fb3a90a8ac1170697093fed82eae28 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-transport-4.1.16.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-transport-4.1.16.Final.jar.sha1 new file mode 100644 index 0000000000000..e502d4c77084c --- /dev/null +++ b/plugins/transport-nio/licenses/netty-transport-4.1.16.Final.jar.sha1 @@ -0,0 +1 @@ +3c8ee2c4d4a1cbb947a5c184c7aeb2204260958b \ No newline at end of file From cdb486ae70053450e6a07ea45b5322741240e6c9 Mon Sep 17 00:00:00 2001 From: Sue Gallagher <36747279+Sue-Gallagher@users.noreply.github.com> Date: Fri, 8 Jun 2018 14:41:01 -0700 Subject: [PATCH 14/20] [DOCS] Added 'fail_on_unsupported_field' param to MLT. Closes #28008 (#31160) * [DOCS] Added 'fail_on_unsupported_field' param to MLT. Closes 28008 * [DOCS] Added 'fail_on_unsupported_field' param to MLT. Closes #28008 * [DOCS] Added 'fail_on_unsupported_field' param to MLT. Closes #28008 * [DOCS] Added 'fail_on_unsupported_field' param to MLT. Closes #28008 --- docs/reference/query-dsl/mlt-query.asciidoc | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docs/reference/query-dsl/mlt-query.asciidoc b/docs/reference/query-dsl/mlt-query.asciidoc index 6cb984263e6f7..bd66c7f071cd4 100644 --- a/docs/reference/query-dsl/mlt-query.asciidoc +++ b/docs/reference/query-dsl/mlt-query.asciidoc @@ -241,6 +241,13 @@ number of terms that must match. The syntax is the same as the <>. (Defaults to `"30%"`). +`fail_on_unsupported_field`:: +Controls whether the query should fail (throw an exception) if any of the +specified fields are not of the supported types +(`text` or `keyword'). Set this to `false` to ignore the field and continue +processing. Defaults to +`true`. + `boost_terms`:: Each term in the formed query could be further boosted by their tf-idf score. This sets the boost factor to use when using this feature. Defaults to From bdb0fb25555381299d80fed7efcbcab97a9d172f Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Fri, 8 Jun 2018 17:19:41 -0600 Subject: [PATCH 15/20] Fully encapsulate LocalCheckpointTracker inside of the engine (#31213) * Fully encapsulate LocalCheckpointTracker inside of the engine This makes the Engine interface not expose the `LocalCheckpointTracker`, instead exposing the pieces needed (like retrieving the local checkpoint) as individual methods. --- .../elasticsearch/index/engine/Engine.java | 25 +++- .../index/engine/InternalEngine.java | 24 +++- .../elasticsearch/index/shard/IndexShard.java | 22 ++-- .../index/shard/LocalShardSnapshot.java | 2 +- .../cluster/routing/PrimaryAllocationIT.java | 3 +- .../index/engine/InternalEngineTests.java | 117 +++++++++--------- .../index/shard/IndexShardTests.java | 3 +- .../SharedClusterSnapshotRestoreIT.java | 3 +- .../index/engine/EngineTestCase.java | 9 ++ .../index/shard/IndexShardTestCase.java | 2 +- 10 files changed, 129 insertions(+), 81 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/engine/Engine.java b/server/src/main/java/org/elasticsearch/index/engine/Engine.java index 5283975be7b12..0b6eea26e0357 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -62,7 +62,7 @@ import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.merge.MergeStats; -import org.elasticsearch.index.seqno.LocalCheckpointTracker; +import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.Store; @@ -635,11 +635,28 @@ public CommitStats commitStats() { } /** - * The sequence number service for this engine. + * @return the local checkpoint for this Engine + */ + public abstract long getLocalCheckpoint(); + + /** + * Waits for all operations up to the provided sequence number to complete. * - * @return the sequence number service + * @param seqNo the sequence number that the checkpoint must advance to before this method returns + * @throws InterruptedException if the thread was interrupted while blocking on the condition + */ + public abstract void waitForOpsToComplete(long seqNo) throws InterruptedException; + + /** + * Reset the local checkpoint in the tracker to the given local checkpoint + * @param localCheckpoint the new checkpoint to be set + */ + public abstract void resetLocalCheckpoint(long localCheckpoint); + + /** + * @return a {@link SeqNoStats} object, using local state and the supplied global checkpoint */ - public abstract LocalCheckpointTracker getLocalCheckpointTracker(); + public abstract SeqNoStats getSeqNoStats(long globalCheckpoint); /** * Returns the latest global checkpoint value that has been persisted in the underlying storage (i.e. translog's checkpoint) diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 88e7160845266..53f209ccf6306 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -65,6 +65,7 @@ import org.elasticsearch.index.merge.MergeStats; import org.elasticsearch.index.merge.OnGoingMerge; import org.elasticsearch.index.seqno.LocalCheckpointTracker; +import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ElasticsearchMergePolicy; import org.elasticsearch.index.shard.ShardId; @@ -2185,10 +2186,31 @@ public MergeStats getMergeStats() { return mergeScheduler.stats(); } - public final LocalCheckpointTracker getLocalCheckpointTracker() { + // Used only for testing! Package private to prevent anyone else from using it + LocalCheckpointTracker getLocalCheckpointTracker() { return localCheckpointTracker; } + @Override + public long getLocalCheckpoint() { + return localCheckpointTracker.getCheckpoint(); + } + + @Override + public void waitForOpsToComplete(long seqNo) throws InterruptedException { + localCheckpointTracker.waitForOpsToComplete(seqNo); + } + + @Override + public void resetLocalCheckpoint(long localCheckpoint) { + localCheckpointTracker.resetCheckpoint(localCheckpoint); + } + + @Override + public SeqNoStats getSeqNoStats(long globalCheckpoint) { + return localCheckpointTracker.getStats(globalCheckpoint); + } + /** * Returns the number of times a version was looked up either from the index. * Note this is only available if assertions are enabled diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 8583b6b4c9b33..34230be14cb7e 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -405,7 +405,7 @@ public void updateShardState(final ShardRouting newRouting, assert currentRouting.active() == false : "we are in POST_RECOVERY, but our shard routing is active " + currentRouting; if (newRouting.primary() && currentRouting.isRelocationTarget() == false) { - replicationTracker.activatePrimaryMode(getEngine().getLocalCheckpointTracker().getCheckpoint()); + replicationTracker.activatePrimaryMode(getLocalCheckpoint()); } changeState(IndexShardState.STARTED, "global state is [" + newRouting.state() + "]"); @@ -479,8 +479,7 @@ public void updateShardState(final ShardRouting newRouting, */ engine.rollTranslogGeneration(); engine.fillSeqNoGaps(newPrimaryTerm); - replicationTracker.updateLocalCheckpoint(currentRouting.allocationId().getId(), - getEngine().getLocalCheckpointTracker().getCheckpoint()); + replicationTracker.updateLocalCheckpoint(currentRouting.allocationId().getId(), getLocalCheckpoint()); primaryReplicaSyncer.accept(this, new ActionListener() { @Override public void onResponse(ResyncTask resyncTask) { @@ -506,7 +505,7 @@ public void onFailure(Exception e) { } }, e -> failShard("exception during primary term transition", e)); - replicationTracker.activatePrimaryMode(getEngine().getLocalCheckpointTracker().getCheckpoint()); + replicationTracker.activatePrimaryMode(getLocalCheckpoint()); primaryTerm = newPrimaryTerm; } } @@ -873,7 +872,7 @@ public CommitStats commitStats() { @Nullable public SeqNoStats seqNoStats() { Engine engine = getEngineOrNull(); - return engine == null ? null : engine.getLocalCheckpointTracker().getStats(replicationTracker.getGlobalCheckpoint()); + return engine == null ? null : engine.getSeqNoStats(replicationTracker.getGlobalCheckpoint()); } public IndexingStats indexingStats(String... types) { @@ -1707,7 +1706,7 @@ public void updateGlobalCheckpointForShard(final String allocationId, final long * @throws InterruptedException if the thread was interrupted while blocking on the condition */ public void waitForOpsToComplete(final long seqNo) throws InterruptedException { - getEngine().getLocalCheckpointTracker().waitForOpsToComplete(seqNo); + getEngine().waitForOpsToComplete(seqNo); } /** @@ -1740,7 +1739,7 @@ public void markAllocationIdAsInSync(final String allocationId, final long local * @return the local checkpoint */ public long getLocalCheckpoint() { - return getEngine().getLocalCheckpointTracker().getCheckpoint(); + return getEngine().getLocalCheckpoint(); } /** @@ -1781,7 +1780,7 @@ public void maybeSyncGlobalCheckpoint(final String reason) { return; } // only sync if there are not operations in flight - final SeqNoStats stats = getEngine().getLocalCheckpointTracker().getStats(replicationTracker.getGlobalCheckpoint()); + final SeqNoStats stats = getEngine().getSeqNoStats(replicationTracker.getGlobalCheckpoint()); if (stats.getMaxSeqNo() == stats.getGlobalCheckpoint()) { final ObjectLongMap globalCheckpoints = getInSyncGlobalCheckpoints(); final String allocationId = routingEntry().allocationId().getId(); @@ -1818,7 +1817,7 @@ public ReplicationGroup getReplicationGroup() { */ public void updateGlobalCheckpointOnReplica(final long globalCheckpoint, final String reason) { verifyReplicationTarget(); - final long localCheckpoint = getEngine().getLocalCheckpointTracker().getCheckpoint(); + final long localCheckpoint = getLocalCheckpoint(); if (globalCheckpoint > localCheckpoint) { /* * This can happen during recovery when the shard has started its engine but recovery is not finalized and is receiving global @@ -1847,8 +1846,7 @@ public void activateWithPrimaryContext(final ReplicationTracker.PrimaryContext p verifyPrimary(); assert shardRouting.isRelocationTarget() : "only relocation target can update allocation IDs from primary context: " + shardRouting; assert primaryContext.getCheckpointStates().containsKey(routingEntry().allocationId().getId()) && - getEngine().getLocalCheckpointTracker().getCheckpoint() == - primaryContext.getCheckpointStates().get(routingEntry().allocationId().getId()).getLocalCheckpoint(); + getLocalCheckpoint() == primaryContext.getCheckpointStates().get(routingEntry().allocationId().getId()).getLocalCheckpoint(); synchronized (mutex) { replicationTracker.activateWithPrimaryContext(primaryContext); // make changes to primaryMode flag only under mutex } @@ -2234,7 +2232,7 @@ public void acquireReplicaOperationPermit(final long operationPrimaryTerm, final operationPrimaryTerm, getLocalCheckpoint(), localCheckpoint); - getEngine().getLocalCheckpointTracker().resetCheckpoint(localCheckpoint); + getEngine().resetLocalCheckpoint(localCheckpoint); getEngine().rollTranslogGeneration(); }); globalCheckpointUpdated = true; diff --git a/server/src/main/java/org/elasticsearch/index/shard/LocalShardSnapshot.java b/server/src/main/java/org/elasticsearch/index/shard/LocalShardSnapshot.java index d7105c0c14d38..09391c9bc9643 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/LocalShardSnapshot.java +++ b/server/src/main/java/org/elasticsearch/index/shard/LocalShardSnapshot.java @@ -62,7 +62,7 @@ Index getIndex() { } long maxSeqNo() { - return shard.getEngine().getLocalCheckpointTracker().getMaxSeqNo(); + return shard.getEngine().getSeqNoStats(-1).getMaxSeqNo(); } long maxUnsafeAutoIdTimestamp() { diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java b/server/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java index 407212936d1d6..90173455c3be3 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.gateway.GatewayAllocator; +import org.elasticsearch.index.engine.EngineTestCase; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardTestCase; import org.elasticsearch.index.shard.ShardId; @@ -350,7 +351,7 @@ public void testPrimaryReplicaResyncFailed() throws Exception { assertThat(indexResult.getShardInfo().getSuccessful(), equalTo(numberOfReplicas + 1)); } final IndexShard oldPrimaryShard = internalCluster().getInstance(IndicesService.class, oldPrimary).getShardOrNull(shardId); - IndexShardTestCase.getEngine(oldPrimaryShard).getLocalCheckpointTracker().generateSeqNo(); // Make gap in seqno. + EngineTestCase.generateNewSeqNo(IndexShardTestCase.getEngine(oldPrimaryShard)); // Make gap in seqno. long moreDocs = scaledRandomIntBetween(1, 10); for (int i = 0; i < moreDocs; i++) { IndexResponse indexResult = index("test", "doc", Long.toString(numDocs + i)); diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 26da424460ef2..d67148dbff2fc 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -225,7 +225,7 @@ public void testVersionMapAfterAutoIDDocument() throws IOException { new BytesArray("{}".getBytes(Charset.defaultCharset())), null); operation = randomBoolean() ? appendOnlyPrimary(doc, false, 1) - : appendOnlyReplica(doc, false, 1, engine.getLocalCheckpointTracker().generateSeqNo()); + : appendOnlyReplica(doc, false, 1, generateNewSeqNo(engine)); engine.index(operation); assertTrue("safe access should be required", engine.isSafeAccessRequired()); assertEquals(1, engine.getVersionMapSize()); // now we add this to the map @@ -1018,7 +1018,7 @@ public void testCommitAdvancesMinTranslogForRecovery() throws IOException { engine.index(indexForDoc(doc)); boolean inSync = randomBoolean(); if (inSync) { - globalCheckpoint.set(engine.getLocalCheckpointTracker().getCheckpoint()); + globalCheckpoint.set(engine.getLocalCheckpoint()); } engine.flush(); @@ -1036,7 +1036,7 @@ public void testCommitAdvancesMinTranslogForRecovery() throws IOException { assertThat(engine.getTranslog().getDeletionPolicy().getMinTranslogGenerationForRecovery(), equalTo(inSync ? 4L : 1L)); assertThat(engine.getTranslog().getDeletionPolicy().getTranslogGenerationOfLastCommit(), equalTo(4L)); - globalCheckpoint.set(engine.getLocalCheckpointTracker().getCheckpoint()); + globalCheckpoint.set(engine.getLocalCheckpoint()); engine.flush(true, true); assertThat(engine.getTranslog().currentFileGeneration(), equalTo(5L)); assertThat(engine.getTranslog().getDeletionPolicy().getMinTranslogGenerationForRecovery(), equalTo(5L)); @@ -2058,12 +2058,12 @@ public void testSeqNoAndCheckpoints() throws IOException { final Engine.DeleteResult result = initialEngine.delete(delete); if (result.getResultType() == Engine.Result.Type.SUCCESS) { assertThat(result.getSeqNo(), equalTo(primarySeqNo + 1)); - assertThat(initialEngine.getLocalCheckpointTracker().getMaxSeqNo(), equalTo(primarySeqNo + 1)); + assertThat(initialEngine.getSeqNoStats(-1).getMaxSeqNo(), equalTo(primarySeqNo + 1)); indexedIds.remove(id); primarySeqNo++; } else { assertThat(result.getSeqNo(), equalTo(SequenceNumbers.UNASSIGNED_SEQ_NO)); - assertThat(initialEngine.getLocalCheckpointTracker().getMaxSeqNo(), equalTo(primarySeqNo)); + assertThat(initialEngine.getSeqNoStats(-1).getMaxSeqNo(), equalTo(primarySeqNo)); } } else { // index a document @@ -2076,12 +2076,12 @@ public void testSeqNoAndCheckpoints() throws IOException { final Engine.IndexResult result = initialEngine.index(index); if (result.getResultType() == Engine.Result.Type.SUCCESS) { assertThat(result.getSeqNo(), equalTo(primarySeqNo + 1)); - assertThat(initialEngine.getLocalCheckpointTracker().getMaxSeqNo(), equalTo(primarySeqNo + 1)); + assertThat(initialEngine.getSeqNoStats(-1).getMaxSeqNo(), equalTo(primarySeqNo + 1)); indexedIds.add(id); primarySeqNo++; } else { assertThat(result.getSeqNo(), equalTo(SequenceNumbers.UNASSIGNED_SEQ_NO)); - assertThat(initialEngine.getLocalCheckpointTracker().getMaxSeqNo(), equalTo(primarySeqNo)); + assertThat(initialEngine.getSeqNoStats(-1).getMaxSeqNo(), equalTo(primarySeqNo)); } } @@ -2090,7 +2090,7 @@ public void testSeqNoAndCheckpoints() throws IOException { replicaLocalCheckpoint = randomIntBetween(Math.toIntExact(replicaLocalCheckpoint), Math.toIntExact(primarySeqNo)); } gcpTracker.updateLocalCheckpoint(primary.allocationId().getId(), - initialEngine.getLocalCheckpointTracker().getCheckpoint()); + initialEngine.getLocalCheckpoint()); gcpTracker.updateLocalCheckpoint(replica.allocationId().getId(), replicaLocalCheckpoint); if (rarely()) { @@ -2103,8 +2103,8 @@ public void testSeqNoAndCheckpoints() throws IOException { logger.info("localcheckpoint {}, global {}", replicaLocalCheckpoint, primarySeqNo); globalCheckpoint = gcpTracker.getGlobalCheckpoint(); - assertEquals(primarySeqNo, initialEngine.getLocalCheckpointTracker().getMaxSeqNo()); - assertEquals(primarySeqNo, initialEngine.getLocalCheckpointTracker().getCheckpoint()); + assertEquals(primarySeqNo, initialEngine.getSeqNoStats(-1).getMaxSeqNo()); + assertEquals(primarySeqNo, initialEngine.getLocalCheckpoint()); assertThat(globalCheckpoint, equalTo(replicaLocalCheckpoint)); assertThat( @@ -2126,7 +2126,7 @@ public void testSeqNoAndCheckpoints() throws IOException { try (InternalEngine recoveringEngine = new InternalEngine(initialEngine.config())){ recoveringEngine.recoverFromTranslog(); - assertEquals(primarySeqNo, recoveringEngine.getLocalCheckpointTracker().getMaxSeqNo()); + assertEquals(primarySeqNo, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo()); assertThat( Long.parseLong(recoveringEngine.commitStats().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)), equalTo(primarySeqNo)); @@ -2139,9 +2139,9 @@ public void testSeqNoAndCheckpoints() throws IOException { // that the committed max seq no is equivalent to what the current primary seq no is, as all data // we have assigned sequence numbers to should be in the commit equalTo(primarySeqNo)); - assertThat(recoveringEngine.getLocalCheckpointTracker().getCheckpoint(), equalTo(primarySeqNo)); - assertThat(recoveringEngine.getLocalCheckpointTracker().getMaxSeqNo(), equalTo(primarySeqNo)); - assertThat(recoveringEngine.getLocalCheckpointTracker().generateSeqNo(), equalTo(primarySeqNo + 1)); + assertThat(recoveringEngine.getLocalCheckpoint(), equalTo(primarySeqNo)); + assertThat(recoveringEngine.getSeqNoStats(-1).getMaxSeqNo(), equalTo(primarySeqNo)); + assertThat(generateNewSeqNo(recoveringEngine), equalTo(primarySeqNo + 1)); } } @@ -2444,7 +2444,7 @@ public void testCurrentTranslogIDisCommitted() throws IOException { try (InternalEngine engine = createEngine(config)) { engine.index(firstIndexRequest); - globalCheckpoint.set(engine.getLocalCheckpointTracker().getCheckpoint()); + globalCheckpoint.set(engine.getLocalCheckpoint()); expectThrows(IllegalStateException.class, () -> engine.recoverFromTranslog()); Map userData = engine.getLastCommittedSegmentInfos().getUserData(); assertEquals("1", userData.get(Translog.TRANSLOG_GENERATION_KEY)); @@ -2607,7 +2607,7 @@ protected void commitIndexWriter(IndexWriter writer, Translog translog, String s engine.recoverFromTranslog(); final ParsedDocument doc1 = testParsedDocument("1", null, testDocumentWithTextField(), SOURCE, null); engine.index(indexForDoc(doc1)); - globalCheckpoint.set(engine.getLocalCheckpointTracker().getCheckpoint()); + globalCheckpoint.set(engine.getLocalCheckpoint()); throwErrorOnCommit.set(true); FlushFailedEngineException e = expectThrows(FlushFailedEngineException.class, engine::flush); assertThat(e.getCause().getMessage(), equalTo("power's out")); @@ -2665,7 +2665,7 @@ private Path[] filterExtraFSFiles(Path[] files) { } public void testTranslogReplay() throws IOException { - final LongSupplier inSyncGlobalCheckpointSupplier = () -> this.engine.getLocalCheckpointTracker().getCheckpoint(); + final LongSupplier inSyncGlobalCheckpointSupplier = () -> this.engine.getLocalCheckpoint(); final int numDocs = randomIntBetween(1, 10); for (int i = 0; i < numDocs; i++) { ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), new BytesArray("{}"), null); @@ -3600,7 +3600,7 @@ private ToLongBiFunction getStallingSeqNoGenerator( final AtomicBoolean stall, final AtomicLong expectedLocalCheckpoint) { return (engine, operation) -> { - final long seqNo = engine.getLocalCheckpointTracker().generateSeqNo(); + final long seqNo = generateNewSeqNo(engine); final CountDownLatch latch = latchReference.get(); if (stall.get()) { try { @@ -3652,8 +3652,8 @@ public void testSequenceNumberAdvancesToMaxSeqOnEngineOpenOnPrimary() throws Bro } } - assertThat(initialEngine.getLocalCheckpointTracker().getCheckpoint(), equalTo(expectedLocalCheckpoint.get())); - assertThat(initialEngine.getLocalCheckpointTracker().getMaxSeqNo(), equalTo((long) (docs - 1))); + assertThat(initialEngine.getLocalCheckpoint(), equalTo(expectedLocalCheckpoint.get())); + assertThat(initialEngine.getSeqNoStats(-1).getMaxSeqNo(), equalTo((long) (docs - 1))); initialEngine.flush(true, true); latchReference.get().countDown(); @@ -3667,7 +3667,7 @@ public void testSequenceNumberAdvancesToMaxSeqOnEngineOpenOnPrimary() throws Bro try (Engine recoveringEngine = new InternalEngine(initialEngine.config())) { recoveringEngine.recoverFromTranslog(); recoveringEngine.fillSeqNoGaps(2); - assertThat(recoveringEngine.getLocalCheckpointTracker().getCheckpoint(), greaterThanOrEqualTo((long) (docs - 1))); + assertThat(recoveringEngine.getLocalCheckpoint(), greaterThanOrEqualTo((long) (docs - 1))); } } @@ -3746,7 +3746,7 @@ public void testOutOfOrderSequenceNumbersWithVersionConflict() throws IOExceptio expectedLocalCheckpoint = numberOfOperations - 1; } - assertThat(engine.getLocalCheckpointTracker().getCheckpoint(), equalTo(expectedLocalCheckpoint)); + assertThat(engine.getLocalCheckpoint(), equalTo(expectedLocalCheckpoint)); try (Engine.GetResult result = engine.get(new Engine.Get(true, false, "type", "2", uid), searcherFactory)) { assertThat(result.exists(), equalTo(exists)); } @@ -3776,11 +3776,11 @@ protected long doGenerateSeqNoForOperation(Operation operation) { final int gapsFilled = noOpEngine.fillSeqNoGaps(primaryTerm.get()); final String reason = randomAlphaOfLength(16); noOpEngine.noOp(new Engine.NoOp(maxSeqNo + 1, primaryTerm.get(), LOCAL_TRANSLOG_RECOVERY, System.nanoTime(), reason)); - assertThat(noOpEngine.getLocalCheckpointTracker().getCheckpoint(), equalTo((long) (maxSeqNo + 1))); + assertThat(noOpEngine.getLocalCheckpoint(), equalTo((long) (maxSeqNo + 1))); assertThat(noOpEngine.getTranslog().stats().getUncommittedOperations(), equalTo(gapsFilled)); noOpEngine.noOp( new Engine.NoOp(maxSeqNo + 2, primaryTerm.get(), randomFrom(PRIMARY, REPLICA, PEER_RECOVERY), System.nanoTime(), reason)); - assertThat(noOpEngine.getLocalCheckpointTracker().getCheckpoint(), equalTo((long) (maxSeqNo + 2))); + assertThat(noOpEngine.getLocalCheckpoint(), equalTo((long) (maxSeqNo + 2))); assertThat(noOpEngine.getTranslog().stats().getUncommittedOperations(), equalTo(gapsFilled + 1)); // skip to the op that we added to the translog Translog.Operation op; @@ -3933,17 +3933,17 @@ public void markSeqNoAsCompleted(long seqNo) { actualEngine.rollTranslogGeneration(); } } - final long currentLocalCheckpoint = actualEngine.getLocalCheckpointTracker().getCheckpoint(); + final long currentLocalCheckpoint = actualEngine.getLocalCheckpoint(); final long resetLocalCheckpoint = randomIntBetween(Math.toIntExact(SequenceNumbers.NO_OPS_PERFORMED), Math.toIntExact(currentLocalCheckpoint)); - actualEngine.getLocalCheckpointTracker().resetCheckpoint(resetLocalCheckpoint); + actualEngine.resetLocalCheckpoint(resetLocalCheckpoint); completedSeqNos.clear(); actualEngine.restoreLocalCheckpointFromTranslog(); final Set intersection = new HashSet<>(expectedCompletedSeqNos); intersection.retainAll(LongStream.range(resetLocalCheckpoint + 1, operations).boxed().collect(Collectors.toSet())); assertThat(completedSeqNos, equalTo(intersection)); - assertThat(actualEngine.getLocalCheckpointTracker().getCheckpoint(), equalTo(currentLocalCheckpoint)); - assertThat(actualEngine.getLocalCheckpointTracker().generateSeqNo(), equalTo((long) operations)); + assertThat(actualEngine.getLocalCheckpoint(), equalTo(currentLocalCheckpoint)); + assertThat(generateNewSeqNo(actualEngine), equalTo((long) operations)); } finally { IOUtils.close(actualEngine); } @@ -3967,7 +3967,7 @@ public void testFillUpSequenceIdGapsOnRecovery() throws IOException { replicaEngine.index(replicaIndexForDoc(doc, 1, indexResult.getSeqNo(), false)); } } - checkpointOnReplica = replicaEngine.getLocalCheckpointTracker().getCheckpoint(); + checkpointOnReplica = replicaEngine.getLocalCheckpoint(); } finally { IOUtils.close(replicaEngine); } @@ -3977,16 +3977,16 @@ public void testFillUpSequenceIdGapsOnRecovery() throws IOException { AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); Engine recoveringEngine = null; try { - assertEquals(docs - 1, engine.getLocalCheckpointTracker().getMaxSeqNo()); - assertEquals(docs - 1, engine.getLocalCheckpointTracker().getCheckpoint()); - assertEquals(maxSeqIDOnReplica, replicaEngine.getLocalCheckpointTracker().getMaxSeqNo()); - assertEquals(checkpointOnReplica, replicaEngine.getLocalCheckpointTracker().getCheckpoint()); + assertEquals(docs - 1, engine.getSeqNoStats(-1).getMaxSeqNo()); + assertEquals(docs - 1, engine.getLocalCheckpoint()); + assertEquals(maxSeqIDOnReplica, replicaEngine.getSeqNoStats(-1).getMaxSeqNo()); + assertEquals(checkpointOnReplica, replicaEngine.getLocalCheckpoint()); trimUnsafeCommits(copy(replicaEngine.config(), globalCheckpoint::get)); recoveringEngine = new InternalEngine(copy(replicaEngine.config(), globalCheckpoint::get)); assertEquals(numDocsOnReplica, recoveringEngine.getTranslog().stats().getUncommittedOperations()); recoveringEngine.recoverFromTranslog(); - assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpointTracker().getMaxSeqNo()); - assertEquals(checkpointOnReplica, recoveringEngine.getLocalCheckpointTracker().getCheckpoint()); + assertEquals(maxSeqIDOnReplica, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo()); + assertEquals(checkpointOnReplica, recoveringEngine.getLocalCheckpoint()); assertEquals((maxSeqIDOnReplica + 1) - numDocsOnReplica, recoveringEngine.fillSeqNoGaps(2)); // now snapshot the tlog and ensure the primary term is updated @@ -4001,10 +4001,10 @@ public void testFillUpSequenceIdGapsOnRecovery() throws IOException { } } - assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpointTracker().getMaxSeqNo()); - assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpointTracker().getCheckpoint()); + assertEquals(maxSeqIDOnReplica, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo()); + assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpoint()); if ((flushed = randomBoolean())) { - globalCheckpoint.set(recoveringEngine.getLocalCheckpointTracker().getMaxSeqNo()); + globalCheckpoint.set(recoveringEngine.getSeqNoStats(-1).getMaxSeqNo()); recoveringEngine.getTranslog().sync(); recoveringEngine.flush(true, true); } @@ -4021,11 +4021,11 @@ public void testFillUpSequenceIdGapsOnRecovery() throws IOException { assertThat(recoveringEngine.getTranslog().stats().getUncommittedOperations(), equalTo(0)); } recoveringEngine.recoverFromTranslog(); - assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpointTracker().getMaxSeqNo()); - assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpointTracker().getCheckpoint()); + assertEquals(maxSeqIDOnReplica, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo()); + assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpoint()); assertEquals(0, recoveringEngine.fillSeqNoGaps(3)); - assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpointTracker().getMaxSeqNo()); - assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpointTracker().getCheckpoint()); + assertEquals(maxSeqIDOnReplica, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo()); + assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpoint()); } finally { IOUtils.close(recoveringEngine); } @@ -4208,7 +4208,7 @@ protected void commitIndexWriter(IndexWriter writer, Translog translog, String s // Advance the global checkpoint during the flush to create a lag between a persisted global checkpoint in the translog // (this value is visible to the deletion policy) and an in memory global checkpoint in the SequenceNumbersService. if (rarely()) { - globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), getLocalCheckpointTracker().getCheckpoint())); + globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), getLocalCheckpoint())); } super.commitIndexWriter(writer, translog, syncId); } @@ -4220,7 +4220,7 @@ protected void commitIndexWriter(IndexWriter writer, Translog translog, String s document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); engine.index(indexForDoc(testParsedDocument(Integer.toString(docId), null, document, B_1, null))); if (frequently()) { - globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), engine.getLocalCheckpointTracker().getCheckpoint())); + globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), engine.getLocalCheckpoint())); engine.getTranslog().sync(); } if (frequently()) { @@ -4354,11 +4354,11 @@ public void testCleanUpCommitsWhenGlobalCheckpointAdvanced() throws Exception { engine.flush(false, randomBoolean()); List commits = DirectoryReader.listCommits(store.directory()); // Global checkpoint advanced but not enough - all commits are kept. - globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), engine.getLocalCheckpointTracker().getCheckpoint() - 1)); + globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), engine.getLocalCheckpoint() - 1)); engine.syncTranslog(); assertThat(DirectoryReader.listCommits(store.directory()), equalTo(commits)); // Global checkpoint advanced enough - only the last commit is kept. - globalCheckpoint.set(randomLongBetween(engine.getLocalCheckpointTracker().getCheckpoint(), Long.MAX_VALUE)); + globalCheckpoint.set(randomLongBetween(engine.getLocalCheckpoint(), Long.MAX_VALUE)); engine.syncTranslog(); assertThat(DirectoryReader.listCommits(store.directory()), contains(commits.get(commits.size() - 1))); } @@ -4382,7 +4382,7 @@ public void testCleanupCommitsWhenReleaseSnapshot() throws Exception { for (int i = 0; i < numSnapshots; i++) { snapshots.add(engine.acquireSafeIndexCommit()); // taking snapshots from the safe commit. } - globalCheckpoint.set(engine.getLocalCheckpointTracker().getCheckpoint()); + globalCheckpoint.set(engine.getLocalCheckpoint()); engine.syncTranslog(); final List commits = DirectoryReader.listCommits(store.directory()); for (int i = 0; i < numSnapshots - 1; i++) { @@ -4432,13 +4432,13 @@ public void testShouldPeriodicallyFlush() throws Exception { assertThat(engine.getTranslog().stats().getUncommittedOperations(), equalTo(0)); // If the new index commit still points to the same translog generation as the current index commit, // we should not enable the periodically flush condition; otherwise we can get into an infinite loop of flushes. - engine.getLocalCheckpointTracker().generateSeqNo(); // create a gap here + generateNewSeqNo(engine); // create a gap here for (int id = 0; id < numDocs; id++) { if (randomBoolean()) { translog.rollGeneration(); } final ParsedDocument doc = testParsedDocument("new" + id, null, testDocumentWithTextField(), SOURCE, null); - engine.index(replicaIndexForDoc(doc, 2L, engine.getLocalCheckpointTracker().generateSeqNo(), false)); + engine.index(replicaIndexForDoc(doc, 2L, generateNewSeqNo(engine), false)); if (engine.shouldPeriodicallyFlush()) { engine.flush(); assertThat(engine.getLastCommittedSegmentInfos(), not(sameInstance(lastCommitInfo))); @@ -4459,7 +4459,7 @@ public void testStressShouldPeriodicallyFlush() throws Exception { engine.onSettingsChanged(); final int numOps = scaledRandomIntBetween(100, 10_000); for (int i = 0; i < numOps; i++) { - final long localCheckPoint = engine.getLocalCheckpointTracker().getCheckpoint(); + final long localCheckPoint = engine.getLocalCheckpoint(); final long seqno = randomLongBetween(Math.max(0, localCheckPoint), localCheckPoint + 5); final ParsedDocument doc = testParsedDocument(Long.toString(seqno), null, testDocumentWithTextField(), SOURCE, null); engine.index(replicaIndexForDoc(doc, 1L, seqno, false)); @@ -4546,9 +4546,9 @@ public void testPruneOnlyDeletesAtMostLocalCheckpoint() throws Exception { } final long deleteBatch = between(10, 20); final long gapSeqNo = randomLongBetween( - engine.getLocalCheckpointTracker().getMaxSeqNo() + 1, engine.getLocalCheckpointTracker().getMaxSeqNo() + deleteBatch); + engine.getSeqNoStats(-1).getMaxSeqNo() + 1, engine.getSeqNoStats(-1).getMaxSeqNo() + deleteBatch); for (int i = 0; i < deleteBatch; i++) { - final long seqno = engine.getLocalCheckpointTracker().generateSeqNo(); + final long seqno = generateNewSeqNo(engine); if (seqno != gapSeqNo) { if (randomBoolean()) { clock.incrementAndGet(); @@ -4595,7 +4595,7 @@ public void testTrackMaxSeqNoOfNonAppendOnlyOperations() throws Exception { for (int i = 0; i < numDocs; i++) { ParsedDocument doc = testParsedDocument("append-only" + i, null, testDocumentWithTextField(), SOURCE, null); if (randomBoolean()) { - engine.index(appendOnlyReplica(doc, randomBoolean(), 1, engine.getLocalCheckpointTracker().generateSeqNo())); + engine.index(appendOnlyReplica(doc, randomBoolean(), 1, generateNewSeqNo(engine))); } else { engine.index(appendOnlyPrimary(doc, randomBoolean(), randomNonNegativeLong())); } @@ -4612,7 +4612,7 @@ public void testTrackMaxSeqNoOfNonAppendOnlyOperations() throws Exception { for (int i = 0; i < numOps; i++) { ParsedDocument parsedDocument = testParsedDocument(Integer.toString(i), null, testDocumentWithTextField(), SOURCE, null); if (randomBoolean()) { // On replica - update max_seqno for non-append-only operations - final long seqno = engine.getLocalCheckpointTracker().generateSeqNo(); + final long seqno = generateNewSeqNo(engine); final Engine.Index doc = replicaIndexForDoc(parsedDocument, 1, seqno, randomBoolean()); if (randomBoolean()) { engine.index(doc); @@ -4631,7 +4631,7 @@ public void testTrackMaxSeqNoOfNonAppendOnlyOperations() throws Exception { } appendOnlyIndexer.join(120_000); assertThat(engine.getMaxSeqNoOfNonAppendOnlyOperations(), equalTo(maxSeqNoOfNonAppendOnly)); - globalCheckpoint.set(engine.getLocalCheckpointTracker().getCheckpoint()); + globalCheckpoint.set(engine.getLocalCheckpoint()); engine.syncTranslog(); engine.flush(); } @@ -4643,15 +4643,14 @@ public void testTrackMaxSeqNoOfNonAppendOnlyOperations() throws Exception { public void testSkipOptimizeForExposedAppendOnlyOperations() throws Exception { long lookupTimes = 0L; - final LocalCheckpointTracker localCheckpointTracker = engine.getLocalCheckpointTracker(); final int initDocs = between(0, 10); for (int i = 0; i < initDocs; i++) { index(engine, i); lookupTimes++; } // doc1 is delayed and arrived after a non-append-only op. - final long seqNoAppendOnly1 = localCheckpointTracker.generateSeqNo(); - final long seqnoNormalOp = localCheckpointTracker.generateSeqNo(); + final long seqNoAppendOnly1 = generateNewSeqNo(engine); + final long seqnoNormalOp = generateNewSeqNo(engine); if (randomBoolean()) { engine.index(replicaIndexForDoc( testParsedDocument("d", null, testDocumentWithTextField(), SOURCE, null), 1, seqnoNormalOp, false)); @@ -4670,7 +4669,7 @@ public void testSkipOptimizeForExposedAppendOnlyOperations() throws Exception { // optimize for other append-only 2 (its seqno > max_seqno of non-append-only) - do not look up in version map. engine.index(appendOnlyReplica(testParsedDocument("append-only-2", null, testDocumentWithTextField(), SOURCE, null), - false, randomNonNegativeLong(), localCheckpointTracker.generateSeqNo())); + false, randomNonNegativeLong(), generateNewSeqNo(engine))); assertThat(engine.getNumVersionLookups(), equalTo(lookupTimes)); } diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 2f420dcbc4d12..31afb5ed42fc0 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -75,6 +75,7 @@ import org.elasticsearch.index.VersionType; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineException; +import org.elasticsearch.index.engine.EngineTestCase; import org.elasticsearch.index.engine.InternalEngine; import org.elasticsearch.index.engine.InternalEngineFactory; import org.elasticsearch.index.engine.Segment; @@ -847,7 +848,7 @@ public void testGlobalCheckpointSync() throws IOException { recoverReplica(replicaShard, primaryShard); final int maxSeqNo = randomIntBetween(0, 128); for (int i = 0; i <= maxSeqNo; i++) { - primaryShard.getEngine().getLocalCheckpointTracker().generateSeqNo(); + EngineTestCase.generateNewSeqNo(primaryShard.getEngine()); } final long checkpoint = rarely() ? maxSeqNo - scaledRandomIntBetween(0, maxSeqNo) : maxSeqNo; diff --git a/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 7e5f9dbb820ec..06499aa544e9f 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -77,6 +77,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.engine.EngineTestCase; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; @@ -3334,7 +3335,7 @@ public void testSnapshottingWithMissingSequenceNumbers() { final Index index = resolveIndex(indexName); final IndexShard primary = internalCluster().getInstance(IndicesService.class, dataNode).getShardOrNull(new ShardId(index, 0)); // create a gap in the sequence numbers - getEngineFromShard(primary).getLocalCheckpointTracker().generateSeqNo(); + EngineTestCase.generateNewSeqNo(getEngineFromShard(primary)); for (int i = 5; i < 10; i++) { index(indexName, "_doc", Integer.toString(i), "foo", "bar" + i); diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java index 8fff17900b072..0d5e693d62da6 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java @@ -386,6 +386,15 @@ public interface IndexWriterFactory { IndexWriter createWriter(Directory directory, IndexWriterConfig iwc) throws IOException; } + /** + * Generate a new sequence number and return it. Only works on InternalEngines + */ + public static long generateNewSeqNo(final Engine engine) { + assert engine instanceof InternalEngine : "expected InternalEngine, got: " + engine.getClass(); + InternalEngine internalEngine = (InternalEngine) engine; + return internalEngine.getLocalCheckpointTracker().generateSeqNo(); + } + public static InternalEngine createInternalEngine( @Nullable final IndexWriterFactory indexWriterFactory, @Nullable final BiFunction localCheckpointTrackerSupplier, diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java index 655112a0646e2..9b21af713701a 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java @@ -570,7 +570,7 @@ protected Engine.IndexResult indexDoc(IndexShard shard, String type, String id, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); } shard.updateLocalCheckpointForShard(shard.routingEntry().allocationId().getId(), - shard.getEngine().getLocalCheckpointTracker().getCheckpoint()); + shard.getLocalCheckpoint()); } else { result = shard.applyIndexOperationOnReplica(shard.seqNoStats().getMaxSeqNo() + 1, 0, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); From 3db1fe7afe8e53081fa38867b76fe818a9094ede Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 8 Jun 2018 21:19:16 -0400 Subject: [PATCH 16/20] Remove version from license file name for GCS SDK (#31221) Most of our license file names strip the version off the artifact name when deducing the license filename. However, the version on the GCS SDK (google-api-services-storage) does not match the usual format and instead starts with a vee. This means that the license filename for this license ended up carrying the version and we should not do that. This commit adjusts the regex the deduces the license filename to account for this case, and adjusts the google-api-services-storage license files accordingly. --- .../gradle/precommit/DependencyLicensesTask.groovy | 2 +- ...v115-LICENSE.txt => google-api-services-storage-LICENSE.txt} | 0 ...rev115-NOTICE.txt => google-api-services-storage-NOTICE.txt} | 0 3 files changed, 1 insertion(+), 1 deletion(-) rename plugins/repository-gcs/licenses/{google-api-services-storage-v1-rev115-LICENSE.txt => google-api-services-storage-LICENSE.txt} (100%) rename plugins/repository-gcs/licenses/{google-api-services-storage-v1-rev115-NOTICE.txt => google-api-services-storage-NOTICE.txt} (100%) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/DependencyLicensesTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/DependencyLicensesTask.groovy index 4d292d87ec39c..df30326a59d79 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/DependencyLicensesTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/DependencyLicensesTask.groovy @@ -151,7 +151,7 @@ public class DependencyLicensesTask extends DefaultTask { for (File dependency : dependencies) { String jarName = dependency.getName() - String depName = jarName - ~/\-\d+.*/ + String depName = jarName - ~/\-v?\d+.*/ if (ignoreShas.contains(depName)) { // local deps should not have sha files! if (getShaFile(jarName).exists()) { diff --git a/plugins/repository-gcs/licenses/google-api-services-storage-v1-rev115-LICENSE.txt b/plugins/repository-gcs/licenses/google-api-services-storage-LICENSE.txt similarity index 100% rename from plugins/repository-gcs/licenses/google-api-services-storage-v1-rev115-LICENSE.txt rename to plugins/repository-gcs/licenses/google-api-services-storage-LICENSE.txt diff --git a/plugins/repository-gcs/licenses/google-api-services-storage-v1-rev115-NOTICE.txt b/plugins/repository-gcs/licenses/google-api-services-storage-NOTICE.txt similarity index 100% rename from plugins/repository-gcs/licenses/google-api-services-storage-v1-rev115-NOTICE.txt rename to plugins/repository-gcs/licenses/google-api-services-storage-NOTICE.txt From 65c107b47d6afaae8851e07b3d239c9cabb709e3 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sat, 9 Jun 2018 07:28:41 -0400 Subject: [PATCH 17/20] Fix unknown licenses (#31223) The goal of this commit is to address unknown licenses when producing the dependencies info report. We have two different checks that we run on licenses. The first check is whether or not we have stashed a copy of the license text for a dependency in the repository. The second is to map every dependency to a license type (e.g., BSD 3-clause). The problem here is that the way we were handling licenses in the second check differs from how we handle licenses in the first check. The first check works by finding a license file with the name of the artifact followed by the text -LICENSE.txt. Yet in some cases we allow mapping an artifact name to another name used to check for the license (e.g., we map lucene-.* to lucene, and opensaml-.* to shibboleth. The second check understood the first way of looking for a license file but not the second way. So in this commit we teach the second check about the mappings from artifact names to license names. We do this by copying the configuration from the dependencyLicenses task to the dependenciesInfo task and then reusing the code from the first check in the second check. There were some other challenges here though. For example, dependenciesInfo was checking too many dependencies. For now, we should only be checking direct dependencies and leaving transitive dependencies from another org.elasticsearch artifact to that artifact (we want to do this differently in a follow-up). We also want to disable dependenciesInfo for projects that we do not publish, users only care about licenses they might be exposed to if they use our assembled products. With all of the changes in this commit we have eliminated all unknown licenses. A follow-up will enforce that when we add a new dependency it does not get mapped to unknown, these will be forbidden in the future. Therefore, with this change and earlier changes are left having no unknown licenses and two custom licenses; custom here means it does not map to an SPDX license type. Those two licenses are xz and ldapsdk. A future change will not allow additional custom licenses unless they are explicitly whitelisted. This ensures that if a new dependency is added it is mapped to an SPDX license or mapped to custom because it does not have an SPDX license. --- build.gradle | 7 ++- .../elasticsearch/gradle/BuildPlugin.groovy | 6 ++- .../gradle/DependenciesInfoTask.groovy | 47 +++++++++++++++---- .../precommit/DependencyLicensesTask.groovy | 35 ++++++++------ .../build.gradle | 3 ++ distribution/build.gradle | 5 ++ test/fixtures/example-fixture/build.gradle | 2 + x-pack/qa/build.gradle | 4 ++ x-pack/qa/sql/build.gradle | 1 + x-pack/test/feature-aware/build.gradle | 1 + 10 files changed, 87 insertions(+), 24 deletions(-) diff --git a/build.gradle b/build.gradle index 015db80d32599..6517d0292bad8 100644 --- a/build.gradle +++ b/build.gradle @@ -543,7 +543,7 @@ subprojects { project -> } } -/* Remove assemble on all qa projects because we don't need to publish +/* Remove assemble/dependenciesInfo on all qa projects because we don't need to publish * artifacts for them. */ gradle.projectsEvaluated { subprojects { @@ -553,6 +553,11 @@ gradle.projectsEvaluated { project.tasks.remove(assemble) project.build.dependsOn.remove('assemble') } + Task dependenciesInfo = project.tasks.findByName('dependenciesInfo') + if (dependenciesInfo) { + project.tasks.remove(dependenciesInfo) + project.precommit.dependsOn.remove('dependenciesInfo') + } } } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 85fe712fd8d85..eb3cd1dc8c6da 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -762,6 +762,10 @@ class BuildPlugin implements Plugin { private static configureDependenciesInfo(Project project) { Task deps = project.tasks.create("dependenciesInfo", DependenciesInfoTask.class) - deps.dependencies = project.configurations.compile.allDependencies + deps.runtimeConfiguration = project.configurations.runtime + deps.compileOnlyConfiguration = project.configurations.compileOnly + project.afterEvaluate { + deps.mappings = project.dependencyLicenses.mappings + } } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/DependenciesInfoTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/DependenciesInfoTask.groovy index b42e6cc8e3caa..e62fe4db954c5 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/DependenciesInfoTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/DependenciesInfoTask.groovy @@ -19,14 +19,19 @@ package org.elasticsearch.gradle +import org.elasticsearch.gradle.precommit.DependencyLicensesTask import org.gradle.api.DefaultTask +import org.gradle.api.artifacts.Configuration import org.gradle.api.artifacts.Dependency +import org.gradle.api.artifacts.DependencyResolutionListener import org.gradle.api.artifacts.DependencySet import org.gradle.api.tasks.Input import org.gradle.api.tasks.InputDirectory import org.gradle.api.tasks.OutputFile import org.gradle.api.tasks.TaskAction +import java.util.regex.Matcher +import java.util.regex.Pattern /** * A task to gather information about the dependencies and export them into a csv file. @@ -44,7 +49,14 @@ public class DependenciesInfoTask extends DefaultTask { /** Dependencies to gather information from. */ @Input - public DependencySet dependencies + public Configuration runtimeConfiguration + + /** We subtract compile-only dependencies. */ + @Input + public Configuration compileOnlyConfiguration + + @Input + public LinkedHashMap mappings /** Directory to read license files */ @InputDirectory @@ -59,15 +71,34 @@ public class DependenciesInfoTask extends DefaultTask { @TaskAction public void generateDependenciesInfo() { + + final DependencySet runtimeDependencies = runtimeConfiguration.getAllDependencies() + // we have to resolve the transitive dependencies and create a group:artifactId:version map + final Set compileOnlyArtifacts = + compileOnlyConfiguration + .getResolvedConfiguration() + .resolvedArtifacts + .collect { it -> "${it.moduleVersion.id.group}:${it.moduleVersion.id.name}:${it.moduleVersion.id.version}" } + final StringBuilder output = new StringBuilder() - for (Dependency dependency : dependencies) { - // Only external dependencies are checked - if (dependency.group != null && dependency.group.contains("elasticsearch") == false) { - final String url = createURL(dependency.group, dependency.name, dependency.version) - final String licenseType = getLicenseType(dependency.group, dependency.name) - output.append("${dependency.group}:${dependency.name},${dependency.version},${url},${licenseType}\n") + for (final Dependency dependency : runtimeDependencies) { + // we do not need compile-only dependencies here + if (compileOnlyArtifacts.contains("${dependency.group}:${dependency.name}:${dependency.version}")) { + continue + } + // only external dependencies are checked + if (dependency.group != null && dependency.group.contains("org.elasticsearch")) { + continue } + + final String url = createURL(dependency.group, dependency.name, dependency.version) + final String dependencyName = DependencyLicensesTask.getDependencyName(mappings, dependency.name) + logger.info("mapped dependency ${dependency.group}:${dependency.name} to ${dependencyName} for license info") + + final String licenseType = getLicenseType(dependency.group, dependencyName) + output.append("${dependency.group}:${dependency.name},${dependency.version},${url},${licenseType}\n") + } outputFile.setText(output.toString(), 'UTF-8') } @@ -173,7 +204,7 @@ are met: derived from this software without specific prior written permission\\.| (3\\.)? Neither the name of .+ nor the names of its contributors may be used to endorse or promote products derived from - this software without specific prior written permission\\.) + this software without specific prior written permission\\.) THIS SOFTWARE IS PROVIDED BY .+ (``|''|")AS IS(''|") AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/DependencyLicensesTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/DependencyLicensesTask.groovy index df30326a59d79..04fb023e2051a 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/DependencyLicensesTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/DependencyLicensesTask.groovy @@ -109,6 +109,10 @@ public class DependencyLicensesTask extends DefaultTask { mappings.put(from, to) } + public LinkedHashMap getMappings() { + return new LinkedHashMap<>(mappings) + } + /** * Add a rule which will skip SHA checking for the given dependency name. This should be used for * locally build dependencies, which cause the sha to change constantly. @@ -129,10 +133,6 @@ public class DependencyLicensesTask extends DefaultTask { throw new GradleException("Licences dir ${licensesDir} does not exist, but there are dependencies") } - - // order is the same for keys and values iteration since we use a linked hashmap - List mapped = new ArrayList<>(mappings.values()) - Pattern mappingsPattern = Pattern.compile('(' + mappings.keySet().join(')|(') + ')') Map licenses = new HashMap<>() Map notices = new HashMap<>() Set shaFiles = new HashSet() @@ -162,16 +162,10 @@ public class DependencyLicensesTask extends DefaultTask { checkSha(dependency, jarName, shaFiles) } - logger.info("Checking license/notice for " + depName) - Matcher match = mappingsPattern.matcher(depName) - if (match.matches()) { - int i = 0 - while (i < match.groupCount() && match.group(i + 1) == null) ++i; - logger.info("Mapped dependency name ${depName} to ${mapped.get(i)} for license check") - depName = mapped.get(i) - } - checkFile(depName, jarName, licenses, 'LICENSE') - checkFile(depName, jarName, notices, 'NOTICE') + final String dependencyName = getDependencyName(mappings, depName) + logger.info("mapped dependency name ${depName} to ${dependencyName} for license/notice check") + checkFile(dependencyName, jarName, licenses, 'LICENSE') + checkFile(dependencyName, jarName, notices, 'NOTICE') } licenses.each { license, count -> @@ -189,6 +183,19 @@ public class DependencyLicensesTask extends DefaultTask { } } + public static String getDependencyName(final LinkedHashMap mappings, final String dependencyName) { + // order is the same for keys and values iteration since we use a linked hashmap + List mapped = new ArrayList<>(mappings.values()) + Pattern mappingsPattern = Pattern.compile('(' + mappings.keySet().join(')|(') + ')') + Matcher match = mappingsPattern.matcher(dependencyName) + if (match.matches()) { + int i = 0 + while (i < match.groupCount() && match.group(i + 1) == null) ++i; + return mapped.get(i) + } + return dependencyName + } + private File getShaFile(String jarName) { return new File(licensesDir, jarName + SHA_EXTENSION) } diff --git a/client/client-benchmark-noop-api-plugin/build.gradle b/client/client-benchmark-noop-api-plugin/build.gradle index bee41034c3ce5..cc84207d90d22 100644 --- a/client/client-benchmark-noop-api-plugin/build.gradle +++ b/client/client-benchmark-noop-api-plugin/build.gradle @@ -31,6 +31,9 @@ esplugin { tasks.remove(assemble) build.dependsOn.remove('assemble') +dependencyLicenses.enabled = false +dependenciesInfo.enabled = false + compileJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-try,-unchecked" // no unit tests diff --git a/distribution/build.gradle b/distribution/build.gradle index fa62513a54069..7f08c244f456d 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -32,6 +32,11 @@ Collection distributions = project('archives').subprojects + project('packages') // Concatenates the dependencies CSV files into a single file task generateDependenciesReport(type: ConcatFilesTask) { + project.rootProject.allprojects { + afterEvaluate { + if (it.tasks.findByName("dependenciesInfo")) dependsOn it.tasks.dependenciesInfo + } + } files = fileTree(dir: project.rootDir, include: '**/dependencies.csv' ) headerLine = "name,version,url,license" target = new File(System.getProperty('csv')?: "${project.buildDir}/dependencies/es-dependencies.csv") diff --git a/test/fixtures/example-fixture/build.gradle b/test/fixtures/example-fixture/build.gradle index 225a2cf9deba6..ce562e89abb7f 100644 --- a/test/fixtures/example-fixture/build.gradle +++ b/test/fixtures/example-fixture/build.gradle @@ -22,3 +22,5 @@ test.enabled = false // Not published so no need to assemble tasks.remove(assemble) build.dependsOn.remove('assemble') + +dependenciesInfo.enabled = false diff --git a/x-pack/qa/build.gradle b/x-pack/qa/build.gradle index 1570b218592fe..24b6618b7d8f6 100644 --- a/x-pack/qa/build.gradle +++ b/x-pack/qa/build.gradle @@ -28,5 +28,9 @@ gradle.projectsEvaluated { project.tasks.remove(assemble) project.build.dependsOn.remove('assemble') } + Task dependenciesInfo = project.tasks.findByName('dependenciesInfo') + if (dependenciesInfo) { + project.precommit.dependsOn.remove('dependenciesInfo') + } } } diff --git a/x-pack/qa/sql/build.gradle b/x-pack/qa/sql/build.gradle index a3c147bbc04fc..0bea3a9364b71 100644 --- a/x-pack/qa/sql/build.gradle +++ b/x-pack/qa/sql/build.gradle @@ -22,6 +22,7 @@ dependencies { test.enabled = false dependencyLicenses.enabled = false +dependenciesInfo.enabled = false // the main files are actually test files, so use the appropriate forbidden api sigs forbiddenApisMain { diff --git a/x-pack/test/feature-aware/build.gradle b/x-pack/test/feature-aware/build.gradle index 217ed25a2d4b1..11b0e67183c8f 100644 --- a/x-pack/test/feature-aware/build.gradle +++ b/x-pack/test/feature-aware/build.gradle @@ -10,6 +10,7 @@ dependencies { forbiddenApisMain.enabled = true dependencyLicenses.enabled = false +dependenciesInfo.enabled = false jarHell.enabled = false From 1082ea57c819301afff2e6cb0192c552112db4ff Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sat, 9 Jun 2018 09:46:57 -0400 Subject: [PATCH 18/20] Add recognition of MPL 2.0 (#31226) This commit adds the ability for the dependencies info check to recognize version 2.0 of the Mozilla Public License. --- .../org/elasticsearch/gradle/DependenciesInfoTask.groovy | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/DependenciesInfoTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/DependenciesInfoTask.groovy index e62fe4db954c5..13e457c031706 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/DependenciesInfoTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/DependenciesInfoTask.groovy @@ -245,6 +245,8 @@ SOFTWARE\\. final String MOZILLA_1_1 = "Mozilla Public License.*Version 1.1" + final String MOZILLA_2_0 = "Mozilla\\s*Public\\s*License\\s*Version\\s*2\\.0" + switch (licenseText) { case ~/.*${APACHE_2_0}.*/: spdx = 'Apache-2.0' @@ -273,6 +275,9 @@ SOFTWARE\\. case ~/.*${MOZILLA_1_1}.*/: spdx = 'MPL-1.1' break + case ~/.*${MOZILLA_2_0}.*/: + spdx = 'MPL-2.0' + break default: break } From 1d840f934824641380395095d41dcf0671e93764 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sat, 9 Jun 2018 09:50:24 -0400 Subject: [PATCH 19/20] Remove dependencies report task dependencies (#31227) A previous commit tried to add task dependencies for the :distribution:generateDependenciesReport task so that a user did not have to run "dependenciesInfo :distribution:generateDependenciesReport". However this method did not reliably add all task dependencies due to task ordering issues in previous versions of Gradle and our build. This commit removes this for now and a user will continue to have to run "dependenciesInfo :distribution:generateDependenciesReport". --- distribution/build.gradle | 5 ----- 1 file changed, 5 deletions(-) diff --git a/distribution/build.gradle b/distribution/build.gradle index 7f08c244f456d..fa62513a54069 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -32,11 +32,6 @@ Collection distributions = project('archives').subprojects + project('packages') // Concatenates the dependencies CSV files into a single file task generateDependenciesReport(type: ConcatFilesTask) { - project.rootProject.allprojects { - afterEvaluate { - if (it.tasks.findByName("dependenciesInfo")) dependsOn it.tasks.dependenciesInfo - } - } files = fileTree(dir: project.rootDir, include: '**/dependencies.csv' ) headerLine = "name,version,url,license" target = new File(System.getProperty('csv')?: "${project.buildDir}/dependencies/es-dependencies.csv") From aa8aa0d9e0275ba30c58cc58b14ca39adcf7eff7 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sat, 9 Jun 2018 09:50:36 -0400 Subject: [PATCH 20/20] Move default location of dependencies report (#31228) This commit moves the default location of the full dependencies report to be under the reports directory to align it with the location for the dependenciesInfo task output. --- distribution/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distribution/build.gradle b/distribution/build.gradle index fa62513a54069..068c8da480f11 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -34,7 +34,7 @@ Collection distributions = project('archives').subprojects + project('packages') task generateDependenciesReport(type: ConcatFilesTask) { files = fileTree(dir: project.rootDir, include: '**/dependencies.csv' ) headerLine = "name,version,url,license" - target = new File(System.getProperty('csv')?: "${project.buildDir}/dependencies/es-dependencies.csv") + target = new File(System.getProperty('csv')?: "${project.buildDir}/reports/dependencies/es-dependencies.csv") } /*****************************************************************************