diff --git a/build.gradle b/build.gradle index 015db80d32599..6517d0292bad8 100644 --- a/build.gradle +++ b/build.gradle @@ -543,7 +543,7 @@ subprojects { project -> } } -/* Remove assemble on all qa projects because we don't need to publish +/* Remove assemble/dependenciesInfo on all qa projects because we don't need to publish * artifacts for them. */ gradle.projectsEvaluated { subprojects { @@ -553,6 +553,11 @@ gradle.projectsEvaluated { project.tasks.remove(assemble) project.build.dependsOn.remove('assemble') } + Task dependenciesInfo = project.tasks.findByName('dependenciesInfo') + if (dependenciesInfo) { + project.tasks.remove(dependenciesInfo) + project.precommit.dependsOn.remove('dependenciesInfo') + } } } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 85fe712fd8d85..eb3cd1dc8c6da 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -762,6 +762,10 @@ class BuildPlugin implements Plugin { private static configureDependenciesInfo(Project project) { Task deps = project.tasks.create("dependenciesInfo", DependenciesInfoTask.class) - deps.dependencies = project.configurations.compile.allDependencies + deps.runtimeConfiguration = project.configurations.runtime + deps.compileOnlyConfiguration = project.configurations.compileOnly + project.afterEvaluate { + deps.mappings = project.dependencyLicenses.mappings + } } } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/DependenciesInfoTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/DependenciesInfoTask.groovy index eb82b4675f287..13e457c031706 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/DependenciesInfoTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/DependenciesInfoTask.groovy @@ -19,14 +19,19 @@ package org.elasticsearch.gradle +import org.elasticsearch.gradle.precommit.DependencyLicensesTask import org.gradle.api.DefaultTask +import org.gradle.api.artifacts.Configuration import org.gradle.api.artifacts.Dependency +import org.gradle.api.artifacts.DependencyResolutionListener import org.gradle.api.artifacts.DependencySet import org.gradle.api.tasks.Input import org.gradle.api.tasks.InputDirectory import org.gradle.api.tasks.OutputFile import org.gradle.api.tasks.TaskAction +import java.util.regex.Matcher +import java.util.regex.Pattern /** * A task to gather information about the dependencies and export them into a csv file. @@ -44,7 +49,14 @@ public class DependenciesInfoTask extends DefaultTask { /** Dependencies to gather information from. */ @Input - public DependencySet dependencies + public Configuration runtimeConfiguration + + /** We subtract compile-only dependencies. */ + @Input + public Configuration compileOnlyConfiguration + + @Input + public LinkedHashMap mappings /** Directory to read license files */ @InputDirectory @@ -59,15 +71,34 @@ public class DependenciesInfoTask extends DefaultTask { @TaskAction public void generateDependenciesInfo() { + + final DependencySet runtimeDependencies = runtimeConfiguration.getAllDependencies() + // we have to resolve the transitive dependencies and create a group:artifactId:version map + final Set compileOnlyArtifacts = + compileOnlyConfiguration + .getResolvedConfiguration() + .resolvedArtifacts + .collect { it -> "${it.moduleVersion.id.group}:${it.moduleVersion.id.name}:${it.moduleVersion.id.version}" } + final StringBuilder output = new StringBuilder() - for (Dependency dependency : dependencies) { - // Only external dependencies are checked - if (dependency.group != null && dependency.group.contains("elasticsearch") == false) { - final String url = createURL(dependency.group, dependency.name, dependency.version) - final String licenseType = getLicenseType(dependency.group, dependency.name) - output.append("${dependency.group}:${dependency.name},${dependency.version},${url},${licenseType}\n") + for (final Dependency dependency : runtimeDependencies) { + // we do not need compile-only dependencies here + if (compileOnlyArtifacts.contains("${dependency.group}:${dependency.name}:${dependency.version}")) { + continue } + // only external dependencies are checked + if (dependency.group != null && dependency.group.contains("org.elasticsearch")) { + continue + } + + final String url = createURL(dependency.group, dependency.name, dependency.version) + final String dependencyName = DependencyLicensesTask.getDependencyName(mappings, dependency.name) + logger.info("mapped dependency ${dependency.group}:${dependency.name} to ${dependencyName} for license info") + + final String licenseType = getLicenseType(dependency.group, dependencyName) + output.append("${dependency.group}:${dependency.name},${dependency.version},${url},${licenseType}\n") + } outputFile.setText(output.toString(), 'UTF-8') } @@ -109,7 +140,8 @@ public class DependenciesInfoTask extends DefaultTask { } if (license) { - final String content = license.readLines("UTF-8").toString() + // replace * because they are sometimes used at the beginning lines as if the license was a multi-line comment + final String content = new String(license.readBytes(), "UTF-8").replaceAll("\\s+", " ").replaceAll("\\*", " ") final String spdx = checkSPDXLicense(content) if (spdx == null) { // License has not be identified as SPDX. @@ -133,15 +165,88 @@ public class DependenciesInfoTask extends DefaultTask { private String checkSPDXLicense(final String licenseText) { String spdx = null - final String APACHE_2_0 = "Apache.*License.*(v|V)ersion 2.0" - final String BSD_2 = "BSD 2-clause.*License" + final String APACHE_2_0 = "Apache.*License.*(v|V)ersion.*2\\.0" + + final String BSD_2 = """ +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + + 1\\. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer\\. + 2\\. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution\\. + +THIS SOFTWARE IS PROVIDED BY .+ (``|''|")AS IS(''|") AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED\\. +IN NO EVENT SHALL .+ BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES \\(INCLUDING, BUT +NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION\\) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +\\(INCLUDING NEGLIGENCE OR OTHERWISE\\) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE\\. +""".replaceAll("\\s+", "\\\\s*") + + final String BSD_3 = """ +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + + (1\\.)? Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer\\. + (2\\.)? Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution\\. + ((3\\.)? The name of .+ may not be used to endorse or promote products + derived from this software without specific prior written permission\\.| + (3\\.)? Neither the name of .+ nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission\\.) + +THIS SOFTWARE IS PROVIDED BY .+ (``|''|")AS IS(''|") AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED\\. +IN NO EVENT SHALL .+ BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES \\(INCLUDING, BUT +NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION\\) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +\\(INCLUDING NEGLIGENCE OR OTHERWISE\\) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE\\. +""".replaceAll("\\s+", "\\\\s*") + final String CDDL_1_0 = "COMMON DEVELOPMENT AND DISTRIBUTION LICENSE.*Version 1.0" final String CDDL_1_1 = "COMMON DEVELOPMENT AND DISTRIBUTION LICENSE.*Version 1.1" final String ICU = "ICU License - ICU 1.8.1 and later" final String LGPL_3 = "GNU LESSER GENERAL PUBLIC LICENSE.*Version 3" - final String MIT = "MIT License" + + final String MIT = """ +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files \\(the "Software"\\), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software\\. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT\\. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE\\. +""".replaceAll("\\s+", "\\\\s*") + final String MOZILLA_1_1 = "Mozilla Public License.*Version 1.1" + final String MOZILLA_2_0 = "Mozilla\\s*Public\\s*License\\s*Version\\s*2\\.0" + switch (licenseText) { case ~/.*${APACHE_2_0}.*/: spdx = 'Apache-2.0' @@ -152,6 +257,9 @@ public class DependenciesInfoTask extends DefaultTask { case ~/.*${BSD_2}.*/: spdx = 'BSD-2-Clause' break + case ~/.*${BSD_3}.*/: + spdx = 'BSD-3-Clause' + break case ~/.*${LGPL_3}.*/: spdx = 'LGPL-3.0' break @@ -167,6 +275,9 @@ public class DependenciesInfoTask extends DefaultTask { case ~/.*${MOZILLA_1_1}.*/: spdx = 'MPL-1.1' break + case ~/.*${MOZILLA_2_0}.*/: + spdx = 'MPL-2.0' + break default: break } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/DependencyLicensesTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/DependencyLicensesTask.groovy index 4d292d87ec39c..04fb023e2051a 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/DependencyLicensesTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/DependencyLicensesTask.groovy @@ -109,6 +109,10 @@ public class DependencyLicensesTask extends DefaultTask { mappings.put(from, to) } + public LinkedHashMap getMappings() { + return new LinkedHashMap<>(mappings) + } + /** * Add a rule which will skip SHA checking for the given dependency name. This should be used for * locally build dependencies, which cause the sha to change constantly. @@ -129,10 +133,6 @@ public class DependencyLicensesTask extends DefaultTask { throw new GradleException("Licences dir ${licensesDir} does not exist, but there are dependencies") } - - // order is the same for keys and values iteration since we use a linked hashmap - List mapped = new ArrayList<>(mappings.values()) - Pattern mappingsPattern = Pattern.compile('(' + mappings.keySet().join(')|(') + ')') Map licenses = new HashMap<>() Map notices = new HashMap<>() Set shaFiles = new HashSet() @@ -151,7 +151,7 @@ public class DependencyLicensesTask extends DefaultTask { for (File dependency : dependencies) { String jarName = dependency.getName() - String depName = jarName - ~/\-\d+.*/ + String depName = jarName - ~/\-v?\d+.*/ if (ignoreShas.contains(depName)) { // local deps should not have sha files! if (getShaFile(jarName).exists()) { @@ -162,16 +162,10 @@ public class DependencyLicensesTask extends DefaultTask { checkSha(dependency, jarName, shaFiles) } - logger.info("Checking license/notice for " + depName) - Matcher match = mappingsPattern.matcher(depName) - if (match.matches()) { - int i = 0 - while (i < match.groupCount() && match.group(i + 1) == null) ++i; - logger.info("Mapped dependency name ${depName} to ${mapped.get(i)} for license check") - depName = mapped.get(i) - } - checkFile(depName, jarName, licenses, 'LICENSE') - checkFile(depName, jarName, notices, 'NOTICE') + final String dependencyName = getDependencyName(mappings, depName) + logger.info("mapped dependency name ${depName} to ${dependencyName} for license/notice check") + checkFile(dependencyName, jarName, licenses, 'LICENSE') + checkFile(dependencyName, jarName, notices, 'NOTICE') } licenses.each { license, count -> @@ -189,6 +183,19 @@ public class DependencyLicensesTask extends DefaultTask { } } + public static String getDependencyName(final LinkedHashMap mappings, final String dependencyName) { + // order is the same for keys and values iteration since we use a linked hashmap + List mapped = new ArrayList<>(mappings.values()) + Pattern mappingsPattern = Pattern.compile('(' + mappings.keySet().join(')|(') + ')') + Matcher match = mappingsPattern.matcher(dependencyName) + if (match.matches()) { + int i = 0 + while (i < match.groupCount() && match.group(i + 1) == null) ++i; + return mapped.get(i) + } + return dependencyName + } + private File getShaFile(String jarName) { return new File(licensesDir, jarName + SHA_EXTENSION) } diff --git a/client/client-benchmark-noop-api-plugin/build.gradle b/client/client-benchmark-noop-api-plugin/build.gradle index bee41034c3ce5..cc84207d90d22 100644 --- a/client/client-benchmark-noop-api-plugin/build.gradle +++ b/client/client-benchmark-noop-api-plugin/build.gradle @@ -31,6 +31,9 @@ esplugin { tasks.remove(assemble) build.dependsOn.remove('assemble') +dependencyLicenses.enabled = false +dependenciesInfo.enabled = false + compileJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-try,-unchecked" // no unit tests diff --git a/distribution/build.gradle b/distribution/build.gradle index fa62513a54069..068c8da480f11 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -34,7 +34,7 @@ Collection distributions = project('archives').subprojects + project('packages') task generateDependenciesReport(type: ConcatFilesTask) { files = fileTree(dir: project.rootDir, include: '**/dependencies.csv' ) headerLine = "name,version,url,license" - target = new File(System.getProperty('csv')?: "${project.buildDir}/dependencies/es-dependencies.csv") + target = new File(System.getProperty('csv')?: "${project.buildDir}/reports/dependencies/es-dependencies.csv") } /***************************************************************************** diff --git a/docs/reference/modules/transport.asciidoc b/docs/reference/modules/transport.asciidoc index b7a65d98592cc..046d82cc507eb 100644 --- a/docs/reference/modules/transport.asciidoc +++ b/docs/reference/modules/transport.asciidoc @@ -44,7 +44,12 @@ time setting format). Defaults to `30s`. |`transport.tcp.compress` |Set to `true` to enable compression (`DEFLATE`) between all nodes. Defaults to `false`. -|`transport.ping_schedule` | Schedule a regular ping message to ensure that connections are kept alive. Defaults to `5s` in the transport client and `-1` (disabled) elsewhere. +|`transport.ping_schedule` | Schedule a regular application-level ping message +to ensure that transport connections between nodes are kept alive. Defaults to +`5s` in the transport client and `-1` (disabled) elsewhere. It is preferable to +correctly configure TCP keep-alives instead of using this feature, because TCP +keep-alives apply to all kinds of long-lived connection and not just to +transport connections. |======================================================================= @@ -80,6 +85,20 @@ The following parameters can be configured like that * `tcp_send_buffer_size`: Configures the send buffer size of the socket * `tcp_receive_buffer_size`: Configures the receive buffer size of the socket +[float] +==== Long-lived idle connections + +Elasticsearch opens a number of long-lived TCP connections between each pair of +nodes in the cluster, and some of these connections may be idle for an extended +period of time. Nonetheless, Elasticsearch requires these connections to remain +open, and it can disrupt the operation of the cluster if any inter-node +connections are closed by an external influence such as a firewall. It is +important to configure your network to preserve long-lived idle connections +between Elasticsearch nodes, for instance by leaving `tcp_keep_alive` enabled +and ensuring that the keepalive interval is shorter than any timeout that might +cause idle connections to be closed, or by setting `transport.ping_schedule` if +keepalives cannot be configured. + [float] === Transport Tracer diff --git a/docs/reference/query-dsl/mlt-query.asciidoc b/docs/reference/query-dsl/mlt-query.asciidoc index 6cb984263e6f7..bd66c7f071cd4 100644 --- a/docs/reference/query-dsl/mlt-query.asciidoc +++ b/docs/reference/query-dsl/mlt-query.asciidoc @@ -241,6 +241,13 @@ number of terms that must match. The syntax is the same as the <>. (Defaults to `"30%"`). +`fail_on_unsupported_field`:: +Controls whether the query should fail (throw an exception) if any of the +specified fields are not of the supported types +(`text` or `keyword'). Set this to `false` to ignore the field and continue +processing. Defaults to +`true`. + `boost_terms`:: Each term in the formed query could be further boosted by their tf-idf score. This sets the boost factor to use when using this feature. Defaults to diff --git a/server/src/main/java/org/elasticsearch/index/analysis/ArabicAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ArabicAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/ArabicAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ArabicAnalyzerProvider.java index 8dcc6cc907569..f5e1d882d2b8d 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/ArabicAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ArabicAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.ar.ArabicAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class ArabicAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final ArabicAnalyzer arabicAnalyzer; - public ArabicAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + ArabicAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); arabicAnalyzer = new ArabicAnalyzer( Analysis.parseStopWords(env, settings, ArabicAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/ArmenianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ArmenianAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/ArmenianAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ArmenianAnalyzerProvider.java index ba9f55f331f2b..d066aed14eeaf 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/ArmenianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ArmenianAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.hy.ArmenianAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class ArmenianAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final ArmenianAnalyzer analyzer; - public ArmenianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + ArmenianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new ArmenianAnalyzer( Analysis.parseStopWords(env, settings, ArmenianAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/BasqueAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/BasqueAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/BasqueAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/BasqueAnalyzerProvider.java index 45ff947c61eac..8fe32a697f756 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/BasqueAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/BasqueAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.eu.BasqueAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class BasqueAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final BasqueAnalyzer analyzer; - public BasqueAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + BasqueAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new BasqueAnalyzer( Analysis.parseStopWords(env, settings, BasqueAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/BengaliAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/BengaliAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/BengaliAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/BengaliAnalyzerProvider.java index 41931833301cf..5696d0a604555 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/BengaliAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/BengaliAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.bn.BengaliAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class BengaliAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final BengaliAnalyzer analyzer; - public BengaliAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + BengaliAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new BengaliAnalyzer( Analysis.parseStopWords(env, settings, BengaliAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/BrazilianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/BrazilianAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/BrazilianAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/BrazilianAnalyzerProvider.java index 36b13e67bf4ee..07399a2263dcc 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/BrazilianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/BrazilianAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.br.BrazilianAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class BrazilianAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final BrazilianAnalyzer analyzer; - public BrazilianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + BrazilianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new BrazilianAnalyzer( Analysis.parseStopWords(env, settings, BrazilianAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/BulgarianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/BulgarianAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/BulgarianAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/BulgarianAnalyzerProvider.java index 26e82cbfb2f70..a6b1cb97a1b5c 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/BulgarianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/BulgarianAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.bg.BulgarianAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class BulgarianAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final BulgarianAnalyzer analyzer; - public BulgarianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + BulgarianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new BulgarianAnalyzer( Analysis.parseStopWords(env, settings, BulgarianAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/CatalanAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CatalanAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/CatalanAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CatalanAnalyzerProvider.java index 94babaa52f83d..db229ffb4924a 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/CatalanAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CatalanAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.ca.CatalanAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class CatalanAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final CatalanAnalyzer analyzer; - public CatalanAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + CatalanAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new CatalanAnalyzer( Analysis.parseStopWords(env, settings, CatalanAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/ChineseAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ChineseAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/ChineseAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ChineseAnalyzerProvider.java index 10e6f0dc42f1e..01b529188c6f0 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/ChineseAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ChineseAnalyzerProvider.java @@ -17,12 +17,13 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; /** * Only for old indexes @@ -31,16 +32,16 @@ public class ChineseAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final CJKAnalyzer analyzer; - public CjkAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + CjkAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); CharArraySet stopWords = Analysis.parseStopWords(env, settings, CJKAnalyzer.getDefaultStopSet()); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index 433bef902c1a1..24dce7abcf370 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -24,11 +24,17 @@ import org.apache.lucene.analysis.LowerCaseFilter; import org.apache.lucene.analysis.StopFilter; import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.ar.ArabicAnalyzer; import org.apache.lucene.analysis.ar.ArabicNormalizationFilter; import org.apache.lucene.analysis.ar.ArabicStemFilter; +import org.apache.lucene.analysis.bg.BulgarianAnalyzer; +import org.apache.lucene.analysis.bn.BengaliAnalyzer; import org.apache.lucene.analysis.bn.BengaliNormalizationFilter; +import org.apache.lucene.analysis.br.BrazilianAnalyzer; import org.apache.lucene.analysis.br.BrazilianStemFilter; +import org.apache.lucene.analysis.ca.CatalanAnalyzer; import org.apache.lucene.analysis.charfilter.HTMLStripCharFilter; +import org.apache.lucene.analysis.cjk.CJKAnalyzer; import org.apache.lucene.analysis.cjk.CJKBigramFilter; import org.apache.lucene.analysis.cjk.CJKWidthFilter; import org.apache.lucene.analysis.ckb.SoraniNormalizationFilter; @@ -40,14 +46,22 @@ import org.apache.lucene.analysis.core.StopAnalyzer; import org.apache.lucene.analysis.core.UpperCaseFilter; import org.apache.lucene.analysis.core.WhitespaceTokenizer; +import org.apache.lucene.analysis.cz.CzechAnalyzer; import org.apache.lucene.analysis.cz.CzechStemFilter; +import org.apache.lucene.analysis.da.DanishAnalyzer; +import org.apache.lucene.analysis.de.GermanAnalyzer; import org.apache.lucene.analysis.de.GermanNormalizationFilter; import org.apache.lucene.analysis.de.GermanStemFilter; +import org.apache.lucene.analysis.en.EnglishAnalyzer; import org.apache.lucene.analysis.en.KStemFilter; import org.apache.lucene.analysis.en.PorterStemFilter; +import org.apache.lucene.analysis.eu.BasqueAnalyzer; import org.apache.lucene.analysis.fa.PersianNormalizationFilter; +import org.apache.lucene.analysis.fi.FinnishAnalyzer; import org.apache.lucene.analysis.fr.FrenchAnalyzer; +import org.apache.lucene.analysis.gl.GalicianAnalyzer; import org.apache.lucene.analysis.hi.HindiNormalizationFilter; +import org.apache.lucene.analysis.hy.ArmenianAnalyzer; import org.apache.lucene.analysis.in.IndicNormalizationFilter; import org.apache.lucene.analysis.miscellaneous.ASCIIFoldingFilter; import org.apache.lucene.analysis.miscellaneous.DisableGraphAttribute; @@ -64,6 +78,7 @@ import org.apache.lucene.analysis.ngram.EdgeNGramTokenizer; import org.apache.lucene.analysis.ngram.NGramTokenFilter; import org.apache.lucene.analysis.ngram.NGramTokenizer; +import org.apache.lucene.analysis.nl.DutchAnalyzer; import org.apache.lucene.analysis.path.PathHierarchyTokenizer; import org.apache.lucene.analysis.pattern.PatternTokenizer; import org.apache.lucene.analysis.payloads.DelimitedPayloadTokenFilter; @@ -73,6 +88,7 @@ import org.apache.lucene.analysis.snowball.SnowballFilter; import org.apache.lucene.analysis.standard.ClassicFilter; import org.apache.lucene.analysis.standard.ClassicTokenizer; +import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.analysis.standard.UAX29URLEmailTokenizer; import org.apache.lucene.analysis.th.ThaiTokenizer; import org.apache.lucene.analysis.tr.ApostropheFilter; @@ -113,6 +129,24 @@ public Map>> getAn analyzers.put("fingerprint", FingerprintAnalyzerProvider::new); analyzers.put("standard_html_strip", StandardHtmlStripAnalyzerProvider::new); analyzers.put("pattern", PatternAnalyzerProvider::new); + analyzers.put("snowball", SnowballAnalyzerProvider::new); + analyzers.put("arabic", ArabicAnalyzerProvider::new); + analyzers.put("armenian", ArmenianAnalyzerProvider::new); + analyzers.put("basque", BasqueAnalyzerProvider::new); + analyzers.put("bengali", BengaliAnalyzerProvider::new); + analyzers.put("brazilian", BrazilianAnalyzerProvider::new); + analyzers.put("bulgarian", BulgarianAnalyzerProvider::new); + analyzers.put("catalan", CatalanAnalyzerProvider::new); + analyzers.put("chinese", ChineseAnalyzerProvider::new); + analyzers.put("cjk", CjkAnalyzerProvider::new); + analyzers.put("czech", CzechAnalyzerProvider::new); + analyzers.put("danish", DanishAnalyzerProvider::new); + analyzers.put("dutch", DutchAnalyzerProvider::new); + analyzers.put("english", EnglishAnalyzerProvider::new); + analyzers.put("finnish", FinnishAnalyzerProvider::new); + analyzers.put("french", FrenchAnalyzerProvider::new); + analyzers.put("galician", GalicianAnalyzerProvider::new); + analyzers.put("german", GermanAnalyzerProvider::new); return analyzers; } @@ -213,10 +247,108 @@ public Map> getTokenizers() { @Override public List getPreBuiltAnalyzerProviderFactories() { List analyzers = new ArrayList<>(); - analyzers.add(new PreBuiltAnalyzerProviderFactory("standard_html_strip", CachingStrategy.LUCENE, - version -> new StandardHtmlStripAnalyzer(CharArraySet.EMPTY_SET))); - analyzers.add(new PreBuiltAnalyzerProviderFactory("pattern", CachingStrategy.ELASTICSEARCH, version -> - new PatternAnalyzer(Regex.compile("\\W+" /*PatternAnalyzer.NON_WORD_PATTERN*/, null), true, CharArraySet.EMPTY_SET))); + analyzers.add(new PreBuiltAnalyzerProviderFactory("standard_html_strip", CachingStrategy.LUCENE, version -> { + Analyzer a = new StandardHtmlStripAnalyzer(CharArraySet.EMPTY_SET); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("pattern", CachingStrategy.ELASTICSEARCH, version -> { + Analyzer a = new PatternAnalyzer(Regex.compile("\\W+" /*PatternAnalyzer.NON_WORD_PATTERN*/, null), true, + CharArraySet.EMPTY_SET); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("snowball", CachingStrategy.LUCENE, version -> { + Analyzer a = new SnowballAnalyzer("English", StopAnalyzer.ENGLISH_STOP_WORDS_SET); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("arabic", CachingStrategy.LUCENE, version -> { + Analyzer a = new ArabicAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("armenian", CachingStrategy.LUCENE, version -> { + Analyzer a = new ArmenianAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("basque", CachingStrategy.LUCENE, version -> { + Analyzer a = new BasqueAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("bengali", CachingStrategy.LUCENE, version -> { + Analyzer a = new BengaliAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("brazilian", CachingStrategy.LUCENE, version -> { + Analyzer a = new BrazilianAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("bulgarian", CachingStrategy.LUCENE, version -> { + Analyzer a = new BulgarianAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("catalan", CachingStrategy.LUCENE, version -> { + Analyzer a = new CatalanAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("chinese", CachingStrategy.LUCENE, version -> { + // only for old indices, best effort + Analyzer a = new StandardAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("cjk", CachingStrategy.LUCENE, version -> { + Analyzer a = new CJKAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("czech", CachingStrategy.LUCENE, version -> { + Analyzer a = new CzechAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("danish", CachingStrategy.LUCENE, version -> { + Analyzer a = new DanishAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("dutch", CachingStrategy.LUCENE, version -> { + Analyzer a = new DutchAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("english", CachingStrategy.LUCENE, version -> { + Analyzer a = new EnglishAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("finnish", CachingStrategy.LUCENE, version -> { + Analyzer a = new FinnishAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("french", CachingStrategy.LUCENE, version -> { + Analyzer a = new FrenchAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("galician", CachingStrategy.LUCENE, version -> { + Analyzer a = new GalicianAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); + analyzers.add(new PreBuiltAnalyzerProviderFactory("german", CachingStrategy.LUCENE, version -> { + Analyzer a = new GermanAnalyzer(); + a.setVersion(version.luceneVersion); + return a; + })); return analyzers; } diff --git a/server/src/main/java/org/elasticsearch/index/analysis/CzechAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CzechAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/CzechAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CzechAnalyzerProvider.java index 12d2349d9bac5..c14c2d57a3afa 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/CzechAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CzechAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.cz.CzechAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class CzechAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final CzechAnalyzer analyzer; - public CzechAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + CzechAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new CzechAnalyzer( Analysis.parseStopWords(env, settings, CzechAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/DanishAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/DanishAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/DanishAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/DanishAnalyzerProvider.java index 01021cbfd8e19..e3f024693c2d7 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/DanishAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/DanishAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.da.DanishAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class DanishAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final DanishAnalyzer analyzer; - public DanishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + DanishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new DanishAnalyzer( Analysis.parseStopWords(env, settings, DanishAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/DutchAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/DutchAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/DutchAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/DutchAnalyzerProvider.java index e215a89241e9b..70ab2a5ea62f1 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/DutchAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/DutchAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.nl.DutchAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class DutchAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final DutchAnalyzer analyzer; - public DutchAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + DutchAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new DutchAnalyzer( Analysis.parseStopWords(env, settings, DutchAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/EnglishAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EnglishAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/EnglishAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EnglishAnalyzerProvider.java index 300381ef14754..b14a83dbf7c4f 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/EnglishAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EnglishAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.en.EnglishAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class EnglishAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final EnglishAnalyzer analyzer; - public EnglishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + EnglishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new EnglishAnalyzer( Analysis.parseStopWords(env, settings, EnglishAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/FinnishAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/FinnishAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/FinnishAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/FinnishAnalyzerProvider.java index 95f0819293aae..ec18a71a12541 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/FinnishAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/FinnishAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.fi.FinnishAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class FinnishAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final FinnishAnalyzer analyzer; - public FinnishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + FinnishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new FinnishAnalyzer( Analysis.parseStopWords(env, settings, FinnishAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/FrenchAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/FrenchAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/FrenchAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/FrenchAnalyzerProvider.java index eac40c375caac..caee3618afbfc 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/FrenchAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/FrenchAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.fr.FrenchAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class FrenchAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final FrenchAnalyzer analyzer; - public FrenchAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + FrenchAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new FrenchAnalyzer( Analysis.parseStopWords(env, settings, FrenchAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/GalicianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/GalicianAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/GalicianAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/GalicianAnalyzerProvider.java index 57550594dc053..23f8da7b8dded 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/GalicianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/GalicianAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.gl.GalicianAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class GalicianAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final GalicianAnalyzer analyzer; - public GalicianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + GalicianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new GalicianAnalyzer( Analysis.parseStopWords(env, settings, GalicianAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/GermanAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/GermanAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/GermanAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/GermanAnalyzerProvider.java index 7951f17b79db9..cf96f50845f22 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/GermanAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/GermanAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.de.GermanAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class GermanAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final GermanAnalyzer analyzer; - public GermanAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + GermanAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new GermanAnalyzer( Analysis.parseStopWords(env, settings, GermanAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/SnowballAnalyzer.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SnowballAnalyzer.java similarity index 95% rename from server/src/main/java/org/elasticsearch/index/analysis/SnowballAnalyzer.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SnowballAnalyzer.java index 1a096b8fa4b9f..5dbe902fe1500 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/SnowballAnalyzer.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SnowballAnalyzer.java @@ -1,4 +1,4 @@ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; /* * Licensed to Elasticsearch under one or more contributor @@ -48,12 +48,12 @@ public final class SnowballAnalyzer extends Analyzer { private CharArraySet stopSet; /** Builds the named analyzer with no stop words. */ - public SnowballAnalyzer(String name) { + SnowballAnalyzer(String name) { this.name = name; } /** Builds the named analyzer with the given stop words. */ - public SnowballAnalyzer(String name, CharArraySet stopWords) { + SnowballAnalyzer(String name, CharArraySet stopWords) { this(name); stopSet = CharArraySet.unmodifiableSet(CharArraySet.copy(stopWords)); } diff --git a/server/src/main/java/org/elasticsearch/index/analysis/SnowballAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SnowballAnalyzerProvider.java similarity index 92% rename from server/src/main/java/org/elasticsearch/index/analysis/SnowballAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SnowballAnalyzerProvider.java index 84f1931633100..0f213df9ad722 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/SnowballAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SnowballAnalyzerProvider.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.core.StopAnalyzer; @@ -26,6 +26,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; import java.util.HashMap; import java.util.Map; @@ -60,7 +62,7 @@ public class SnowballAnalyzerProvider extends AbstractIndexAnalyzerProvider.txt file, which is located in +the 'license' directory of the distribution file, for the license terms of the +components that this product depends on. + +------------------------------------------------------------------------------- +This product contains the extensions to Java Collections Framework which has +been derived from the works by JSR-166 EG, Doug Lea, and Jason T. Greene: + + * LICENSE: + * license/LICENSE.jsr166y.txt (Public Domain) + * HOMEPAGE: + * http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/ + * http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/ + +This product contains a modified version of Robert Harder's Public Domain +Base64 Encoder and Decoder, which can be obtained at: + + * LICENSE: + * license/LICENSE.base64.txt (Public Domain) + * HOMEPAGE: + * http://iharder.sourceforge.net/current/java/base64/ + +This product contains a modified version of 'JZlib', a re-implementation of +zlib in pure Java, which can be obtained at: + + * LICENSE: + * license/LICENSE.jzlib.txt (BSD Style License) + * HOMEPAGE: + * http://www.jcraft.com/jzlib/ + +This product contains a modified version of 'Webbit', a Java event based +WebSocket and HTTP server: + + * LICENSE: + * license/LICENSE.webbit.txt (BSD License) + * HOMEPAGE: + * https://github.com/joewalnes/webbit + +This product optionally depends on 'Protocol Buffers', Google's data +interchange format, which can be obtained at: + + * LICENSE: + * license/LICENSE.protobuf.txt (New BSD License) + * HOMEPAGE: + * http://code.google.com/p/protobuf/ + +This product optionally depends on 'Bouncy Castle Crypto APIs' to generate +a temporary self-signed X.509 certificate when the JVM does not provide the +equivalent functionality. It can be obtained at: + + * LICENSE: + * license/LICENSE.bouncycastle.txt (MIT License) + * HOMEPAGE: + * http://www.bouncycastle.org/ + +This product optionally depends on 'SLF4J', a simple logging facade for Java, +which can be obtained at: + + * LICENSE: + * license/LICENSE.slf4j.txt (MIT License) + * HOMEPAGE: + * http://www.slf4j.org/ + +This product optionally depends on 'Apache Commons Logging', a logging +framework, which can be obtained at: + + * LICENSE: + * license/LICENSE.commons-logging.txt (Apache License 2.0) + * HOMEPAGE: + * http://commons.apache.org/logging/ + +This product optionally depends on 'Apache Log4J', a logging framework, +which can be obtained at: + + * LICENSE: + * license/LICENSE.log4j.txt (Apache License 2.0) + * HOMEPAGE: + * http://logging.apache.org/log4j/ + +This product optionally depends on 'JBoss Logging', a logging framework, +which can be obtained at: + + * LICENSE: + * license/LICENSE.jboss-logging.txt (GNU LGPL 2.1) + * HOMEPAGE: + * http://anonsvn.jboss.org/repos/common/common-logging-spi/ + +This product optionally depends on 'Apache Felix', an open source OSGi +framework implementation, which can be obtained at: + + * LICENSE: + * license/LICENSE.felix.txt (Apache License 2.0) + * HOMEPAGE: + * http://felix.apache.org/ diff --git a/plugins/transport-nio/licenses/netty-buffer-4.1.16.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-buffer-4.1.16.Final.jar.sha1 new file mode 100644 index 0000000000000..c546222971985 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-buffer-4.1.16.Final.jar.sha1 @@ -0,0 +1 @@ +63b5fa95c74785e16f2c30ce268bc222e35c8cb5 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-4.1.16.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-4.1.16.Final.jar.sha1 new file mode 100644 index 0000000000000..1e6c241ea0b17 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-codec-4.1.16.Final.jar.sha1 @@ -0,0 +1 @@ +d84a1f21768b7309c2954521cf5a1f46c2309eb1 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-http-4.1.16.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-http-4.1.16.Final.jar.sha1 new file mode 100644 index 0000000000000..71c33af1c5fc2 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-codec-http-4.1.16.Final.jar.sha1 @@ -0,0 +1 @@ +d64312378b438dfdad84267c599a053327c6f02a \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-common-4.1.16.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-common-4.1.16.Final.jar.sha1 new file mode 100644 index 0000000000000..3edf5fcea59b3 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-common-4.1.16.Final.jar.sha1 @@ -0,0 +1 @@ +177a6b30cca92f6f5f9873c9befd681377a4c328 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-handler-4.1.16.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-handler-4.1.16.Final.jar.sha1 new file mode 100644 index 0000000000000..cba27387268d1 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-handler-4.1.16.Final.jar.sha1 @@ -0,0 +1 @@ +fec0e63e7dd7f4eeef7ea8dc47a1ff32dfc7ebc2 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-resolver-4.1.16.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-resolver-4.1.16.Final.jar.sha1 new file mode 100644 index 0000000000000..3571d2ecfdc48 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-resolver-4.1.16.Final.jar.sha1 @@ -0,0 +1 @@ +f6eb553b53fb3a90a8ac1170697093fed82eae28 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-transport-4.1.16.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-transport-4.1.16.Final.jar.sha1 new file mode 100644 index 0000000000000..e502d4c77084c --- /dev/null +++ b/plugins/transport-nio/licenses/netty-transport-4.1.16.Final.jar.sha1 @@ -0,0 +1 @@ +3c8ee2c4d4a1cbb947a5c184c7aeb2204260958b \ No newline at end of file diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/count/20_query_string.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/count/20_query_string.yml index 2c8965f5c6fd5..bf644d9a5f1c5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/count/20_query_string.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/count/20_query_string.yml @@ -44,14 +44,6 @@ - match: {count : 0} - - do: - count: - index: test - q: field:bars - analyzer: snowball - - - match: {count : 1} - - do: count: index: test diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/30_query_string.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/30_query_string.yml index 5ccf97891af48..ea298f2092342 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/30_query_string.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/30_query_string.yml @@ -50,16 +50,6 @@ - is_false: matched - - do: - explain: - index: test - type: test - id: 1 - q: field:bars - analyzer: snowball - - - is_true: matched - - do: explain: index: test diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.validate_query/20_query_string.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.validate_query/20_query_string.yml index 3b5418169aa21..29ff59fc96af1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.validate_query/20_query_string.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.validate_query/20_query_string.yml @@ -35,14 +35,6 @@ - is_true: valid - - do: - indices.validate_query: - index: test - q: field:bars - analyzer: snowball - - - is_true: valid - - do: indices.validate_query: index: test diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/60_query_string.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/60_query_string.yml index fa9ff925d2ee2..3e7c46e7476e3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/60_query_string.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/60_query_string.yml @@ -44,14 +44,6 @@ - match: {hits.total: 0} - - do: - search: - index: test - q: field:bars - analyzer: snowball - - - match: {hits.total: 1} - - do: search: index: test diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java index 54079592224a0..07ae513351e1b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsIndexAction.java @@ -186,7 +186,7 @@ private static Map findFieldMappingsByType(Predica } } else { // not a pattern - FieldMapper fieldMapper = allFieldMappers.smartNameFieldMapper(field); + FieldMapper fieldMapper = allFieldMappers.getMapper(field); if (fieldMapper != null) { addFieldMapper(fieldPredicate, field, fieldMapper, fieldMappings, request.includeDefaults()); } else if (request.probablySingleFieldRequest()) { diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java index b24dc685df6d0..f1a1dc451406e 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java @@ -76,7 +76,7 @@ protected FieldCapabilitiesIndexResponse shardOperation(final FieldCapabilitiesI MapperService mapperService = indicesService.indexServiceSafe(shardId.getIndex()).mapperService(); Set fieldNames = new HashSet<>(); for (String field : request.fields()) { - fieldNames.addAll(mapperService.simpleMatchToIndexNames(field)); + fieldNames.addAll(mapperService.simpleMatchToFullName(field)); } Predicate fieldPredicate = indicesService.getFieldFilter().apply(shardId.getIndexName()); Map responseMap = new HashMap<>(); diff --git a/server/src/main/java/org/elasticsearch/action/resync/ResyncReplicationRequest.java b/server/src/main/java/org/elasticsearch/action/resync/ResyncReplicationRequest.java index 07dcdf7db116a..d4e2c652fa875 100644 --- a/server/src/main/java/org/elasticsearch/action/resync/ResyncReplicationRequest.java +++ b/server/src/main/java/org/elasticsearch/action/resync/ResyncReplicationRequest.java @@ -22,6 +22,7 @@ import org.elasticsearch.action.support.replication.ReplicatedWriteRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.translog.Translog; @@ -33,17 +34,24 @@ */ public final class ResyncReplicationRequest extends ReplicatedWriteRequest { + private long trimAboveSeqNo; private Translog.Operation[] operations; ResyncReplicationRequest() { super(); } - public ResyncReplicationRequest(final ShardId shardId, final Translog.Operation[] operations) { + public ResyncReplicationRequest(final ShardId shardId, final long trimAboveSeqNo, + final Translog.Operation[] operations) { super(shardId); + this.trimAboveSeqNo = trimAboveSeqNo; this.operations = operations; } + public long getTrimAboveSeqNo() { + return trimAboveSeqNo; + } + public Translog.Operation[] getOperations() { return operations; } @@ -60,12 +68,20 @@ public void readFrom(final StreamInput in) throws IOException { throw new IllegalStateException("resync replication request serialization is broken in 6.0.0"); } super.readFrom(in); + if (in.getVersion().onOrAfter(Version.V_6_4_0)) { + trimAboveSeqNo = in.readZLong(); + } else { + trimAboveSeqNo = SequenceNumbers.UNASSIGNED_SEQ_NO; + } operations = in.readArray(Translog.Operation::readOperation, Translog.Operation[]::new); } @Override public void writeTo(final StreamOutput out) throws IOException { super.writeTo(out); + if (out.getVersion().onOrAfter(Version.V_6_4_0)) { + out.writeZLong(trimAboveSeqNo); + } out.writeArray(Translog.Operation::writeOperation, operations); } @@ -74,12 +90,13 @@ public boolean equals(final Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; final ResyncReplicationRequest that = (ResyncReplicationRequest) o; - return Arrays.equals(operations, that.operations); + return trimAboveSeqNo == that.trimAboveSeqNo + && Arrays.equals(operations, that.operations); } @Override public int hashCode() { - return Arrays.hashCode(operations); + return Long.hashCode(trimAboveSeqNo) + 31 * Arrays.hashCode(operations); } @Override @@ -88,6 +105,7 @@ public String toString() { "shardId=" + shardId + ", timeout=" + timeout + ", index='" + index + '\'' + + ", trimAboveSeqNo=" + trimAboveSeqNo + ", ops=" + operations.length + "}"; } diff --git a/server/src/main/java/org/elasticsearch/action/resync/TransportResyncReplicationAction.java b/server/src/main/java/org/elasticsearch/action/resync/TransportResyncReplicationAction.java index 3dd2bd4df580f..78c1e835d4087 100644 --- a/server/src/main/java/org/elasticsearch/action/resync/TransportResyncReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/resync/TransportResyncReplicationAction.java @@ -135,6 +135,9 @@ public static Translog.Location performOnReplica(ResyncReplicationRequest reques } } } + if (request.getTrimAboveSeqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO) { + replica.trimOperationOfPreviousPrimaryTerms(request.getTrimAboveSeqNo()); + } return location; } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 46207b94c3af4..ac9248ef98d41 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -346,7 +346,8 @@ private void executeSearch(SearchTask task, SearchTimeProvider timeProvider, Sea * it sane. A single search request that fans out to lots of shards should not hit a cluster too hard while 256 is already a * lot. */ - searchRequest.setMaxConcurrentShardRequests(Math.min(256, nodeCount)); + // we use nodeCount * 5 as we used to default this to the default number of shard which used to be 5. + searchRequest.setMaxConcurrentShardRequests(Math.min(256, nodeCount * 5)); } boolean preFilterSearchShards = shouldPreFilterSearchShards(searchRequest, shardIterators); searchAsyncAction(task, searchRequest, shardIterators, timeProvider, connectionLookup, clusterState.version(), diff --git a/server/src/main/java/org/elasticsearch/index/engine/Engine.java b/server/src/main/java/org/elasticsearch/index/engine/Engine.java index 874f03ae434de..ff51ae4332541 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -63,7 +63,7 @@ import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.merge.MergeStats; -import org.elasticsearch.index.seqno.LocalCheckpointTracker; +import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.Store; @@ -237,6 +237,12 @@ boolean isThrottled() { */ public abstract boolean isThrottled(); + /** + * Trims translog for terms below belowTerm and seq# above aboveSeqNo + * @see Translog#trimOperations(long, long) + */ + public abstract void trimOperationsFromTranslog(long belowTerm, long aboveSeqNo) throws EngineException; + /** A Lock implementation that always allows the lock to be acquired */ protected static final class NoOpLock implements Lock { @@ -636,11 +642,28 @@ public CommitStats commitStats() { } /** - * The sequence number service for this engine. + * @return the local checkpoint for this Engine + */ + public abstract long getLocalCheckpoint(); + + /** + * Waits for all operations up to the provided sequence number to complete. * - * @return the sequence number service + * @param seqNo the sequence number that the checkpoint must advance to before this method returns + * @throws InterruptedException if the thread was interrupted while blocking on the condition + */ + public abstract void waitForOpsToComplete(long seqNo) throws InterruptedException; + + /** + * Reset the local checkpoint in the tracker to the given local checkpoint + * @param localCheckpoint the new checkpoint to be set + */ + public abstract void resetLocalCheckpoint(long localCheckpoint); + + /** + * @return a {@link SeqNoStats} object, using local state and the supplied global checkpoint */ - public abstract LocalCheckpointTracker getLocalCheckpointTracker(); + public abstract SeqNoStats getSeqNoStats(long globalCheckpoint); /** * Returns the latest global checkpoint value that has been persisted in the underlying storage (i.e. translog's checkpoint) @@ -911,7 +934,7 @@ public final boolean refreshNeeded() { * checks and removes translog files that no longer need to be retained. See * {@link org.elasticsearch.index.translog.TranslogDeletionPolicy} for details */ - public abstract void trimTranslog() throws EngineException; + public abstract void trimUnreferencedTranslogFiles() throws EngineException; /** * Tests whether or not the translog generation should be rolled to a new generation. diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 900c8e0ea753f..c8abdac79db30 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -76,6 +76,7 @@ import org.elasticsearch.index.merge.MergeStats; import org.elasticsearch.index.merge.OnGoingMerge; import org.elasticsearch.index.seqno.LocalCheckpointTracker; +import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ElasticsearchMergePolicy; import org.elasticsearch.index.shard.ShardId; @@ -1652,7 +1653,7 @@ public void rollTranslogGeneration() throws EngineException { } @Override - public void trimTranslog() throws EngineException { + public void trimUnreferencedTranslogFiles() throws EngineException { try (ReleasableLock lock = readLock.acquire()) { ensureOpen(); translog.trimUnreferencedReaders(); @@ -1669,6 +1670,24 @@ public void trimTranslog() throws EngineException { } } + @Override + public void trimOperationsFromTranslog(long belowTerm, long aboveSeqNo) throws EngineException { + try (ReleasableLock lock = readLock.acquire()) { + ensureOpen(); + translog.trimOperations(belowTerm, aboveSeqNo); + } catch (AlreadyClosedException e) { + failOnTragicEvent(e); + throw e; + } catch (Exception e) { + try { + failEngine("translog operations trimming failed", e); + } catch (Exception inner) { + e.addSuppressed(inner); + } + throw new EngineException(shardId, "failed to trim translog operations", e); + } + } + private void pruneDeletedTombstones() { /* * We need to deploy two different trimming strategies for GC deletes on primary and replicas. Delete operations on primary @@ -2289,10 +2308,31 @@ public MergeStats getMergeStats() { return mergeScheduler.stats(); } - public final LocalCheckpointTracker getLocalCheckpointTracker() { + // Used only for testing! Package private to prevent anyone else from using it + LocalCheckpointTracker getLocalCheckpointTracker() { return localCheckpointTracker; } + @Override + public long getLocalCheckpoint() { + return localCheckpointTracker.getCheckpoint(); + } + + @Override + public void waitForOpsToComplete(long seqNo) throws InterruptedException { + localCheckpointTracker.waitForOpsToComplete(seqNo); + } + + @Override + public void resetLocalCheckpoint(long localCheckpoint) { + localCheckpointTracker.resetCheckpoint(localCheckpoint); + } + + @Override + public SeqNoStats getSeqNoStats(long globalCheckpoint) { + return localCheckpointTracker.getStats(globalCheckpoint); + } + /** * Returns the number of times a version was looked up either from the index. * Note this is only available if assertions are enabled diff --git a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java index 13f6e58cd79ec..a759f6a676714 100644 --- a/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java +++ b/server/src/main/java/org/elasticsearch/index/get/ShardGetService.java @@ -202,7 +202,7 @@ private GetResult innerGetLoadFromStoredFields(String type, String id, String[] if (gFields != null && gFields.length > 0) { for (String field : gFields) { - FieldMapper fieldMapper = docMapper.mappers().smartNameFieldMapper(field); + FieldMapper fieldMapper = docMapper.mappers().getMapper(field); if (fieldMapper == null) { if (docMapper.objectMappers().get(field) != null) { // Only fail if we know it is a object field, missing paths / fields shouldn't fail. diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java index 52b9a0d46e55d..c50a7d18113bf 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java @@ -84,14 +84,6 @@ public Builder(String name) { this.builder = this; } - @Override - public Builder tokenized(boolean tokenized) { - if (tokenized) { - throw new IllegalArgumentException("bool field can't be tokenized"); - } - return super.tokenized(tokenized); - } - @Override public BooleanFieldMapper build(BuilderContext context) { setupFieldType(context); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java index 2a19cb3f8bd40..9193ca209ba23 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentFieldMappers.java @@ -20,16 +20,13 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.analysis.Analyzer; -import org.elasticsearch.common.regex.Regex; import org.elasticsearch.index.analysis.FieldNameAnalyzer; import java.util.Collection; import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.Iterator; import java.util.Map; -import java.util.Set; public final class DocumentFieldMappers implements Iterable { @@ -70,29 +67,6 @@ public FieldMapper getMapper(String field) { return fieldMappers.get(field); } - public Collection simpleMatchToFullName(String pattern) { - Set fields = new HashSet<>(); - for (FieldMapper fieldMapper : this) { - if (Regex.simpleMatch(pattern, fieldMapper.fieldType().name())) { - fields.add(fieldMapper.fieldType().name()); - } - } - return fields; - } - - public FieldMapper smartNameFieldMapper(String name) { - FieldMapper fieldMapper = getMapper(name); - if (fieldMapper != null) { - return fieldMapper; - } - for (FieldMapper otherFieldMapper : this) { - if (otherFieldMapper.fieldType().name().equals(name)) { - return otherFieldMapper; - } - } - return null; - } - /** * A smart analyzer used for indexing that takes into account specific analyzers configured * per {@link FieldMapper}. diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index f23a8d0ce96aa..977b930c41e5b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -147,11 +147,6 @@ public T storeTermVectorPayloads(boolean termVectorPayloads) { return builder; } - public T tokenized(boolean tokenized) { - this.fieldType.setTokenized(tokenized); - return builder; - } - public T boost(float boost) { this.fieldType.setBoost(boost); return builder; @@ -376,9 +371,8 @@ protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, boolean indexed = fieldType().indexOptions() != IndexOptions.NONE; boolean defaultIndexed = defaultFieldType.indexOptions() != IndexOptions.NONE; - if (includeDefaults || indexed != defaultIndexed || - fieldType().tokenized() != defaultFieldType.tokenized()) { - builder.field("index", indexTokenizeOption(indexed, fieldType().tokenized())); + if (includeDefaults || indexed != defaultIndexed) { + builder.field("index", indexed); } if (includeDefaults || fieldType().stored() != defaultFieldType.stored()) { builder.field("store", fieldType().stored()); @@ -474,11 +468,6 @@ public static String termVectorOptionsToString(FieldType fieldType) { } } - /* Only protected so that string can override it */ - protected Object indexTokenizeOption(boolean indexed, boolean tokenized) { - return indexed; - } - protected abstract String contentType(); public static class MultiFields { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 71450e69948fb..24f27aa9ea7f3 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -165,7 +165,7 @@ public void checkCompatibility(MappedFieldType other, List conflicts) { boolean indexed = indexOptions() != IndexOptions.NONE; boolean mergeWithIndexed = other.indexOptions() != IndexOptions.NONE; // TODO: should be validating if index options go "up" (but "down" is ok) - if (indexed != mergeWithIndexed || tokenized() != other.tokenized()) { + if (indexed != mergeWithIndexed) { conflicts.add("mapper [" + name() + "] has different [index] values"); } if (stored() != other.stored()) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index a06288b67e3bd..8988238d9277e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -721,7 +721,7 @@ public MappedFieldType fullName(String fullName) { * Returns all the fields that match the given pattern. If the pattern is prefixed with a type * then the fields will be returned with a type prefix. */ - public Collection simpleMatchToIndexNames(String pattern) { + public Collection simpleMatchToFullName(String pattern) { if (Regex.isSimpleMatchPattern(pattern) == false) { // no wildcards return Collections.singletonList(pattern); diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java b/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java index 6bb69c0cab990..598a6f38a2ef8 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryShardContext.java @@ -198,7 +198,7 @@ public void setIsFilter(boolean isFilter) { * type then the fields will be returned with a type prefix. */ public Collection simpleMatchToIndexNames(String pattern) { - return mapperService.simpleMatchToIndexNames(pattern); + return mapperService.simpleMatchToFullName(pattern); } public MappedFieldType fieldMapper(String name) { diff --git a/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java b/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java index c3a695beff083..b3751afbc9c5d 100644 --- a/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java +++ b/server/src/main/java/org/elasticsearch/index/search/QueryParserHelper.java @@ -91,7 +91,7 @@ public static Map parseFieldsAndWeights(List fields) { public static FieldMapper getFieldMapper(MapperService mapperService, String field) { DocumentMapper mapper = mapperService.documentMapper(); if (mapper != null) { - FieldMapper fieldMapper = mapper.mappers().smartNameFieldMapper(field); + FieldMapper fieldMapper = mapper.mappers().getMapper(field); if (fieldMapper != null) { return fieldMapper; } diff --git a/server/src/main/java/org/elasticsearch/index/seqno/SequenceNumbers.java b/server/src/main/java/org/elasticsearch/index/seqno/SequenceNumbers.java index 0c071f4b2d422..7cffc8c1ac911 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/SequenceNumbers.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/SequenceNumbers.java @@ -37,7 +37,7 @@ public class SequenceNumbers { */ public static final long UNASSIGNED_SEQ_NO = -2L; /** - * Represents no operations have been performed on the shard. + * Represents no operations have been performed on the shard. Initial value of a sequence number. */ public static final long NO_OPS_PERFORMED = -1L; diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index e90b98902b148..e8a3a14d4614e 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -407,7 +407,7 @@ public void updateShardState(final ShardRouting newRouting, assert currentRouting.active() == false : "we are in POST_RECOVERY, but our shard routing is active " + currentRouting; if (newRouting.primary() && currentRouting.isRelocationTarget() == false) { - replicationTracker.activatePrimaryMode(getEngine().getLocalCheckpointTracker().getCheckpoint()); + replicationTracker.activatePrimaryMode(getLocalCheckpoint()); } changeState(IndexShardState.STARTED, "global state is [" + newRouting.state() + "]"); @@ -481,8 +481,7 @@ public void updateShardState(final ShardRouting newRouting, */ engine.rollTranslogGeneration(); engine.fillSeqNoGaps(newPrimaryTerm); - replicationTracker.updateLocalCheckpoint(currentRouting.allocationId().getId(), - getEngine().getLocalCheckpointTracker().getCheckpoint()); + replicationTracker.updateLocalCheckpoint(currentRouting.allocationId().getId(), getLocalCheckpoint()); primaryReplicaSyncer.accept(this, new ActionListener() { @Override public void onResponse(ResyncTask resyncTask) { @@ -508,7 +507,7 @@ public void onFailure(Exception e) { } }, e -> failShard("exception during primary term transition", e)); - replicationTracker.activatePrimaryMode(getEngine().getLocalCheckpointTracker().getCheckpoint()); + replicationTracker.activatePrimaryMode(getLocalCheckpoint()); primaryTerm = newPrimaryTerm; } } @@ -875,7 +874,7 @@ public CommitStats commitStats() { @Nullable public SeqNoStats seqNoStats() { Engine engine = getEngineOrNull(); - return engine == null ? null : engine.getLocalCheckpointTracker().getStats(replicationTracker.getGlobalCheckpoint()); + return engine == null ? null : engine.getSeqNoStats(replicationTracker.getGlobalCheckpoint()); } public IndexingStats indexingStats(String... types) { @@ -994,7 +993,7 @@ public Engine.CommitId flush(FlushRequest request) { public void trimTranslog() { verifyNotClosed(); final Engine engine = getEngine(); - engine.trimTranslog(); + engine.trimUnreferencedTranslogFiles(); } /** @@ -1196,6 +1195,10 @@ public void prepareForIndexRecovery() { assert currentEngineReference.get() == null; } + public void trimOperationOfPreviousPrimaryTerms(long aboveSeqNo) { + getEngine().trimOperationsFromTranslog(primaryTerm, aboveSeqNo); + } + public Engine.Result applyTranslogOperation(Translog.Operation operation, Engine.Operation.Origin origin) throws IOException { final Engine.Result result; switch (operation.opType()) { @@ -1721,7 +1724,7 @@ public void updateGlobalCheckpointForShard(final String allocationId, final long * @throws InterruptedException if the thread was interrupted while blocking on the condition */ public void waitForOpsToComplete(final long seqNo) throws InterruptedException { - getEngine().getLocalCheckpointTracker().waitForOpsToComplete(seqNo); + getEngine().waitForOpsToComplete(seqNo); } /** @@ -1754,7 +1757,7 @@ public void markAllocationIdAsInSync(final String allocationId, final long local * @return the local checkpoint */ public long getLocalCheckpoint() { - return getEngine().getLocalCheckpointTracker().getCheckpoint(); + return getEngine().getLocalCheckpoint(); } /** @@ -1795,7 +1798,7 @@ public void maybeSyncGlobalCheckpoint(final String reason) { return; } // only sync if there are not operations in flight - final SeqNoStats stats = getEngine().getLocalCheckpointTracker().getStats(replicationTracker.getGlobalCheckpoint()); + final SeqNoStats stats = getEngine().getSeqNoStats(replicationTracker.getGlobalCheckpoint()); if (stats.getMaxSeqNo() == stats.getGlobalCheckpoint()) { final ObjectLongMap globalCheckpoints = getInSyncGlobalCheckpoints(); final String allocationId = routingEntry().allocationId().getId(); @@ -1832,7 +1835,7 @@ public ReplicationGroup getReplicationGroup() { */ public void updateGlobalCheckpointOnReplica(final long globalCheckpoint, final String reason) { verifyReplicationTarget(); - final long localCheckpoint = getEngine().getLocalCheckpointTracker().getCheckpoint(); + final long localCheckpoint = getLocalCheckpoint(); if (globalCheckpoint > localCheckpoint) { /* * This can happen during recovery when the shard has started its engine but recovery is not finalized and is receiving global @@ -1861,8 +1864,7 @@ public void activateWithPrimaryContext(final ReplicationTracker.PrimaryContext p verifyPrimary(); assert shardRouting.isRelocationTarget() : "only relocation target can update allocation IDs from primary context: " + shardRouting; assert primaryContext.getCheckpointStates().containsKey(routingEntry().allocationId().getId()) && - getEngine().getLocalCheckpointTracker().getCheckpoint() == - primaryContext.getCheckpointStates().get(routingEntry().allocationId().getId()).getLocalCheckpoint(); + getLocalCheckpoint() == primaryContext.getCheckpointStates().get(routingEntry().allocationId().getId()).getLocalCheckpoint(); synchronized (mutex) { replicationTracker.activateWithPrimaryContext(primaryContext); // make changes to primaryMode flag only under mutex } @@ -2248,7 +2250,7 @@ public void acquireReplicaOperationPermit(final long operationPrimaryTerm, final operationPrimaryTerm, getLocalCheckpoint(), localCheckpoint); - getEngine().getLocalCheckpointTracker().resetCheckpoint(localCheckpoint); + getEngine().resetLocalCheckpoint(localCheckpoint); getEngine().rollTranslogGeneration(); }); globalCheckpointUpdated = true; diff --git a/server/src/main/java/org/elasticsearch/index/shard/LocalShardSnapshot.java b/server/src/main/java/org/elasticsearch/index/shard/LocalShardSnapshot.java index d7105c0c14d38..09391c9bc9643 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/LocalShardSnapshot.java +++ b/server/src/main/java/org/elasticsearch/index/shard/LocalShardSnapshot.java @@ -62,7 +62,7 @@ Index getIndex() { } long maxSeqNo() { - return shard.getEngine().getLocalCheckpointTracker().getMaxSeqNo(); + return shard.getEngine().getSeqNoStats(-1).getMaxSeqNo(); } long maxUnsafeAutoIdTimestamp() { diff --git a/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java b/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java index af8c9bdd0272f..8e05e7bf08efa 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java +++ b/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.tasks.Task; @@ -84,6 +85,7 @@ public void resync(final IndexShard indexShard, final ActionListener try { final long startingSeqNo = indexShard.getGlobalCheckpoint() + 1; Translog.Snapshot snapshot = indexShard.newTranslogSnapshotFromMinSeqNo(startingSeqNo); + final long maxSeqNo = indexShard.seqNoStats().getMaxSeqNo(); resyncListener = new ActionListener() { @Override public void onResponse(final ResyncTask resyncTask) { @@ -135,7 +137,7 @@ public synchronized Translog.Operation next() throws IOException { } }; resync(shardId, indexShard.routingEntry().allocationId().getId(), indexShard.getPrimaryTerm(), wrappedSnapshot, - startingSeqNo, resyncListener); + startingSeqNo, maxSeqNo, resyncListener); } catch (Exception e) { if (resyncListener != null) { resyncListener.onFailure(e); @@ -146,7 +148,7 @@ public synchronized Translog.Operation next() throws IOException { } private void resync(final ShardId shardId, final String primaryAllocationId, final long primaryTerm, final Translog.Snapshot snapshot, - long startingSeqNo, ActionListener listener) { + long startingSeqNo, long maxSeqNo, ActionListener listener) { ResyncRequest request = new ResyncRequest(shardId, primaryAllocationId); ResyncTask resyncTask = (ResyncTask) taskManager.register("transport", "resync", request); // it's not transport :-) ActionListener wrappedListener = new ActionListener() { @@ -166,7 +168,7 @@ public void onFailure(Exception e) { }; try { new SnapshotSender(logger, syncAction, resyncTask, shardId, primaryAllocationId, primaryTerm, snapshot, chunkSize.bytesAsInt(), - startingSeqNo, wrappedListener).run(); + startingSeqNo, maxSeqNo, wrappedListener).run(); } catch (Exception e) { wrappedListener.onFailure(e); } @@ -186,14 +188,16 @@ static class SnapshotSender extends AbstractRunnable implements ActionListener listener; + private final AtomicBoolean firstMessage = new AtomicBoolean(true); private final AtomicInteger totalSentOps = new AtomicInteger(); private final AtomicInteger totalSkippedOps = new AtomicInteger(); private AtomicBoolean closed = new AtomicBoolean(); SnapshotSender(Logger logger, SyncAction syncAction, ResyncTask task, ShardId shardId, String primaryAllocationId, long primaryTerm, - Translog.Snapshot snapshot, int chunkSizeInBytes, long startingSeqNo, ActionListener listener) { + Translog.Snapshot snapshot, int chunkSizeInBytes, long startingSeqNo, long maxSeqNo, ActionListener listener) { this.logger = logger; this.syncAction = syncAction; this.task = task; @@ -203,6 +207,7 @@ static class SnapshotSender extends AbstractRunnable implements ActionListener fieldNames = new HashSet<>(); for (String pattern : request.selectedFields()) { - fieldNames.addAll(indexShard.mapperService().simpleMatchToIndexNames(pattern)); + fieldNames.addAll(indexShard.mapperService().simpleMatchToFullName(pattern)); } request.selectedFields(fieldNames.toArray(Strings.EMPTY_ARRAY)); } diff --git a/server/src/main/java/org/elasticsearch/index/translog/Checkpoint.java b/server/src/main/java/org/elasticsearch/index/translog/Checkpoint.java index 32aef840b6f37..364203f898cc1 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/Checkpoint.java +++ b/server/src/main/java/org/elasticsearch/index/translog/Checkpoint.java @@ -45,14 +45,29 @@ final class Checkpoint { final long maxSeqNo; final long globalCheckpoint; final long minTranslogGeneration; + final long trimmedAboveSeqNo; private static final int INITIAL_VERSION = 1; // start with 1, just to recognize there was some magic serialization logic before - private static final int CURRENT_VERSION = 2; // introduction of global checkpoints + private static final int VERSION_6_0_0 = 2; // introduction of global checkpoints + private static final int CURRENT_VERSION = 3; // introduction of trimmed above seq# private static final String CHECKPOINT_CODEC = "ckp"; + // size of 6.4.0 checkpoint + + static final int V3_FILE_SIZE = CodecUtil.headerLength(CHECKPOINT_CODEC) + + Integer.BYTES // ops + + Long.BYTES // offset + + Long.BYTES // generation + + Long.BYTES // minimum sequence number, introduced in 6.0.0 + + Long.BYTES // maximum sequence number, introduced in 6.0.0 + + Long.BYTES // global checkpoint, introduced in 6.0.0 + + Long.BYTES // minimum translog generation in the translog - introduced in 6.0.0 + + Long.BYTES // maximum reachable (trimmed) sequence number, introduced in 6.4.0 + + CodecUtil.footerLength(); + // size of 6.0.0 checkpoint - static final int FILE_SIZE = CodecUtil.headerLength(CHECKPOINT_CODEC) + static final int V2_FILE_SIZE = CodecUtil.headerLength(CHECKPOINT_CODEC) + Integer.BYTES // ops + Long.BYTES // offset + Long.BYTES // generation @@ -72,16 +87,20 @@ final class Checkpoint { /** * Create a new translog checkpoint. * - * @param offset the current offset in the translog - * @param numOps the current number of operations in the translog - * @param generation the current translog generation - * @param minSeqNo the current minimum sequence number of all operations in the translog - * @param maxSeqNo the current maximum sequence number of all operations in the translog - * @param globalCheckpoint the last-known global checkpoint + * @param offset the current offset in the translog + * @param numOps the current number of operations in the translog + * @param generation the current translog generation + * @param minSeqNo the current minimum sequence number of all operations in the translog + * @param maxSeqNo the current maximum sequence number of all operations in the translog + * @param globalCheckpoint the last-known global checkpoint * @param minTranslogGeneration the minimum generation referenced by the translog at this moment. + * @param trimmedAboveSeqNo all operations with seq# above trimmedAboveSeqNo should be ignored and not read from the + * corresponding translog file. {@link SequenceNumbers#UNASSIGNED_SEQ_NO} is used to disable trimming. */ - Checkpoint(long offset, int numOps, long generation, long minSeqNo, long maxSeqNo, long globalCheckpoint, long minTranslogGeneration) { + Checkpoint(long offset, int numOps, long generation, long minSeqNo, long maxSeqNo, long globalCheckpoint, + long minTranslogGeneration, long trimmedAboveSeqNo) { assert minSeqNo <= maxSeqNo : "minSeqNo [" + minSeqNo + "] is higher than maxSeqNo [" + maxSeqNo + "]"; + assert trimmedAboveSeqNo <= maxSeqNo : "trimmedAboveSeqNo [" + trimmedAboveSeqNo + "] is higher than maxSeqNo [" + maxSeqNo + "]"; assert minTranslogGeneration <= generation : "minTranslogGen [" + minTranslogGeneration + "] is higher than generation [" + generation + "]"; this.offset = offset; @@ -91,6 +110,7 @@ final class Checkpoint { this.maxSeqNo = maxSeqNo; this.globalCheckpoint = globalCheckpoint; this.minTranslogGeneration = minTranslogGeneration; + this.trimmedAboveSeqNo = trimmedAboveSeqNo; } private void write(DataOutput out) throws IOException { @@ -101,26 +121,52 @@ private void write(DataOutput out) throws IOException { out.writeLong(maxSeqNo); out.writeLong(globalCheckpoint); out.writeLong(minTranslogGeneration); + out.writeLong(trimmedAboveSeqNo); } static Checkpoint emptyTranslogCheckpoint(final long offset, final long generation, final long globalCheckpoint, long minTranslogGeneration) { final long minSeqNo = SequenceNumbers.NO_OPS_PERFORMED; final long maxSeqNo = SequenceNumbers.NO_OPS_PERFORMED; - return new Checkpoint(offset, 0, generation, minSeqNo, maxSeqNo, globalCheckpoint, minTranslogGeneration); + final long trimmedAboveSeqNo = SequenceNumbers.UNASSIGNED_SEQ_NO; + return new Checkpoint(offset, 0, generation, minSeqNo, maxSeqNo, globalCheckpoint, minTranslogGeneration, trimmedAboveSeqNo); + } + + static Checkpoint readCheckpointV6_4_0(final DataInput in) throws IOException { + final long offset = in.readLong(); + final int numOps = in.readInt(); + final long generation = in.readLong(); + final long minSeqNo = in.readLong(); + final long maxSeqNo = in.readLong(); + final long globalCheckpoint = in.readLong(); + final long minTranslogGeneration = in.readLong(); + final long trimmedAboveSeqNo = in.readLong(); + return new Checkpoint(offset, numOps, generation, minSeqNo, maxSeqNo, globalCheckpoint, minTranslogGeneration, trimmedAboveSeqNo); } static Checkpoint readCheckpointV6_0_0(final DataInput in) throws IOException { - return new Checkpoint(in.readLong(), in.readInt(), in.readLong(), in.readLong(), in.readLong(), in.readLong(), in.readLong()); + final long offset = in.readLong(); + final int numOps = in.readInt(); + final long generation = in.readLong(); + final long minSeqNo = in.readLong(); + final long maxSeqNo = in.readLong(); + final long globalCheckpoint = in.readLong(); + final long minTranslogGeneration = in.readLong(); + final long trimmedAboveSeqNo = SequenceNumbers.UNASSIGNED_SEQ_NO; + return new Checkpoint(offset, numOps, generation, minSeqNo, maxSeqNo, globalCheckpoint, minTranslogGeneration, trimmedAboveSeqNo); } // reads a checksummed checkpoint introduced in ES 5.0.0 static Checkpoint readCheckpointV5_0_0(final DataInput in) throws IOException { + final long offset = in.readLong(); + final int numOps = in.readInt(); + final long generation = in.readLong(); final long minSeqNo = SequenceNumbers.NO_OPS_PERFORMED; final long maxSeqNo = SequenceNumbers.NO_OPS_PERFORMED; final long globalCheckpoint = SequenceNumbers.UNASSIGNED_SEQ_NO; - final long minTranslogGeneration = -1L; - return new Checkpoint(in.readLong(), in.readInt(), in.readLong(), minSeqNo, maxSeqNo, globalCheckpoint, minTranslogGeneration); + final long minTranslogGeneration = -1; + final long trimmedAboveSeqNo = SequenceNumbers.UNASSIGNED_SEQ_NO; + return new Checkpoint(offset, numOps, generation, minSeqNo, maxSeqNo, globalCheckpoint, minTranslogGeneration, trimmedAboveSeqNo); } @Override @@ -133,6 +179,7 @@ public String toString() { ", maxSeqNo=" + maxSeqNo + ", globalCheckpoint=" + globalCheckpoint + ", minTranslogGeneration=" + minTranslogGeneration + + ", trimmedAboveSeqNo=" + trimmedAboveSeqNo + '}'; } @@ -145,17 +192,20 @@ public static Checkpoint read(Path path) throws IOException { if (fileVersion == INITIAL_VERSION) { assert indexInput.length() == V1_FILE_SIZE : indexInput.length(); return Checkpoint.readCheckpointV5_0_0(indexInput); + } else if (fileVersion == VERSION_6_0_0) { + assert indexInput.length() == V2_FILE_SIZE : indexInput.length(); + return Checkpoint.readCheckpointV6_0_0(indexInput); } else { assert fileVersion == CURRENT_VERSION : fileVersion; - assert indexInput.length() == FILE_SIZE : indexInput.length(); - return Checkpoint.readCheckpointV6_0_0(indexInput); + assert indexInput.length() == V3_FILE_SIZE : indexInput.length(); + return Checkpoint.readCheckpointV6_4_0(indexInput); } } } } public static void write(ChannelFactory factory, Path checkpointFile, Checkpoint checkpoint, OpenOption... options) throws IOException { - final ByteArrayOutputStream byteOutputStream = new ByteArrayOutputStream(FILE_SIZE) { + final ByteArrayOutputStream byteOutputStream = new ByteArrayOutputStream(V3_FILE_SIZE) { @Override public synchronized byte[] toByteArray() { // don't clone @@ -164,13 +214,13 @@ public synchronized byte[] toByteArray() { }; final String resourceDesc = "checkpoint(path=\"" + checkpointFile + "\", gen=" + checkpoint + ")"; try (OutputStreamIndexOutput indexOutput = - new OutputStreamIndexOutput(resourceDesc, checkpointFile.toString(), byteOutputStream, FILE_SIZE)) { + new OutputStreamIndexOutput(resourceDesc, checkpointFile.toString(), byteOutputStream, V3_FILE_SIZE)) { CodecUtil.writeHeader(indexOutput, CHECKPOINT_CODEC, CURRENT_VERSION); checkpoint.write(indexOutput); CodecUtil.writeFooter(indexOutput); - assert indexOutput.getFilePointer() == FILE_SIZE : - "get you numbers straight; bytes written: " + indexOutput.getFilePointer() + ", buffer size: " + FILE_SIZE; + assert indexOutput.getFilePointer() == V3_FILE_SIZE : + "get you numbers straight; bytes written: " + indexOutput.getFilePointer() + ", buffer size: " + V3_FILE_SIZE; assert indexOutput.getFilePointer() < 512 : "checkpoint files have to be smaller than 512 bytes for atomic writes; size: " + indexOutput.getFilePointer(); @@ -196,7 +246,8 @@ public boolean equals(Object o) { if (generation != that.generation) return false; if (minSeqNo != that.minSeqNo) return false; if (maxSeqNo != that.maxSeqNo) return false; - return globalCheckpoint == that.globalCheckpoint; + if (globalCheckpoint != that.globalCheckpoint) return false; + return trimmedAboveSeqNo == that.trimmedAboveSeqNo; } @Override @@ -207,6 +258,7 @@ public int hashCode() { result = 31 * result + Long.hashCode(minSeqNo); result = 31 * result + Long.hashCode(maxSeqNo); result = 31 * result + Long.hashCode(globalCheckpoint); + result = 31 * result + Long.hashCode(trimmedAboveSeqNo); return result; } diff --git a/server/src/main/java/org/elasticsearch/index/translog/MultiSnapshot.java b/server/src/main/java/org/elasticsearch/index/translog/MultiSnapshot.java index 1b095beddb4a8..b5a17534903ee 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/MultiSnapshot.java +++ b/server/src/main/java/org/elasticsearch/index/translog/MultiSnapshot.java @@ -56,6 +56,15 @@ public int totalOperations() { return totalOperations; } + @Override + public int skippedOperations() { + int skippedOperations = overriddenOperations; + for (TranslogSnapshot translog : translogs) { + skippedOperations += translog.skippedOperations(); + } + return skippedOperations; + } + @Override public int overriddenOperations() { return overriddenOperations; diff --git a/server/src/main/java/org/elasticsearch/index/translog/Translog.java b/server/src/main/java/org/elasticsearch/index/translog/Translog.java index ec4f57e6f4101..32e2024a73262 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/server/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -710,6 +710,41 @@ static String getCommitCheckpointFileName(long generation) { return TRANSLOG_FILE_PREFIX + generation + CHECKPOINT_SUFFIX; } + /** + * Trims translog for terms of files below belowTerm and seq# above aboveSeqNo. + * Effectively it moves max visible seq# {@link Checkpoint#trimmedAboveSeqNo} therefore {@link TranslogSnapshot} skips those operations. + */ + public void trimOperations(long belowTerm, long aboveSeqNo) throws IOException { + assert aboveSeqNo >= SequenceNumbers.NO_OPS_PERFORMED : "aboveSeqNo has to a valid sequence number"; + + try (ReleasableLock lock = writeLock.acquire()) { + ensureOpen(); + if (current.getPrimaryTerm() < belowTerm) { + throw new IllegalArgumentException("Trimming the translog can only be done for terms lower than the current one. " + + "Trim requested for term [ " + belowTerm + " ] , current is [ " + current.getPrimaryTerm() + " ]"); + } + // we assume that the current translog generation doesn't have trimmable ops. Verify that. + assert current.assertNoSeqAbove(belowTerm, aboveSeqNo); + // update all existed ones (if it is necessary) as checkpoint and reader are immutable + final List newReaders = new ArrayList<>(readers.size()); + try { + for (TranslogReader reader : readers) { + final TranslogReader newReader = + reader.getPrimaryTerm() < belowTerm + ? reader.closeIntoTrimmedReader(aboveSeqNo, getChannelFactory()) + : reader; + newReaders.add(newReader); + } + } catch (IOException e) { + IOUtils.closeWhileHandlingException(newReaders); + close(); + throw e; + } + + this.readers.clear(); + this.readers.addAll(newReaders); + } + } /** * Ensures that the given location has be synced / written to the underlying storage. @@ -859,6 +894,13 @@ public interface Snapshot extends Closeable { */ int totalOperations(); + /** + * The number of operations have been skipped (overridden or trimmed) in the snapshot so far. + */ + default int skippedOperations() { + return 0; + } + /** * The number of operations have been overridden (eg. superseded) in the snapshot so far. * If two operations have the same sequence number, the operation with a lower term will be overridden by the operation diff --git a/server/src/main/java/org/elasticsearch/index/translog/TranslogReader.java b/server/src/main/java/org/elasticsearch/index/translog/TranslogReader.java index 29e30bd25dd37..4091fa45762e1 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TranslogReader.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TranslogReader.java @@ -21,6 +21,8 @@ import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.common.io.Channels; +import org.elasticsearch.core.internal.io.IOUtils; +import org.elasticsearch.index.seqno.SequenceNumbers; import java.io.Closeable; import java.io.EOFException; @@ -28,8 +30,11 @@ import java.nio.ByteBuffer; import java.nio.channels.FileChannel; import java.nio.file.Path; +import java.nio.file.StandardOpenOption; import java.util.concurrent.atomic.AtomicBoolean; +import static org.elasticsearch.index.translog.Translog.getCommitCheckpointFileName; + /** * an immutable translog filereader */ @@ -70,6 +75,39 @@ public static TranslogReader open( return new TranslogReader(checkpoint, channel, path, header); } + /** + * Closes current reader and creates new one with new checkoint and same file channel + */ + TranslogReader closeIntoTrimmedReader(long aboveSeqNo, ChannelFactory channelFactory) throws IOException { + if (closed.compareAndSet(false, true)) { + Closeable toCloseOnFailure = channel; + final TranslogReader newReader; + try { + if (aboveSeqNo < checkpoint.trimmedAboveSeqNo + || aboveSeqNo < checkpoint.maxSeqNo && checkpoint.trimmedAboveSeqNo == SequenceNumbers.UNASSIGNED_SEQ_NO) { + final Path checkpointFile = path.getParent().resolve(getCommitCheckpointFileName(checkpoint.generation)); + final Checkpoint newCheckpoint = new Checkpoint(checkpoint.offset, checkpoint.numOps, + checkpoint.generation, checkpoint.minSeqNo, checkpoint.maxSeqNo, + checkpoint.globalCheckpoint, checkpoint.minTranslogGeneration, aboveSeqNo); + Checkpoint.write(channelFactory, checkpointFile, newCheckpoint, StandardOpenOption.WRITE); + + IOUtils.fsync(checkpointFile, false); + IOUtils.fsync(checkpointFile.getParent(), true); + + newReader = new TranslogReader(newCheckpoint, channel, path, header); + } else { + newReader = new TranslogReader(checkpoint, channel, path, header); + } + toCloseOnFailure = null; + return newReader; + } finally { + IOUtils.close(toCloseOnFailure); + } + } else { + throw new AlreadyClosedException(toString() + " is already closed"); + } + } + public long sizeInBytes() { return length; } diff --git a/server/src/main/java/org/elasticsearch/index/translog/TranslogSnapshot.java b/server/src/main/java/org/elasticsearch/index/translog/TranslogSnapshot.java index a966720353297..8fe92bba0097c 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TranslogSnapshot.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TranslogSnapshot.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.translog; import org.elasticsearch.common.io.Channels; +import org.elasticsearch.index.seqno.SequenceNumbers; import java.io.EOFException; import java.io.IOException; @@ -32,6 +33,7 @@ final class TranslogSnapshot extends BaseTranslogReader { private final ByteBuffer reusableBuffer; private long position; + private int skippedOperations; private int readOperations; private BufferedChecksumStreamInput reuse; @@ -54,17 +56,24 @@ public int totalOperations() { return totalOperations; } + int skippedOperations(){ + return skippedOperations; + } + @Override Checkpoint getCheckpoint() { return checkpoint; } public Translog.Operation next() throws IOException { - if (readOperations < totalOperations) { - return readOperation(); - } else { - return null; + while (readOperations < totalOperations) { + final Translog.Operation operation = readOperation(); + if (operation.seqNo() <= checkpoint.trimmedAboveSeqNo || checkpoint.trimmedAboveSeqNo == SequenceNumbers.UNASSIGNED_SEQ_NO) { + return operation; + } + skippedOperations++; } + return null; } protected Translog.Operation readOperation() throws IOException { diff --git a/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index cae6578886534..b89b21c52588a 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.translog; import org.apache.lucene.store.AlreadyClosedException; -import org.apache.lucene.store.OutputStreamDataOutput; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Assertions; import org.elasticsearch.common.bytes.BytesArray; @@ -92,6 +91,7 @@ private TranslogWriter( this.minSeqNo = initialCheckpoint.minSeqNo; assert initialCheckpoint.maxSeqNo == SequenceNumbers.NO_OPS_PERFORMED : initialCheckpoint.maxSeqNo; this.maxSeqNo = initialCheckpoint.maxSeqNo; + assert initialCheckpoint.trimmedAboveSeqNo == SequenceNumbers.UNASSIGNED_SEQ_NO : initialCheckpoint.trimmedAboveSeqNo; this.globalCheckpointSupplier = globalCheckpointSupplier; this.seenSequenceNumbers = Assertions.ENABLED ? new HashMap<>() : null; } @@ -213,6 +213,25 @@ private synchronized boolean assertNoSeqNumberConflict(long seqNo, BytesReferenc return true; } + synchronized boolean assertNoSeqAbove(long belowTerm, long aboveSeqNo) { + seenSequenceNumbers.entrySet().stream().filter(e -> e.getKey().longValue() > aboveSeqNo) + .forEach(e -> { + final Translog.Operation op; + try { + op = Translog.readOperation(new BufferedChecksumStreamInput(e.getValue().v1().streamInput())); + } catch (IOException ex) { + throw new RuntimeException(ex); + } + long seqNo = op.seqNo(); + long primaryTerm = op.primaryTerm(); + if (primaryTerm < belowTerm) { + throw new AssertionError("current should not have any operations with seq#:primaryTerm [" + + seqNo + ":" + primaryTerm + "] > " + aboveSeqNo + ":" + belowTerm); + } + }); + return true; + } + /** * write all buffered ops to disk and fsync file. * @@ -241,7 +260,8 @@ public int totalOperations() { @Override synchronized Checkpoint getCheckpoint() { return new Checkpoint(totalOffset, operationCounter, generation, minSeqNo, maxSeqNo, - globalCheckpointSupplier.getAsLong(), minTranslogGenerationSupplier.getAsLong()); + globalCheckpointSupplier.getAsLong(), minTranslogGenerationSupplier.getAsLong(), + SequenceNumbers.UNASSIGNED_SEQ_NO); } @Override diff --git a/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java b/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java index 13aaf44c82ed9..6b7860c0cf949 100644 --- a/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java +++ b/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java @@ -29,24 +29,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalyzerProvider; -import org.elasticsearch.index.analysis.ArabicAnalyzerProvider; -import org.elasticsearch.index.analysis.ArmenianAnalyzerProvider; -import org.elasticsearch.index.analysis.BasqueAnalyzerProvider; -import org.elasticsearch.index.analysis.BengaliAnalyzerProvider; -import org.elasticsearch.index.analysis.BrazilianAnalyzerProvider; -import org.elasticsearch.index.analysis.BulgarianAnalyzerProvider; -import org.elasticsearch.index.analysis.CatalanAnalyzerProvider; import org.elasticsearch.index.analysis.CharFilterFactory; -import org.elasticsearch.index.analysis.ChineseAnalyzerProvider; -import org.elasticsearch.index.analysis.CjkAnalyzerProvider; -import org.elasticsearch.index.analysis.CzechAnalyzerProvider; -import org.elasticsearch.index.analysis.DanishAnalyzerProvider; -import org.elasticsearch.index.analysis.DutchAnalyzerProvider; -import org.elasticsearch.index.analysis.EnglishAnalyzerProvider; -import org.elasticsearch.index.analysis.FinnishAnalyzerProvider; -import org.elasticsearch.index.analysis.FrenchAnalyzerProvider; -import org.elasticsearch.index.analysis.GalicianAnalyzerProvider; -import org.elasticsearch.index.analysis.GermanAnalyzerProvider; import org.elasticsearch.index.analysis.GreekAnalyzerProvider; import org.elasticsearch.index.analysis.HindiAnalyzerProvider; import org.elasticsearch.index.analysis.HungarianAnalyzerProvider; @@ -68,7 +51,6 @@ import org.elasticsearch.index.analysis.RussianAnalyzerProvider; import org.elasticsearch.index.analysis.ShingleTokenFilterFactory; import org.elasticsearch.index.analysis.SimpleAnalyzerProvider; -import org.elasticsearch.index.analysis.SnowballAnalyzerProvider; import org.elasticsearch.index.analysis.SoraniAnalyzerProvider; import org.elasticsearch.index.analysis.SpanishAnalyzerProvider; import org.elasticsearch.index.analysis.StandardAnalyzerProvider; @@ -245,24 +227,6 @@ private NamedRegistry>> setupAnalyzers(List analyzers.register("stop", StopAnalyzerProvider::new); analyzers.register("whitespace", WhitespaceAnalyzerProvider::new); analyzers.register("keyword", KeywordAnalyzerProvider::new); - analyzers.register("snowball", SnowballAnalyzerProvider::new); - analyzers.register("arabic", ArabicAnalyzerProvider::new); - analyzers.register("armenian", ArmenianAnalyzerProvider::new); - analyzers.register("basque", BasqueAnalyzerProvider::new); - analyzers.register("bengali", BengaliAnalyzerProvider::new); - analyzers.register("brazilian", BrazilianAnalyzerProvider::new); - analyzers.register("bulgarian", BulgarianAnalyzerProvider::new); - analyzers.register("catalan", CatalanAnalyzerProvider::new); - analyzers.register("chinese", ChineseAnalyzerProvider::new); - analyzers.register("cjk", CjkAnalyzerProvider::new); - analyzers.register("czech", CzechAnalyzerProvider::new); - analyzers.register("danish", DanishAnalyzerProvider::new); - analyzers.register("dutch", DutchAnalyzerProvider::new); - analyzers.register("english", EnglishAnalyzerProvider::new); - analyzers.register("finnish", FinnishAnalyzerProvider::new); - analyzers.register("french", FrenchAnalyzerProvider::new); - analyzers.register("galician", GalicianAnalyzerProvider::new); - analyzers.register("german", GermanAnalyzerProvider::new); analyzers.register("greek", GreekAnalyzerProvider::new); analyzers.register("hindi", HindiAnalyzerProvider::new); analyzers.register("hungarian", HungarianAnalyzerProvider::new); diff --git a/server/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java b/server/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java index 18cc247b84493..0e9aed3c142d9 100644 --- a/server/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java +++ b/server/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java @@ -20,37 +20,21 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.CharArraySet; -import org.apache.lucene.analysis.ar.ArabicAnalyzer; -import org.apache.lucene.analysis.bg.BulgarianAnalyzer; -import org.apache.lucene.analysis.bn.BengaliAnalyzer; -import org.apache.lucene.analysis.br.BrazilianAnalyzer; -import org.apache.lucene.analysis.ca.CatalanAnalyzer; -import org.apache.lucene.analysis.cjk.CJKAnalyzer; import org.apache.lucene.analysis.ckb.SoraniAnalyzer; import org.apache.lucene.analysis.core.KeywordAnalyzer; import org.apache.lucene.analysis.core.SimpleAnalyzer; import org.apache.lucene.analysis.core.StopAnalyzer; import org.apache.lucene.analysis.core.WhitespaceAnalyzer; -import org.apache.lucene.analysis.cz.CzechAnalyzer; -import org.apache.lucene.analysis.da.DanishAnalyzer; -import org.apache.lucene.analysis.de.GermanAnalyzer; import org.apache.lucene.analysis.el.GreekAnalyzer; -import org.apache.lucene.analysis.en.EnglishAnalyzer; import org.apache.lucene.analysis.es.SpanishAnalyzer; -import org.apache.lucene.analysis.eu.BasqueAnalyzer; import org.apache.lucene.analysis.fa.PersianAnalyzer; -import org.apache.lucene.analysis.fi.FinnishAnalyzer; -import org.apache.lucene.analysis.fr.FrenchAnalyzer; import org.apache.lucene.analysis.ga.IrishAnalyzer; -import org.apache.lucene.analysis.gl.GalicianAnalyzer; import org.apache.lucene.analysis.hi.HindiAnalyzer; import org.apache.lucene.analysis.hu.HungarianAnalyzer; -import org.apache.lucene.analysis.hy.ArmenianAnalyzer; import org.apache.lucene.analysis.id.IndonesianAnalyzer; import org.apache.lucene.analysis.it.ItalianAnalyzer; import org.apache.lucene.analysis.lt.LithuanianAnalyzer; import org.apache.lucene.analysis.lv.LatvianAnalyzer; -import org.apache.lucene.analysis.nl.DutchAnalyzer; import org.apache.lucene.analysis.no.NorwegianAnalyzer; import org.apache.lucene.analysis.pt.PortugueseAnalyzer; import org.apache.lucene.analysis.ro.RomanianAnalyzer; @@ -61,7 +45,6 @@ import org.apache.lucene.analysis.th.ThaiAnalyzer; import org.apache.lucene.analysis.tr.TurkishAnalyzer; import org.elasticsearch.Version; -import org.elasticsearch.index.analysis.SnowballAnalyzer; import org.elasticsearch.indices.analysis.PreBuiltCacheFactory.CachingStrategy; import java.util.Locale; @@ -129,168 +112,6 @@ protected Analyzer create(Version version) { } }, - SNOWBALL { - @Override - protected Analyzer create(Version version) { - Analyzer analyzer = new SnowballAnalyzer("English", StopAnalyzer.ENGLISH_STOP_WORDS_SET); - analyzer.setVersion(version.luceneVersion); - return analyzer; - } - }, - - ARABIC { - @Override - protected Analyzer create(Version version) { - Analyzer a = new ArabicAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - ARMENIAN { - @Override - protected Analyzer create(Version version) { - Analyzer a = new ArmenianAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - BASQUE { - @Override - protected Analyzer create(Version version) { - Analyzer a = new BasqueAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - BENGALI { - @Override - protected Analyzer create(Version version) { - Analyzer a = new BengaliAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - BRAZILIAN { - @Override - protected Analyzer create(Version version) { - Analyzer a = new BrazilianAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - BULGARIAN { - @Override - protected Analyzer create(Version version) { - Analyzer a = new BulgarianAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - CATALAN { - @Override - protected Analyzer create(Version version) { - Analyzer a = new CatalanAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - CHINESE(CachingStrategy.ONE) { - @Override - protected Analyzer create(Version version) { - Analyzer a = new StandardAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - CJK { - @Override - protected Analyzer create(Version version) { - Analyzer a = new CJKAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - CZECH { - @Override - protected Analyzer create(Version version) { - Analyzer a = new CzechAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - DUTCH { - @Override - protected Analyzer create(Version version) { - Analyzer a = new DutchAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - DANISH { - @Override - protected Analyzer create(Version version) { - Analyzer a = new DanishAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - ENGLISH { - @Override - protected Analyzer create(Version version) { - Analyzer a = new EnglishAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - FINNISH { - @Override - protected Analyzer create(Version version) { - Analyzer a = new FinnishAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - FRENCH { - @Override - protected Analyzer create(Version version) { - Analyzer a = new FrenchAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - GALICIAN { - @Override - protected Analyzer create(Version version) { - Analyzer a = new GalicianAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - GERMAN { - @Override - protected Analyzer create(Version version) { - Analyzer a = new GermanAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - GREEK { @Override protected Analyzer create(Version version) { diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 4c543aeeb22d4..72a6fcb6ba329 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -615,9 +615,9 @@ protected SendSnapshotResult sendSnapshot(final long startingSeqNo, long require cancellableThreads.executeIO(sendBatch); } - assert expectedTotalOps == snapshot.overriddenOperations() + skippedOps + totalSentOps + assert expectedTotalOps == snapshot.skippedOperations() + skippedOps + totalSentOps : String.format(Locale.ROOT, "expected total [%d], overridden [%d], skipped [%d], total sent [%d]", - expectedTotalOps, snapshot.overriddenOperations(), skippedOps, totalSentOps); + expectedTotalOps, snapshot.skippedOperations(), skippedOps, totalSentOps); if (requiredOpsTracker.getCheckpoint() < endingSeqNo) { throw new IllegalStateException("translog replay failed to cover required sequence numbers" + diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java index 16cc6a50e8c7e..4343a1ebca564 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java @@ -53,8 +53,7 @@ public void hitExecute(SearchContext context, HitContext hitContext) { for (SearchContextHighlight.Field field : context.highlight().fields()) { Collection fieldNamesToHighlight; if (Regex.isSimpleMatchPattern(field.field())) { - DocumentMapper documentMapper = context.mapperService().documentMapper(hitContext.hit().getType()); - fieldNamesToHighlight = documentMapper.mappers().simpleMatchToFullName(field.field()); + fieldNamesToHighlight = context.mapperService().simpleMatchToFullName(field.field()); } else { fieldNamesToHighlight = Collections.singletonList(field.field()); } diff --git a/server/src/test/java/org/elasticsearch/action/resync/ResyncReplicationRequestTests.java b/server/src/test/java/org/elasticsearch/action/resync/ResyncReplicationRequestTests.java index d5ad3941a5e8f..914c2b87422db 100644 --- a/server/src/test/java/org/elasticsearch/action/resync/ResyncReplicationRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/resync/ResyncReplicationRequestTests.java @@ -40,7 +40,7 @@ public void testSerialization() throws IOException { final Translog.Index index = new Translog.Index("type", "id", 0, randomNonNegativeLong(), Versions.MATCH_ANY, VersionType.INTERNAL, bytes, null, -1); final ShardId shardId = new ShardId(new Index("index", "uuid"), 0); - final ResyncReplicationRequest before = new ResyncReplicationRequest(shardId, new Translog.Operation[]{index}); + final ResyncReplicationRequest before = new ResyncReplicationRequest(shardId, 42L, new Translog.Operation[]{index}); final BytesStreamOutput out = new BytesStreamOutput(); before.writeTo(out); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java b/server/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java index 4af5d5b9a8535..9786c0eaf5290 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.gateway.GatewayAllocator; +import org.elasticsearch.index.engine.EngineTestCase; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardTestCase; import org.elasticsearch.index.shard.ShardId; @@ -350,7 +351,7 @@ public void testPrimaryReplicaResyncFailed() throws Exception { assertThat(indexResult.getShardInfo().getSuccessful(), equalTo(numberOfReplicas + 1)); } final IndexShard oldPrimaryShard = internalCluster().getInstance(IndicesService.class, oldPrimary).getShardOrNull(shardId); - IndexShardTestCase.getEngine(oldPrimaryShard).getLocalCheckpointTracker().generateSeqNo(); // Make gap in seqno. + EngineTestCase.generateNewSeqNo(IndexShardTestCase.getEngine(oldPrimaryShard)); // Make gap in seqno. long moreDocs = scaledRandomIntBetween(1, 10); for (int i = 0; i < moreDocs; i++) { IndexResponse indexResult = index("test", "doc", Long.toString(numDocs + i)); diff --git a/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java b/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java index d0ffdbe229dd6..8c4879fd35e82 100644 --- a/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java @@ -61,14 +61,17 @@ public void testThatInstancesAreTheSameAlwaysForKeywordAnalyzer() { } public void testThatInstancesAreCachedAndReused() { - assertSame(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.CURRENT), - PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.CURRENT)); - // same lucene version should be cached - assertSame(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_5_2_1), - PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_5_2_2)); - - assertNotSame(PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_5_0_0), - PreBuiltAnalyzers.ARABIC.getAnalyzer(Version.V_5_0_1)); + assertSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.CURRENT), + PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.CURRENT)); + // same es version should be cached + assertSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_5_2_1), + PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_5_2_1)); + assertNotSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_5_0_0), + PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_5_0_1)); + + // Same Lucene version should be cached: + assertSame(PreBuiltAnalyzers.STOP.getAnalyzer(Version.V_5_2_1), + PreBuiltAnalyzers.STOP.getAnalyzer(Version.V_5_2_2)); } public void testThatAnalyzersAreUsedInMapping() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 4fd47031a96b0..94c6066080a25 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -232,7 +232,7 @@ public void testVersionMapAfterAutoIDDocument() throws IOException { new BytesArray("{}".getBytes(Charset.defaultCharset())), null); operation = randomBoolean() ? appendOnlyPrimary(doc, false, 1) - : appendOnlyReplica(doc, false, 1, engine.getLocalCheckpointTracker().generateSeqNo()); + : appendOnlyReplica(doc, false, 1, generateNewSeqNo(engine)); engine.index(operation); assertTrue("safe access should be required", engine.isSafeAccessRequired()); assertEquals(1, engine.getVersionMapSize()); // now we add this to the map @@ -1028,7 +1028,7 @@ public void testCommitAdvancesMinTranslogForRecovery() throws IOException { engine.index(indexForDoc(doc)); boolean inSync = randomBoolean(); if (inSync) { - globalCheckpoint.set(engine.getLocalCheckpointTracker().getCheckpoint()); + globalCheckpoint.set(engine.getLocalCheckpoint()); } engine.flush(); @@ -1046,7 +1046,7 @@ public void testCommitAdvancesMinTranslogForRecovery() throws IOException { assertThat(engine.getTranslog().getDeletionPolicy().getMinTranslogGenerationForRecovery(), equalTo(inSync ? 4L : 1L)); assertThat(engine.getTranslog().getDeletionPolicy().getTranslogGenerationOfLastCommit(), equalTo(4L)); - globalCheckpoint.set(engine.getLocalCheckpointTracker().getCheckpoint()); + globalCheckpoint.set(engine.getLocalCheckpoint()); engine.flush(true, true); assertThat(engine.getTranslog().currentFileGeneration(), equalTo(5L)); assertThat(engine.getTranslog().getDeletionPolicy().getMinTranslogGenerationForRecovery(), equalTo(5L)); @@ -2058,12 +2058,12 @@ public void testSeqNoAndCheckpoints() throws IOException { final Engine.DeleteResult result = initialEngine.delete(delete); if (result.getResultType() == Engine.Result.Type.SUCCESS) { assertThat(result.getSeqNo(), equalTo(primarySeqNo + 1)); - assertThat(initialEngine.getLocalCheckpointTracker().getMaxSeqNo(), equalTo(primarySeqNo + 1)); + assertThat(initialEngine.getSeqNoStats(-1).getMaxSeqNo(), equalTo(primarySeqNo + 1)); indexedIds.remove(id); primarySeqNo++; } else { assertThat(result.getSeqNo(), equalTo(SequenceNumbers.UNASSIGNED_SEQ_NO)); - assertThat(initialEngine.getLocalCheckpointTracker().getMaxSeqNo(), equalTo(primarySeqNo)); + assertThat(initialEngine.getSeqNoStats(-1).getMaxSeqNo(), equalTo(primarySeqNo)); } } else { // index a document @@ -2076,12 +2076,12 @@ public void testSeqNoAndCheckpoints() throws IOException { final Engine.IndexResult result = initialEngine.index(index); if (result.getResultType() == Engine.Result.Type.SUCCESS) { assertThat(result.getSeqNo(), equalTo(primarySeqNo + 1)); - assertThat(initialEngine.getLocalCheckpointTracker().getMaxSeqNo(), equalTo(primarySeqNo + 1)); + assertThat(initialEngine.getSeqNoStats(-1).getMaxSeqNo(), equalTo(primarySeqNo + 1)); indexedIds.add(id); primarySeqNo++; } else { assertThat(result.getSeqNo(), equalTo(SequenceNumbers.UNASSIGNED_SEQ_NO)); - assertThat(initialEngine.getLocalCheckpointTracker().getMaxSeqNo(), equalTo(primarySeqNo)); + assertThat(initialEngine.getSeqNoStats(-1).getMaxSeqNo(), equalTo(primarySeqNo)); } } @@ -2090,7 +2090,7 @@ public void testSeqNoAndCheckpoints() throws IOException { replicaLocalCheckpoint = randomIntBetween(Math.toIntExact(replicaLocalCheckpoint), Math.toIntExact(primarySeqNo)); } gcpTracker.updateLocalCheckpoint(primary.allocationId().getId(), - initialEngine.getLocalCheckpointTracker().getCheckpoint()); + initialEngine.getLocalCheckpoint()); gcpTracker.updateLocalCheckpoint(replica.allocationId().getId(), replicaLocalCheckpoint); if (rarely()) { @@ -2103,8 +2103,8 @@ public void testSeqNoAndCheckpoints() throws IOException { logger.info("localcheckpoint {}, global {}", replicaLocalCheckpoint, primarySeqNo); globalCheckpoint = gcpTracker.getGlobalCheckpoint(); - assertEquals(primarySeqNo, initialEngine.getLocalCheckpointTracker().getMaxSeqNo()); - assertEquals(primarySeqNo, initialEngine.getLocalCheckpointTracker().getCheckpoint()); + assertEquals(primarySeqNo, initialEngine.getSeqNoStats(-1).getMaxSeqNo()); + assertEquals(primarySeqNo, initialEngine.getLocalCheckpoint()); assertThat(globalCheckpoint, equalTo(replicaLocalCheckpoint)); assertThat( @@ -2126,7 +2126,7 @@ public void testSeqNoAndCheckpoints() throws IOException { try (InternalEngine recoveringEngine = new InternalEngine(initialEngine.config())){ recoveringEngine.recoverFromTranslog(); - assertEquals(primarySeqNo, recoveringEngine.getLocalCheckpointTracker().getMaxSeqNo()); + assertEquals(primarySeqNo, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo()); assertThat( Long.parseLong(recoveringEngine.commitStats().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)), equalTo(primarySeqNo)); @@ -2139,9 +2139,9 @@ public void testSeqNoAndCheckpoints() throws IOException { // that the committed max seq no is equivalent to what the current primary seq no is, as all data // we have assigned sequence numbers to should be in the commit equalTo(primarySeqNo)); - assertThat(recoveringEngine.getLocalCheckpointTracker().getCheckpoint(), equalTo(primarySeqNo)); - assertThat(recoveringEngine.getLocalCheckpointTracker().getMaxSeqNo(), equalTo(primarySeqNo)); - assertThat(recoveringEngine.getLocalCheckpointTracker().generateSeqNo(), equalTo(primarySeqNo + 1)); + assertThat(recoveringEngine.getLocalCheckpoint(), equalTo(primarySeqNo)); + assertThat(recoveringEngine.getSeqNoStats(-1).getMaxSeqNo(), equalTo(primarySeqNo)); + assertThat(generateNewSeqNo(recoveringEngine), equalTo(primarySeqNo + 1)); } } @@ -2444,7 +2444,7 @@ public void testCurrentTranslogIDisCommitted() throws IOException { try (InternalEngine engine = createEngine(config)) { engine.index(firstIndexRequest); - globalCheckpoint.set(engine.getLocalCheckpointTracker().getCheckpoint()); + globalCheckpoint.set(engine.getLocalCheckpoint()); expectThrows(IllegalStateException.class, () -> engine.recoverFromTranslog()); Map userData = engine.getLastCommittedSegmentInfos().getUserData(); assertEquals("1", userData.get(Translog.TRANSLOG_GENERATION_KEY)); @@ -2607,7 +2607,7 @@ protected void commitIndexWriter(IndexWriter writer, Translog translog, String s engine.recoverFromTranslog(); final ParsedDocument doc1 = testParsedDocument("1", null, testDocumentWithTextField(), SOURCE, null); engine.index(indexForDoc(doc1)); - globalCheckpoint.set(engine.getLocalCheckpointTracker().getCheckpoint()); + globalCheckpoint.set(engine.getLocalCheckpoint()); throwErrorOnCommit.set(true); FlushFailedEngineException e = expectThrows(FlushFailedEngineException.class, engine::flush); assertThat(e.getCause().getMessage(), equalTo("power's out")); @@ -2667,7 +2667,7 @@ private Path[] filterExtraFSFiles(Path[] files) { } public void testTranslogReplay() throws IOException { - final LongSupplier inSyncGlobalCheckpointSupplier = () -> this.engine.getLocalCheckpointTracker().getCheckpoint(); + final LongSupplier inSyncGlobalCheckpointSupplier = () -> this.engine.getLocalCheckpoint(); final int numDocs = randomIntBetween(1, 10); for (int i = 0; i < numDocs; i++) { ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), new BytesArray("{}"), null); @@ -3610,7 +3610,7 @@ private ToLongBiFunction getStallingSeqNoGenerator( final AtomicBoolean stall, final AtomicLong expectedLocalCheckpoint) { return (engine, operation) -> { - final long seqNo = engine.getLocalCheckpointTracker().generateSeqNo(); + final long seqNo = generateNewSeqNo(engine); final CountDownLatch latch = latchReference.get(); if (stall.get()) { try { @@ -3662,8 +3662,8 @@ public void testSequenceNumberAdvancesToMaxSeqOnEngineOpenOnPrimary() throws Bro } } - assertThat(initialEngine.getLocalCheckpointTracker().getCheckpoint(), equalTo(expectedLocalCheckpoint.get())); - assertThat(initialEngine.getLocalCheckpointTracker().getMaxSeqNo(), equalTo((long) (docs - 1))); + assertThat(initialEngine.getLocalCheckpoint(), equalTo(expectedLocalCheckpoint.get())); + assertThat(initialEngine.getSeqNoStats(-1).getMaxSeqNo(), equalTo((long) (docs - 1))); initialEngine.flush(true, true); latchReference.get().countDown(); @@ -3677,7 +3677,7 @@ public void testSequenceNumberAdvancesToMaxSeqOnEngineOpenOnPrimary() throws Bro try (Engine recoveringEngine = new InternalEngine(initialEngine.config())) { recoveringEngine.recoverFromTranslog(); recoveringEngine.fillSeqNoGaps(2); - assertThat(recoveringEngine.getLocalCheckpointTracker().getCheckpoint(), greaterThanOrEqualTo((long) (docs - 1))); + assertThat(recoveringEngine.getLocalCheckpoint(), greaterThanOrEqualTo((long) (docs - 1))); } } @@ -3758,7 +3758,7 @@ public void testOutOfOrderSequenceNumbersWithVersionConflict() throws IOExceptio expectedLocalCheckpoint = numberOfOperations - 1; } - assertThat(engine.getLocalCheckpointTracker().getCheckpoint(), equalTo(expectedLocalCheckpoint)); + assertThat(engine.getLocalCheckpoint(), equalTo(expectedLocalCheckpoint)); try (Engine.GetResult result = engine.get(new Engine.Get(true, false, "type", "2", uid), searcherFactory)) { assertThat(result.exists(), equalTo(exists)); } @@ -3790,11 +3790,11 @@ protected long doGenerateSeqNoForOperation(Operation operation) { final int gapsFilled = noOpEngine.fillSeqNoGaps(primaryTerm.get()); final String reason = "filling gaps"; noOpEngine.noOp(new Engine.NoOp(maxSeqNo + 1, primaryTerm.get(), LOCAL_TRANSLOG_RECOVERY, System.nanoTime(), reason)); - assertThat(noOpEngine.getLocalCheckpointTracker().getCheckpoint(), equalTo((long) (maxSeqNo + 1))); + assertThat(noOpEngine.getLocalCheckpoint(), equalTo((long) (maxSeqNo + 1))); assertThat(noOpEngine.getTranslog().stats().getUncommittedOperations(), equalTo(gapsFilled)); noOpEngine.noOp( new Engine.NoOp(maxSeqNo + 2, primaryTerm.get(), randomFrom(PRIMARY, REPLICA, PEER_RECOVERY), System.nanoTime(), reason)); - assertThat(noOpEngine.getLocalCheckpointTracker().getCheckpoint(), equalTo((long) (maxSeqNo + 2))); + assertThat(noOpEngine.getLocalCheckpoint(), equalTo((long) (maxSeqNo + 2))); assertThat(noOpEngine.getTranslog().stats().getUncommittedOperations(), equalTo(gapsFilled + 1)); // skip to the op that we added to the translog Translog.Operation op; @@ -4009,17 +4009,17 @@ public void markSeqNoAsCompleted(long seqNo) { actualEngine.rollTranslogGeneration(); } } - final long currentLocalCheckpoint = actualEngine.getLocalCheckpointTracker().getCheckpoint(); + final long currentLocalCheckpoint = actualEngine.getLocalCheckpoint(); final long resetLocalCheckpoint = randomIntBetween(Math.toIntExact(SequenceNumbers.NO_OPS_PERFORMED), Math.toIntExact(currentLocalCheckpoint)); - actualEngine.getLocalCheckpointTracker().resetCheckpoint(resetLocalCheckpoint); + actualEngine.resetLocalCheckpoint(resetLocalCheckpoint); completedSeqNos.clear(); actualEngine.restoreLocalCheckpointFromTranslog(); final Set intersection = new HashSet<>(expectedCompletedSeqNos); intersection.retainAll(LongStream.range(resetLocalCheckpoint + 1, operations).boxed().collect(Collectors.toSet())); assertThat(completedSeqNos, equalTo(intersection)); - assertThat(actualEngine.getLocalCheckpointTracker().getCheckpoint(), equalTo(currentLocalCheckpoint)); - assertThat(actualEngine.getLocalCheckpointTracker().generateSeqNo(), equalTo((long) operations)); + assertThat(actualEngine.getLocalCheckpoint(), equalTo(currentLocalCheckpoint)); + assertThat(generateNewSeqNo(actualEngine), equalTo((long) operations)); } finally { IOUtils.close(actualEngine); } @@ -4043,7 +4043,7 @@ public void testFillUpSequenceIdGapsOnRecovery() throws IOException { replicaEngine.index(replicaIndexForDoc(doc, 1, indexResult.getSeqNo(), false)); } } - checkpointOnReplica = replicaEngine.getLocalCheckpointTracker().getCheckpoint(); + checkpointOnReplica = replicaEngine.getLocalCheckpoint(); } finally { IOUtils.close(replicaEngine); } @@ -4053,16 +4053,16 @@ public void testFillUpSequenceIdGapsOnRecovery() throws IOException { AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); Engine recoveringEngine = null; try { - assertEquals(docs - 1, engine.getLocalCheckpointTracker().getMaxSeqNo()); - assertEquals(docs - 1, engine.getLocalCheckpointTracker().getCheckpoint()); - assertEquals(maxSeqIDOnReplica, replicaEngine.getLocalCheckpointTracker().getMaxSeqNo()); - assertEquals(checkpointOnReplica, replicaEngine.getLocalCheckpointTracker().getCheckpoint()); + assertEquals(docs - 1, engine.getSeqNoStats(-1).getMaxSeqNo()); + assertEquals(docs - 1, engine.getLocalCheckpoint()); + assertEquals(maxSeqIDOnReplica, replicaEngine.getSeqNoStats(-1).getMaxSeqNo()); + assertEquals(checkpointOnReplica, replicaEngine.getLocalCheckpoint()); trimUnsafeCommits(copy(replicaEngine.config(), globalCheckpoint::get)); recoveringEngine = new InternalEngine(copy(replicaEngine.config(), globalCheckpoint::get)); assertEquals(numDocsOnReplica, recoveringEngine.getTranslog().stats().getUncommittedOperations()); recoveringEngine.recoverFromTranslog(); - assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpointTracker().getMaxSeqNo()); - assertEquals(checkpointOnReplica, recoveringEngine.getLocalCheckpointTracker().getCheckpoint()); + assertEquals(maxSeqIDOnReplica, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo()); + assertEquals(checkpointOnReplica, recoveringEngine.getLocalCheckpoint()); assertEquals((maxSeqIDOnReplica + 1) - numDocsOnReplica, recoveringEngine.fillSeqNoGaps(2)); // now snapshot the tlog and ensure the primary term is updated @@ -4077,10 +4077,10 @@ public void testFillUpSequenceIdGapsOnRecovery() throws IOException { } } - assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpointTracker().getMaxSeqNo()); - assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpointTracker().getCheckpoint()); + assertEquals(maxSeqIDOnReplica, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo()); + assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpoint()); if ((flushed = randomBoolean())) { - globalCheckpoint.set(recoveringEngine.getLocalCheckpointTracker().getMaxSeqNo()); + globalCheckpoint.set(recoveringEngine.getSeqNoStats(-1).getMaxSeqNo()); recoveringEngine.getTranslog().sync(); recoveringEngine.flush(true, true); } @@ -4097,11 +4097,11 @@ public void testFillUpSequenceIdGapsOnRecovery() throws IOException { assertThat(recoveringEngine.getTranslog().stats().getUncommittedOperations(), equalTo(0)); } recoveringEngine.recoverFromTranslog(); - assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpointTracker().getMaxSeqNo()); - assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpointTracker().getCheckpoint()); + assertEquals(maxSeqIDOnReplica, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo()); + assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpoint()); assertEquals(0, recoveringEngine.fillSeqNoGaps(3)); - assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpointTracker().getMaxSeqNo()); - assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpointTracker().getCheckpoint()); + assertEquals(maxSeqIDOnReplica, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo()); + assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpoint()); } finally { IOUtils.close(recoveringEngine); } @@ -4284,7 +4284,7 @@ protected void commitIndexWriter(IndexWriter writer, Translog translog, String s // Advance the global checkpoint during the flush to create a lag between a persisted global checkpoint in the translog // (this value is visible to the deletion policy) and an in memory global checkpoint in the SequenceNumbersService. if (rarely()) { - globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), getLocalCheckpointTracker().getCheckpoint())); + globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), getLocalCheckpoint())); } super.commitIndexWriter(writer, translog, syncId); } @@ -4296,7 +4296,7 @@ protected void commitIndexWriter(IndexWriter writer, Translog translog, String s document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); engine.index(indexForDoc(testParsedDocument(Integer.toString(docId), null, document, B_1, null))); if (frequently()) { - globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), engine.getLocalCheckpointTracker().getCheckpoint())); + globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), engine.getLocalCheckpoint())); engine.getTranslog().sync(); } if (frequently()) { @@ -4430,11 +4430,11 @@ public void testCleanUpCommitsWhenGlobalCheckpointAdvanced() throws Exception { engine.flush(false, randomBoolean()); List commits = DirectoryReader.listCommits(store.directory()); // Global checkpoint advanced but not enough - all commits are kept. - globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), engine.getLocalCheckpointTracker().getCheckpoint() - 1)); + globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), engine.getLocalCheckpoint() - 1)); engine.syncTranslog(); assertThat(DirectoryReader.listCommits(store.directory()), equalTo(commits)); // Global checkpoint advanced enough - only the last commit is kept. - globalCheckpoint.set(randomLongBetween(engine.getLocalCheckpointTracker().getCheckpoint(), Long.MAX_VALUE)); + globalCheckpoint.set(randomLongBetween(engine.getLocalCheckpoint(), Long.MAX_VALUE)); engine.syncTranslog(); assertThat(DirectoryReader.listCommits(store.directory()), contains(commits.get(commits.size() - 1))); } @@ -4458,7 +4458,7 @@ public void testCleanupCommitsWhenReleaseSnapshot() throws Exception { for (int i = 0; i < numSnapshots; i++) { snapshots.add(engine.acquireSafeIndexCommit()); // taking snapshots from the safe commit. } - globalCheckpoint.set(engine.getLocalCheckpointTracker().getCheckpoint()); + globalCheckpoint.set(engine.getLocalCheckpoint()); engine.syncTranslog(); final List commits = DirectoryReader.listCommits(store.directory()); for (int i = 0; i < numSnapshots - 1; i++) { @@ -4508,13 +4508,13 @@ public void testShouldPeriodicallyFlush() throws Exception { assertThat(engine.getTranslog().stats().getUncommittedOperations(), equalTo(0)); // If the new index commit still points to the same translog generation as the current index commit, // we should not enable the periodically flush condition; otherwise we can get into an infinite loop of flushes. - engine.getLocalCheckpointTracker().generateSeqNo(); // create a gap here + generateNewSeqNo(engine); // create a gap here for (int id = 0; id < numDocs; id++) { if (randomBoolean()) { translog.rollGeneration(); } final ParsedDocument doc = testParsedDocument("new" + id, null, testDocumentWithTextField(), SOURCE, null); - engine.index(replicaIndexForDoc(doc, 2L, engine.getLocalCheckpointTracker().generateSeqNo(), false)); + engine.index(replicaIndexForDoc(doc, 2L, generateNewSeqNo(engine), false)); if (engine.shouldPeriodicallyFlush()) { engine.flush(); assertThat(engine.getLastCommittedSegmentInfos(), not(sameInstance(lastCommitInfo))); @@ -4535,7 +4535,7 @@ public void testStressShouldPeriodicallyFlush() throws Exception { engine.onSettingsChanged(); final int numOps = scaledRandomIntBetween(100, 10_000); for (int i = 0; i < numOps; i++) { - final long localCheckPoint = engine.getLocalCheckpointTracker().getCheckpoint(); + final long localCheckPoint = engine.getLocalCheckpoint(); final long seqno = randomLongBetween(Math.max(0, localCheckPoint), localCheckPoint + 5); final ParsedDocument doc = testParsedDocument(Long.toString(seqno), null, testDocumentWithTextField(), SOURCE, null); engine.index(replicaIndexForDoc(doc, 1L, seqno, false)); @@ -4622,9 +4622,9 @@ public void testPruneOnlyDeletesAtMostLocalCheckpoint() throws Exception { } final long deleteBatch = between(10, 20); final long gapSeqNo = randomLongBetween( - engine.getLocalCheckpointTracker().getMaxSeqNo() + 1, engine.getLocalCheckpointTracker().getMaxSeqNo() + deleteBatch); + engine.getSeqNoStats(-1).getMaxSeqNo() + 1, engine.getSeqNoStats(-1).getMaxSeqNo() + deleteBatch); for (int i = 0; i < deleteBatch; i++) { - final long seqno = engine.getLocalCheckpointTracker().generateSeqNo(); + final long seqno = generateNewSeqNo(engine); if (seqno != gapSeqNo) { if (randomBoolean()) { clock.incrementAndGet(); @@ -4671,7 +4671,7 @@ public void testTrackMaxSeqNoOfNonAppendOnlyOperations() throws Exception { for (int i = 0; i < numDocs; i++) { ParsedDocument doc = testParsedDocument("append-only" + i, null, testDocumentWithTextField(), SOURCE, null); if (randomBoolean()) { - engine.index(appendOnlyReplica(doc, randomBoolean(), 1, engine.getLocalCheckpointTracker().generateSeqNo())); + engine.index(appendOnlyReplica(doc, randomBoolean(), 1, generateNewSeqNo(engine))); } else { engine.index(appendOnlyPrimary(doc, randomBoolean(), randomNonNegativeLong())); } @@ -4688,7 +4688,7 @@ public void testTrackMaxSeqNoOfNonAppendOnlyOperations() throws Exception { for (int i = 0; i < numOps; i++) { ParsedDocument parsedDocument = testParsedDocument(Integer.toString(i), null, testDocumentWithTextField(), SOURCE, null); if (randomBoolean()) { // On replica - update max_seqno for non-append-only operations - final long seqno = engine.getLocalCheckpointTracker().generateSeqNo(); + final long seqno = generateNewSeqNo(engine); final Engine.Index doc = replicaIndexForDoc(parsedDocument, 1, seqno, randomBoolean()); if (randomBoolean()) { engine.index(doc); @@ -4707,7 +4707,7 @@ public void testTrackMaxSeqNoOfNonAppendOnlyOperations() throws Exception { } appendOnlyIndexer.join(120_000); assertThat(engine.getMaxSeqNoOfNonAppendOnlyOperations(), equalTo(maxSeqNoOfNonAppendOnly)); - globalCheckpoint.set(engine.getLocalCheckpointTracker().getCheckpoint()); + globalCheckpoint.set(engine.getLocalCheckpoint()); engine.syncTranslog(); engine.flush(); } @@ -4719,15 +4719,14 @@ public void testTrackMaxSeqNoOfNonAppendOnlyOperations() throws Exception { public void testSkipOptimizeForExposedAppendOnlyOperations() throws Exception { long lookupTimes = 0L; - final LocalCheckpointTracker localCheckpointTracker = engine.getLocalCheckpointTracker(); final int initDocs = between(0, 10); for (int i = 0; i < initDocs; i++) { index(engine, i); lookupTimes++; } // doc1 is delayed and arrived after a non-append-only op. - final long seqNoAppendOnly1 = localCheckpointTracker.generateSeqNo(); - final long seqnoNormalOp = localCheckpointTracker.generateSeqNo(); + final long seqNoAppendOnly1 = generateNewSeqNo(engine); + final long seqnoNormalOp = generateNewSeqNo(engine); if (randomBoolean()) { engine.index(replicaIndexForDoc( testParsedDocument("d", null, testDocumentWithTextField(), SOURCE, null), 1, seqnoNormalOp, false)); @@ -4746,7 +4745,7 @@ public void testSkipOptimizeForExposedAppendOnlyOperations() throws Exception { // optimize for other append-only 2 (its seqno > max_seqno of non-append-only) - do not look up in version map. engine.index(appendOnlyReplica(testParsedDocument("append-only-2", null, testDocumentWithTextField(), SOURCE, null), - false, randomNonNegativeLong(), localCheckpointTracker.generateSeqNo())); + false, randomNonNegativeLong(), generateNewSeqNo(engine))); assertThat(engine.getNumVersionLookups(), equalTo(lookupTimes)); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java index e4cd5731daafa..6e9cb6c0b5980 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java @@ -59,7 +59,7 @@ public void testDefaultMapping() throws Exception { DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - FieldMapper fieldMapper = mapper.mappers().smartNameFieldMapper("field"); + FieldMapper fieldMapper = mapper.mappers().getMapper("field"); assertThat(fieldMapper, instanceOf(BinaryFieldMapper.class)); assertThat(fieldMapper.fieldType().stored(), equalTo(false)); } @@ -94,7 +94,7 @@ public void testStoredValue() throws IOException { XContentType.JSON)); BytesRef indexedValue = doc.rootDoc().getBinaryValue("field"); assertEquals(new BytesRef(value), indexedValue); - FieldMapper fieldMapper = mapper.mappers().smartNameFieldMapper("field"); + FieldMapper fieldMapper = mapper.mappers().getMapper("field"); Object originalValue = fieldMapper.fieldType().valueForDisplay(indexedValue); assertEquals(new BytesArray(value), originalValue); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java index e69d48b7b44b7..0234fcb681d82 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentMapperMergeTests.java @@ -56,11 +56,11 @@ public void test1Merge() throws Exception { DocumentMapper merged = stage1.merge(stage2.mapping()); // stage1 mapping should not have been modified - assertThat(stage1.mappers().smartNameFieldMapper("age"), nullValue()); - assertThat(stage1.mappers().smartNameFieldMapper("obj1.prop1"), nullValue()); + assertThat(stage1.mappers().getMapper("age"), nullValue()); + assertThat(stage1.mappers().getMapper("obj1.prop1"), nullValue()); // but merged should - assertThat(merged.mappers().smartNameFieldMapper("age"), notNullValue()); - assertThat(merged.mappers().smartNameFieldMapper("obj1.prop1"), notNullValue()); + assertThat(merged.mappers().getMapper("age"), notNullValue()); + assertThat(merged.mappers().getMapper("obj1.prop1"), notNullValue()); } public void testMergeObjectDynamic() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java index 6c83f31f93fe6..c50320900923c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java @@ -69,25 +69,25 @@ public void testDoubleIndexingSameDoc() throws Exception { IndexReader reader = DirectoryReader.open(writer); IndexSearcher searcher = new IndexSearcher(reader); - TopDocs topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field1").fieldType().termQuery("value1", context), 10); + TopDocs topDocs = searcher.search(mapper.mappers().getMapper("field1").fieldType().termQuery("value1", context), 10); assertThat(topDocs.totalHits, equalTo(2L)); - topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field2").fieldType().termQuery("1", context), 10); + topDocs = searcher.search(mapper.mappers().getMapper("field2").fieldType().termQuery("1", context), 10); assertThat(topDocs.totalHits, equalTo(2L)); - topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field3").fieldType().termQuery("1.1", context), 10); + topDocs = searcher.search(mapper.mappers().getMapper("field3").fieldType().termQuery("1.1", context), 10); assertThat(topDocs.totalHits, equalTo(2L)); - topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field4").fieldType().termQuery("2010-01-01", context), 10); + topDocs = searcher.search(mapper.mappers().getMapper("field4").fieldType().termQuery("2010-01-01", context), 10); assertThat(topDocs.totalHits, equalTo(2L)); - topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field5").fieldType().termQuery("1", context), 10); + topDocs = searcher.search(mapper.mappers().getMapper("field5").fieldType().termQuery("1", context), 10); assertThat(topDocs.totalHits, equalTo(2L)); - topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field5").fieldType().termQuery("2", context), 10); + topDocs = searcher.search(mapper.mappers().getMapper("field5").fieldType().termQuery("2", context), 10); assertThat(topDocs.totalHits, equalTo(2L)); - topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field5").fieldType().termQuery("3", context), 10); + topDocs = searcher.search(mapper.mappers().getMapper("field5").fieldType().termQuery("3", context), 10); assertThat(topDocs.totalHits, equalTo(2L)); writer.close(); reader.close(); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java index d1ca3d3b4461b..e26ea4ff3769f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java @@ -629,10 +629,10 @@ public void testNumericDetectionEnabled() throws Exception { .setSource(doc.dynamicMappingsUpdate().toString(), XContentType.JSON).get(); defaultMapper = index.mapperService().documentMapper("type"); - FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("s_long"); + FieldMapper mapper = defaultMapper.mappers().getMapper("s_long"); assertThat(mapper.fieldType().typeName(), equalTo("long")); - mapper = defaultMapper.mappers().smartNameFieldMapper("s_double"); + mapper = defaultMapper.mappers().getMapper("s_double"); assertThat(mapper.fieldType().typeName(), equalTo("float")); } @@ -656,10 +656,10 @@ public void testNumericDetectionDefault() throws Exception { .setSource(doc.dynamicMappingsUpdate().toString(), XContentType.JSON).get()); defaultMapper = index.mapperService().documentMapper("type"); - FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("s_long"); + FieldMapper mapper = defaultMapper.mappers().getMapper("s_long"); assertThat(mapper, instanceOf(TextFieldMapper.class)); - mapper = defaultMapper.mappers().smartNameFieldMapper("s_double"); + mapper = defaultMapper.mappers().getMapper("s_double"); assertThat(mapper, instanceOf(TextFieldMapper.class)); } @@ -707,9 +707,9 @@ public void testDateDetectionInheritsFormat() throws Exception { defaultMapper = index.mapperService().documentMapper("type"); - DateFieldMapper dateMapper1 = (DateFieldMapper) defaultMapper.mappers().smartNameFieldMapper("date1"); - DateFieldMapper dateMapper2 = (DateFieldMapper) defaultMapper.mappers().smartNameFieldMapper("date2"); - DateFieldMapper dateMapper3 = (DateFieldMapper) defaultMapper.mappers().smartNameFieldMapper("date3"); + DateFieldMapper dateMapper1 = (DateFieldMapper) defaultMapper.mappers().getMapper("date1"); + DateFieldMapper dateMapper2 = (DateFieldMapper) defaultMapper.mappers().getMapper("date2"); + DateFieldMapper dateMapper3 = (DateFieldMapper) defaultMapper.mappers().getMapper("date3"); // inherited from dynamic date format assertEquals("yyyy-MM-dd", dateMapper1.fieldType().dateTimeFormatter().format()); // inherited from dynamic date format since the mapping in the template did not specify a format diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java index 64927103e6d1d..d8e8c8e0e3da5 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java @@ -56,11 +56,11 @@ public void testMatchTypeOnly() throws Exception { docMapper = index.mapperService().documentMapper("person"); DocumentFieldMappers mappers = docMapper.mappers(); - assertThat(mappers.smartNameFieldMapper("s"), Matchers.notNullValue()); - assertEquals(IndexOptions.NONE, mappers.smartNameFieldMapper("s").fieldType().indexOptions()); + assertThat(mappers.getMapper("s"), Matchers.notNullValue()); + assertEquals(IndexOptions.NONE, mappers.getMapper("s").fieldType().indexOptions()); - assertThat(mappers.smartNameFieldMapper("l"), Matchers.notNullValue()); - assertNotSame(IndexOptions.NONE, mappers.smartNameFieldMapper("l").fieldType().indexOptions()); + assertThat(mappers.getMapper("l"), Matchers.notNullValue()); + assertNotSame(IndexOptions.NONE, mappers.getMapper("l").fieldType().indexOptions()); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java index 945407fc39492..4ae03138764d7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldTypeTests.java @@ -63,7 +63,7 @@ public void testTermQuery() { MapperService mapperService = mock(MapperService.class); when(mapperService.fullName("_field_names")).thenReturn(fieldNamesFieldType); when(mapperService.fullName("field_name")).thenReturn(fieldType); - when(mapperService.simpleMatchToIndexNames("field_name")).thenReturn(Collections.singletonList("field_name")); + when(mapperService.simpleMatchToFullName("field_name")).thenReturn(Collections.singletonList("field_name")); QueryShardContext queryShardContext = new QueryShardContext(0, indexSettings, null, null, mapperService, null, null, null, null, null, null, () -> 0L, null); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java index 0d5b4ca154a5a..45bb8090206bf 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MultiFieldTests.java @@ -122,7 +122,7 @@ public void testBuildThenParse() throws Exception { DocumentMapper builderDocMapper = new DocumentMapper.Builder(new RootObjectMapper.Builder("person").add( new TextFieldMapper.Builder("name").store(true) - .addMultiField(new TextFieldMapper.Builder("indexed").index(true).tokenized(true)) + .addMultiField(new TextFieldMapper.Builder("indexed").index(true)) .addMultiField(new TextFieldMapper.Builder("not_indexed").index(false).store(true)) ), indexService.mapperService()).build(indexService.mapperService()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index ed8274fad05da..e3dc8ff0b78fc 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -55,7 +55,6 @@ import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType; import org.elasticsearch.index.query.MatchPhraseQueryBuilder; import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.index.search.MatchQuery; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -87,6 +86,9 @@ public void setup() { .putList("index.analysis.filter.mySynonyms.synonyms", Collections.singletonList("car, auto")) .put("index.analysis.analyzer.synonym.tokenizer", "standard") .put("index.analysis.analyzer.synonym.filter", "mySynonyms") + // Stop filter remains in server as it is part of lucene-core + .put("index.analysis.analyzer.my_stop_analyzer.tokenizer", "standard") + .put("index.analysis.analyzer.my_stop_analyzer.filter", "stop") .build(); indexService = createIndex("test", settings); parser = indexService.mapperService().documentMapperParser(); @@ -621,7 +623,7 @@ public void testIndexPrefixIndexTypes() throws IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") - .field("analyzer", "english") + .field("analyzer", "standard") .startObject("index_prefixes").endObject() .field("index_options", "offsets") .endObject().endObject().endObject().endObject()); @@ -637,7 +639,7 @@ public void testIndexPrefixIndexTypes() throws IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") - .field("analyzer", "english") + .field("analyzer", "standard") .startObject("index_prefixes").endObject() .field("index_options", "freqs") .endObject().endObject().endObject().endObject()); @@ -654,7 +656,7 @@ public void testIndexPrefixIndexTypes() throws IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") - .field("analyzer", "english") + .field("analyzer", "standard") .startObject("index_prefixes").endObject() .field("index_options", "positions") .endObject().endObject().endObject().endObject()); @@ -675,7 +677,7 @@ public void testIndexPrefixIndexTypes() throws IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") - .field("analyzer", "english") + .field("analyzer", "standard") .startObject("index_prefixes").endObject() .field("term_vector", "with_positions_offsets") .endObject().endObject().endObject().endObject()); @@ -696,7 +698,7 @@ public void testIndexPrefixIndexTypes() throws IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") - .field("analyzer", "english") + .field("analyzer", "standard") .startObject("index_prefixes").endObject() .field("term_vector", "with_positions") .endObject().endObject().endObject().endObject()); @@ -725,7 +727,7 @@ public void testFastPhraseMapping() throws IOException { .startObject("properties") .startObject("field") .field("type", "text") - .field("analyzer", "english") + .field("analyzer", "my_stop_analyzer") .field("index_phrases", true) .endObject() .startObject("synfield") @@ -742,20 +744,20 @@ public void testFastPhraseMapping() throws IOException { queryShardContext.getMapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); Query q = new MatchPhraseQueryBuilder("field", "two words").toQuery(queryShardContext); - assertThat(q, is(new PhraseQuery("field._index_phrase", "two word"))); + assertThat(q, is(new PhraseQuery("field._index_phrase", "two words"))); Query q2 = new MatchPhraseQueryBuilder("field", "three words here").toQuery(queryShardContext); - assertThat(q2, is(new PhraseQuery("field._index_phrase", "three word", "word here"))); + assertThat(q2, is(new PhraseQuery("field._index_phrase", "three words", "words here"))); Query q3 = new MatchPhraseQueryBuilder("field", "two words").slop(1).toQuery(queryShardContext); - assertThat(q3, is(new PhraseQuery(1, "field", "two", "word"))); + assertThat(q3, is(new PhraseQuery(1, "field", "two", "words"))); Query q4 = new MatchPhraseQueryBuilder("field", "singleton").toQuery(queryShardContext); assertThat(q4, is(new TermQuery(new Term("field", "singleton")))); Query q5 = new MatchPhraseQueryBuilder("field", "sparkle a stopword").toQuery(queryShardContext); assertThat(q5, - is(new PhraseQuery.Builder().add(new Term("field", "sparkl")).add(new Term("field", "stopword"), 2).build())); + is(new PhraseQuery.Builder().add(new Term("field", "sparkle")).add(new Term("field", "stopword"), 2).build())); Query q6 = new MatchPhraseQueryBuilder("synfield", "motor car").toQuery(queryShardContext); assertThat(q6, is(new MultiPhraseQuery.Builder() @@ -778,7 +780,7 @@ public void testFastPhraseMapping() throws IOException { CharTermAttribute termAtt = ts.addAttribute(CharTermAttribute.class); ts.reset(); assertTrue(ts.incrementToken()); - assertEquals("some english", termAtt.toString()); + assertEquals("Some English", termAtt.toString()); } { @@ -821,7 +823,7 @@ public void testIndexPrefixMapping() throws IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") - .field("analyzer", "english") + .field("analyzer", "standard") .startObject("index_prefixes") .field("min_chars", 1) .field("max_chars", 10) @@ -855,7 +857,7 @@ public void testIndexPrefixMapping() throws IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") - .field("analyzer", "english") + .field("analyzer", "standard") .startObject("index_prefixes").endObject() .endObject().endObject() .endObject().endObject()); @@ -880,7 +882,7 @@ public void testIndexPrefixMapping() throws IOException { String illegalMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") - .field("analyzer", "english") + .field("analyzer", "standard") .startObject("index_prefixes") .field("min_chars", 1) .field("max_chars", 10) @@ -903,7 +905,7 @@ public void testIndexPrefixMapping() throws IOException { String badConfigMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") - .field("analyzer", "english") + .field("analyzer", "standard") .startObject("index_prefixes") .field("min_chars", 11) .field("max_chars", 10) @@ -920,7 +922,7 @@ public void testIndexPrefixMapping() throws IOException { String badConfigMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") - .field("analyzer", "english") + .field("analyzer", "standard") .startObject("index_prefixes") .field("min_chars", 0) .field("max_chars", 10) @@ -937,7 +939,7 @@ public void testIndexPrefixMapping() throws IOException { String badConfigMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") - .field("analyzer", "english") + .field("analyzer", "standard") .startObject("index_prefixes") .field("min_chars", 1) .field("max_chars", 25) @@ -954,7 +956,7 @@ public void testIndexPrefixMapping() throws IOException { String badConfigMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field") .field("type", "text") - .field("analyzer", "english") + .field("analyzer", "standard") .field("index_prefixes", (String) null) .endObject().endObject() .endObject().endObject()); diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index aafc66b3985e1..1c8bc48fb7ef3 100644 --- a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -1300,7 +1300,7 @@ public void testWithStopWords() throws Exception { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); Query query = new QueryStringQueryBuilder("the quick fox") .field(STRING_FIELD_NAME) - .analyzer("english") + .analyzer("stop") .toQuery(createShardContext()); BooleanQuery expected = new BooleanQuery.Builder() .add(new TermQuery(new Term(STRING_FIELD_NAME, "quick")), Occur.SHOULD) @@ -1313,7 +1313,7 @@ public void testWithPrefixStopWords() throws Exception { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); Query query = new QueryStringQueryBuilder("the* quick fox") .field(STRING_FIELD_NAME) - .analyzer("english") + .analyzer("stop") .toQuery(createShardContext()); BooleanQuery expected = new BooleanQuery.Builder() .add(new PrefixQuery(new Term(STRING_FIELD_NAME, "the")), Occur.SHOULD) diff --git a/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java index 7ff2b7ec12285..0cd5e7fe330eb 100644 --- a/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java @@ -629,7 +629,7 @@ public void testWithStopWords() throws Exception { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); Query query = new SimpleQueryStringBuilder("the quick fox") .field(STRING_FIELD_NAME) - .analyzer("english") + .analyzer("stop") .toQuery(createShardContext()); BooleanQuery expected = new BooleanQuery.Builder() .add(new TermQuery(new Term(STRING_FIELD_NAME, "quick")), BooleanClause.Occur.SHOULD) @@ -642,7 +642,7 @@ public void testWithPrefixStopWords() throws Exception { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); Query query = new SimpleQueryStringBuilder("the* quick fox") .field(STRING_FIELD_NAME) - .analyzer("english") + .analyzer("stop") .toQuery(createShardContext()); BooleanQuery expected = new BooleanQuery.Builder() .add(new PrefixQuery(new Term(STRING_FIELD_NAME, "the")), BooleanClause.Occur.SHOULD) diff --git a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java index 26eb1287f6a6c..1a70de53f5f17 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java @@ -344,9 +344,10 @@ public void testSeqNoCollision() throws Exception { op1 = snapshot.next(); assertThat(op1, notNullValue()); assertThat(snapshot.next(), nullValue()); - assertThat(snapshot.overriddenOperations(), equalTo(0)); + assertThat(snapshot.skippedOperations(), equalTo(0)); } - // Make sure that replica2 receives translog ops (eg. op2) from replica1 and overwrites its stale operation (op1). + // Make sure that replica2 receives translog ops (eg. op2) from replica1 + // and does not overwrite its stale operation (op1) as it is trimmed. logger.info("--> Promote replica1 as the primary"); shards.promoteReplicaToPrimary(replica1).get(); // wait until resync completed. shards.index(new IndexRequest(index.getName(), "type", "d2").source("{}", XContentType.JSON)); @@ -357,7 +358,8 @@ public void testSeqNoCollision() throws Exception { assertThat(op2.seqNo(), equalTo(op1.seqNo())); assertThat(op2.primaryTerm(), greaterThan(op1.primaryTerm())); assertThat("Remaining of snapshot should contain init operations", snapshot, containsOperationsInAnyOrder(initOperations)); - assertThat(snapshot.overriddenOperations(), equalTo(1)); + assertThat(snapshot.overriddenOperations(), equalTo(0)); + assertThat(snapshot.skippedOperations(), equalTo(1)); } // Make sure that peer-recovery transfers all but non-overridden operations. @@ -370,7 +372,7 @@ public void testSeqNoCollision() throws Exception { assertThat(snapshot.totalOperations(), equalTo(initDocs + 1)); assertThat(snapshot.next(), equalTo(op2)); assertThat("Remaining of snapshot should contain init operations", snapshot, containsOperationsInAnyOrder(initOperations)); - assertThat("Peer-recovery should not send overridden operations", snapshot.overriddenOperations(), equalTo(0)); + assertThat("Peer-recovery should not send overridden operations", snapshot.skippedOperations(), equalTo(0)); } // TODO: We should assert the content of shards in the ReplicationGroup. // Without rollback replicas(current implementation), we don't have the same content across shards: diff --git a/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java index 21be1da3845b6..ee97ba14fe09e 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java @@ -53,8 +53,12 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Collections; import java.util.EnumSet; +import java.util.HashSet; import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicBoolean; @@ -65,6 +69,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; @@ -353,10 +358,19 @@ public void testReplicaRollbackStaleDocumentsInPeerRecovery() throws Exception { @TestLogging("org.elasticsearch.index.shard:TRACE,org.elasticsearch.action.resync:TRACE") public void testResyncAfterPrimaryPromotion() throws Exception { - // TODO: check translog trimming functionality once it's implemented - try (ReplicationGroup shards = createGroup(2)) { + // TODO: check translog trimming functionality once rollback is implemented in Lucene (ES trimming is done) + Map mappings = + Collections.singletonMap("type", "{ \"type\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}"); + try (ReplicationGroup shards = new ReplicationGroup(buildIndexMetaData(2, mappings))) { shards.startAll(); - int initialDocs = shards.indexDocs(randomInt(10)); + int initialDocs = randomInt(10); + + for (int i = 0; i < initialDocs; i++) { + final IndexRequest indexRequest = new IndexRequest(index.getName(), "type", "initial_doc_" + i) + .source("{ \"f\": \"normal\"}", XContentType.JSON); + shards.index(indexRequest); + } + boolean syncedGlobalCheckPoint = randomBoolean(); if (syncedGlobalCheckPoint) { shards.syncGlobalCheckpoint(); @@ -364,16 +378,30 @@ public void testResyncAfterPrimaryPromotion() throws Exception { final IndexShard oldPrimary = shards.getPrimary(); final IndexShard newPrimary = shards.getReplicas().get(0); + final IndexShard justReplica = shards.getReplicas().get(1); // simulate docs that were inflight when primary failed - final int extraDocs = randomIntBetween(0, 5); + final int extraDocs = randomInt(5); logger.info("--> indexing {} extra docs", extraDocs); for (int i = 0; i < extraDocs; i++) { - final IndexRequest indexRequest = new IndexRequest(index.getName(), "type", "extra_" + i) - .source("{}", XContentType.JSON); + final IndexRequest indexRequest = new IndexRequest(index.getName(), "type", "extra_doc_" + i) + .source("{ \"f\": \"normal\"}", XContentType.JSON); final BulkShardRequest bulkShardRequest = indexOnPrimary(indexRequest, oldPrimary); indexOnReplica(bulkShardRequest, shards, newPrimary); } + + final int extraDocsToBeTrimmed = randomIntBetween(0, 10); + logger.info("--> indexing {} extra docs to be trimmed", extraDocsToBeTrimmed); + for (int i = 0; i < extraDocsToBeTrimmed; i++) { + final IndexRequest indexRequest = new IndexRequest(index.getName(), "type", "extra_trimmed_" + i) + .source("{ \"f\": \"trimmed\"}", XContentType.JSON); + final BulkShardRequest bulkShardRequest = indexOnPrimary(indexRequest, oldPrimary); + // have to replicate to another replica != newPrimary one - the subject to trim + indexOnReplica(bulkShardRequest, shards, justReplica); + } + + logger.info("--> seqNo primary {} replica {}", oldPrimary.seqNoStats(), newPrimary.seqNoStats()); + logger.info("--> resyncing replicas"); PrimaryReplicaSyncer.ResyncTask task = shards.promoteReplicaToPrimary(newPrimary).get(); if (syncedGlobalCheckPoint) { @@ -381,7 +409,36 @@ public void testResyncAfterPrimaryPromotion() throws Exception { } else { assertThat(task.getResyncedOperations(), greaterThanOrEqualTo(extraDocs)); } - shards.assertAllEqual(initialDocs + extraDocs); + List replicas = shards.getReplicas(); + + // check all docs on primary are available on replica + Set primaryIds = getShardDocUIDs(newPrimary); + assertThat(primaryIds.size(), equalTo(initialDocs + extraDocs)); + for (IndexShard replica : replicas) { + Set replicaIds = getShardDocUIDs(replica); + Set temp = new HashSet<>(primaryIds); + temp.removeAll(replicaIds); + assertThat(replica.routingEntry() + " is missing docs", temp, empty()); + temp = new HashSet<>(replicaIds); + temp.removeAll(primaryIds); + // yeah, replica has more docs as there is no Lucene roll back on it + assertThat(replica.routingEntry() + " has to have extra docs", temp, + extraDocsToBeTrimmed > 0 ? not(empty()) : empty()); + } + + // check translog on replica is trimmed + int translogOperations = 0; + try(Translog.Snapshot snapshot = getTranslog(justReplica).newSnapshot()) { + Translog.Operation next; + while ((next = snapshot.next()) != null) { + translogOperations++; + assertThat("unexpected op: " + next, (int)next.seqNo(), lessThan(initialDocs + extraDocs)); + assertThat("unexpected primaryTerm: " + next.primaryTerm(), next.primaryTerm(), is(oldPrimary.getPrimaryTerm())); + final Translog.Source source = next.getSource(); + assertThat(source.source.utf8ToString(), is("{ \"f\": \"normal\"}")); + } + } + assertThat(translogOperations, is(initialDocs + extraDocs)); } } diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 66ca366196569..f4401da720438 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -78,6 +78,7 @@ import org.elasticsearch.index.VersionType; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineException; +import org.elasticsearch.index.engine.EngineTestCase; import org.elasticsearch.index.engine.InternalEngine; import org.elasticsearch.index.engine.InternalEngineFactory; import org.elasticsearch.index.engine.Segment; @@ -857,7 +858,7 @@ public void testGlobalCheckpointSync() throws IOException { recoverReplica(replicaShard, primaryShard); final int maxSeqNo = randomIntBetween(0, 128); for (int i = 0; i <= maxSeqNo; i++) { - primaryShard.getEngine().getLocalCheckpointTracker().generateSeqNo(); + EngineTestCase.generateNewSeqNo(primaryShard.getEngine()); } final long checkpoint = rarely() ? maxSeqNo - scaledRandomIntBetween(0, maxSeqNo) : maxSeqNo; diff --git a/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java b/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java index 1257aea3d14fa..b290f4d45597b 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java @@ -20,6 +20,7 @@ import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.resync.ResyncReplicationRequest; import org.elasticsearch.action.resync.ResyncReplicationResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; @@ -36,15 +37,20 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.mapper.SourceToParse; +import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.tasks.TaskManager; import java.io.IOException; import java.nio.ByteBuffer; +import java.util.ArrayList; import java.util.Collections; +import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; import static org.hamcrest.core.IsInstanceOf.instanceOf; public class PrimaryReplicaSyncerTests extends IndexShardTestCase { @@ -53,15 +59,17 @@ public void testSyncerSendsOffCorrectDocuments() throws Exception { IndexShard shard = newStartedShard(true); TaskManager taskManager = new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet()); AtomicBoolean syncActionCalled = new AtomicBoolean(); + List resyncRequests = new ArrayList<>(); PrimaryReplicaSyncer.SyncAction syncAction = (request, parentTask, allocationId, primaryTerm, listener) -> { logger.info("Sending off {} operations", request.getOperations().length); syncActionCalled.set(true); + resyncRequests.add(request); assertThat(parentTask, instanceOf(PrimaryReplicaSyncer.ResyncTask.class)); listener.onResponse(new ResyncReplicationResponse()); }; PrimaryReplicaSyncer syncer = new PrimaryReplicaSyncer(Settings.EMPTY, taskManager, syncAction); - syncer.setChunkSize(new ByteSizeValue(randomIntBetween(1, 100))); + syncer.setChunkSize(new ByteSizeValue(randomIntBetween(1, 10))); int numDocs = randomInt(10); for (int i = 0; i < numDocs; i++) { @@ -72,7 +80,7 @@ public void testSyncerSendsOffCorrectDocuments() throws Exception { } long globalCheckPoint = numDocs > 0 ? randomIntBetween(0, numDocs - 1) : 0; - boolean syncNeeded = numDocs > 0 && globalCheckPoint < numDocs - 1; + boolean syncNeeded = numDocs > 0; String allocationId = shard.routingEntry().allocationId().getId(); shard.updateShardState(shard.routingEntry(), shard.getPrimaryTerm(), null, 1000L, Collections.singleton(allocationId), @@ -84,19 +92,29 @@ public void testSyncerSendsOffCorrectDocuments() throws Exception { PlainActionFuture fut = new PlainActionFuture<>(); syncer.resync(shard, fut); - fut.get(); + PrimaryReplicaSyncer.ResyncTask resyncTask = fut.get(); if (syncNeeded) { assertTrue("Sync action was not called", syncActionCalled.get()); + ResyncReplicationRequest resyncRequest = resyncRequests.remove(0); + assertThat(resyncRequest.getTrimAboveSeqNo(), equalTo(numDocs - 1L)); + + assertThat("trimAboveSeqNo has to be specified in request #0 only", resyncRequests.stream() + .mapToLong(ResyncReplicationRequest::getTrimAboveSeqNo) + .filter(seqNo -> seqNo != SequenceNumbers.UNASSIGNED_SEQ_NO) + .findFirst() + .isPresent(), + is(false)); } - assertEquals(globalCheckPoint == numDocs - 1 ? 0 : numDocs, fut.get().getTotalOperations()); - if (syncNeeded) { + + assertEquals(globalCheckPoint == numDocs - 1 ? 0 : numDocs, resyncTask.getTotalOperations()); + if (syncNeeded && globalCheckPoint < numDocs - 1) { long skippedOps = globalCheckPoint + 1; // everything up to global checkpoint included - assertEquals(skippedOps, fut.get().getSkippedOperations()); - assertEquals(numDocs - skippedOps, fut.get().getResyncedOperations()); + assertEquals(skippedOps, resyncTask.getSkippedOperations()); + assertEquals(numDocs - skippedOps, resyncTask.getResyncedOperations()); } else { - assertEquals(0, fut.get().getSkippedOperations()); - assertEquals(0, fut.get().getResyncedOperations()); + assertEquals(0, resyncTask.getSkippedOperations()); + assertEquals(0, resyncTask.getResyncedOperations()); } closeShards(shard); diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogHeaderTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogHeaderTests.java index 0dc404767de3c..99e21d4760463 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogHeaderTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogHeaderTests.java @@ -118,7 +118,8 @@ private void checkFailsToOpen(String file, Class expect assertThat("test file [" + translogFile + "] should exist", Files.exists(translogFile), equalTo(true)); final E error = expectThrows(expectedErrorType, () -> { final Checkpoint checkpoint = new Checkpoint(Files.size(translogFile), 1, 1, - SequenceNumbers.NO_OPS_PERFORMED, SequenceNumbers.NO_OPS_PERFORMED, SequenceNumbers.NO_OPS_PERFORMED, 1); + SequenceNumbers.NO_OPS_PERFORMED, SequenceNumbers.NO_OPS_PERFORMED, + SequenceNumbers.NO_OPS_PERFORMED, 1, SequenceNumbers.NO_OPS_PERFORMED); try (FileChannel channel = FileChannel.open(translogFile, StandardOpenOption.READ)) { TranslogReader.open(channel, translogFile, checkpoint, null); } diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 64bb3f5101a8f..c92370fec04ec 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -107,6 +107,7 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.LongStream; @@ -121,8 +122,11 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasToString; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.isIn; import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.stub; @@ -1475,8 +1479,8 @@ public void testRecoveryUncommittedCorruptedCheckpoint() throws IOException { fail("corrupted"); } catch (IllegalStateException ex) { assertEquals("Checkpoint file translog-3.ckp already exists but has corrupted content expected: Checkpoint{offset=3080, " + - "numOps=55, generation=3, minSeqNo=45, maxSeqNo=99, globalCheckpoint=-1, minTranslogGeneration=1} but got: Checkpoint{offset=0, numOps=0, " + - "generation=0, minSeqNo=-1, maxSeqNo=-1, globalCheckpoint=-1, minTranslogGeneration=0}", ex.getMessage()); + "numOps=55, generation=3, minSeqNo=45, maxSeqNo=99, globalCheckpoint=-1, minTranslogGeneration=1, trimmedAboveSeqNo=-2} but got: Checkpoint{offset=0, numOps=0, " + + "generation=0, minSeqNo=-1, maxSeqNo=-1, globalCheckpoint=-1, minTranslogGeneration=0, trimmedAboveSeqNo=-2}", ex.getMessage()); } Checkpoint.write(FileChannel::open, config.getTranslogPath().resolve(Translog.getCommitCheckpointFileName(read.generation)), read, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING); try (Translog translog = new Translog(config, translogUUID, deletionPolicy, () -> SequenceNumbers.NO_OPS_PERFORMED, primaryTerm::get)) { @@ -1508,6 +1512,191 @@ public void testSnapshotFromStreamInput() throws IOException { assertEquals(ops, readOperations); } + public void testSnapshotCurrentHasUnexpectedOperationsForTrimmedOperations() throws Exception { + int extraDocs = randomIntBetween(10, 15); + + // increment primaryTerm to avoid potential negative numbers + primaryTerm.addAndGet(extraDocs); + translog.rollGeneration(); + + for (int op = 0; op < extraDocs; op++) { + String ascii = randomAlphaOfLengthBetween(1, 50); + Translog.Index operation = new Translog.Index("test", "" + op, op, primaryTerm.get() - op, + ascii.getBytes("UTF-8")); + translog.add(operation); + } + + AssertionError error = expectThrows(AssertionError.class, () -> translog.trimOperations(primaryTerm.get(), 0)); + assertThat(error.getMessage(), is("current should not have any operations with seq#:primaryTerm " + + "[1:" + (primaryTerm.get() - 1) + "] > 0:" + primaryTerm.get())); + + primaryTerm.incrementAndGet(); + translog.rollGeneration(); + + // add a single operation to current with seq# > trimmed seq# but higher primary term + Translog.Index operation = new Translog.Index("test", "" + 1, 1L, primaryTerm.get(), + randomAlphaOfLengthBetween(1, 50).getBytes("UTF-8")); + translog.add(operation); + + // it is possible to trim after generation rollover + translog.trimOperations(primaryTerm.get(), 0); + } + + public void testSnapshotTrimmedOperations() throws Exception { + final InMemoryTranslog inMemoryTranslog = new InMemoryTranslog(); + final List allOperations = new ArrayList<>(); + + for(int attempt = 0, maxAttempts = randomIntBetween(3, 10); attempt < maxAttempts; attempt++) { + List ops = LongStream.range(0, allOperations.size() + randomIntBetween(10, 15)) + .boxed().collect(Collectors.toList()); + Randomness.shuffle(ops); + + AtomicReference source = new AtomicReference<>(); + for (final long op : ops) { + source.set(randomAlphaOfLengthBetween(1, 50)); + + // have to use exactly the same source for same seq# if primaryTerm is not changed + if (primaryTerm.get() == translog.getCurrent().getPrimaryTerm()) { + // use the latest source of op with the same seq# - therefore no break + allOperations + .stream() + .filter(allOp -> allOp instanceof Translog.Index && allOp.seqNo() == op) + .map(allOp -> ((Translog.Index)allOp).source().utf8ToString()) + .reduce((a, b) -> b) + .ifPresent(source::set); + } + + // use ongoing primaryTerms - or the same as it was + Translog.Index operation = new Translog.Index("test", "" + op, op, primaryTerm.get(), + source.get().getBytes("UTF-8")); + translog.add(operation); + inMemoryTranslog.add(operation); + allOperations.add(operation); + } + + if (randomBoolean()) { + primaryTerm.incrementAndGet(); + translog.rollGeneration(); + } + + long maxTrimmedSeqNo = randomInt(allOperations.size()); + + translog.trimOperations(primaryTerm.get(), maxTrimmedSeqNo); + inMemoryTranslog.trimOperations(primaryTerm.get(), maxTrimmedSeqNo); + translog.sync(); + + Collection effectiveOperations = inMemoryTranslog.operations(); + + try (Translog.Snapshot snapshot = translog.newSnapshot()) { + assertThat(snapshot, containsOperationsInAnyOrder(effectiveOperations)); + assertThat(snapshot.totalOperations(), is(allOperations.size())); + assertThat(snapshot.skippedOperations(), is(allOperations.size() - effectiveOperations.size())); + } + } + } + + /** + * this class mimic behaviour of original {@link Translog} + */ + static class InMemoryTranslog { + private final Map operations = new HashMap<>(); + + void add(Translog.Operation operation) { + final Translog.Operation old = operations.put(operation.seqNo(), operation); + assert old == null || old.primaryTerm() <= operation.primaryTerm(); + } + + void trimOperations(long belowTerm, long aboveSeqNo) { + for (final Iterator> it = operations.entrySet().iterator(); it.hasNext(); ) { + final Map.Entry next = it.next(); + Translog.Operation op = next.getValue(); + boolean drop = op.primaryTerm() < belowTerm && op.seqNo() > aboveSeqNo; + if (drop) { + it.remove(); + } + } + } + + Collection operations() { + return operations.values(); + } + } + + public void testRandomExceptionsOnTrimOperations( ) throws Exception { + Path tempDir = createTempDir(); + final FailSwitch fail = new FailSwitch(); + fail.failNever(); + TranslogConfig config = getTranslogConfig(tempDir); + List fileChannels = new ArrayList<>(); + final Translog failableTLog = + getFailableTranslog(fail, config, randomBoolean(), false, null, createTranslogDeletionPolicy(), fileChannels); + + IOException expectedException = null; + int translogOperations = 0; + final int maxAttempts = 10; + for(int attempt = 0; attempt < maxAttempts; attempt++) { + int maxTrimmedSeqNo; + fail.failNever(); + int extraTranslogOperations = randomIntBetween(10, 100); + + List ops = IntStream.range(translogOperations, translogOperations + extraTranslogOperations) + .boxed().collect(Collectors.toList()); + Randomness.shuffle(ops); + for (int op : ops) { + String ascii = randomAlphaOfLengthBetween(1, 50); + Translog.Index operation = new Translog.Index("test", "" + op, op, + primaryTerm.get(), ascii.getBytes("UTF-8")); + + failableTLog.add(operation); + } + + translogOperations += extraTranslogOperations; + + // at least one roll + inc of primary term has to be there - otherwise trim would not take place at all + // last attempt we have to make roll as well - otherwise could skip trimming as it has been trimmed already + boolean rollover = attempt == 0 || attempt == maxAttempts - 1 || randomBoolean(); + if (rollover) { + primaryTerm.incrementAndGet(); + failableTLog.rollGeneration(); + } + + maxTrimmedSeqNo = rollover ? translogOperations - randomIntBetween(4, 8) : translogOperations + 1; + + // if we are so happy to reach the max attempts - fail it always` + fail.failRate(attempt < maxAttempts - 1 ? 25 : 100); + try { + failableTLog.trimOperations(primaryTerm.get(), maxTrimmedSeqNo); + } catch (IOException e){ + expectedException = e; + break; + } + } + + assertThat(expectedException, is(not(nullValue()))); + + assertThat(fileChannels, is(not(empty()))); + assertThat("all file channels have to be closed", + fileChannels.stream().filter(f -> f.isOpen()).findFirst().isPresent(), is(false)); + + assertThat(failableTLog.isOpen(), is(false)); + final AlreadyClosedException alreadyClosedException = expectThrows(AlreadyClosedException.class, () -> failableTLog.newSnapshot()); + assertThat(alreadyClosedException.getMessage(), + is("translog is already closed")); + + fail.failNever(); + + // check that despite of IO exception translog is not corrupted + try(Translog reopenedTranslog = openTranslog(config, failableTLog.getTranslogUUID())) { + try (Translog.Snapshot snapshot = reopenedTranslog.newSnapshot()) { + assertThat(snapshot.totalOperations(), greaterThan(0)); + Translog.Operation operation; + for (int i = 0; (operation = snapshot.next()) != null; i++) { + assertNotNull("operation " + i + " must be non-null", operation); + } + } + } + } + public void testLocationHashCodeEquals() throws IOException { List locations = new ArrayList<>(); List locations2 = new ArrayList<>(); @@ -2008,7 +2197,8 @@ private static class FailSwitch { private volatile boolean onceFailedFailAlways = false; public boolean fail() { - boolean fail = randomIntBetween(1, 100) <= failRate; + final int rnd = randomIntBetween(1, 100); + boolean fail = rnd <= failRate; if (fail && onceFailedFailAlways) { failAlways(); } @@ -2027,17 +2217,30 @@ public void failRandomly() { failRate = randomIntBetween(1, 100); } + public void failRate(int rate) { + failRate = rate; + } + public void onceFailedFailAlways() { onceFailedFailAlways = true; } } - private Translog getFailableTranslog(final FailSwitch fail, final TranslogConfig config, final boolean partialWrites, final boolean throwUnknownException, String translogUUID, final TranslogDeletionPolicy deletionPolicy) throws IOException { + return getFailableTranslog(fail, config, partialWrites, throwUnknownException, translogUUID, deletionPolicy, null); + } + + private Translog getFailableTranslog(final FailSwitch fail, final TranslogConfig config, final boolean partialWrites, + final boolean throwUnknownException, String translogUUID, + final TranslogDeletionPolicy deletionPolicy, + final List fileChannels) throws IOException { final ChannelFactory channelFactory = (file, openOption) -> { FileChannel channel = FileChannel.open(file, openOption); + if (fileChannels != null) { + fileChannels.add(channel); + } boolean success = false; try { final boolean isCkpFile = file.getFileName().toString().endsWith(".ckp"); // don't do partial writes for checkpoints we rely on the fact that the bytes are written as an atomic operation @@ -2394,7 +2597,7 @@ private Checkpoint randomCheckpoint() { } final long generation = randomNonNegativeLong(); return new Checkpoint(randomLong(), randomInt(), generation, minSeqNo, maxSeqNo, randomNonNegativeLong(), - randomLongBetween(1, generation)); + randomLongBetween(1, generation), maxSeqNo); } public void testCheckpointOnDiskFull() throws IOException { @@ -2618,7 +2821,7 @@ public void testMinSeqNoBasedAPI() throws IOException { assertThat(Tuple.tuple(op.seqNo(), op.primaryTerm()), isIn(seenSeqNos)); readFromSnapshot++; } - readFromSnapshot += snapshot.overriddenOperations(); + readFromSnapshot += snapshot.skippedOperations(); } assertThat(readFromSnapshot, equalTo(expectedSnapshotOps)); final long seqNoLowerBound = seqNo; diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 717bab12ea5cb..35c5a19cc2e8c 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -22,6 +22,9 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockTokenizer; +import org.apache.lucene.analysis.TokenFilter; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder; @@ -36,6 +39,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; import org.elasticsearch.index.analysis.AnalyzerProvider; +import org.elasticsearch.index.analysis.PreConfiguredTokenFilter; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.IdsQueryBuilder; import org.elasticsearch.index.query.MatchQueryBuilder; @@ -66,9 +70,11 @@ import java.util.Arrays; import java.util.Collection; import java.util.HashMap; +import java.util.List; import java.util.Locale; import java.util.Map; +import static java.util.Collections.singletonList; import static java.util.Collections.singletonMap; import static org.elasticsearch.client.Requests.searchRequest; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @@ -113,7 +119,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Arrays.asList(InternalSettingsPlugin.class, MockKeywordPlugin.class, MockWhitespacePlugin.class); + return Arrays.asList(InternalSettingsPlugin.class, MockKeywordPlugin.class, MockAnalysisPlugin.class); } public void testHighlightingWithStoredKeyword() throws IOException { @@ -765,14 +771,19 @@ public void testMatchedFieldsFvhNoRequireFieldMatch() throws Exception { } private void checkMatchedFieldsCase(boolean requireFieldMatch) throws Exception { + Settings.Builder settings = Settings.builder(); + settings.put(indexSettings()); + settings.put("index.analysis.analyzer.mock_english.tokenizer", "standard"); + settings.put("index.analysis.analyzer.mock_english.filter", "mock_snowball"); assertAcked(prepareCreate("test") + .setSettings(settings) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("foo") .field("type", "text") .field("term_vector", "with_positions_offsets") .field("store", true) - .field("analyzer", "english") + .field("analyzer", "mock_english") .startObject("fields") .startObject("plain") .field("type", "text") @@ -785,7 +796,7 @@ private void checkMatchedFieldsCase(boolean requireFieldMatch) throws Exception .field("type", "text") .field("term_vector", "with_positions_offsets") .field("store", true) - .field("analyzer", "english") + .field("analyzer", "mock_english") .startObject("fields") .startObject("plain") .field("type", "text") @@ -2819,7 +2830,7 @@ public void testSynonyms() throws IOException { assertAcked(prepareCreate("test").setSettings(builder.build()) .addMapping("type1", "field1", "type=text,term_vector=with_positions_offsets,search_analyzer=synonym," + - "analyzer=english,index_options=offsets")); + "analyzer=standard,index_options=offsets")); ensureGreen(); client().prepareIndex("test", "type1", "0").setSource( @@ -2983,7 +2994,39 @@ public void testWithNormalizer() throws Exception { } } - public static class MockWhitespacePlugin extends Plugin implements AnalysisPlugin { + public static class MockAnalysisPlugin extends Plugin implements AnalysisPlugin { + + public final class MockSnowBall extends TokenFilter { + private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); + + /** Sole constructor. */ + MockSnowBall(TokenStream in) { + super(in); + } + + @Override + public boolean incrementToken() throws IOException { + if (input.incrementToken()) { + final char[] buffer = termAtt.buffer(); + final int length = termAtt.length(); + if (buffer[length - 1] == 's') { + termAtt.setLength(length - 1); + } + if (length > 3) { + if (buffer[length - 1] == 'g' && buffer[length - 2] == 'n' && buffer[length - 3] == 'i') { + termAtt.setLength(length- 3); + } + } + return true; + } else + return false; + } + } + + @Override + public List getPreConfiguredTokenFilters() { + return singletonList(PreConfiguredTokenFilter.singleton("mock_snowball", false, MockSnowBall::new)); + } @Override public Map>> getAnalyzers() { diff --git a/server/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java b/server/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java index 7aef2d208ecc5..147caa4c1c131 100644 --- a/server/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java @@ -19,6 +19,12 @@ package org.elasticsearch.search.query; +import org.apache.lucene.analysis.CharacterUtils; +import org.apache.lucene.analysis.MockLowerCaseFilter; +import org.apache.lucene.analysis.MockTokenizer; +import org.apache.lucene.analysis.TokenFilter; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -28,12 +34,19 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.analysis.CharFilterFactory; +import org.elasticsearch.index.analysis.MultiTermAwareComponent; +import org.elasticsearch.index.analysis.PreConfiguredCharFilter; +import org.elasticsearch.index.analysis.PreConfiguredTokenFilter; +import org.elasticsearch.index.analysis.TokenizerFactory; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.SimpleQueryStringBuilder; import org.elasticsearch.index.query.SimpleQueryStringFlag; +import org.elasticsearch.indices.analysis.AnalysisModule; +import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; @@ -42,14 +55,19 @@ import org.elasticsearch.test.InternalSettingsPlugin; import java.io.IOException; +import java.io.Reader; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutionException; +import java.util.function.Function; +import static java.util.Collections.singletonList; +import static java.util.Collections.singletonMap; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.simpleQueryStringQuery; @@ -72,11 +90,15 @@ public class SimpleQueryStringIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Arrays.asList(InternalSettingsPlugin.class); // uses index.version.created + return Arrays.asList(MockAnalysisPlugin.class, InternalSettingsPlugin.class); // uses index.version.created } public void testSimpleQueryString() throws ExecutionException, InterruptedException { - createIndex("test"); + Settings.Builder settings = Settings.builder(); + settings.put(indexSettings()); + settings.put("index.analysis.analyzer.mock_snowball.tokenizer", "standard"); + settings.put("index.analysis.analyzer.mock_snowball.filter", "mock_snowball"); + createIndex("test", settings.build()); indexRandom(true, false, client().prepareIndex("test", "type1", "1").setSource("body", "foo"), client().prepareIndex("test", "type1", "2").setSource("body", "bar"), @@ -108,7 +130,7 @@ public void testSimpleQueryString() throws ExecutionException, InterruptedExcept assertSearchHits(searchResponse, "4", "5"); searchResponse = client().prepareSearch().setQuery( - simpleQueryStringQuery("eggplants").analyzer("snowball")).get(); + simpleQueryStringQuery("eggplants").analyzer("mock_snowball")).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("4")); @@ -312,7 +334,7 @@ public void testSimpleQueryStringAnalyzeWildcard() throws ExecutionException, In .startObject("properties") .startObject("location") .field("type", "text") - .field("analyzer", "german") + .field("analyzer", "standard") .endObject() .endObject() .endObject() @@ -583,4 +605,33 @@ private void assertHits(SearchHits hits, String... ids) { } assertThat(hitIds, containsInAnyOrder(ids)); } + + public static class MockAnalysisPlugin extends Plugin implements AnalysisPlugin { + + public final class MockSnowBall extends TokenFilter { + private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); + + /** Sole constructor. */ + MockSnowBall(TokenStream in) { + super(in); + } + + @Override + public boolean incrementToken() throws IOException { + if (input.incrementToken()) { + char[] buffer = termAtt.buffer(); + if (buffer[termAtt.length() - 1] == 's') { + termAtt.setLength(termAtt.length() - 1); + } + return true; + } else + return false; + } + } + + @Override + public List getPreConfiguredTokenFilters() { + return singletonList(PreConfiguredTokenFilter.singleton("mock_snowball", false, MockSnowBall::new)); + } + } } diff --git a/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 1f3bdf2e88ac9..7db5e084ed370 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -78,6 +78,7 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.engine.EngineTestCase; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; @@ -3345,7 +3346,7 @@ public void testSnapshottingWithMissingSequenceNumbers() { final Index index = resolveIndex(indexName); final IndexShard primary = internalCluster().getInstance(IndicesService.class, dataNode).getShardOrNull(new ShardId(index, 0)); // create a gap in the sequence numbers - getEngineFromShard(primary).getLocalCheckpointTracker().generateSeqNo(); + EngineTestCase.generateNewSeqNo(getEngineFromShard(primary)); for (int i = 5; i < 10; i++) { index(indexName, "_doc", Integer.toString(i), "foo", "bar" + i); diff --git a/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java b/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java index 34501ba8a1b02..8b3aff90e8d4c 100644 --- a/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java +++ b/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java @@ -87,7 +87,7 @@ public void testExplainValidateQueryTwoNodes() throws IOException { .setSource(XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("foo").field("type", "text").endObject() .startObject("bar").field("type", "integer").endObject() - .startObject("baz").field("type", "text").field("analyzer", "snowball").endObject() + .startObject("baz").field("type", "text").field("analyzer", "standard").endObject() .startObject("pin").startObject("properties").startObject("location").field("type", "geo_point").endObject().endObject().endObject() .endObject().endObject().endObject()) .execute().actionGet(); diff --git a/test/fixtures/example-fixture/build.gradle b/test/fixtures/example-fixture/build.gradle index 225a2cf9deba6..ce562e89abb7f 100644 --- a/test/fixtures/example-fixture/build.gradle +++ b/test/fixtures/example-fixture/build.gradle @@ -22,3 +22,5 @@ test.enabled = false // Not published so no need to assemble tasks.remove(assemble) build.dependsOn.remove('assemble') + +dependenciesInfo.enabled = false diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java index fe7a3ec3ac1de..74200149f18cb 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java @@ -481,6 +481,15 @@ public interface IndexWriterFactory { IndexWriter createWriter(Directory directory, IndexWriterConfig iwc) throws IOException; } + /** + * Generate a new sequence number and return it. Only works on InternalEngines + */ + public static long generateNewSeqNo(final Engine engine) { + assert engine instanceof InternalEngine : "expected InternalEngine, got: " + engine.getClass(); + InternalEngine internalEngine = (InternalEngine) engine; + return internalEngine.getLocalCheckpointTracker().generateSeqNo(); + } + public static InternalEngine createInternalEngine( @Nullable final IndexWriterFactory indexWriterFactory, @Nullable final BiFunction localCheckpointTrackerSupplier, diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java index 19d0ecd6003ac..3fcf06c6545bf 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java @@ -591,7 +591,7 @@ protected Engine.IndexResult indexDoc(IndexShard shard, String type, String id, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false); } shard.updateLocalCheckpointForShard(shard.routingEntry().allocationId().getId(), - shard.getEngine().getLocalCheckpointTracker().getCheckpoint()); + shard.getLocalCheckpoint()); } else { result = shard.applyIndexOperationOnReplica(shard.seqNoStats().getMaxSeqNo() + 1, 0, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 8b58cea4d0a54..979bfccdb64b8 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -126,7 +126,8 @@ import org.elasticsearch.index.MockEngineFactoryPlugin; import org.elasticsearch.index.codec.CodecService; import org.elasticsearch.index.engine.Segment; -import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MockFieldFilterPlugin; import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.seqno.SequenceNumbers; @@ -823,7 +824,7 @@ public void waitNoPendingTasksOnAll() throws Exception { } /** - * Waits till a (pattern) field name mappings concretely exists on all nodes. Note, this waits for the current + * Waits until mappings for the provided fields exist on all nodes. Note, this waits for the current * started shards and checks for concrete mappings. */ public void assertConcreteMappingsOnAll(final String index, final String type, final String... fieldNames) throws Exception { @@ -833,11 +834,10 @@ public void assertConcreteMappingsOnAll(final String index, final String type, f IndicesService indicesService = internalCluster().getInstance(IndicesService.class, node); IndexService indexService = indicesService.indexService(resolveIndex(index)); assertThat("index service doesn't exists on " + node, indexService, notNullValue()); - DocumentMapper documentMapper = indexService.mapperService().documentMapper(type); - assertThat("document mapper doesn't exists on " + node, documentMapper, notNullValue()); + MapperService mapperService = indexService.mapperService(); for (String fieldName : fieldNames) { - Collection matches = documentMapper.mappers().simpleMatchToFullName(fieldName); - assertThat("field " + fieldName + " doesn't exists on " + node, matches, Matchers.not(emptyIterable())); + MappedFieldType fieldType = mapperService.fullName(fieldName); + assertNotNull("field " + fieldName + " doesn't exists on " + node, fieldType); } } assertMappingOnMaster(index, type, fieldNames); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperIntegrationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperIntegrationTests.java index 08c1d010d99dd..9abaaf0ecf0e2 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperIntegrationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperIntegrationTests.java @@ -59,7 +59,7 @@ public void testDLS() throws Exception { MapperService mapperService = mock(MapperService.class); ScriptService scriptService = mock(ScriptService.class); when(mapperService.documentMapper()).thenReturn(null); - when(mapperService.simpleMatchToIndexNames(anyString())) + when(mapperService.simpleMatchToFullName(anyString())) .then(invocationOnMock -> Collections.singletonList((String) invocationOnMock.getArguments()[0])); ThreadContext threadContext = new ThreadContext(Settings.EMPTY); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticator.java index f8826bebcac71..61e451150cd08 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticator.java @@ -159,8 +159,10 @@ private String getSessionIndex(Assertion assertion) { private void checkResponseDestination(Response response) { final String asc = getSpConfiguration().getAscUrl(); if (asc.equals(response.getDestination()) == false) { - throw samlException("SAML response " + response.getID() + " is for destination " + response.getDestination() + if (response.isSigned() || Strings.hasText(response.getDestination())) { + throw samlException("SAML response " + response.getID() + " is for destination " + response.getDestination() + " but this realm uses " + asc); + } } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java index 8bb9890151ff0..913258cf45c5d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java @@ -523,13 +523,59 @@ public void testIncorrectDestinationIsRejected() throws Exception { "" + "" + ""; - SamlToken token = token(signDoc(xml)); + SamlToken token = randomBoolean() ? token(signDoc(xml)) : token(signAssertions(xml, idpSigningCertificatePair)); final ElasticsearchSecurityException exception = expectSamlException(() -> authenticator.authenticate(token)); assertThat(exception.getMessage(), containsString("destination")); assertThat(exception.getCause(), nullValue()); assertThat(SamlUtils.isSamlException(exception), is(true)); } + public void testMissingDestinationIsNotRejectedForNotSignedResponse() throws Exception { + Instant now = clock.instant(); + Instant validUntil = now.plusSeconds(30); + String sessionindex = randomId(); + final String xml = "\n" + + "" + + "" + IDP_ENTITY_ID + "" + + "" + + "" + + "" + IDP_ENTITY_ID + "" + + "" + + "randomopaquestring" + + "" + + "" + + "" + + "" + + "" + + "" + + "" + PASSWORD_AUTHN_CTX + "" + + "" + + "" + + "" + + "daredevil" + + "" + + "" + + ""; + SamlToken token = token(signAssertions(xml, idpSigningCertificatePair)); + final SamlAttributes attributes = authenticator.authenticate(token); + assertThat(attributes, notNullValue()); + assertThat(attributes.attributes(), iterableWithSize(1)); + final List uid = attributes.getAttributeValues("urn:oid:0.9.2342.19200300.100.1.1"); + assertThat(uid, contains("daredevil")); + assertThat(uid, iterableWithSize(1)); + assertThat(attributes.name(), notNullValue()); + assertThat(attributes.name().format, equalTo(TRANSIENT)); + } + public void testIncorrectRequestIdIsRejected() throws Exception { Instant now = clock.instant(); Instant validUntil = now.plusSeconds(30); diff --git a/x-pack/plugin/sql/jdbc/build.gradle b/x-pack/plugin/sql/jdbc/build.gradle index 26cf913aa2790..e383e71cd4c76 100644 --- a/x-pack/plugin/sql/jdbc/build.gradle +++ b/x-pack/plugin/sql/jdbc/build.gradle @@ -1,74 +1,40 @@ + +buildscript { + repositories { + maven { + url 'https://plugins.gradle.org/m2/' + } + } + dependencies { + classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.2' + } +} + apply plugin: 'elasticsearch.build' apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' +apply plugin: 'com.github.johnrengelman.shadow' description = 'JDBC driver for Elasticsearch' +archivesBaseName = "x-pack-sql-jdbc" forbiddenApisMain { // does not depend on core, so only jdk and http signatures should be checked signaturesURLs = [this.class.getResource('/forbidden/jdk-signatures.txt')] } -/* - * Bundle as many of our dependencies as we can get away with into the jar. - * We can't currently bundle *all* dependencies into the jar, but we'd like - * to avoid publishing the sql shared libraries if possible. This allows that. - * - * It is possible to use configure this bundling in a bunch of different ways - * but this particular way generates a pom that doesn't declare the bundled - * dependencies as dependencies. Which is a good thing because we don't publish - * them and we don't want consumers to get two copies of them. - * - * We'd *like* to shade these dependencies, at least ones like jackson which we - * know that we can't remove entirely. But for now something like this is - * simpler. - */ -configurations { - bundled -} -sourceSets { - main { - compileClasspath += configurations.bundled - } - test { - compileClasspath += configurations.bundled - } -} -javadoc { - classpath += configurations.bundled -} -jar { - from({configurations.bundled.collect { it.isDirectory() ? it : zipTree(it) }}) { - // We don't need the META-INF from the things we bundle. For now. - exclude 'META-INF/*' - } -} - dependencies { - - // Eclipse doesn't know how to deal with these bundled deependencies so make them compile - // dependencies if we are running in Eclipse - if (isEclipse) { - compile (xpackProject('plugin:sql:sql-shared-client')) { - transitive = false - } - compile (xpackProject('plugin:sql:sql-shared-proto')) { - transitive = false - } - } else { - bundled (xpackProject('plugin:sql:sql-shared-client')) { - transitive = false - } - bundled (xpackProject('plugin:sql:sql-shared-proto')) { - transitive = false - } + compile (xpackProject('plugin:sql:sql-shared-client')) { + transitive = false + } + compile (xpackProject('plugin:sql:sql-shared-proto')) { + transitive = false } compile (project(':libs:x-content')) { transitive = false } compile project(':libs:core') runtime "com.fasterxml.jackson.core:jackson-core:${versions.jackson}" - testCompile "org.elasticsearch.test:framework:${version}" } @@ -82,23 +48,48 @@ dependencyLicenses { ignoreSha 'elasticsearch' } -/* - * Temporary zip file to make the jdbc driver more usable during the 6.3 - * release. We'd like to remove this in future releases when the jdbc driver - * bundles or shades all of its dependencies. But for now this should help - * non-maven jdbc users, specifically those folks using BI tools. - */ -task zipWithDependencies(type: Zip) { - from configurations.runtime - from configurations.runtime.artifacts.files - baseName 'elasticsearch-jdbc-with-dependencies' - into "elasticsearch-jdbc-with-dependencies-$version" +shadowJar { + classifier = null + relocate 'com.fasterxml', 'org.elasticsearch.fasterxml' +} + +// We don't need normal jar, we use shadow jar instead +jar.enabled = false + +// We need a no-depenencies jar though for qa testing so it doesn't conflict with cli +configurations { + nodeps +} + +task nodepsJar(type: Jar) { + appendix 'nodeps' + from sourceSets.main.output } -assemble.dependsOn zipWithDependencies + +artifacts { + nodeps nodepsJar + archives shadowJar +} + +publishing { + publications { + nebula(MavenPublication) { + artifact shadowJar + pom.withXml { + // Nebula is mistakenly including all dependencies that are already shadowed into the shadow jar + asNode().remove(asNode().dependencies) + } + } + } +} + +assemble.dependsOn shadowJar // Use the jar for testing so the tests are more "real" test { classpath -= compileJava.outputs.files - classpath += jar.outputs.files - dependsOn jar + classpath -= configurations.compile + classpath -= configurations.runtime + classpath += shadowJar.outputs.files + dependsOn shadowJar } diff --git a/x-pack/qa/build.gradle b/x-pack/qa/build.gradle index 1570b218592fe..24b6618b7d8f6 100644 --- a/x-pack/qa/build.gradle +++ b/x-pack/qa/build.gradle @@ -28,5 +28,9 @@ gradle.projectsEvaluated { project.tasks.remove(assemble) project.build.dependsOn.remove('assemble') } + Task dependenciesInfo = project.tasks.findByName('dependenciesInfo') + if (dependenciesInfo) { + project.precommit.dependsOn.remove('dependenciesInfo') + } } } diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java index 18b1071280307..2f3ea6c83a536 100644 --- a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java +++ b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java @@ -239,6 +239,9 @@ public void testOverflowToDisk() throws Exception { throw e; } + // flushing the job forces an index refresh, see https://github.com/elastic/elasticsearch/issues/31173 + flushJob(job.getId(), false); + List forecastStats = getForecastStats(); assertThat(forecastStats.size(), equalTo(1)); ForecastRequestStats forecastRequestStats = forecastStats.get(0); @@ -261,6 +264,16 @@ public void testOverflowToDisk() throws Exception { } closeJob(job.getId()); + + forecastStats = getForecastStats(); + assertThat(forecastStats.size(), equalTo(2)); + for (ForecastRequestStats stats : forecastStats) { + forecasts = getForecasts(job.getId(), stats); + + assertThat(forecastRequestStats.getRecordCount(), equalTo(8000L)); + assertThat(forecasts.size(), equalTo(8000)); + } + } private void createDataWithLotsOfClientIps(TimeValue bucketSpan, Job.Builder job) throws IOException { diff --git a/x-pack/qa/sql/build.gradle b/x-pack/qa/sql/build.gradle index 8f77e1608d6d0..0bea3a9364b71 100644 --- a/x-pack/qa/sql/build.gradle +++ b/x-pack/qa/sql/build.gradle @@ -9,7 +9,7 @@ dependencies { compile "org.elasticsearch.test:framework:${version}" // JDBC testing dependencies - compile xpackProject('plugin:sql:jdbc') + compile project(path: xpackModule('sql:jdbc'), configuration: 'nodeps') compile "net.sourceforge.csvjdbc:csvjdbc:1.0.34" // CLI testing dependencies @@ -22,6 +22,7 @@ dependencies { test.enabled = false dependencyLicenses.enabled = false +dependenciesInfo.enabled = false // the main files are actually test files, so use the appropriate forbidden api sigs forbiddenApisMain { @@ -87,7 +88,9 @@ subprojects { // JDBC testing dependencies testRuntime "net.sourceforge.csvjdbc:csvjdbc:1.0.34" testRuntime "com.h2database:h2:1.4.197" - testRuntime xpackProject('plugin:sql:jdbc') + testRuntime project(path: xpackModule('sql:jdbc'), configuration: 'nodeps') + testRuntime xpackProject('plugin:sql:sql-shared-client') + // TODO check if needed testRuntime("org.antlr:antlr4-runtime:4.5.3") { diff --git a/x-pack/test/feature-aware/build.gradle b/x-pack/test/feature-aware/build.gradle index 217ed25a2d4b1..11b0e67183c8f 100644 --- a/x-pack/test/feature-aware/build.gradle +++ b/x-pack/test/feature-aware/build.gradle @@ -10,6 +10,7 @@ dependencies { forbiddenApisMain.enabled = true dependencyLicenses.enabled = false +dependenciesInfo.enabled = false jarHell.enabled = false