Skip to content

Commit

Permalink
Merge branch 'master' into ccr
Browse files Browse the repository at this point in the history
* master:
  Move default location of dependencies report (#31228)
  Remove dependencies report task dependencies (#31227)
  Add recognition of MPL 2.0 (#31226)
  Fix unknown licenses (#31223)
  Remove version from license file name for GCS SDK (#31221)
  Fully encapsulate LocalCheckpointTracker inside of the engine (#31213)
  [DOCS] Added 'fail_on_unsupported_field' param to MLT. Closes #28008 (#31160)
  Add licenses for transport-nio (#31218)
  Remove DocumentFieldMappers#simpleMatchToFullName. (#31041)
  Allow to trim all ops above a certain seq# with a term lower than X, post backport fix (#31211)
  Compliant SAML Response destination check (#31175)
  Remove DocumentFieldMappers#smartNameFieldMapper, as it is no longer needed. (#31018)
  Remove extraneous references to 'tokenized' in the mapper code. (#31010)
  Allow to trim all ops above a certain seq# with a term lower than X (#30176)
  SQL: Make a single JDBC driver jar (#31012)
  Enhance license detection for various licenses (#31198)
  [DOCS] Add note about long-lived idle connections (#30990)
  Move number of language analyzers to analysis-common module (#31143)
  Default max concurrent search req. numNodes * 5 (#31171)
  flush job to ensure all results have been written (#31187)
  • Loading branch information
dnhatn committed Jun 9, 2018
2 parents 64b4cde + aa8aa0d commit afe02a3
Show file tree
Hide file tree
Showing 116 changed files with 2,355 additions and 694 deletions.
7 changes: 6 additions & 1 deletion build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -543,7 +543,7 @@ subprojects { project ->
}
}

/* Remove assemble on all qa projects because we don't need to publish
/* Remove assemble/dependenciesInfo on all qa projects because we don't need to publish
* artifacts for them. */
gradle.projectsEvaluated {
subprojects {
Expand All @@ -553,6 +553,11 @@ gradle.projectsEvaluated {
project.tasks.remove(assemble)
project.build.dependsOn.remove('assemble')
}
Task dependenciesInfo = project.tasks.findByName('dependenciesInfo')
if (dependenciesInfo) {
project.tasks.remove(dependenciesInfo)
project.precommit.dependsOn.remove('dependenciesInfo')
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -762,6 +762,10 @@ class BuildPlugin implements Plugin<Project> {

private static configureDependenciesInfo(Project project) {
Task deps = project.tasks.create("dependenciesInfo", DependenciesInfoTask.class)
deps.dependencies = project.configurations.compile.allDependencies
deps.runtimeConfiguration = project.configurations.runtime
deps.compileOnlyConfiguration = project.configurations.compileOnly
project.afterEvaluate {
deps.mappings = project.dependencyLicenses.mappings
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,19 @@

package org.elasticsearch.gradle

import org.elasticsearch.gradle.precommit.DependencyLicensesTask
import org.gradle.api.DefaultTask
import org.gradle.api.artifacts.Configuration
import org.gradle.api.artifacts.Dependency
import org.gradle.api.artifacts.DependencyResolutionListener
import org.gradle.api.artifacts.DependencySet
import org.gradle.api.tasks.Input
import org.gradle.api.tasks.InputDirectory
import org.gradle.api.tasks.OutputFile
import org.gradle.api.tasks.TaskAction

import java.util.regex.Matcher
import java.util.regex.Pattern

/**
* A task to gather information about the dependencies and export them into a csv file.
Expand All @@ -44,7 +49,14 @@ public class DependenciesInfoTask extends DefaultTask {

/** Dependencies to gather information from. */
@Input
public DependencySet dependencies
public Configuration runtimeConfiguration

/** We subtract compile-only dependencies. */
@Input
public Configuration compileOnlyConfiguration

@Input
public LinkedHashMap<String, String> mappings

/** Directory to read license files */
@InputDirectory
Expand All @@ -59,15 +71,34 @@ public class DependenciesInfoTask extends DefaultTask {

@TaskAction
public void generateDependenciesInfo() {

final DependencySet runtimeDependencies = runtimeConfiguration.getAllDependencies()
// we have to resolve the transitive dependencies and create a group:artifactId:version map
final Set<String> compileOnlyArtifacts =
compileOnlyConfiguration
.getResolvedConfiguration()
.resolvedArtifacts
.collect { it -> "${it.moduleVersion.id.group}:${it.moduleVersion.id.name}:${it.moduleVersion.id.version}" }

final StringBuilder output = new StringBuilder()

for (Dependency dependency : dependencies) {
// Only external dependencies are checked
if (dependency.group != null && dependency.group.contains("elasticsearch") == false) {
final String url = createURL(dependency.group, dependency.name, dependency.version)
final String licenseType = getLicenseType(dependency.group, dependency.name)
output.append("${dependency.group}:${dependency.name},${dependency.version},${url},${licenseType}\n")
for (final Dependency dependency : runtimeDependencies) {
// we do not need compile-only dependencies here
if (compileOnlyArtifacts.contains("${dependency.group}:${dependency.name}:${dependency.version}")) {
continue
}
// only external dependencies are checked
if (dependency.group != null && dependency.group.contains("org.elasticsearch")) {
continue
}

final String url = createURL(dependency.group, dependency.name, dependency.version)
final String dependencyName = DependencyLicensesTask.getDependencyName(mappings, dependency.name)
logger.info("mapped dependency ${dependency.group}:${dependency.name} to ${dependencyName} for license info")

final String licenseType = getLicenseType(dependency.group, dependencyName)
output.append("${dependency.group}:${dependency.name},${dependency.version},${url},${licenseType}\n")

}
outputFile.setText(output.toString(), 'UTF-8')
}
Expand Down Expand Up @@ -109,7 +140,8 @@ public class DependenciesInfoTask extends DefaultTask {
}

if (license) {
final String content = license.readLines("UTF-8").toString()
// replace * because they are sometimes used at the beginning lines as if the license was a multi-line comment
final String content = new String(license.readBytes(), "UTF-8").replaceAll("\\s+", " ").replaceAll("\\*", " ")
final String spdx = checkSPDXLicense(content)
if (spdx == null) {
// License has not be identified as SPDX.
Expand All @@ -133,15 +165,88 @@ public class DependenciesInfoTask extends DefaultTask {
private String checkSPDXLicense(final String licenseText) {
String spdx = null

final String APACHE_2_0 = "Apache.*License.*(v|V)ersion 2.0"
final String BSD_2 = "BSD 2-clause.*License"
final String APACHE_2_0 = "Apache.*License.*(v|V)ersion.*2\\.0"

final String BSD_2 = """
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1\\. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer\\.
2\\. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution\\.
THIS SOFTWARE IS PROVIDED BY .+ (``|''|")AS IS(''|") AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED\\.
IN NO EVENT SHALL .+ BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES \\(INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION\\) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
\\(INCLUDING NEGLIGENCE OR OTHERWISE\\) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE\\.
""".replaceAll("\\s+", "\\\\s*")

final String BSD_3 = """
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
(1\\.)? Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer\\.
(2\\.)? Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution\\.
((3\\.)? The name of .+ may not be used to endorse or promote products
derived from this software without specific prior written permission\\.|
(3\\.)? Neither the name of .+ nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission\\.)
THIS SOFTWARE IS PROVIDED BY .+ (``|''|")AS IS(''|") AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED\\.
IN NO EVENT SHALL .+ BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES \\(INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION\\) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
\\(INCLUDING NEGLIGENCE OR OTHERWISE\\) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE\\.
""".replaceAll("\\s+", "\\\\s*")

final String CDDL_1_0 = "COMMON DEVELOPMENT AND DISTRIBUTION LICENSE.*Version 1.0"
final String CDDL_1_1 = "COMMON DEVELOPMENT AND DISTRIBUTION LICENSE.*Version 1.1"
final String ICU = "ICU License - ICU 1.8.1 and later"
final String LGPL_3 = "GNU LESSER GENERAL PUBLIC LICENSE.*Version 3"
final String MIT = "MIT License"

final String MIT = """
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files \\(the "Software"\\), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software\\.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT\\. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE\\.
""".replaceAll("\\s+", "\\\\s*")

final String MOZILLA_1_1 = "Mozilla Public License.*Version 1.1"

final String MOZILLA_2_0 = "Mozilla\\s*Public\\s*License\\s*Version\\s*2\\.0"

switch (licenseText) {
case ~/.*${APACHE_2_0}.*/:
spdx = 'Apache-2.0'
Expand All @@ -152,6 +257,9 @@ public class DependenciesInfoTask extends DefaultTask {
case ~/.*${BSD_2}.*/:
spdx = 'BSD-2-Clause'
break
case ~/.*${BSD_3}.*/:
spdx = 'BSD-3-Clause'
break
case ~/.*${LGPL_3}.*/:
spdx = 'LGPL-3.0'
break
Expand All @@ -167,6 +275,9 @@ public class DependenciesInfoTask extends DefaultTask {
case ~/.*${MOZILLA_1_1}.*/:
spdx = 'MPL-1.1'
break
case ~/.*${MOZILLA_2_0}.*/:
spdx = 'MPL-2.0'
break
default:
break
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,10 @@ public class DependencyLicensesTask extends DefaultTask {
mappings.put(from, to)
}

public LinkedHashMap<String, String> getMappings() {
return new LinkedHashMap<>(mappings)
}

/**
* Add a rule which will skip SHA checking for the given dependency name. This should be used for
* locally build dependencies, which cause the sha to change constantly.
Expand All @@ -129,10 +133,6 @@ public class DependencyLicensesTask extends DefaultTask {
throw new GradleException("Licences dir ${licensesDir} does not exist, but there are dependencies")
}


// order is the same for keys and values iteration since we use a linked hashmap
List<String> mapped = new ArrayList<>(mappings.values())
Pattern mappingsPattern = Pattern.compile('(' + mappings.keySet().join(')|(') + ')')
Map<String, Integer> licenses = new HashMap<>()
Map<String, Integer> notices = new HashMap<>()
Set<File> shaFiles = new HashSet<File>()
Expand All @@ -151,7 +151,7 @@ public class DependencyLicensesTask extends DefaultTask {

for (File dependency : dependencies) {
String jarName = dependency.getName()
String depName = jarName - ~/\-\d+.*/
String depName = jarName - ~/\-v?\d+.*/
if (ignoreShas.contains(depName)) {
// local deps should not have sha files!
if (getShaFile(jarName).exists()) {
Expand All @@ -162,16 +162,10 @@ public class DependencyLicensesTask extends DefaultTask {
checkSha(dependency, jarName, shaFiles)
}

logger.info("Checking license/notice for " + depName)
Matcher match = mappingsPattern.matcher(depName)
if (match.matches()) {
int i = 0
while (i < match.groupCount() && match.group(i + 1) == null) ++i;
logger.info("Mapped dependency name ${depName} to ${mapped.get(i)} for license check")
depName = mapped.get(i)
}
checkFile(depName, jarName, licenses, 'LICENSE')
checkFile(depName, jarName, notices, 'NOTICE')
final String dependencyName = getDependencyName(mappings, depName)
logger.info("mapped dependency name ${depName} to ${dependencyName} for license/notice check")
checkFile(dependencyName, jarName, licenses, 'LICENSE')
checkFile(dependencyName, jarName, notices, 'NOTICE')
}

licenses.each { license, count ->
Expand All @@ -189,6 +183,19 @@ public class DependencyLicensesTask extends DefaultTask {
}
}

public static String getDependencyName(final LinkedHashMap<String, String> mappings, final String dependencyName) {
// order is the same for keys and values iteration since we use a linked hashmap
List<String> mapped = new ArrayList<>(mappings.values())
Pattern mappingsPattern = Pattern.compile('(' + mappings.keySet().join(')|(') + ')')
Matcher match = mappingsPattern.matcher(dependencyName)
if (match.matches()) {
int i = 0
while (i < match.groupCount() && match.group(i + 1) == null) ++i;
return mapped.get(i)
}
return dependencyName
}

private File getShaFile(String jarName) {
return new File(licensesDir, jarName + SHA_EXTENSION)
}
Expand Down
3 changes: 3 additions & 0 deletions client/client-benchmark-noop-api-plugin/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,9 @@ esplugin {
tasks.remove(assemble)
build.dependsOn.remove('assemble')

dependencyLicenses.enabled = false
dependenciesInfo.enabled = false

compileJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-try,-unchecked"

// no unit tests
Expand Down
2 changes: 1 addition & 1 deletion distribution/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ Collection distributions = project('archives').subprojects + project('packages')
task generateDependenciesReport(type: ConcatFilesTask) {
files = fileTree(dir: project.rootDir, include: '**/dependencies.csv' )
headerLine = "name,version,url,license"
target = new File(System.getProperty('csv')?: "${project.buildDir}/dependencies/es-dependencies.csv")
target = new File(System.getProperty('csv')?: "${project.buildDir}/reports/dependencies/es-dependencies.csv")
}

/*****************************************************************************
Expand Down
21 changes: 20 additions & 1 deletion docs/reference/modules/transport.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,12 @@ time setting format). Defaults to `30s`.
|`transport.tcp.compress` |Set to `true` to enable compression (`DEFLATE`)
between all nodes. Defaults to `false`.

|`transport.ping_schedule` | Schedule a regular ping message to ensure that connections are kept alive. Defaults to `5s` in the transport client and `-1` (disabled) elsewhere.
|`transport.ping_schedule` | Schedule a regular application-level ping message
to ensure that transport connections between nodes are kept alive. Defaults to
`5s` in the transport client and `-1` (disabled) elsewhere. It is preferable to
correctly configure TCP keep-alives instead of using this feature, because TCP
keep-alives apply to all kinds of long-lived connection and not just to
transport connections.

|=======================================================================

Expand Down Expand Up @@ -80,6 +85,20 @@ The following parameters can be configured like that
* `tcp_send_buffer_size`: Configures the send buffer size of the socket
* `tcp_receive_buffer_size`: Configures the receive buffer size of the socket

[float]
==== Long-lived idle connections

Elasticsearch opens a number of long-lived TCP connections between each pair of
nodes in the cluster, and some of these connections may be idle for an extended
period of time. Nonetheless, Elasticsearch requires these connections to remain
open, and it can disrupt the operation of the cluster if any inter-node
connections are closed by an external influence such as a firewall. It is
important to configure your network to preserve long-lived idle connections
between Elasticsearch nodes, for instance by leaving `tcp_keep_alive` enabled
and ensuring that the keepalive interval is shorter than any timeout that might
cause idle connections to be closed, or by setting `transport.ping_schedule` if
keepalives cannot be configured.

[float]
=== Transport Tracer

Expand Down
7 changes: 7 additions & 0 deletions docs/reference/query-dsl/mlt-query.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -241,6 +241,13 @@ number of terms that must match.
The syntax is the same as the <<query-dsl-minimum-should-match,minimum should match>>.
(Defaults to `"30%"`).

`fail_on_unsupported_field`::
Controls whether the query should fail (throw an exception) if any of the
specified fields are not of the supported types
(`text` or `keyword'). Set this to `false` to ignore the field and continue
processing. Defaults to
`true`.

`boost_terms`::
Each term in the formed query could be further boosted by their tf-idf score.
This sets the boost factor to use when using this feature. Defaults to
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,19 +17,21 @@
* under the License.
*/

package org.elasticsearch.index.analysis;
package org.elasticsearch.analysis.common;

import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.ar.ArabicAnalyzer;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
import org.elasticsearch.index.analysis.Analysis;

public class ArabicAnalyzerProvider extends AbstractIndexAnalyzerProvider<ArabicAnalyzer> {

private final ArabicAnalyzer arabicAnalyzer;

public ArabicAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
ArabicAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(indexSettings, name, settings);
arabicAnalyzer = new ArabicAnalyzer(
Analysis.parseStopWords(env, settings, ArabicAnalyzer.getDefaultStopSet()),
Expand Down
Loading

0 comments on commit afe02a3

Please sign in to comment.