Skip to content

Commit

Permalink
Merge branch 'upstream/master' into reuse-date-stream-timestamp
Browse files Browse the repository at this point in the history
* upstream/master:
  Validate tsdb's routing_path (elastic#79384)
  Adjust the BWC version for the return200ForClusterHealthTimeout field (elastic#79436)
  API for adding and removing indices from a data stream (elastic#79279)
  Exposing the ability to log deprecated settings at non-critical level (elastic#79107)
  Convert operator privilege license object to LicensedFeature (elastic#79407)
  Mute SnapshotBasedIndexRecoveryIT testSeqNoBasedRecoveryIsUsedAfterPrimaryFailOver (elastic#79456)
  Create cache files with CREATE_NEW & SPARSE options (elastic#79371)
  Revert "[ML] Use a new annotations index for future annotations (elastic#79151)"
  [ML] Use a new annotations index for future annotations (elastic#79151)
  [ML] Removing legacy code from ML/transform auditor (elastic#79434)
  Fix rate agg with custom `_doc_count` (elastic#79346)
  Optimize SLM Policy Queries (elastic#79341)
  Fix execution of exists query within nested queries on field with doc_values disabled (elastic#78841)
  Stricter UpdateSettingsRequest parsing on the REST layer (elastic#79227)
  Do not release snapshot file download permit during recovery retries (elastic#79409)
  Preserve request headers in a mixed version cluster (elastic#79412)
  Adjust versions after elastic#79044 backport to 7.x (elastic#79424)
  Mute BulkByScrollUsesAllScrollDocumentsAfterConflictsIntegTests (elastic#79429)
  Fail on SSPL licensed x-pack sources (elastic#79348)

# Conflicts:
#	server/src/test/java/org/elasticsearch/index/TimeSeriesModeTests.java
  • Loading branch information
weizijun committed Oct 19, 2021
2 parents 93beacb + 7a32158 commit db25634
Show file tree
Hide file tree
Showing 103 changed files with 3,038 additions and 696 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;

import org.gradle.api.model.ObjectFactory;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import java.io.BufferedWriter;
Expand All @@ -51,6 +51,8 @@
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import javax.inject.Inject;
import java.io.Serializable;

/**
* Checks files for license headers..
Expand Down Expand Up @@ -95,17 +97,18 @@ public List<String> getExcludes() {
return excludes;
}

public Map<String, String> getAdditionalLicenses() {
return additionalLicenses;
}

public void setExcludes(List<String> excludes) {
this.excludes = excludes;
}

@OutputFile
private File reportFile = new File(getProject().getBuildDir(), "reports/licenseHeaders/rat.xml");

private static List<License> conventionalLicenses = Arrays.asList(
// Dual SSPLv1 and Elastic
new License("DUAL", "SSPL+Elastic License", "the Elastic License 2.0 or the Server")
);

/**
* Allowed license families for this project.
*/
Expand All @@ -118,13 +121,17 @@ public void setExcludes(List<String> excludes) {
*/
@Input
private List<String> excludes = new ArrayList<String>();

private ListProperty<License> additionalLicenses;

/**
* Additional license families that may be found. The key is the license category name (5 characters),
* followed by the family name and the value list of patterns to search for.
*/
@Input
protected Map<String, String> additionalLicenses = new HashMap<String, String>();

public ListProperty<License> getAdditionalLicenses() {
return additionalLicenses;
}
/**
* Add a new license type.
* <p>
Expand All @@ -139,7 +146,12 @@ public void additionalLicense(final String categoryName, String familyName, Stri
throw new IllegalArgumentException("License category name must be exactly 5 characters, got " + categoryName);
}

additionalLicenses.put(categoryName + familyName, pattern);
additionalLicenses.add(new License(categoryName, familyName, pattern));
}

@Inject
public LicenseHeadersTask(ObjectFactory objectFactory) {
additionalLicenses = objectFactory.listProperty(License.class).convention(conventionalLicenses);
}

@TaskAction
Expand All @@ -160,14 +172,10 @@ public void runRat() {
matchers.add(subStringMatcher("GEN ", "Generated", "ANTLR GENERATED CODE"));
// Vendored Code
matchers.add(subStringMatcher("VEN ", "Vendored", "@notice"));
// Dual SSPLv1 and Elastic
matchers.add(subStringMatcher("DUAL", "SSPL+Elastic License", "the Elastic License 2.0 or the Server"));

for (Map.Entry<String, String> additional : additionalLicenses.entrySet()) {
String category = additional.getKey().substring(0, 5);
String family = additional.getKey().substring(5);
matchers.add(subStringMatcher(category, family, additional.getValue()));
}
additionalLicenses.get().forEach(l ->
matchers.add(subStringMatcher(l.licenseFamilyCategory, l.licenseFamilyName, l.substringPattern))
);

reportConfiguration.setHeaderMatcher(new HeaderMatcherMultiplexer(matchers.toArray(IHeaderMatcher[]::new)));
reportConfiguration.setApprovedLicenseNames(approvedLicenses.stream().map(license -> {
Expand All @@ -190,7 +198,6 @@ private IHeaderMatcher subStringMatcher(String licenseFamilyCategory, String lic
SubstringLicenseMatcher substringLicenseMatcher = new SubstringLicenseMatcher();
substringLicenseMatcher.setLicenseFamilyCategory(licenseFamilyCategory);
substringLicenseMatcher.setLicenseFamilyName(licenseFamilyName);

SubstringLicenseMatcher.Pattern pattern = new SubstringLicenseMatcher.Pattern();
pattern.setSubstring(substringPattern);
substringLicenseMatcher.addConfiguredPattern(pattern);
Expand Down Expand Up @@ -249,4 +256,16 @@ private static List<Element> elementList(NodeList resourcesNodes) {
}
return nodeList;
}

static class License implements Serializable {
private String licenseFamilyCategory;
private String licenseFamilyName;
private String substringPattern;

public License(String licenseFamilyCategory, String licenseFamilyName, String substringPattern) {
this.licenseFamilyCategory = licenseFamilyCategory;
this.licenseFamilyName = licenseFamilyName;
this.substringPattern = substringPattern;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,45 @@ class LicenseHeadersPrecommitPluginFuncTest extends AbstractGradleFuncTest {
result.task(":licenseHeaders").outcome == TaskOutcome.SUCCESS
}

def "supports sspl by convention"() {
given:
buildFile << """
plugins {
id 'java'
id 'elasticsearch.internal-licenseheaders'
}
"""
dualLicensedFile()

when:
def result = gradleRunner("licenseHeaders").build()

then:
result.task(":licenseHeaders").outcome == TaskOutcome.SUCCESS
}

def "sspl default additional license can be overridden"() {
given:
buildFile << """
plugins {
id 'java'
id 'elasticsearch.internal-licenseheaders'
}
tasks.named("licenseHeaders").configure {
additionalLicense 'ELAST', 'Elastic License 2.0', '2.0; you may not use this file except in compliance with the Elastic License'
}
"""
elasticLicensed()
dualLicensedFile()

when:
def result = gradleRunner("licenseHeaders").buildAndFail()

then:
result.task(":licenseHeaders").outcome == TaskOutcome.FAILED
}

private File unapprovedSourceFile(String filePath = "src/main/java/org/acme/UnapprovedLicensed.java") {
File sourceFile = file(filePath);
sourceFile << """
Expand Down Expand Up @@ -115,6 +154,21 @@ class LicenseHeadersPrecommitPluginFuncTest extends AbstractGradleFuncTest {
"""
}

private File elasticLicensed() {
file("src/main/java/org/acme/ElasticLicensed.java") << """
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.acme;
public class ElasticLicensed {
}
"""
}

private String packageString(File sourceFile) {
String normalizedPath = normalized(sourceFile.getPath())
(normalizedPath.substring(normalizedPath.indexOf("src/main/java")) - "src/main/java/" - ("/" + sourceFile.getName())).replaceAll("/", ".")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -136,14 +136,14 @@ public HttpEntity getEntity() {

/**
* Tests if a string matches the RFC 7234 specification for warning headers.
* This assumes that the warn code is always 299 and the warn agent is always
* Elasticsearch.
* This assumes that the warn code is always 299 or 300 and the warn agent is
* always Elasticsearch.
*
* @param s the value of a warning header formatted according to RFC 7234
* @return {@code true} if the input string matches the specification
*/
private static boolean matchWarningHeaderPatternByPrefix(final String s) {
return s.startsWith("299 Elasticsearch-");
return s.startsWith("299 Elasticsearch-") || s.startsWith("300 Elasticsearch-");
}

/**
Expand Down
8 changes: 4 additions & 4 deletions docs/reference/aggregations/metrics/rate-aggregation.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
++++

A `rate` metrics aggregation can be used only inside a `date_histogram` or `composite` aggregation. It calculates a rate of documents
or a field in each bucket. The field values can be generated extracted from specific numeric or
or a field in each bucket. The field values can be extracted from specific numeric or
<<histogram,histogram fields>> in the documents.

NOTE: For `composite` aggregations, there must be exactly one `date_histogram` source for the `rate` aggregation to be supported.
Expand All @@ -27,7 +27,7 @@ A `rate` aggregation looks like this in isolation:
--------------------------------------------------
// NOTCONSOLE

The following request will group all sales records into monthly bucket and than convert the number of sales transaction in each bucket
The following request will group all sales records into monthly buckets and then convert the number of sales transactions in each bucket
into per annual sales rate.

[source,console]
Expand Down Expand Up @@ -56,8 +56,8 @@ GET sales/_search
<1> Histogram is grouped by month.
<2> But the rate is converted into annual rate.

The response will return the annual rate of transaction in each bucket. Since there are 12 months per year, the annual rate will
be automatically calculated by multiplying monthly rate by 12.
The response will return the annual rate of transactions in each bucket. Since there are 12 months per year, the annual rate will
be automatically calculated by multiplying the monthly rate by 12.

[source,console-result]
--------------------------------------------------
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -626,8 +626,8 @@ public void testCompatibleFieldDeclarations() throws IOException {
RestApiVersion.minimumSupported());
StructWithCompatibleFields o = StructWithCompatibleFields.PARSER.parse(parser, null);
assertEquals(1, o.intField);
assertWarnings(false, "[struct_with_compatible_fields][1:14] " +
"Deprecated field [old_name] used, expected [new_name] instead");
assertWarnings(false, new DeprecationWarning(DeprecationLogger.CRITICAL, "[struct_with_compatible_fields][1:14] " +
"Deprecated field [old_name] used, expected [new_name] instead"));
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
*/
package org.elasticsearch.xcontent;

import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import org.elasticsearch.core.CheckedFunction;
import org.elasticsearch.core.RestApiVersion;
Expand Down Expand Up @@ -211,7 +212,8 @@ class TestStruct {
objectParser.declareField((i, v, c) -> v.test = i.text(), new ParseField("test", "old_test"), ObjectParser.ValueType.STRING);
objectParser.parse(parser, s, null);
assertEquals("foo", s.test);
assertWarnings(false, "[foo][1:15] Deprecated field [old_test] used, expected [test] instead");
assertWarnings(false, new DeprecationWarning(DeprecationLogger.CRITICAL, "[foo][1:15] Deprecated field [old_test] used, " +
"expected [test] instead"));
}

public void testFailOnValueType() throws IOException {
Expand Down Expand Up @@ -1072,8 +1074,8 @@ public void testCompatibleFieldDeclarations() throws IOException {
RestApiVersion.minimumSupported());
StructWithCompatibleFields o = StructWithCompatibleFields.PARSER.parse(parser, null);
assertEquals(1, o.intField);
assertWarnings(false, "[struct_with_compatible_fields][1:14] " +
"Deprecated field [old_name] used, expected [new_name] instead");
assertWarnings(false, new DeprecationWarning(DeprecationLogger.CRITICAL, "[struct_with_compatible_fields][1:14] " +
"Deprecated field [old_name] used, expected [new_name] instead"));

}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -470,8 +470,8 @@ public Processor create(

boolean valid = metadata.isValid(currentState.metadata().settings());
if (valid && metadata.isCloseToExpiration()) {
HeaderWarning.addWarning("database [{}] was not updated for over 25 days, geoip processor will stop working if there " +
"is no update for 30 days", databaseFile);
HeaderWarning.addWarning(DeprecationLogger.CRITICAL, "database [{}] was not updated for over 25 days, geoip processor" +
" will stop working if there is no update for 30 days", databaseFile);
}

return valid;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,7 @@ public void setUpCluster() {
internalCluster().startDataOnlyNode(Settings.builder().put("thread_pool.write.size", 1).build());
}

@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/79300")
public void testUpdateByQuery() throws Exception {
final String indexName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT);
final boolean scriptEnabled = randomBoolean();
Expand All @@ -111,7 +112,7 @@ public void testUpdateByQuery() throws Exception {
});
}

@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/79342")
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/79300")
public void testReindex() throws Exception {
final String sourceIndex = randomAlphaOfLength(10).toLowerCase(Locale.ROOT);
final String targetIndex = randomAlphaOfLength(10).toLowerCase(Locale.ROOT);
Expand All @@ -134,6 +135,7 @@ public void testReindex() throws Exception {
});
}

@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/79300")
public void testDeleteByQuery() throws Exception {
final String indexName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT);
executeConcurrentUpdatesOnSubsetOfDocs(indexName,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,10 @@
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.BasicSessionCredentials;
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain;

import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.settings.MockSecureSettings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsException;
import org.elasticsearch.test.ESTestCase;
Expand Down Expand Up @@ -59,8 +62,9 @@ public void testDeprecationOfLoneAccessKey() {
Ec2ClientSettings.getClientSettings(Settings.builder().setSecureSettings(secureSettings).build())).getCredentials();
assertThat(credentials.getAWSAccessKeyId(), is("aws_key"));
assertThat(credentials.getAWSSecretKey(), is(""));
assertSettingDeprecationsAndWarnings(new String[]{},
"Setting [discovery.ec2.access_key] is set but [discovery.ec2.secret_key] is not, which will be unsupported in future");
assertSettingDeprecationsAndWarnings(new Setting<?>[]{},
new DeprecationWarning(DeprecationLogger.CRITICAL, "Setting [discovery.ec2.access_key] is set but " +
"[discovery.ec2.secret_key] is not, which will be unsupported in future"));
}

public void testDeprecationOfLoneSecretKey() {
Expand All @@ -70,8 +74,9 @@ public void testDeprecationOfLoneSecretKey() {
Ec2ClientSettings.getClientSettings(Settings.builder().setSecureSettings(secureSettings).build())).getCredentials();
assertThat(credentials.getAWSAccessKeyId(), is(""));
assertThat(credentials.getAWSSecretKey(), is("aws_secret"));
assertSettingDeprecationsAndWarnings(new String[]{},
"Setting [discovery.ec2.secret_key] is set but [discovery.ec2.access_key] is not, which will be unsupported in future");
assertSettingDeprecationsAndWarnings(new Setting<?>[]{},
new DeprecationWarning(DeprecationLogger.CRITICAL, "Setting [discovery.ec2.secret_key] is set but " +
"[discovery.ec2.access_key] is not, which will be unsupported in future"));
}

public void testRejectionOfLoneSessionToken() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,8 @@ public void testEnforcedCooldownPeriod() throws IOException {
SnapshotState.SUCCESS,
SnapshotsService.SHARD_GEN_IN_REPO_DATA_VERSION.minimumCompatibilityVersion(),
0L, // -1 would refresh RepositoryData and find the real version
0L // -1 would refresh RepositoryData and find the real version
0L, // -1 would refresh RepositoryData and find the real version,
"" // null would refresh RepositoryData and find the real version
)));
final BytesReference serialized = BytesReference.bytes(modifiedRepositoryData.snapshotsToXContent(XContentFactory.jsonBuilder(),
SnapshotsService.OLD_SNAPSHOT_FORMAT));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ public void testDeprecationWarnMessage() throws IOException {
);
}

assertWarnings("deprecated warn message1");
assertWarnings(true, new DeprecationWarning(Level.WARN, "deprecated warn message1")) ;
}

public void testDeprecatedMessageWithoutXOpaqueId() throws IOException {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
{
"indices.modify_data_stream":{
"documentation":{
"url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams.html",
"description":"Modifies a data stream"
},
"stability":"stable",
"visibility":"public",
"headers":{
"accept": [ "application/json"],
"content_type": ["application/json"]
},
"url":{
"paths":[
{
"path":"/_data_stream/_modify",
"methods":["POST"]
}
]
},
"params":{
},
"body":{
"description":"The data stream modifications",
"required":true
}
}
}
Loading

0 comments on commit db25634

Please sign in to comment.