From ef1066d7f876d63869cfbd3ed73c3c383f8aa69c Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 5 Sep 2018 22:04:53 +0200 Subject: [PATCH 01/91] INGEST: Allow Repeated Invocation of Pipeline (#33419) * Allows repeated, non-recursive invocation of the same pipeline --- .../ingest/common/PipelineProcessorTests.java | 16 ++++++++++++++++ .../org/elasticsearch/ingest/IngestDocument.java | 10 +++++++--- 2 files changed, 23 insertions(+), 3 deletions(-) diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/PipelineProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/PipelineProcessorTests.java index 3103fb0392e96..6e18bac40d4aa 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/PipelineProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/PipelineProcessorTests.java @@ -113,4 +113,20 @@ public void testThrowsOnRecursivePipelineInvocations() throws Exception { "Recursive invocation of pipeline [inner] detected.", e.getRootCause().getMessage() ); } + + public void testAllowsRepeatedPipelineInvocations() throws Exception { + String innerPipelineId = "inner"; + IngestService ingestService = mock(IngestService.class); + IngestDocument testIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); + Map outerConfig = new HashMap<>(); + outerConfig.put("pipeline", innerPipelineId); + PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService); + Pipeline inner = new Pipeline( + innerPipelineId, null, null, new CompoundProcessor() + ); + when(ingestService.getPipeline(innerPipelineId)).thenReturn(inner); + Processor outerProc = factory.create(Collections.emptyMap(), null, outerConfig); + outerProc.execute(testIngestDocument); + outerProc.execute(testIngestDocument); + } } diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java index 5f122358d0c43..10cb2fd17fec6 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -645,10 +645,14 @@ private static Object deepCopy(Object value) { * @throws Exception On exception in pipeline execution */ public IngestDocument executePipeline(Pipeline pipeline) throws Exception { - if (this.executedPipelines.add(pipeline) == false) { - throw new IllegalStateException("Recursive invocation of pipeline [" + pipeline.getId() + "] detected."); + try { + if (this.executedPipelines.add(pipeline) == false) { + throw new IllegalStateException("Recursive invocation of pipeline [" + pipeline.getId() + "] detected."); + } + return pipeline.execute(this); + } finally { + executedPipelines.remove(pipeline); } - return pipeline.execute(this); } @Override From 6f9c9ab5e1199f6f0823cb2cec6a1ef6dff3f225 Mon Sep 17 00:00:00 2001 From: Jake Landis Date: Wed, 5 Sep 2018 14:04:25 -0700 Subject: [PATCH 02/91] [ingest] geo-ip performance improvements (#33029) Re-implement the cache to avoid jackson JSON de-serialization for every IP lookup. The built in maxmind cache caches JsonNode objects. This requires de-serialization for every lookup, even if the object is found in cache. Profiling shows that is very expensive (CPU). The cache will now consist of the fully de-serialized objects. Profiling shows that the new footprint for the CityDB is ~6KB per cache entry. This may result in ~6MB increase with the 1000 entry default. The performance has been measured up to 40% faster on a modern 4 core/8 thread CPU for an ingest (minimal indexing) workflow. Further, the since prior implementation cached the JsonNode objects, and there is not a 1:1 relationship between an IP lookup / JsonNode object, the default cache size was most likely too small to be very effective. While this change does not change the 1000 default cache size, it will now cache more since there is now a 1:1 relationship between an IP lookup and value in the cache. --- .../ingest/geoip/GeoIpCache.java | 46 --------- .../ingest/geoip/GeoIpProcessor.java | 73 ++++++++------ .../ingest/geoip/IngestGeoIpPlugin.java | 95 ++++++++++++++++--- .../ingest/geoip/GeoIpCacheTests.java | 51 ---------- .../geoip/GeoIpProcessorFactoryTests.java | 32 +++---- .../ingest/geoip/GeoIpProcessorTests.java | 39 +++++--- .../ingest/geoip/IngestGeoIpPluginTests.java | 64 +++++++++++++ 7 files changed, 230 insertions(+), 170 deletions(-) delete mode 100644 plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpCache.java delete mode 100644 plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpCacheTests.java create mode 100644 plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpPluginTests.java diff --git a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpCache.java b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpCache.java deleted file mode 100644 index 83a3374b504dd..0000000000000 --- a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpCache.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.ingest.geoip; - -import com.fasterxml.jackson.databind.JsonNode; -import com.maxmind.db.NodeCache; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.cache.Cache; -import org.elasticsearch.common.cache.CacheBuilder; - -import java.io.IOException; -import java.util.concurrent.ExecutionException; - -final class GeoIpCache implements NodeCache { - private final Cache cache; - - GeoIpCache(long maxSize) { - this.cache = CacheBuilder.builder().setMaximumWeight(maxSize).build(); - } - - @Override - public JsonNode get(int key, Loader loader) throws IOException { - try { - return cache.computeIfAbsent(key, loader::load); - } catch (ExecutionException e) { - Throwable cause = e.getCause() != null ? e.getCause() : e; - throw new ElasticsearchException(cause); - } - } -} diff --git a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index b5dbf5a7f34de..a0be7557a5a8a 100644 --- a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -36,6 +36,7 @@ import org.elasticsearch.ingest.AbstractProcessor; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.geoip.IngestGeoIpPlugin.GeoIpCache; import java.net.InetAddress; import java.security.AccessController; @@ -66,14 +67,18 @@ public final class GeoIpProcessor extends AbstractProcessor { private final DatabaseReader dbReader; private final Set properties; private final boolean ignoreMissing; + private final GeoIpCache cache; - GeoIpProcessor(String tag, String field, DatabaseReader dbReader, String targetField, Set properties, boolean ignoreMissing) { + + GeoIpProcessor(String tag, String field, DatabaseReader dbReader, String targetField, Set properties, boolean ignoreMissing, + GeoIpCache cache) { super(tag); this.field = field; this.targetField = targetField; this.dbReader = dbReader; this.properties = properties; this.ignoreMissing = ignoreMissing; + this.cache = cache; } boolean isIgnoreMissing() { @@ -146,15 +151,16 @@ Set getProperties() { private Map retrieveCityGeoData(InetAddress ipAddress) { SpecialPermission.check(); - CityResponse response = AccessController.doPrivileged((PrivilegedAction) () -> { - try { - return dbReader.city(ipAddress); - } catch (AddressNotFoundException e) { - throw new AddressNotFoundRuntimeException(e); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); + CityResponse response = AccessController.doPrivileged((PrivilegedAction) () -> + cache.putIfAbsent(ipAddress, CityResponse.class, ip -> { + try { + return dbReader.city(ip); + } catch (AddressNotFoundException e) { + throw new AddressNotFoundRuntimeException(e); + } catch (Exception e) { + throw new RuntimeException(e); + } + })); Country country = response.getCountry(); City city = response.getCity(); @@ -231,15 +237,16 @@ private Map retrieveCityGeoData(InetAddress ipAddress) { private Map retrieveCountryGeoData(InetAddress ipAddress) { SpecialPermission.check(); - CountryResponse response = AccessController.doPrivileged((PrivilegedAction) () -> { - try { - return dbReader.country(ipAddress); - } catch (AddressNotFoundException e) { - throw new AddressNotFoundRuntimeException(e); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); + CountryResponse response = AccessController.doPrivileged((PrivilegedAction) () -> + cache.putIfAbsent(ipAddress, CountryResponse.class, ip -> { + try { + return dbReader.country(ip); + } catch (AddressNotFoundException e) { + throw new AddressNotFoundRuntimeException(e); + } catch (Exception e) { + throw new RuntimeException(e); + } + })); Country country = response.getCountry(); Continent continent = response.getContinent(); @@ -275,15 +282,16 @@ private Map retrieveCountryGeoData(InetAddress ipAddress) { private Map retrieveAsnGeoData(InetAddress ipAddress) { SpecialPermission.check(); - AsnResponse response = AccessController.doPrivileged((PrivilegedAction) () -> { - try { - return dbReader.asn(ipAddress); - } catch (AddressNotFoundException e) { - throw new AddressNotFoundRuntimeException(e); - } catch (Exception e) { - throw new RuntimeException(e); - } - }); + AsnResponse response = AccessController.doPrivileged((PrivilegedAction) () -> + cache.putIfAbsent(ipAddress, AsnResponse.class, ip -> { + try { + return dbReader.asn(ip); + } catch (AddressNotFoundException e) { + throw new AddressNotFoundRuntimeException(e); + } catch (Exception e) { + throw new RuntimeException(e); + } + })); Integer asn = response.getAutonomousSystemNumber(); String organization_name = response.getAutonomousSystemOrganization(); @@ -322,9 +330,11 @@ public static final class Factory implements Processor.Factory { ); private final Map databaseReaders; + private final GeoIpCache cache; - public Factory(Map databaseReaders) { + public Factory(Map databaseReaders, GeoIpCache cache) { this.databaseReaders = databaseReaders; + this.cache = cache; } @Override @@ -368,14 +378,15 @@ public GeoIpProcessor create(Map registry, String pro } } - return new GeoIpProcessor(processorTag, ipField, databaseReader, targetField, properties, ignoreMissing); + return new GeoIpProcessor(processorTag, ipField, databaseReader, targetField, properties, ignoreMissing, cache); } } // Geoip2's AddressNotFoundException is checked and due to the fact that we need run their code // inside a PrivilegedAction code block, we are forced to catch any checked exception and rethrow // it with an unchecked exception. - private static final class AddressNotFoundRuntimeException extends RuntimeException { + //package private for testing + static final class AddressNotFoundRuntimeException extends RuntimeException { AddressNotFoundRuntimeException(Throwable cause) { super(cause); diff --git a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java index c9c742d178980..95e20f340b5ae 100644 --- a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java +++ b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java @@ -23,16 +23,20 @@ import com.maxmind.db.NodeCache; import com.maxmind.db.Reader; import com.maxmind.geoip2.DatabaseReader; -import org.elasticsearch.core.internal.io.IOUtils; +import com.maxmind.geoip2.model.AbstractResponse; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.cache.Cache; +import org.elasticsearch.common.cache.CacheBuilder; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ingest.Processor; import org.elasticsearch.plugins.IngestPlugin; import org.elasticsearch.plugins.Plugin; import java.io.Closeable; import java.io.IOException; +import java.net.InetAddress; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.PathMatcher; @@ -42,6 +46,8 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Objects; +import java.util.function.Function; import java.util.stream.Stream; public class IngestGeoIpPlugin extends Plugin implements IngestPlugin, Closeable { @@ -61,24 +67,18 @@ public Map getProcessors(Processor.Parameters paramet throw new IllegalStateException("getProcessors called twice for geoip plugin!!"); } Path geoIpConfigDirectory = parameters.env.configFile().resolve("ingest-geoip"); - NodeCache cache; long cacheSize = CACHE_SIZE.get(parameters.env.settings()); - if (cacheSize > 0) { - cache = new GeoIpCache(cacheSize); - } else { - cache = NoCache.getInstance(); - } try { - databaseReaders = loadDatabaseReaders(geoIpConfigDirectory, cache); + databaseReaders = loadDatabaseReaders(geoIpConfigDirectory); } catch (IOException e) { throw new RuntimeException(e); } - return Collections.singletonMap(GeoIpProcessor.TYPE, new GeoIpProcessor.Factory(databaseReaders)); + return Collections.singletonMap(GeoIpProcessor.TYPE, new GeoIpProcessor.Factory(databaseReaders, new GeoIpCache(cacheSize))); } - static Map loadDatabaseReaders(Path geoIpConfigDirectory, NodeCache cache) throws IOException { + static Map loadDatabaseReaders(Path geoIpConfigDirectory) throws IOException { if (Files.exists(geoIpConfigDirectory) == false && Files.isDirectory(geoIpConfigDirectory)) { - throw new IllegalStateException("the geoip directory [" + geoIpConfigDirectory + "] containing databases doesn't exist"); + throw new IllegalStateException("the geoip directory [" + geoIpConfigDirectory + "] containing databases doesn't exist"); } boolean loadDatabaseOnHeap = Booleans.parseBoolean(System.getProperty("es.geoip.load_db_on_heap", "false")); Map databaseReaders = new HashMap<>(); @@ -92,7 +92,7 @@ static Map loadDatabaseReaders(Path geoIpConfi String databaseFileName = databasePath.getFileName().toString(); DatabaseReaderLazyLoader holder = new DatabaseReaderLazyLoader(databaseFileName, () -> { - DatabaseReader.Builder builder = createDatabaseBuilder(databasePath).withCache(cache); + DatabaseReader.Builder builder = createDatabaseBuilder(databasePath).withCache(NoCache.getInstance()); if (loadDatabaseOnHeap) { builder.fileMode(Reader.FileMode.MEMORY); } else { @@ -119,4 +119,75 @@ public void close() throws IOException { } } + /** + * The in-memory cache for the geoip data. There should only be 1 instance of this class.. + * This cache differs from the maxmind's {@link NodeCache} such that this cache stores the deserialized Json objects to avoid the + * cost of deserialization for each lookup (cached or not). This comes at slight expense of higher memory usage, but significant + * reduction of CPU usage. + */ + static class GeoIpCache { + private final Cache cache; + + //package private for testing + GeoIpCache(long maxSize) { + if (maxSize < 0) { + throw new IllegalArgumentException("geoip max cache size must be 0 or greater"); + } + this.cache = CacheBuilder.builder().setMaximumWeight(maxSize).build(); + } + + T putIfAbsent(InetAddress ip, Class responseType, + Function retrieveFunction) { + + //can't use cache.computeIfAbsent due to the elevated permissions for the jackson (run via the cache loader) + CacheKey cacheKey = new CacheKey<>(ip, responseType); + //intentionally non-locking for simplicity...it's OK if we re-put the same key/value in the cache during a race condition. + AbstractResponse response = cache.get(cacheKey); + if (response == null) { + response = retrieveFunction.apply(ip); + cache.put(cacheKey, response); + } + return responseType.cast(response); + } + + //only useful for testing + T get(InetAddress ip, Class responseType) { + CacheKey cacheKey = new CacheKey<>(ip, responseType); + return responseType.cast(cache.get(cacheKey)); + } + + /** + * The key to use for the cache. Since this cache can span multiple geoip processors that all use different databases, the response + * type is needed to be included in the cache key. For example, if we only used the IP address as the key the City and ASN the same + * IP may be in both with different values and we need to cache both. The response type scopes the IP to the correct database + * provides a means to safely cast the return objects. + * @param The AbstractResponse type used to scope the key and cast the result. + */ + private static class CacheKey { + + private final InetAddress ip; + private final Class responseType; + + private CacheKey(InetAddress ip, Class responseType) { + this.ip = ip; + this.responseType = responseType; + } + + //generated + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CacheKey cacheKey = (CacheKey) o; + return Objects.equals(ip, cacheKey.ip) && + Objects.equals(responseType, cacheKey.responseType); + } + + //generated + @Override + public int hashCode() { + return Objects.hash(ip, responseType); + } + } + } } diff --git a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpCacheTests.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpCacheTests.java deleted file mode 100644 index 71cab99115fc7..0000000000000 --- a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpCacheTests.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.ingest.geoip; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.IntNode; -import com.maxmind.db.NodeCache; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.test.ESTestCase; - -public class GeoIpCacheTests extends ESTestCase { - public void testCachesAndEvictsResults() throws Exception { - GeoIpCache cache = new GeoIpCache(1); - final NodeCache.Loader loader = key -> new IntNode(key); - - JsonNode jsonNode1 = cache.get(1, loader); - assertSame(jsonNode1, cache.get(1, loader)); - - // evict old key by adding another value - cache.get(2, loader); - - assertNotSame(jsonNode1, cache.get(1, loader)); - } - - public void testThrowsElasticsearchException() throws Exception { - GeoIpCache cache = new GeoIpCache(1); - NodeCache.Loader loader = (int key) -> { - throw new IllegalArgumentException("Illegal key"); - }; - ElasticsearchException ex = expectThrows(ElasticsearchException.class, () -> cache.get(1, loader)); - assertTrue("Expected cause to be of type IllegalArgumentException but was [" + ex.getCause().getClass() + "]", - ex.getCause() instanceof IllegalArgumentException); - assertEquals("Illegal key", ex.getCause().getMessage()); - } -} diff --git a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java index 7a5d6f5808f76..316cfbc152c57 100644 --- a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -20,11 +20,10 @@ package org.elasticsearch.ingest.geoip; import com.carrotsearch.randomizedtesting.generators.RandomPicks; -import com.maxmind.db.NoCache; -import com.maxmind.db.NodeCache; import org.apache.lucene.util.Constants; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Randomness; +import org.elasticsearch.ingest.geoip.IngestGeoIpPlugin.GeoIpCache; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.StreamsUtils; import org.junit.AfterClass; @@ -69,8 +68,7 @@ public static void loadDatabaseReaders() throws IOException { Files.copy(new ByteArrayInputStream(StreamsUtils.copyToBytesFromClasspath("/GeoLite2-ASN.mmdb")), geoIpConfigDir.resolve("GeoLite2-ASN.mmdb")); - NodeCache cache = randomFrom(NoCache.getInstance(), new GeoIpCache(randomNonNegativeLong())); - databaseReaders = IngestGeoIpPlugin.loadDatabaseReaders(geoIpConfigDir, cache); + databaseReaders = IngestGeoIpPlugin.loadDatabaseReaders(geoIpConfigDir); } @AfterClass @@ -92,7 +90,7 @@ public void testBuildDefaults() throws Exception { // This test uses a MappedByteBuffer which will keep the file mappings active until it is garbage-collected. // As a consequence, the corresponding file appears to be still in use and Windows cannot delete it. assumeFalse("windows deletion behavior is asinine", Constants.WINDOWS); - GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders, new GeoIpCache(1000)); Map config = new HashMap<>(); config.put("field", "_field"); @@ -111,7 +109,7 @@ public void testSetIgnoreMissing() throws Exception { // This test uses a MappedByteBuffer which will keep the file mappings active until it is garbage-collected. // As a consequence, the corresponding file appears to be still in use and Windows cannot delete it. assumeFalse("windows deletion behavior is asinine", Constants.WINDOWS); - GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders, new GeoIpCache(1000)); Map config = new HashMap<>(); config.put("field", "_field"); @@ -131,7 +129,7 @@ public void testCountryBuildDefaults() throws Exception { // This test uses a MappedByteBuffer which will keep the file mappings active until it is garbage-collected. // As a consequence, the corresponding file appears to be still in use and Windows cannot delete it. assumeFalse("windows deletion behavior is asinine", Constants.WINDOWS); - GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders, new GeoIpCache(1000)); Map config = new HashMap<>(); config.put("field", "_field"); @@ -152,7 +150,7 @@ public void testAsnBuildDefaults() throws Exception { // This test uses a MappedByteBuffer which will keep the file mappings active until it is garbage-collected. // As a consequence, the corresponding file appears to be still in use and Windows cannot delete it. assumeFalse("windows deletion behavior is asinine", Constants.WINDOWS); - GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders, new GeoIpCache(1000)); Map config = new HashMap<>(); config.put("field", "_field"); @@ -173,7 +171,7 @@ public void testBuildTargetField() throws Exception { // This test uses a MappedByteBuffer which will keep the file mappings active until it is garbage-collected. // As a consequence, the corresponding file appears to be still in use and Windows cannot delete it. assumeFalse("windows deletion behavior is asinine", Constants.WINDOWS); - GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders, new GeoIpCache(1000)); Map config = new HashMap<>(); config.put("field", "_field"); config.put("target_field", "_field"); @@ -187,7 +185,7 @@ public void testBuildDbFile() throws Exception { // This test uses a MappedByteBuffer which will keep the file mappings active until it is garbage-collected. // As a consequence, the corresponding file appears to be still in use and Windows cannot delete it. assumeFalse("windows deletion behavior is asinine", Constants.WINDOWS); - GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders, new GeoIpCache(1000)); Map config = new HashMap<>(); config.put("field", "_field"); config.put("database_file", "GeoLite2-Country.mmdb"); @@ -203,7 +201,7 @@ public void testBuildWithCountryDbAndAsnFields() throws Exception { // This test uses a MappedByteBuffer which will keep the file mappings active until it is garbage-collected. // As a consequence, the corresponding file appears to be still in use and Windows cannot delete it. assumeFalse("windows deletion behavior is asinine", Constants.WINDOWS); - GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders, new GeoIpCache(1000)); Map config = new HashMap<>(); config.put("field", "_field"); config.put("database_file", "GeoLite2-Country.mmdb"); @@ -220,7 +218,7 @@ public void testBuildWithAsnDbAndCityFields() throws Exception { // This test uses a MappedByteBuffer which will keep the file mappings active until it is garbage-collected. // As a consequence, the corresponding file appears to be still in use and Windows cannot delete it. assumeFalse("windows deletion behavior is asinine", Constants.WINDOWS); - GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders, new GeoIpCache(1000)); Map config = new HashMap<>(); config.put("field", "_field"); config.put("database_file", "GeoLite2-ASN.mmdb"); @@ -237,7 +235,7 @@ public void testBuildNonExistingDbFile() throws Exception { // This test uses a MappedByteBuffer which will keep the file mappings active until it is garbage-collected. // As a consequence, the corresponding file appears to be still in use and Windows cannot delete it. assumeFalse("windows deletion behavior is asinine", Constants.WINDOWS); - GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders, new GeoIpCache(1000)); Map config = new HashMap<>(); config.put("field", "_field"); @@ -250,7 +248,7 @@ public void testBuildFields() throws Exception { // This test uses a MappedByteBuffer which will keep the file mappings active until it is garbage-collected. // As a consequence, the corresponding file appears to be still in use and Windows cannot delete it. assumeFalse("windows deletion behavior is asinine", Constants.WINDOWS); - GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders, new GeoIpCache(1000)); Set properties = EnumSet.noneOf(GeoIpProcessor.Property.class); List fieldNames = new ArrayList<>(); @@ -277,7 +275,7 @@ public void testBuildIllegalFieldOption() throws Exception { // This test uses a MappedByteBuffer which will keep the file mappings active until it is garbage-collected. // As a consequence, the corresponding file appears to be still in use and Windows cannot delete it. assumeFalse("windows deletion behavior is asinine", Constants.WINDOWS); - GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders, new GeoIpCache(1000)); Map config1 = new HashMap<>(); config1.put("field", "_field"); @@ -311,8 +309,8 @@ public void testLazyLoading() throws Exception { // database readers used at class level are reused between tests. (we want to keep that otherwise running this // test will take roughly 4 times more time) Map databaseReaders = - IngestGeoIpPlugin.loadDatabaseReaders(geoIpConfigDir, NoCache.getInstance()); - GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders); + IngestGeoIpPlugin.loadDatabaseReaders(geoIpConfigDir); + GeoIpProcessor.Factory factory = new GeoIpProcessor.Factory(databaseReaders, new GeoIpCache(1000)); for (DatabaseReaderLazyLoader lazyLoader : databaseReaders.values()) { assertNull(lazyLoader.databaseReader.get()); } diff --git a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java index 4c04d4e340a71..4da680f186e5b 100644 --- a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorTests.java @@ -20,8 +20,9 @@ package org.elasticsearch.ingest.geoip; import com.maxmind.geoip2.DatabaseReader; -import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.geoip.IngestGeoIpPlugin.GeoIpCache; import org.elasticsearch.test.ESTestCase; import java.io.InputStream; @@ -40,7 +41,8 @@ public class GeoIpProcessorTests extends ESTestCase { public void testCity() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb"); GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", - new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); + new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, + new GeoIpCache(1000)); Map document = new HashMap<>(); document.put("source_field", "8.8.8.8"); @@ -64,7 +66,8 @@ public void testCity() throws Exception { public void testNullValueWithIgnoreMissing() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb"); GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", - new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), true); + new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), true, + new GeoIpCache(1000)); IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("source_field", null)); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); @@ -75,7 +78,8 @@ public void testNullValueWithIgnoreMissing() throws Exception { public void testNonExistentWithIgnoreMissing() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb"); GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", - new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), true); + new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), true, + new GeoIpCache(1000)); IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); processor.execute(ingestDocument); @@ -85,7 +89,8 @@ public void testNonExistentWithIgnoreMissing() throws Exception { public void testNullWithoutIgnoreMissing() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb"); GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", - new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); + new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, + new GeoIpCache(1000)); IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.singletonMap("source_field", null)); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); @@ -96,7 +101,8 @@ public void testNullWithoutIgnoreMissing() throws Exception { public void testNonExistentWithoutIgnoreMissing() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb"); GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", - new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); + new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, + new GeoIpCache(1000)); IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), Collections.emptyMap()); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); Exception exception = expectThrows(Exception.class, () -> processor.execute(ingestDocument)); @@ -106,7 +112,8 @@ public void testNonExistentWithoutIgnoreMissing() throws Exception { public void testCity_withIpV6() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb"); GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", - new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); + new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, + new GeoIpCache(1000)); String address = "2602:306:33d3:8000::3257:9652"; Map document = new HashMap<>(); @@ -135,7 +142,8 @@ public void testCity_withIpV6() throws Exception { public void testCityWithMissingLocation() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb"); GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", - new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); + new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, + new GeoIpCache(1000)); Map document = new HashMap<>(); document.put("source_field", "80.231.5.0"); @@ -152,7 +160,8 @@ public void testCityWithMissingLocation() throws Exception { public void testCountry() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-Country.mmdb"); GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", - new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); + new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, + new GeoIpCache(1000)); Map document = new HashMap<>(); document.put("source_field", "82.170.213.79"); @@ -172,7 +181,8 @@ public void testCountry() throws Exception { public void testCountryWithMissingLocation() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-Country.mmdb"); GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", - new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); + new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, + new GeoIpCache(1000)); Map document = new HashMap<>(); document.put("source_field", "80.231.5.0"); @@ -190,7 +200,8 @@ public void testAsn() throws Exception { String ip = "82.170.213.79"; InputStream database = getDatabaseFileInputStream("/GeoLite2-ASN.mmdb"); GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", - new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); + new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, + new GeoIpCache(1000)); Map document = new HashMap<>(); document.put("source_field", ip); @@ -209,7 +220,8 @@ public void testAsn() throws Exception { public void testAddressIsNotInTheDatabase() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb"); GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", - new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); + new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, + new GeoIpCache(1000)); Map document = new HashMap<>(); document.put("source_field", "127.0.0.1"); @@ -222,7 +234,8 @@ public void testAddressIsNotInTheDatabase() throws Exception { public void testInvalid() throws Exception { InputStream database = getDatabaseFileInputStream("/GeoLite2-City.mmdb"); GeoIpProcessor processor = new GeoIpProcessor(randomAlphaOfLength(10), "source_field", - new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false); + new DatabaseReader.Builder(database).build(), "target_field", EnumSet.allOf(GeoIpProcessor.Property.class), false, + new GeoIpCache(1000)); Map document = new HashMap<>(); document.put("source_field", "www.google.com"); diff --git a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpPluginTests.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpPluginTests.java new file mode 100644 index 0000000000000..884056bb0be8b --- /dev/null +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpPluginTests.java @@ -0,0 +1,64 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.geoip; + +import com.maxmind.geoip2.model.AbstractResponse; +import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.ingest.geoip.IngestGeoIpPlugin.GeoIpCache; +import org.elasticsearch.test.ESTestCase; + +import static org.mockito.Mockito.mock; + +public class IngestGeoIpPluginTests extends ESTestCase { + + public void testCachesAndEvictsResults() { + GeoIpCache cache = new GeoIpCache(1); + AbstractResponse response1 = mock(AbstractResponse.class); + AbstractResponse response2 = mock(AbstractResponse.class); + + //add a key + AbstractResponse cachedResponse = cache.putIfAbsent(InetAddresses.forString("127.0.0.1"), AbstractResponse.class, ip -> response1); + assertSame(cachedResponse, response1); + assertSame(cachedResponse, cache.putIfAbsent(InetAddresses.forString("127.0.0.1"), AbstractResponse.class, ip -> response1)); + assertSame(cachedResponse, cache.get(InetAddresses.forString("127.0.0.1"), AbstractResponse.class)); + + + // evict old key by adding another value + cachedResponse = cache.putIfAbsent(InetAddresses.forString("127.0.0.2"), AbstractResponse.class, ip -> response2); + assertSame(cachedResponse, response2); + assertSame(cachedResponse, cache.putIfAbsent(InetAddresses.forString("127.0.0.2"), AbstractResponse.class, ip -> response2)); + assertSame(cachedResponse, cache.get(InetAddresses.forString("127.0.0.2"), AbstractResponse.class)); + + assertNotSame(response1, cache.get(InetAddresses.forString("127.0.0.1"), AbstractResponse.class)); + } + + public void testThrowsFunctionsException() { + GeoIpCache cache = new GeoIpCache(1); + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, + () -> cache.putIfAbsent(InetAddresses.forString("127.0.0.1"), AbstractResponse.class, + ip -> { throw new IllegalArgumentException("bad"); })); + assertEquals("bad", ex.getMessage()); + } + + public void testInvalidInit() { + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> new GeoIpCache(-1)); + assertEquals("geoip max cache size must be 0 or greater", ex.getMessage()); + } +} From 88c178dca6587c6f7f0f7b78951284553df0d8b3 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Wed, 5 Sep 2018 16:12:10 -0600 Subject: [PATCH 03/91] Add sni name to SSLEngine in netty transport (#33144) This commit is related to #32517. It allows an "server_name" attribute on a DiscoveryNode to be propagated to the server using the TLS SNI extentsion. This functionality is only implemented for the netty security transport. --- .../transport/netty4/Netty4Transport.java | 34 +- .../java/org/elasticsearch/node/Node.java | 8 + .../org/elasticsearch/node/NodeTests.java | 19 + .../AbstractSimpleTransportTestCase.java | 2 +- .../netty4/SecurityNetty4Transport.java | 35 +- .../SimpleSecurityNetty4TransportTests.java | 383 ++++++++++++++++++ .../nio/SimpleSecurityNioTransportTests.java | 9 +- 7 files changed, 466 insertions(+), 24 deletions(-) create mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4TransportTests.java diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java index 0edd12a44e8c1..009a75b3e3301 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java @@ -97,12 +97,12 @@ public class Netty4Transport extends TcpTransport { intSetting("transport.netty.boss_count", 1, 1, Property.NodeScope); - protected final RecvByteBufAllocator recvByteBufAllocator; - protected final int workerCount; - protected final ByteSizeValue receivePredictorMin; - protected final ByteSizeValue receivePredictorMax; - protected volatile Bootstrap bootstrap; - protected final Map serverBootstraps = newConcurrentMap(); + private final RecvByteBufAllocator recvByteBufAllocator; + private final int workerCount; + private final ByteSizeValue receivePredictorMin; + private final ByteSizeValue receivePredictorMax; + private volatile Bootstrap clientBootstrap; + private final Map serverBootstraps = newConcurrentMap(); public Netty4Transport(Settings settings, ThreadPool threadPool, NetworkService networkService, BigArrays bigArrays, NamedWriteableRegistry namedWriteableRegistry, CircuitBreakerService circuitBreakerService) { @@ -125,7 +125,7 @@ public Netty4Transport(Settings settings, ThreadPool threadPool, NetworkService protected void doStart() { boolean success = false; try { - bootstrap = createBootstrap(); + clientBootstrap = createClientBootstrap(); if (NetworkService.NETWORK_SERVER.get(settings)) { for (ProfileSettings profileSettings : profileSettings) { createServerBootstrap(profileSettings); @@ -141,13 +141,11 @@ protected void doStart() { } } - private Bootstrap createBootstrap() { + private Bootstrap createClientBootstrap() { final Bootstrap bootstrap = new Bootstrap(); bootstrap.group(new NioEventLoopGroup(workerCount, daemonThreadFactory(settings, TRANSPORT_CLIENT_BOSS_THREAD_NAME_PREFIX))); bootstrap.channel(NioSocketChannel.class); - bootstrap.handler(getClientChannelInitializer()); - bootstrap.option(ChannelOption.TCP_NODELAY, TCP_NO_DELAY.get(settings)); bootstrap.option(ChannelOption.SO_KEEPALIVE, TCP_KEEP_ALIVE.get(settings)); @@ -166,8 +164,6 @@ private Bootstrap createBootstrap() { final boolean reuseAddress = TCP_REUSE_ADDRESS.get(settings); bootstrap.option(ChannelOption.SO_REUSEADDR, reuseAddress); - bootstrap.validate(); - return bootstrap; } @@ -216,7 +212,7 @@ protected ChannelHandler getServerChannelInitializer(String name) { return new ServerChannelInitializer(name); } - protected ChannelHandler getClientChannelInitializer() { + protected ChannelHandler getClientChannelInitializer(DiscoveryNode node) { return new ClientChannelInitializer(); } @@ -226,7 +222,11 @@ protected ChannelHandler getClientChannelInitializer() { @Override protected Netty4TcpChannel initiateChannel(DiscoveryNode node, ActionListener listener) throws IOException { InetSocketAddress address = node.getAddress().address(); - ChannelFuture channelFuture = bootstrap.connect(address); + Bootstrap bootstrapWithHandler = clientBootstrap.clone(); + bootstrapWithHandler.handler(getClientChannelInitializer(node)); + bootstrapWithHandler.remoteAddress(address); + ChannelFuture channelFuture = bootstrapWithHandler.connect(); + Channel channel = channelFuture.channel(); if (channel == null) { ExceptionsHelper.maybeDieOnAnotherThread(channelFuture.cause()); @@ -289,9 +289,9 @@ protected void stopInternal() { } serverBootstraps.clear(); - if (bootstrap != null) { - bootstrap.config().group().shutdownGracefully(0, 5, TimeUnit.SECONDS).awaitUninterruptibly(); - bootstrap = null; + if (clientBootstrap != null) { + clientBootstrap.config().group().shutdownGracefully(0, 5, TimeUnit.SECONDS).awaitUninterruptibly(); + clientBootstrap = null; } }); } diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index eff1a796e313e..9ead528c974da 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -150,6 +150,7 @@ import org.elasticsearch.usage.UsageService; import org.elasticsearch.watcher.ResourceWatcherService; +import javax.net.ssl.SNIHostName; import java.io.BufferedWriter; import java.io.Closeable; import java.io.IOException; @@ -209,6 +210,13 @@ public class Node implements Closeable { throw new IllegalArgumentException(key + " cannot have leading or trailing whitespace " + "[" + value + "]"); } + if (value.length() > 0 && "node.attr.server_name".equals(key)) { + try { + new SNIHostName(value); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException("invalid node.attr.server_name [" + value + "]", e ); + } + } return value; }, Property.NodeScope)); public static final Setting BREAKER_TYPE_KEY = new Setting<>("indices.breaker.type", "hierarchy", (s) -> { diff --git a/server/src/test/java/org/elasticsearch/node/NodeTests.java b/server/src/test/java/org/elasticsearch/node/NodeTests.java index dfb1de9e5567d..d3e1c9641f968 100644 --- a/server/src/test/java/org/elasticsearch/node/NodeTests.java +++ b/server/src/test/java/org/elasticsearch/node/NodeTests.java @@ -156,6 +156,25 @@ public void testNodeAttributes() throws IOException { } } + public void testServerNameNodeAttribute() throws IOException { + String attr = "valid-hostname"; + Settings.Builder settings = baseSettings().put(Node.NODE_ATTRIBUTES.getKey() + "server_name", attr); + int i = 0; + try (Node node = new MockNode(settings.build(), basePlugins())) { + final Settings nodeSettings = randomBoolean() ? node.settings() : node.getEnvironment().settings(); + assertEquals(attr, Node.NODE_ATTRIBUTES.getAsMap(nodeSettings).get("server_name")); + } + + // non-LDH hostname not allowed + attr = "invalid_hostname"; + settings = baseSettings().put(Node.NODE_ATTRIBUTES.getKey() + "server_name", attr); + try (Node node = new MockNode(settings.build(), basePlugins())) { + fail("should not allow a server_name attribute with an underscore"); + } catch (IllegalArgumentException e) { + assertEquals("invalid node.attr.server_name [invalid_hostname]", e.getMessage()); + } + } + private static Settings.Builder baseSettings() { final Path tempDir = createTempDir(); return Settings.builder() diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index c485f9d45bda4..989afd04dab99 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -2686,7 +2686,7 @@ private void closeConnectionChannel(Transport.Connection connection) { } @SuppressForbidden(reason = "need local ephemeral port") - private InetSocketAddress getLocalEphemeral() throws UnknownHostException { + protected InetSocketAddress getLocalEphemeral() throws UnknownHostException { return new InetSocketAddress(InetAddress.getLocalHost(), 0); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java index f828a82d95f6c..36b480c29c7fd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/transport/netty4/SecurityNetty4Transport.java @@ -12,6 +12,7 @@ import io.netty.channel.ChannelPromise; import io.netty.handler.ssl.SslHandler; import org.apache.logging.log4j.message.ParameterizedMessage; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.network.NetworkService; @@ -19,6 +20,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.TcpChannel; import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.netty4.Netty4Transport; @@ -27,7 +29,10 @@ import org.elasticsearch.xpack.core.ssl.SSLConfiguration; import org.elasticsearch.xpack.core.ssl.SSLService; +import javax.net.ssl.SNIHostName; +import javax.net.ssl.SNIServerName; import javax.net.ssl.SSLEngine; +import javax.net.ssl.SSLParameters; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.util.Collections; @@ -106,8 +111,8 @@ protected ChannelHandler getNoSslChannelInitializer(final String name) { } @Override - protected ChannelHandler getClientChannelInitializer() { - return new SecurityClientChannelInitializer(); + protected ChannelHandler getClientChannelInitializer(DiscoveryNode node) { + return new SecurityClientChannelInitializer(node); } @Override @@ -167,16 +172,28 @@ protected ServerChannelInitializer getSslChannelInitializer(final String name, f private class SecurityClientChannelInitializer extends ClientChannelInitializer { private final boolean hostnameVerificationEnabled; + private final SNIHostName serverName; - SecurityClientChannelInitializer() { + SecurityClientChannelInitializer(DiscoveryNode node) { this.hostnameVerificationEnabled = sslEnabled && sslConfiguration.verificationMode().isHostnameVerificationEnabled(); + String configuredServerName = node.getAttributes().get("server_name"); + if (configuredServerName != null) { + try { + serverName = new SNIHostName(configuredServerName); + } catch (IllegalArgumentException e) { + throw new ConnectTransportException(node, "invalid DiscoveryNode server_name [" + configuredServerName + "]", e); + } + } else { + serverName = null; + } } @Override protected void initChannel(Channel ch) throws Exception { super.initChannel(ch); if (sslEnabled) { - ch.pipeline().addFirst(new ClientSslHandlerInitializer(sslConfiguration, sslService, hostnameVerificationEnabled)); + ch.pipeline().addFirst(new ClientSslHandlerInitializer(sslConfiguration, sslService, hostnameVerificationEnabled, + serverName)); } } } @@ -186,11 +203,14 @@ private static class ClientSslHandlerInitializer extends ChannelOutboundHandlerA private final boolean hostnameVerificationEnabled; private final SSLConfiguration sslConfiguration; private final SSLService sslService; + private final SNIServerName serverName; - private ClientSslHandlerInitializer(SSLConfiguration sslConfiguration, SSLService sslService, boolean hostnameVerificationEnabled) { + private ClientSslHandlerInitializer(SSLConfiguration sslConfiguration, SSLService sslService, boolean hostnameVerificationEnabled, + SNIServerName serverName) { this.sslConfiguration = sslConfiguration; this.hostnameVerificationEnabled = hostnameVerificationEnabled; this.sslService = sslService; + this.serverName = serverName; } @Override @@ -207,6 +227,11 @@ public void connect(ChannelHandlerContext ctx, SocketAddress remoteAddress, } sslEngine.setUseClientMode(true); + if (serverName != null) { + SSLParameters sslParameters = sslEngine.getSSLParameters(); + sslParameters.setServerNames(Collections.singletonList(serverName)); + sslEngine.setSSLParameters(sslParameters); + } ctx.pipeline().replace(this, "ssl", new SslHandler(sslEngine)); super.connect(ctx, remoteAddress, localAddress, promise); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4TransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4TransportTests.java new file mode 100644 index 0000000000000..5181f3a747ead --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4TransportTests.java @@ -0,0 +1,383 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.security.transport.netty4; + +import io.netty.bootstrap.ServerBootstrap; +import io.netty.channel.Channel; +import io.netty.channel.ChannelFuture; +import io.netty.channel.ChannelInitializer; +import io.netty.channel.nio.NioEventLoopGroup; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.handler.ssl.SslHandler; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; +import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.MockSecureSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.node.Node; +import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.AbstractSimpleTransportTestCase; +import org.elasticsearch.transport.BindTransportException; +import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.ConnectionProfile; +import org.elasticsearch.transport.TcpChannel; +import org.elasticsearch.transport.TcpTransport; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.common.socket.SocketAccess; +import org.elasticsearch.xpack.core.security.transport.netty4.SecurityNetty4Transport; +import org.elasticsearch.xpack.core.ssl.SSLConfiguration; +import org.elasticsearch.xpack.core.ssl.SSLService; + +import javax.net.SocketFactory; +import javax.net.ssl.HandshakeCompletedListener; +import javax.net.ssl.SNIHostName; +import javax.net.ssl.SNIMatcher; +import javax.net.ssl.SNIServerName; +import javax.net.ssl.SSLEngine; +import javax.net.ssl.SSLParameters; +import javax.net.ssl.SSLSocket; +import java.io.IOException; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.SocketTimeoutException; +import java.net.UnknownHostException; +import java.nio.file.Path; +import java.util.Collections; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; + +import static java.util.Collections.emptyMap; +import static java.util.Collections.emptySet; +import static org.elasticsearch.xpack.core.security.SecurityField.setting; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; + +public class SimpleSecurityNetty4TransportTests extends AbstractSimpleTransportTestCase { + + private static final ConnectionProfile SINGLE_CHANNEL_PROFILE; + + static { + ConnectionProfile.Builder builder = new ConnectionProfile.Builder(); + builder.addConnections(1, + TransportRequestOptions.Type.BULK, + TransportRequestOptions.Type.PING, + TransportRequestOptions.Type.RECOVERY, + TransportRequestOptions.Type.REG, + TransportRequestOptions.Type.STATE); + SINGLE_CHANNEL_PROFILE = builder.build(); + } + + private SSLService createSSLService() { + Path testnodeCert = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); + Path testnodeKey = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"); + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); + Settings settings = Settings.builder() + .put("xpack.security.transport.ssl.enabled", true) + .put("xpack.ssl.key", testnodeKey) + .put("xpack.ssl.certificate", testnodeCert) + .put("path.home", createTempDir()) + .setSecureSettings(secureSettings) + .build(); + try { + return new SSLService(settings, TestEnvironment.newEnvironment(settings)); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + public MockTransportService nettyFromThreadPool(Settings settings, ThreadPool threadPool, final Version version, + ClusterSettings clusterSettings, boolean doHandshake) { + NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Collections.emptyList()); + NetworkService networkService = new NetworkService(Collections.emptyList()); + Settings settings1 = Settings.builder() + .put(settings) + .put("xpack.security.transport.ssl.enabled", true).build(); + Transport transport = new SecurityNetty4Transport(settings1, threadPool, + networkService, BigArrays.NON_RECYCLING_INSTANCE, namedWriteableRegistry, + new NoneCircuitBreakerService(), createSSLService()) { + + @Override + protected Version executeHandshake(DiscoveryNode node, TcpChannel channel, TimeValue timeout) throws IOException, + InterruptedException { + if (doHandshake) { + return super.executeHandshake(node, channel, timeout); + } else { + return version.minimumCompatibilityVersion(); + } + } + + @Override + protected Version getCurrentVersion() { + return version; + } + + }; + MockTransportService mockTransportService = + MockTransportService.createNewService(Settings.EMPTY, transport, version, threadPool, clusterSettings, + Collections.emptySet()); + mockTransportService.start(); + return mockTransportService; + } + + @Override + protected MockTransportService build(Settings settings, Version version, ClusterSettings clusterSettings, boolean doHandshake) { + settings = Settings.builder().put(settings) + .put(TcpTransport.PORT.getKey(), "0") + .build(); + MockTransportService transportService = nettyFromThreadPool(settings, threadPool, version, clusterSettings, doHandshake); + transportService.start(); + return transportService; + } + + public void testConnectException() throws UnknownHostException { + try { + serviceA.connectToNode(new DiscoveryNode("C", new TransportAddress(InetAddress.getByName("localhost"), 9876), + emptyMap(), emptySet(), Version.CURRENT)); + fail("Expected ConnectTransportException"); + } catch (ConnectTransportException e) { + assertThat(e.getMessage(), containsString("connect_exception")); + assertThat(e.getMessage(), containsString("[127.0.0.1:9876]")); + Throwable cause = e.getCause(); + assertThat(cause, instanceOf(IOException.class)); + } + } + + public void testBindUnavailableAddress() { + // this is on a lower level since it needs access to the TransportService before it's started + int port = serviceA.boundAddress().publishAddress().getPort(); + Settings settings = Settings.builder() + .put(Node.NODE_NAME_SETTING.getKey(), "foobar") + .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "") + .put(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") + .put("transport.tcp.port", port) + .build(); + ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + BindTransportException bindTransportException = expectThrows(BindTransportException.class, () -> { + MockTransportService transportService = nettyFromThreadPool(settings, threadPool, Version.CURRENT, clusterSettings, true); + try { + transportService.start(); + } finally { + transportService.stop(); + transportService.close(); + } + }); + assertEquals("Failed to bind to [" + port + "]", bindTransportException.getMessage()); + } + + @SuppressForbidden(reason = "Need to open socket connection") + public void testRenegotiation() throws Exception { + SSLService sslService = createSSLService(); + final SSLConfiguration sslConfiguration = sslService.getSSLConfiguration("xpack.ssl"); + SocketFactory factory = sslService.sslSocketFactory(sslConfiguration); + try (SSLSocket socket = (SSLSocket) factory.createSocket()) { + SocketAccess.doPrivileged(() -> socket.connect(serviceA.boundAddress().publishAddress().address())); + + CountDownLatch handshakeLatch = new CountDownLatch(1); + HandshakeCompletedListener firstListener = event -> handshakeLatch.countDown(); + socket.addHandshakeCompletedListener(firstListener); + socket.startHandshake(); + handshakeLatch.await(); + socket.removeHandshakeCompletedListener(firstListener); + + OutputStreamStreamOutput stream = new OutputStreamStreamOutput(socket.getOutputStream()); + stream.writeByte((byte) 'E'); + stream.writeByte((byte) 'S'); + stream.writeInt(-1); + stream.flush(); + + socket.startHandshake(); + CountDownLatch renegotiationLatch = new CountDownLatch(1); + HandshakeCompletedListener secondListener = event -> renegotiationLatch.countDown(); + socket.addHandshakeCompletedListener(secondListener); + + AtomicReference error = new AtomicReference<>(); + CountDownLatch catchReadErrorsLatch = new CountDownLatch(1); + Thread renegotiationThread = new Thread(() -> { + try { + socket.setSoTimeout(50); + socket.getInputStream().read(); + } catch (SocketTimeoutException e) { + // Ignore. We expect a timeout. + } catch (IOException e) { + error.set(e); + } finally { + catchReadErrorsLatch.countDown(); + } + }); + renegotiationThread.start(); + renegotiationLatch.await(); + socket.removeHandshakeCompletedListener(secondListener); + catchReadErrorsLatch.await(); + + assertNull(error.get()); + + stream.writeByte((byte) 'E'); + stream.writeByte((byte) 'S'); + stream.writeInt(-1); + stream.flush(); + } + } + + // TODO: These tests currently rely on plaintext transports + + @Override + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33285") + public void testTcpHandshake() { + } + + // TODO: These tests as configured do not currently work with the security transport + + @Override + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33285") + public void testTransportProfilesWithPortAndHost() { + } + + public void testSNIServerNameIsPropagated() throws Exception { + SSLService sslService = createSSLService(); + final ServerBootstrap serverBootstrap = new ServerBootstrap(); + boolean success = false; + try { + serverBootstrap.group(new NioEventLoopGroup(1)); + serverBootstrap.channel(NioServerSocketChannel.class); + + final String sniIp = "sni-hostname"; + final SNIHostName sniHostName = new SNIHostName(sniIp); + final CountDownLatch latch = new CountDownLatch(2); + serverBootstrap.childHandler(new ChannelInitializer() { + + @Override + protected void initChannel(Channel ch) { + SSLEngine serverEngine = sslService.createSSLEngine(sslService.getSSLConfiguration(setting("transport.ssl.")), + null, -1); + serverEngine.setUseClientMode(false); + SSLParameters sslParameters = serverEngine.getSSLParameters(); + sslParameters.setSNIMatchers(Collections.singletonList(new SNIMatcher(0) { + @Override + public boolean matches(SNIServerName sniServerName) { + if (sniHostName.equals(sniServerName)) { + latch.countDown(); + return true; + } else { + return false; + } + } + })); + serverEngine.setSSLParameters(sslParameters); + final SslHandler sslHandler = new SslHandler(serverEngine); + sslHandler.handshakeFuture().addListener(future -> latch.countDown()); + ch.pipeline().addFirst("sslhandler", sslHandler); + } + }); + serverBootstrap.validate(); + ChannelFuture serverFuture = serverBootstrap.bind(getLocalEphemeral()); + serverFuture.await(); + InetSocketAddress serverAddress = (InetSocketAddress) serverFuture.channel().localAddress(); + + try (MockTransportService serviceC = build( + Settings.builder() + .put("name", "TS_TEST") + .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "") + .put(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") + .build(), + version0, + null, true)) { + serviceC.acceptIncomingRequests(); + + HashMap attributes = new HashMap<>(); + attributes.put("server_name", sniIp); + DiscoveryNode node = new DiscoveryNode("server_node_id", new TransportAddress(serverAddress), attributes, + EnumSet.allOf(DiscoveryNode.Role.class), Version.CURRENT); + + new Thread(() -> { + try { + serviceC.connectToNode(node, SINGLE_CHANNEL_PROFILE); + } catch (ConnectTransportException ex) { + // Ignore. The other side is not setup to do the ES handshake. So this will fail. + } + }).start(); + + latch.await(); + serverBootstrap.config().group().shutdownGracefully(0, 5, TimeUnit.SECONDS); + success = true; + } + } finally { + if (success == false) { + serverBootstrap.config().group().shutdownGracefully(0, 5, TimeUnit.SECONDS); + } + } + } + + public void testInvalidSNIServerName() throws Exception { + SSLService sslService = createSSLService(); + final ServerBootstrap serverBootstrap = new ServerBootstrap(); + boolean success = false; + try { + serverBootstrap.group(new NioEventLoopGroup(1)); + serverBootstrap.channel(NioServerSocketChannel.class); + + final String sniIp = "invalid_hostname"; + serverBootstrap.childHandler(new ChannelInitializer() { + + @Override + protected void initChannel(Channel ch) { + SSLEngine serverEngine = sslService.createSSLEngine(sslService.getSSLConfiguration(setting("transport.ssl.")), + null, -1); + serverEngine.setUseClientMode(false); + final SslHandler sslHandler = new SslHandler(serverEngine); + ch.pipeline().addFirst("sslhandler", sslHandler); + } + }); + serverBootstrap.validate(); + ChannelFuture serverFuture = serverBootstrap.bind(getLocalEphemeral()); + serverFuture.await(); + InetSocketAddress serverAddress = (InetSocketAddress) serverFuture.channel().localAddress(); + + try (MockTransportService serviceC = build( + Settings.builder() + .put("name", "TS_TEST") + .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "") + .put(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") + .build(), + version0, + null, true)) { + serviceC.acceptIncomingRequests(); + + HashMap attributes = new HashMap<>(); + attributes.put("server_name", sniIp); + DiscoveryNode node = new DiscoveryNode("server_node_id", new TransportAddress(serverAddress), attributes, + EnumSet.allOf(DiscoveryNode.Role.class), Version.CURRENT); + + ConnectTransportException connectException = expectThrows(ConnectTransportException.class, + () -> serviceC.connectToNode(node, SINGLE_CHANNEL_PROFILE)); + + assertThat(connectException.getMessage(), containsString("invalid DiscoveryNode server_name [invalid_hostname]")); + + serverBootstrap.config().group().shutdownGracefully(0, 5, TimeUnit.SECONDS); + success = true; + } + } finally { + if (success == false) { + serverBootstrap.config().group().shutdownGracefully(0, 5, TimeUnit.SECONDS); + } + } + } +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java index 7397ebc8c7dc4..1b8e35651b64f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java @@ -208,7 +208,14 @@ public void testRenegotiation() throws Exception { // TODO: These tests currently rely on plaintext transports @Override - @AwaitsFix(bugUrl = "") + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33285") public void testTcpHandshake() throws IOException, InterruptedException { } + + // TODO: These tests as configured do not currently work with the security transport + + @Override + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33285") + public void testTransportProfilesWithPortAndHost() { + } } From b697f485bb4815b231f4accb5725fdc237214aef Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Wed, 5 Sep 2018 16:12:37 -0600 Subject: [PATCH 04/91] Introduce `TransportLogger` for common logging (#32725) Historically we have had a ESLoggingHandler in the netty module that logs low-level connection operations. This class just extends the netty logging handler with some (broken) message deserialization. This commit fixes this message serialization and moves the class to server. This new logger logs inbound and outbound messages. Eventually, we should move other event logging to this class (connect, close, flush). That way we will have consistent logging regards of which transport is loaded. Resolves #27306 on master. Older branches will need a different fix. --- .../transport/netty4/ESLoggingHandler.java | 102 +-------------- .../netty4/Netty4MessageChannelHandler.java | 18 +-- .../netty4/Netty4SizeHeaderFrameDecoder.java | 24 ++-- .../transport/netty4/Netty4Utils.java | 1 - .../transport/netty4/ESLoggingHandlerIT.java | 9 +- .../transport/nio/NioTransportLoggingIT.java | 79 ++++++++++++ .../elasticsearch/transport/TcpTransport.java | 44 +++++-- .../transport/TransportLogger.java | 122 ++++++++++++++++++ .../transport/TransportLoggerTests.java | 116 +++++++++++++++++ 9 files changed, 376 insertions(+), 139 deletions(-) create mode 100644 plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/NioTransportLoggingIT.java create mode 100644 server/src/main/java/org/elasticsearch/transport/TransportLogger.java create mode 100644 server/src/test/java/org/elasticsearch/transport/TransportLoggerTests.java diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/ESLoggingHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/ESLoggingHandler.java index 5c275f63be885..3f4eb0695fac2 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/ESLoggingHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/ESLoggingHandler.java @@ -19,21 +19,9 @@ package org.elasticsearch.transport.netty4; -import io.netty.buffer.ByteBuf; import io.netty.channel.ChannelHandlerContext; import io.netty.handler.logging.LogLevel; import io.netty.handler.logging.LoggingHandler; -import org.elasticsearch.Version; -import org.elasticsearch.common.compress.Compressor; -import org.elasticsearch.common.compress.CompressorFactory; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.transport.TcpHeader; -import org.elasticsearch.transport.TcpTransport; -import org.elasticsearch.transport.TransportStatus; - -import java.io.IOException; final class ESLoggingHandler extends LoggingHandler { @@ -42,92 +30,8 @@ final class ESLoggingHandler extends LoggingHandler { } @Override - protected String format(final ChannelHandlerContext ctx, final String eventName, final Object arg) { - if (arg instanceof ByteBuf) { - try { - return format(ctx, eventName, (ByteBuf) arg); - } catch (final Exception e) { - // we really do not want to allow a bug in the formatting handling to escape - logger.trace("an exception occurred formatting a trace message", e); - // we are going to let this be formatted via the default formatting - return super.format(ctx, eventName, arg); - } - } else { - return super.format(ctx, eventName, arg); - } - } - - private static final int MESSAGE_LENGTH_OFFSET = TcpHeader.MARKER_BYTES_SIZE; - private static final int REQUEST_ID_OFFSET = MESSAGE_LENGTH_OFFSET + TcpHeader.MESSAGE_LENGTH_SIZE; - private static final int STATUS_OFFSET = REQUEST_ID_OFFSET + TcpHeader.REQUEST_ID_SIZE; - private static final int VERSION_ID_OFFSET = STATUS_OFFSET + TcpHeader.STATUS_SIZE; - private static final int ACTION_OFFSET = VERSION_ID_OFFSET + TcpHeader.VERSION_ID_SIZE; - - private String format(final ChannelHandlerContext ctx, final String eventName, final ByteBuf arg) throws IOException { - final int readableBytes = arg.readableBytes(); - if (readableBytes == 0) { - return super.format(ctx, eventName, arg); - } else if (readableBytes >= 2) { - final StringBuilder sb = new StringBuilder(); - sb.append(ctx.channel().toString()); - final int offset = arg.readerIndex(); - // this might be an ES message, check the header - if (arg.getByte(offset) == (byte) 'E' && arg.getByte(offset + 1) == (byte) 'S') { - if (readableBytes == TcpHeader.MARKER_BYTES_SIZE + TcpHeader.MESSAGE_LENGTH_SIZE) { - final int length = arg.getInt(offset + MESSAGE_LENGTH_OFFSET); - if (length == TcpTransport.PING_DATA_SIZE) { - sb.append(" [ping]").append(' ').append(eventName).append(": ").append(readableBytes).append('B'); - return sb.toString(); - } - } - else if (readableBytes >= TcpHeader.HEADER_SIZE) { - // we are going to try to decode this as an ES message - final int length = arg.getInt(offset + MESSAGE_LENGTH_OFFSET); - final long requestId = arg.getLong(offset + REQUEST_ID_OFFSET); - final byte status = arg.getByte(offset + STATUS_OFFSET); - final boolean isRequest = TransportStatus.isRequest(status); - final String type = isRequest ? "request" : "response"; - final String version = Version.fromId(arg.getInt(offset + VERSION_ID_OFFSET)).toString(); - sb.append(" [length: ").append(length); - sb.append(", request id: ").append(requestId); - sb.append(", type: ").append(type); - sb.append(", version: ").append(version); - if (isRequest) { - // it looks like an ES request, try to decode the action - final int remaining = readableBytes - ACTION_OFFSET; - final ByteBuf slice = arg.slice(offset + ACTION_OFFSET, remaining); - // the stream might be compressed - try (StreamInput in = in(status, slice, remaining)) { - // the first bytes in the message is the context headers - try (ThreadContext context = new ThreadContext(Settings.EMPTY)) { - context.readHeaders(in); - } - // now we decode the features - if (in.getVersion().onOrAfter(Version.V_6_3_0)) { - in.readStringArray(); - } - // now we can decode the action name - sb.append(", action: ").append(in.readString()); - } - } - sb.append(']'); - sb.append(' ').append(eventName).append(": ").append(readableBytes).append('B'); - return sb.toString(); - } - } - } - // we could not decode this as an ES message, use the default formatting - return super.format(ctx, eventName, arg); + public void channelReadComplete(ChannelHandlerContext ctx) throws Exception { + // We do not want to log read complete events because we log inbound messages in the TcpTransport. + ctx.fireChannelReadComplete(); } - - private StreamInput in(final Byte status, final ByteBuf slice, final int remaining) throws IOException { - final ByteBufStreamInput in = new ByteBufStreamInput(slice, remaining); - if (TransportStatus.isCompress(status)) { - final Compressor compressor = CompressorFactory.compressor(Netty4Utils.toBytesReference(slice)); - return compressor.streamInput(in); - } else { - return in; - } - } - } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageChannelHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageChannelHandler.java index 620b5cb13c6bb..29ae47df06f87 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageChannelHandler.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4MessageChannelHandler.java @@ -26,8 +26,6 @@ import io.netty.util.Attribute; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.transport.TcpHeader; import org.elasticsearch.transport.Transports; @@ -46,23 +44,15 @@ final class Netty4MessageChannelHandler extends ChannelDuplexHandler { @Override public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception { Transports.assertTransportThread(); - if (!(msg instanceof ByteBuf)) { - ctx.fireChannelRead(msg); - return; - } + assert msg instanceof ByteBuf : "Expected message type ByteBuf, found: " + msg.getClass(); + final ByteBuf buffer = (ByteBuf) msg; - final int remainingMessageSize = buffer.getInt(buffer.readerIndex() - TcpHeader.MESSAGE_LENGTH_SIZE); - final int expectedReaderIndex = buffer.readerIndex() + remainingMessageSize; try { Channel channel = ctx.channel(); - // netty always copies a buffer, either in NioWorker in its read handler, where it copies to a fresh - // buffer, or in the cumulative buffer, which is cleaned each time so it could be bigger than the actual size - BytesReference reference = Netty4Utils.toBytesReference(buffer, remainingMessageSize); Attribute channelAttribute = channel.attr(Netty4Transport.CHANNEL_KEY); - transport.messageReceived(reference, channelAttribute.get()); + transport.inboundMessage(channelAttribute.get(), Netty4Utils.toBytesReference(buffer)); } finally { - // Set the expected position of the buffer, no matter what happened - buffer.readerIndex(expectedReaderIndex); + buffer.release(); } } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoder.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoder.java index 40eabfc126357..1951d789b65a8 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoder.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4SizeHeaderFrameDecoder.java @@ -23,7 +23,6 @@ import io.netty.channel.ChannelHandlerContext; import io.netty.handler.codec.ByteToMessageDecoder; import io.netty.handler.codec.TooLongFrameException; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.transport.TcpHeader; import org.elasticsearch.transport.TcpTransport; @@ -36,17 +35,20 @@ final class Netty4SizeHeaderFrameDecoder extends ByteToMessageDecoder { @Override protected void decode(ChannelHandlerContext ctx, ByteBuf in, List out) throws Exception { try { - BytesReference networkBytes = Netty4Utils.toBytesReference(in); - int messageLength = TcpTransport.readMessageLength(networkBytes); - // If the message length is -1, we have not read a complete header. - if (messageLength != -1) { - int messageLengthWithHeader = messageLength + HEADER_SIZE; - // If the message length is greater than the network bytes available, we have not read a complete frame. - if (messageLengthWithHeader <= networkBytes.length()) { - final ByteBuf message = in.skipBytes(HEADER_SIZE); - // 6 bytes would mean it is a ping. And we should ignore. - if (messageLengthWithHeader != 6) { + boolean continueDecode = true; + while (continueDecode) { + int messageLength = TcpTransport.readMessageLength(Netty4Utils.toBytesReference(in)); + if (messageLength == -1) { + continueDecode = false; + } else { + int messageLengthWithHeader = messageLength + HEADER_SIZE; + // If the message length is greater than the network bytes available, we have not read a complete frame. + if (messageLengthWithHeader > in.readableBytes()) { + continueDecode = false; + } else { + final ByteBuf message = in.retainedSlice(in.readerIndex() + HEADER_SIZE, messageLength); out.add(message); + in.readerIndex(in.readerIndex() + messageLengthWithHeader); } } } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java index 655dafdd28981..76d7864c71692 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java @@ -156,5 +156,4 @@ public static void closeChannels(final Collection channels) throws IOEx throw closingExceptions; } } - } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java index acd71749e2333..abe02cdf4c1cd 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java @@ -26,9 +26,10 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.MockLogAppender; import org.elasticsearch.test.junit.annotations.TestLogging; +import org.elasticsearch.transport.TransportLogger; @ESIntegTestCase.ClusterScope(numDataNodes = 2) -@TestLogging(value = "org.elasticsearch.transport.netty4.ESLoggingHandler:trace") +@TestLogging(value = "org.elasticsearch.transport.netty4.ESLoggingHandler:trace,org.elasticsearch.transport.TransportLogger:trace") public class ESLoggingHandlerIT extends ESNetty4IntegTestCase { private MockLogAppender appender; @@ -37,11 +38,13 @@ public void setUp() throws Exception { super.setUp(); appender = new MockLogAppender(); Loggers.addAppender(Loggers.getLogger(ESLoggingHandler.class), appender); + Loggers.addAppender(Loggers.getLogger(TransportLogger.class), appender); appender.start(); } public void tearDown() throws Exception { Loggers.removeAppender(Loggers.getLogger(ESLoggingHandler.class), appender); + Loggers.removeAppender(Loggers.getLogger(TransportLogger.class), appender); appender.stop(); super.tearDown(); } @@ -56,7 +59,7 @@ public void testLoggingHandler() throws IllegalAccessException { " WRITE: \\d+B"; final MockLogAppender.LoggingExpectation writeExpectation = new MockLogAppender.PatternSeenEventExcpectation( - "hot threads request", ESLoggingHandler.class.getCanonicalName(), Level.TRACE, writePattern); + "hot threads request", TransportLogger.class.getCanonicalName(), Level.TRACE, writePattern); final MockLogAppender.LoggingExpectation flushExpectation = new MockLogAppender.SeenEventExpectation("flush", ESLoggingHandler.class.getCanonicalName(), Level.TRACE, "*FLUSH*"); @@ -71,7 +74,7 @@ public void testLoggingHandler() throws IllegalAccessException { final MockLogAppender.LoggingExpectation readExpectation = new MockLogAppender.PatternSeenEventExcpectation( - "hot threads request", ESLoggingHandler.class.getCanonicalName(), Level.TRACE, readPattern); + "hot threads request", TransportLogger.class.getCanonicalName(), Level.TRACE, readPattern); appender.addExpectation(writeExpectation); appender.addExpectation(flushExpectation); diff --git a/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/NioTransportLoggingIT.java b/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/NioTransportLoggingIT.java new file mode 100644 index 0000000000000..b29df77cae1bb --- /dev/null +++ b/plugins/transport-nio/src/test/java/org/elasticsearch/transport/nio/NioTransportLoggingIT.java @@ -0,0 +1,79 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport.nio; + +import org.apache.logging.log4j.Level; +import org.elasticsearch.NioIntegTestCase; +import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsRequest; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.MockLogAppender; +import org.elasticsearch.test.junit.annotations.TestLogging; +import org.elasticsearch.transport.TransportLogger; + +@ESIntegTestCase.ClusterScope(numDataNodes = 2) +@TestLogging(value = "org.elasticsearch.transport.TransportLogger:trace") +public class NioTransportLoggingIT extends NioIntegTestCase { + + private MockLogAppender appender; + + public void setUp() throws Exception { + super.setUp(); + appender = new MockLogAppender(); + Loggers.addAppender(Loggers.getLogger(TransportLogger.class), appender); + appender.start(); + } + + public void tearDown() throws Exception { + Loggers.removeAppender(Loggers.getLogger(TransportLogger.class), appender); + appender.stop(); + super.tearDown(); + } + + public void testLoggingHandler() throws IllegalAccessException { + final String writePattern = + ".*\\[length: \\d+" + + ", request id: \\d+" + + ", type: request" + + ", version: .*" + + ", action: cluster:monitor/nodes/hot_threads\\[n\\]\\]" + + " WRITE: \\d+B"; + final MockLogAppender.LoggingExpectation writeExpectation = + new MockLogAppender.PatternSeenEventExcpectation( + "hot threads request", TransportLogger.class.getCanonicalName(), Level.TRACE, writePattern); + + final String readPattern = + ".*\\[length: \\d+" + + ", request id: \\d+" + + ", type: request" + + ", version: .*" + + ", action: cluster:monitor/nodes/hot_threads\\[n\\]\\]" + + " READ: \\d+B"; + + final MockLogAppender.LoggingExpectation readExpectation = + new MockLogAppender.PatternSeenEventExcpectation( + "hot threads request", TransportLogger.class.getCanonicalName(), Level.TRACE, readPattern); + + appender.addExpectation(writeExpectation); + appender.addExpectation(readExpectation); + client().admin().cluster().nodesHotThreads(new NodesHotThreadsRequest()).actionGet(); + appender.assertAllExpectationsMatched(); + } +} diff --git a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java index d71e459fccdf0..2552007463b55 100644 --- a/server/src/main/java/org/elasticsearch/transport/TcpTransport.java +++ b/server/src/main/java/org/elasticsearch/transport/TcpTransport.java @@ -207,6 +207,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent implements private final MeanMetric transmittedBytesMetric = new MeanMetric(); private volatile Map requestHandlers = Collections.emptyMap(); private final ResponseHandlers responseHandlers = new ResponseHandlers(); + private final TransportLogger transportLogger; private final BytesReference pingMessage; public TcpTransport(String transportName, Settings settings, ThreadPool threadPool, BigArrays bigArrays, @@ -221,6 +222,8 @@ public TcpTransport(String transportName, Settings settings, ThreadPool threadPo this.compress = Transport.TRANSPORT_TCP_COMPRESS.get(settings); this.networkService = networkService; this.transportName = transportName; + this.transportLogger = new TransportLogger(settings); + final Settings defaultFeatures = DEFAULT_FEATURES_SETTING.get(settings); if (defaultFeatures == null) { this.features = new String[0]; @@ -788,7 +791,7 @@ public void onException(TcpChannel channel, Exception e) { // in case we are able to return data, serialize the exception content and sent it back to the client if (channel.isOpen()) { BytesArray message = new BytesArray(e.getMessage().getBytes(StandardCharsets.UTF_8)); - final SendMetricListener closeChannel = new SendMetricListener(message.length()) { + final SendMetricListener listener = new SendMetricListener(message.length()) { @Override protected void innerInnerOnResponse(Void v) { CloseableChannel.closeChannel(channel); @@ -800,7 +803,14 @@ protected void innerOnFailure(Exception e) { CloseableChannel.closeChannel(channel); } }; - internalSendMessage(channel, message, closeChannel); + // We do not call internalSendMessage because we are not sending a message that is an + // elasticsearch binary message. We are just serializing an exception here. Not formatting it + // as an elasticsearch transport message. + try { + channel.sendMessage(message, listener); + } catch (Exception ex) { + listener.onFailure(ex); + } } } else { logger.warn(() -> new ParameterizedMessage("exception caught on transport layer [{}], closing connection", channel), e); @@ -906,6 +916,7 @@ private void sendRequestToChannel(final DiscoveryNode node, final TcpChannel cha * sends a message to the given channel, using the given callbacks. */ private void internalSendMessage(TcpChannel channel, BytesReference message, SendMetricListener listener) { + transportLogger.logOutboundMessage(channel, message); try { channel.sendMessage(message, listener); } catch (Exception ex) { @@ -1050,6 +1061,24 @@ private BytesReference buildMessage(long requestId, byte status, Version nodeVer return new CompositeBytesReference(header, messageBody, zeroCopyBuffer); } + /** + * Handles inbound message that has been decoded. + * + * @param channel the channel the message if fomr + * @param message the message + */ + public void inboundMessage(TcpChannel channel, BytesReference message) { + try { + transportLogger.logInboundMessage(channel, message); + // Message length of 0 is a ping + if (message.length() != 0) { + messageReceived(message, channel); + } + } catch (Exception e) { + onException(channel, e); + } + } + /** * Consumes bytes that are available from network reads. This method returns the number of bytes consumed * in this call. @@ -1067,15 +1096,8 @@ public int consumeNetworkReads(TcpChannel channel, BytesReference bytesReference if (message == null) { return 0; - } else if (message.length() == 0) { - // This is a ping and should not be handled. - return BYTES_NEEDED_FOR_MESSAGE_SIZE; } else { - try { - messageReceived(message, channel); - } catch (Exception e) { - onException(channel, e); - } + inboundMessage(channel, message); return message.length() + BYTES_NEEDED_FOR_MESSAGE_SIZE; } } @@ -1091,7 +1113,7 @@ public int consumeNetworkReads(TcpChannel channel, BytesReference bytesReference * @throws IllegalArgumentException if the message length is greater that the maximum allowed frame size. * This is dependent on the available memory. */ - public static BytesReference decodeFrame(BytesReference networkBytes) throws IOException { + static BytesReference decodeFrame(BytesReference networkBytes) throws IOException { int messageLength = readMessageLength(networkBytes); if (messageLength == -1) { return null; diff --git a/server/src/main/java/org/elasticsearch/transport/TransportLogger.java b/server/src/main/java/org/elasticsearch/transport/TransportLogger.java new file mode 100644 index 0000000000000..3120620b05322 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/transport/TransportLogger.java @@ -0,0 +1,122 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.transport; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.Version; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.compress.Compressor; +import org.elasticsearch.common.compress.CompressorFactory; +import org.elasticsearch.common.compress.NotCompressedException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.internal.io.IOUtils; + +import java.io.IOException; + +public final class TransportLogger { + + private final Logger logger; + private static final int HEADER_SIZE = TcpHeader.MARKER_BYTES_SIZE + TcpHeader.MESSAGE_LENGTH_SIZE; + + TransportLogger(Settings settings) { + logger = Loggers.getLogger(TransportLogger.class, settings); + } + + void logInboundMessage(TcpChannel channel, BytesReference message) { + if (logger.isTraceEnabled()) { + try { + String logMessage = format(channel, message, "READ"); + logger.trace(logMessage); + } catch (IOException e) { + logger.trace("an exception occurred formatting a READ trace message", e); + } + } + } + + void logOutboundMessage(TcpChannel channel, BytesReference message) { + if (logger.isTraceEnabled()) { + try { + BytesReference withoutHeader = message.slice(HEADER_SIZE, message.length() - HEADER_SIZE); + String logMessage = format(channel, withoutHeader, "WRITE"); + logger.trace(logMessage); + } catch (IOException e) { + logger.trace("an exception occurred formatting a WRITE trace message", e); + } + } + } + + private String format(TcpChannel channel, BytesReference message, String event) throws IOException { + final StringBuilder sb = new StringBuilder(); + sb.append(channel); + int messageLengthWithHeader = HEADER_SIZE + message.length(); + // This is a ping + if (message.length() == 0) { + sb.append(" [ping]").append(' ').append(event).append(": ").append(messageLengthWithHeader).append('B'); + } else { + boolean success = false; + StreamInput streamInput = message.streamInput(); + try { + final long requestId = streamInput.readLong(); + final byte status = streamInput.readByte(); + final boolean isRequest = TransportStatus.isRequest(status); + final String type = isRequest ? "request" : "response"; + final String version = Version.fromId(streamInput.readInt()).toString(); + sb.append(" [length: ").append(messageLengthWithHeader); + sb.append(", request id: ").append(requestId); + sb.append(", type: ").append(type); + sb.append(", version: ").append(version); + + if (isRequest) { + if (TransportStatus.isCompress(status)) { + Compressor compressor; + try { + final int bytesConsumed = TcpHeader.REQUEST_ID_SIZE + TcpHeader.STATUS_SIZE + TcpHeader.VERSION_ID_SIZE; + compressor = CompressorFactory.compressor(message.slice(bytesConsumed, message.length() - bytesConsumed)); + } catch (NotCompressedException ex) { + throw new IllegalStateException(ex); + } + streamInput = compressor.streamInput(streamInput); + } + + try (ThreadContext context = new ThreadContext(Settings.EMPTY)) { + context.readHeaders(streamInput); + } + // now we decode the features + if (streamInput.getVersion().onOrAfter(Version.V_6_3_0)) { + streamInput.readStringArray(); + } + sb.append(", action: ").append(streamInput.readString()); + } + sb.append(']'); + sb.append(' ').append(event).append(": ").append(messageLengthWithHeader).append('B'); + success = true; + } finally { + if (success) { + IOUtils.close(streamInput); + } else { + IOUtils.closeWhileHandlingException(streamInput); + } + } + } + return sb.toString(); + } +} diff --git a/server/src/test/java/org/elasticsearch/transport/TransportLoggerTests.java b/server/src/test/java/org/elasticsearch/transport/TransportLoggerTests.java new file mode 100644 index 0000000000000..42a6100882088 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/transport/TransportLoggerTests.java @@ -0,0 +1,116 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.transport; + +import org.apache.logging.log4j.Level; +import org.elasticsearch.Version; +import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; +import org.elasticsearch.action.admin.cluster.stats.ClusterStatsRequest; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.bytes.CompositeBytesReference; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.MockLogAppender; +import org.elasticsearch.test.junit.annotations.TestLogging; + +import java.io.IOException; + +import static org.mockito.Mockito.mock; + +@TestLogging(value = "org.elasticsearch.transport.TransportLogger:trace") +public class TransportLoggerTests extends ESTestCase { + + private MockLogAppender appender; + + public void setUp() throws Exception { + super.setUp(); + appender = new MockLogAppender(); + Loggers.addAppender(Loggers.getLogger(TransportLogger.class), appender); + appender.start(); + } + + public void tearDown() throws Exception { + Loggers.removeAppender(Loggers.getLogger(TransportLogger.class), appender); + appender.stop(); + super.tearDown(); + } + + public void testLoggingHandler() throws IOException { + TransportLogger transportLogger = new TransportLogger(Settings.EMPTY); + + final String writePattern = + ".*\\[length: \\d+" + + ", request id: \\d+" + + ", type: request" + + ", version: .*" + + ", action: cluster:monitor/stats]" + + " WRITE: \\d+B"; + final MockLogAppender.LoggingExpectation writeExpectation = + new MockLogAppender.PatternSeenEventExcpectation( + "hot threads request", TransportLogger.class.getCanonicalName(), Level.TRACE, writePattern); + + final String readPattern = + ".*\\[length: \\d+" + + ", request id: \\d+" + + ", type: request" + + ", version: .*" + + ", action: cluster:monitor/stats]" + + " READ: \\d+B"; + + final MockLogAppender.LoggingExpectation readExpectation = + new MockLogAppender.PatternSeenEventExcpectation( + "cluster monitor request", TransportLogger.class.getCanonicalName(), Level.TRACE, readPattern); + + appender.addExpectation(writeExpectation); + appender.addExpectation(readExpectation); + BytesReference bytesReference = buildRequest(); + transportLogger.logInboundMessage(mock(TcpChannel.class), bytesReference.slice(6, bytesReference.length() - 6)); + transportLogger.logOutboundMessage(mock(TcpChannel.class), bytesReference); + appender.assertAllExpectationsMatched(); + } + + private BytesReference buildRequest() throws IOException { + try (BytesStreamOutput messageOutput = new BytesStreamOutput()) { + messageOutput.setVersion(Version.CURRENT); + try (ThreadContext context = new ThreadContext(Settings.EMPTY)) { + context.writeTo(messageOutput); + } + messageOutput.writeStringArray(new String[0]); + messageOutput.writeString(ClusterStatsAction.NAME); + new ClusterStatsRequest().writeTo(messageOutput); + BytesReference messageBody = messageOutput.bytes(); + final BytesReference header = buildHeader(randomInt(30), messageBody.length()); + return new CompositeBytesReference(header, messageBody); + } + } + + private BytesReference buildHeader(long requestId, int length) throws IOException { + try (BytesStreamOutput headerOutput = new BytesStreamOutput(TcpHeader.HEADER_SIZE)) { + headerOutput.setVersion(Version.CURRENT); + TcpHeader.writeHeader(headerOutput, requestId, TransportStatus.setRequest((byte) 0), Version.CURRENT, length); + final BytesReference bytes = headerOutput.bytes(); + assert bytes.length() == TcpHeader.HEADER_SIZE : "header size mismatch expected: " + TcpHeader.HEADER_SIZE + " but was: " + + bytes.length(); + return bytes; + } + } +} From 41839cf9a857cb1eef743f36b51a55d10f7073e1 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 5 Sep 2018 19:03:34 -0400 Subject: [PATCH 05/91] Acquire seacher on closing engine should throw ACE (#33331) Closes #33330 --- .../java/org/elasticsearch/index/engine/InternalEngine.java | 4 +++- .../org/elasticsearch/index/engine/InternalEngineTests.java | 5 +++++ .../elasticsearch/index/shard/PrimaryReplicaSyncerTests.java | 1 - 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 00d1f67f01bd9..f902ce0750245 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -2014,7 +2014,9 @@ public Searcher acquireSearcher(String source, SearcherScope scope) { /* Acquire order here is store -> manager since we need * to make sure that the store is not closed before * the searcher is acquired. */ - store.incRef(); + if (store.tryIncRef() == false) { + throw new AlreadyClosedException(shardId + " store is closed", failedEngine.get()); + } Releasable releasable = store::decRef; try { final ReferenceManager referenceManager; diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index d3aead9e44e16..9cc8d859c6ff1 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -5020,6 +5020,11 @@ public void testLastRefreshCheckpoint() throws Exception { assertThat(engine.lastRefreshedCheckpoint(), equalTo(engine.getLocalCheckpoint())); } + public void testAcquireSearcherOnClosingEngine() throws Exception { + engine.close(); + expectThrows(AlreadyClosedException.class, () -> engine.acquireSearcher("test")); + } + private static void trimUnsafeCommits(EngineConfig config) throws IOException { final Store store = config.getStore(); final TranslogConfig translogConfig = config.getTranslogConfig(); diff --git a/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java b/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java index 36d52d4475b1a..29b16ca28f4da 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java @@ -125,7 +125,6 @@ public void testSyncerSendsOffCorrectDocuments() throws Exception { closeShards(shard); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33330") public void testSyncerOnClosingShard() throws Exception { IndexShard shard = newStartedShard(true); AtomicBoolean syncActionCalled = new AtomicBoolean(); From 39e3bd93c7f5554720ea7a6820d20082c7feeeb0 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 5 Sep 2018 19:05:41 -0400 Subject: [PATCH 06/91] TEST: Create following engines in the main thread (#33391) There are two races in the testUpdateAndReadChangesConcurrently if the following engines are created in the worker threads. We fixed the translog issue in #33352, but there is still another race with createStore. This commit ensures that we create all engines in the main thread. Relates #33352 Closes #33344 --- .../engine/LuceneChangesSnapshotTests.java | 25 +++++++++---------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/engine/LuceneChangesSnapshotTests.java b/server/src/test/java/org/elasticsearch/index/engine/LuceneChangesSnapshotTests.java index ba98e8f2d62a1..412b91aaef200 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/LuceneChangesSnapshotTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/LuceneChangesSnapshotTests.java @@ -19,14 +19,12 @@ package org.elasticsearch.index.engine; -import java.nio.file.Path; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.SnapshotMatchers; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.test.IndexSettingsModule; @@ -202,7 +200,7 @@ public void testUpdateAndReadChangesConcurrently() throws Exception { CountDownLatch readyLatch = new CountDownLatch(followers.length + 1); AtomicBoolean isDone = new AtomicBoolean(); for (int i = 0; i < followers.length; i++) { - followers[i] = new Follower(engine, isDone, readyLatch, createTempDir()); + followers[i] = new Follower(engine, isDone, readyLatch); followers[i].start(); } boolean onPrimary = randomBoolean(); @@ -228,28 +226,30 @@ public void testUpdateAndReadChangesConcurrently() throws Exception { operations.add(op); } readyLatch.countDown(); + readyLatch.await(); concurrentlyApplyOps(operations, engine); assertThat(engine.getLocalCheckpointTracker().getCheckpoint(), equalTo(operations.size() - 1L)); isDone.set(true); for (Follower follower : followers) { follower.join(); + IOUtils.close(follower.engine, follower.engine.store); } } class Follower extends Thread { private final Engine leader; + private final InternalEngine engine; private final TranslogHandler translogHandler; private final AtomicBoolean isDone; private final CountDownLatch readLatch; - private final Path translogPath; - Follower(Engine leader, AtomicBoolean isDone, CountDownLatch readLatch, Path translogPath) { + Follower(Engine leader, AtomicBoolean isDone, CountDownLatch readLatch) throws IOException { this.leader = leader; this.isDone = isDone; this.readLatch = readLatch; this.translogHandler = new TranslogHandler(xContentRegistry(), IndexSettingsModule.newIndexSettings(shardId.getIndexName(), - engine.engineConfig.getIndexSettings().getSettings())); - this.translogPath = translogPath; + leader.engineConfig.getIndexSettings().getSettings())); + this.engine = createEngine(createStore(), createTempDir()); } void pullOperations(Engine follower) throws IOException { @@ -267,16 +267,15 @@ void pullOperations(Engine follower) throws IOException { @Override public void run() { - try (Store store = createStore(); - InternalEngine follower = createEngine(store, translogPath)) { + try { readLatch.countDown(); readLatch.await(); while (isDone.get() == false || - follower.getLocalCheckpointTracker().getCheckpoint() < leader.getLocalCheckpoint()) { - pullOperations(follower); + engine.getLocalCheckpointTracker().getCheckpoint() < leader.getLocalCheckpoint()) { + pullOperations(engine); } - assertConsistentHistoryBetweenTranslogAndLuceneIndex(follower, mapperService); - assertThat(getDocIds(follower, true), equalTo(getDocIds(leader, true))); + assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, mapperService); + assertThat(getDocIds(engine, true), equalTo(getDocIds(leader, true))); } catch (Exception ex) { throw new AssertionError(ex); } From d71ced1b0077c41d84ad3842f7aa98897bcb450e Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 5 Sep 2018 20:43:44 -0400 Subject: [PATCH 07/91] Generalize search.remote settings to cluster.remote (#33413) With features like CCR building on the CCS infrastructure, the settings prefix search.remote makes less sense as the namespace for these remote cluster settings than does a more general namespace like cluster.remote. This commit replaces these settings with cluster.remote with a fallback to the deprecated settings search.remote. --- .../modules/cross-cluster-search.asciidoc | 24 +-- docs/reference/modules/ml-node.asciidoc | 6 +- docs/reference/modules/node.asciidoc | 8 +- ...rossClusterSearchUnavailableClusterIT.java | 10 +- qa/multi-cluster-search/build.gradle | 8 +- .../test/multi_cluster/10_basic.yml | 12 +- .../test/multi_cluster/20_info.yml | 28 ++-- .../common/settings/ClusterSettings.java | 7 + .../common/settings/Setting.java | 63 +++++-- .../transport/RemoteClusterAware.java | 139 ++++++++++----- .../transport/RemoteClusterService.java | 158 ++++++++++++------ .../CanMatchPreFilterSearchPhaseTests.java | 6 +- .../action/search/DfsQueryPhaseTests.java | 6 +- .../action/search/ExpandSearchPhaseTests.java | 10 +- .../action/search/FetchSearchPhaseTests.java | 10 +- .../cluster/settings/ClusterSettingsIT.java | 2 +- .../index/IndexServiceTests.java | 2 +- .../IndexingMemoryControllerTests.java | 4 +- .../transport/RemoteClusterClientTests.java | 4 +- .../transport/RemoteClusterServiceTests.java | 68 ++++---- .../transport/RemoteClusterSettingsTests.java | 146 ++++++++++++++++ .../cross-cluster.asciidoc | 4 +- .../build.gradle | 2 +- .../multi-cluster-with-security/build.gradle | 2 +- .../plugin/ccr/qa/multi-cluster/build.gradle | 2 +- .../authz/AuthorizationServiceTests.java | 2 +- .../authz/IndicesAndAliasesResolverTests.java | 4 +- .../build.gradle | 8 +- .../test/multi_cluster/10_basic.yml | 6 +- .../test/multi_cluster/20_info.yml | 6 +- 30 files changed, 527 insertions(+), 230 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/transport/RemoteClusterSettingsTests.java diff --git a/docs/reference/modules/cross-cluster-search.asciidoc b/docs/reference/modules/cross-cluster-search.asciidoc index 21e21edc35b57..d6c65eaff0155 100644 --- a/docs/reference/modules/cross-cluster-search.asciidoc +++ b/docs/reference/modules/cross-cluster-search.asciidoc @@ -38,7 +38,7 @@ remote clusters that should be connected to, for instance: [source,yaml] -------------------------------- -search: +cluster: remote: cluster_one: <1> seeds: 127.0.0.1:9300 @@ -58,7 +58,7 @@ following: PUT _cluster/settings { "persistent": { - "search": { + "cluster": { "remote": { "cluster_one": { "seeds": [ @@ -94,7 +94,7 @@ because we'll use them later. { "acknowledged" : true, "persistent": { - "search": { + "cluster": { "remote": { "cluster_one": { "seeds": [ @@ -129,7 +129,7 @@ A remote cluster can be deleted from the cluster settings by setting its seeds t PUT _cluster/settings { "persistent": { - "search": { + "cluster": { "remote": { "cluster_three": { "seeds": null <1> @@ -309,7 +309,7 @@ boolean `skip_unavailable` setting, set to `false` by default. PUT _cluster/settings { "persistent": { - "search.remote.cluster_two.skip_unavailable": true <1> + "cluster.remote.cluster_two.skip_unavailable": true <1> } } -------------------------------- @@ -391,30 +391,30 @@ GET /cluster_one:twitter,cluster_two:twitter,twitter/_search <1> [[cross-cluster-search-settings]] === Cross cluster search settings -`search.remote.connections_per_cluster`:: +`cluster.remote.connections_per_cluster`:: The number of nodes to connect to per remote cluster. The default is `3`. -`search.remote.initial_connect_timeout`:: +`cluster.remote.initial_connect_timeout`:: The time to wait for remote connections to be established when the node starts. The default is `30s`. -`search.remote.node.attr`:: +`cluster.remote.node.attr`:: A node attribute to filter out nodes that are eligible as a gateway node in the remote cluster. For instance a node can have a node attribute `node.attr.gateway: true` such that only nodes with this attribute will be - connected to if `search.remote.node.attr` is set to `gateway`. + connected to if `cluster.remote.node.attr` is set to `gateway`. -`search.remote.connect`:: +`cluster.remote.connect`:: By default, any node in the cluster can act as a cross-cluster client and - connect to remote clusters. The `search.remote.connect` setting can be set + connect to remote clusters. The `cluster.remote.connect` setting can be set to `false` (defaults to `true`) to prevent certain nodes from connecting to remote clusters. Cross-cluster search requests must be sent to a node that is allowed to act as a cross-cluster client. -`search.remote.${cluster_alias}.skip_unavailable`:: +`cluster.remote.${cluster_alias}.skip_unavailable`:: Per cluster boolean setting that allows to skip specific clusters when no nodes belonging to them are available and they are searched as part of a diff --git a/docs/reference/modules/ml-node.asciidoc b/docs/reference/modules/ml-node.asciidoc index 316df743bf9a1..9e4413e3a0c7e 100644 --- a/docs/reference/modules/ml-node.asciidoc +++ b/docs/reference/modules/ml-node.asciidoc @@ -59,7 +59,7 @@ To create a dedicated ingest node when {xpack} is installed, set: node.master: false <1> node.data: false <2> node.ingest: true <3> -search.remote.connect: false <4> +cluster.remote.connect: false <4> node.ml: false <5> ------------------- <1> Disable the `node.master` role (enabled by default). @@ -75,7 +75,7 @@ To create a dedicated coordinating node when {xpack} is installed, set: node.master: false <1> node.data: false <2> node.ingest: false <3> -search.remote.connect: false <4> +cluster.remote.connect: false <4> node.ml: false <5> ------------------- <1> Disable the `node.master` role (enabled by default). @@ -105,7 +105,7 @@ To create a dedicated {ml} node, set: node.master: false <1> node.data: false <2> node.ingest: false <3> -search.remote.connect: false <4> +cluster.remote.connect: false <4> node.ml: true <5> xpack.ml.enabled: true <6> ------------------- diff --git a/docs/reference/modules/node.asciidoc b/docs/reference/modules/node.asciidoc index f772977e3f02f..2d0cee85e29e1 100644 --- a/docs/reference/modules/node.asciidoc +++ b/docs/reference/modules/node.asciidoc @@ -93,7 +93,7 @@ To create a dedicated master-eligible node, set: node.master: true <1> node.data: false <2> node.ingest: false <3> -search.remote.connect: false <4> +cluster.remote.connect: false <4> ------------------- <1> The `node.master` role is enabled by default. <2> Disable the `node.data` role (enabled by default). @@ -192,7 +192,7 @@ To create a dedicated data node, set: node.master: false <1> node.data: true <2> node.ingest: false <3> -search.remote.connect: false <4> +cluster.remote.connect: false <4> ------------------- <1> Disable the `node.master` role (enabled by default). <2> The `node.data` role is enabled by default. @@ -220,7 +220,7 @@ To create a dedicated ingest node, set: node.master: false <1> node.data: false <2> node.ingest: true <3> -search.remote.connect: false <4> +cluster.remote.connect: false <4> ------------------- <1> Disable the `node.master` role (enabled by default). <2> Disable the `node.data` role (enabled by default). @@ -260,7 +260,7 @@ To create a dedicated coordinating node, set: node.master: false <1> node.data: false <2> node.ingest: false <3> -search.remote.connect: false <4> +cluster.remote.connect: false <4> ------------------- <1> Disable the `node.master` role (enabled by default). <2> Disable the `node.data` role (enabled by default). diff --git a/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java b/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java index 6bfa4de8d4adf..fbcf55c91b739 100644 --- a/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java +++ b/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java @@ -235,8 +235,8 @@ public void testSkipUnavailableDependsOnSeeds() throws IOException { () -> client().performRequest(request)); assertEquals(400, responseException.getResponse().getStatusLine().getStatusCode()); assertThat(responseException.getMessage(), - containsString("Missing required setting [search.remote.remote1.seeds] " + - "for setting [search.remote.remote1.skip_unavailable]")); + containsString("Missing required setting [cluster.remote.remote1.seeds] " + + "for setting [cluster.remote.remote1.skip_unavailable]")); } Map settingsMap = new HashMap<>(); @@ -251,8 +251,8 @@ public void testSkipUnavailableDependsOnSeeds() throws IOException { ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest(request)); assertEquals(400, responseException.getResponse().getStatusLine().getStatusCode()); - assertThat(responseException.getMessage(), containsString("Missing required setting [search.remote.remote1.seeds] " + - "for setting [search.remote.remote1.skip_unavailable]")); + assertThat(responseException.getMessage(), containsString("Missing required setting [cluster.remote.remote1.seeds] " + + "for setting [cluster.remote.remote1.skip_unavailable]")); } if (randomBoolean()) { @@ -304,7 +304,7 @@ private static HttpEntity buildUpdateSettingsRequestBody(Map set { builder.startObject("persistent"); { - builder.startObject("search.remote.remote1"); + builder.startObject("cluster.remote.remote1"); { for (Map.Entry entry : settings.entrySet()) { builder.field(entry.getKey(), entry.getValue()); diff --git a/qa/multi-cluster-search/build.gradle b/qa/multi-cluster-search/build.gradle index 782e83fbb34f2..3012be985bc67 100644 --- a/qa/multi-cluster-search/build.gradle +++ b/qa/multi-cluster-search/build.gradle @@ -28,7 +28,7 @@ task remoteClusterTest(type: RestIntegTestTask) { remoteClusterTestCluster { numNodes = 2 clusterName = 'remote-cluster' - setting 'search.remote.connect', false + setting 'cluster.remote.connect', false } remoteClusterTestRunner { @@ -39,9 +39,9 @@ task mixedClusterTest(type: RestIntegTestTask) {} mixedClusterTestCluster { dependsOn remoteClusterTestRunner - setting 'search.remote.my_remote_cluster.seeds', "\"${-> remoteClusterTest.nodes.get(0).transportUri()}\"" - setting 'search.remote.connections_per_cluster', 1 - setting 'search.remote.connect', true + setting 'cluster.remote.my_remote_cluster.seeds', "\"${-> remoteClusterTest.nodes.get(0).transportUri()}\"" + setting 'cluster.remote.connections_per_cluster', 1 + setting 'cluster.remote.connect', true } mixedClusterTestRunner { diff --git a/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/10_basic.yml b/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/10_basic.yml index 8617ecc1fe28a..e2b15bc0d5d9e 100644 --- a/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/10_basic.yml +++ b/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/10_basic.yml @@ -99,16 +99,16 @@ cluster.get_settings: include_defaults: true - - set: { defaults.search.remote.my_remote_cluster.seeds.0: remote_ip } + - set: { defaults.cluster.remote.my_remote_cluster.seeds.0: remote_ip } - do: cluster.put_settings: flat_settings: true body: transient: - search.remote.test_remote_cluster.seeds: $remote_ip + cluster.remote.test_remote_cluster.seeds: $remote_ip - - match: {transient: {search.remote.test_remote_cluster.seeds: $remote_ip}} + - match: {transient: {cluster.remote.test_remote_cluster.seeds: $remote_ip}} - do: search: @@ -124,16 +124,16 @@ cluster.get_settings: include_defaults: true - - set: { defaults.search.remote.my_remote_cluster.seeds.0: remote_ip } + - set: { defaults.cluster.remote.my_remote_cluster.seeds.0: remote_ip } - do: cluster.put_settings: flat_settings: true body: transient: - search.remote.test_remote_cluster.seeds: $remote_ip + cluster.remote.test_remote_cluster.seeds: $remote_ip - - match: {transient: {search.remote.test_remote_cluster.seeds: $remote_ip}} + - match: {transient: {cluster.remote.test_remote_cluster.seeds: $remote_ip}} - do: search: diff --git a/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/20_info.yml b/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/20_info.yml index b4487e4fefee1..45cc570ecea8c 100644 --- a/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/20_info.yml +++ b/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/20_info.yml @@ -14,16 +14,16 @@ cluster.get_settings: include_defaults: true - - set: { defaults.search.remote.my_remote_cluster.seeds.0: remote_ip } + - set: { defaults.cluster.remote.my_remote_cluster.seeds.0: remote_ip } - do: cluster.put_settings: flat_settings: true body: transient: - search.remote.test_remote_cluster.seeds: $remote_ip + cluster.remote.test_remote_cluster.seeds: $remote_ip - - match: {transient: {search.remote.test_remote_cluster.seeds: $remote_ip}} + - match: {transient: {cluster.remote.test_remote_cluster.seeds: $remote_ip}} # we do another search here since this will enforce the connection to be established # otherwise the cluster might not have been connected yet. @@ -56,7 +56,7 @@ cluster.put_settings: body: transient: - search.remote.test_remote_cluster.seeds: null + cluster.remote.test_remote_cluster.seeds: null --- "skip_unavailable is returned as part of _remote/info response": @@ -68,16 +68,16 @@ cluster.get_settings: include_defaults: true - - set: { defaults.search.remote.my_remote_cluster.seeds.0: remote_ip } + - set: { defaults.cluster.remote.my_remote_cluster.seeds.0: remote_ip } - do: cluster.put_settings: flat_settings: true body: transient: - search.remote.remote1.seeds: $remote_ip + cluster.remote.remote1.seeds: $remote_ip - - match: {transient: {search.remote.remote1.seeds: $remote_ip}} + - match: {transient: {cluster.remote.remote1.seeds: $remote_ip}} - do: cluster.remote_info: {} @@ -87,9 +87,9 @@ cluster.put_settings: body: transient: - search.remote.remote1.skip_unavailable: true + cluster.remote.remote1.skip_unavailable: true - - is_true: transient.search.remote.remote1.skip_unavailable + - is_true: transient.cluster.remote.remote1.skip_unavailable - do: cluster.remote_info: {} @@ -100,9 +100,9 @@ cluster.put_settings: body: transient: - search.remote.remote1.skip_unavailable: false + cluster.remote.remote1.skip_unavailable: false - - is_false: transient.search.remote.remote1.skip_unavailable + - is_false: transient.cluster.remote.remote1.skip_unavailable - do: cluster.remote_info: {} @@ -113,7 +113,7 @@ cluster.put_settings: body: transient: - search.remote.remote1.skip_unavailable: null + cluster.remote.remote1.skip_unavailable: null - match: {transient: {}} @@ -126,5 +126,5 @@ cluster.put_settings: body: transient: - search.remote.remote1.seeds: null - search.remote.remote1.skip_unavailable: null + cluster.remote.remote1.seeds: null + cluster.remote.remote1.skip_unavailable: null diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index f1405b5a5c0fd..10787140bdec8 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -273,12 +273,19 @@ public void apply(Settings value, Settings current, Settings previous) { ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING, TransportSearchAction.SHARD_COUNT_LIMIT_SETTING, RemoteClusterAware.REMOTE_CLUSTERS_SEEDS, + RemoteClusterAware.SEARCH_REMOTE_CLUSTERS_SEEDS, RemoteClusterAware.REMOTE_CLUSTERS_PROXY, + RemoteClusterAware.SEARCH_REMOTE_CLUSTERS_PROXY, RemoteClusterService.REMOTE_CLUSTER_SKIP_UNAVAILABLE, + RemoteClusterService.SEARCH_REMOTE_CLUSTER_SKIP_UNAVAILABLE, RemoteClusterService.REMOTE_CONNECTIONS_PER_CLUSTER, + RemoteClusterService.SEARCH_REMOTE_CONNECTIONS_PER_CLUSTER, RemoteClusterService.REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING, + RemoteClusterService.SEARCH_REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING, RemoteClusterService.REMOTE_NODE_ATTRIBUTE, + RemoteClusterService.SEARCH_REMOTE_NODE_ATTRIBUTE, RemoteClusterService.ENABLE_REMOTE_CLUSTERS, + RemoteClusterService.SEARCH_ENABLE_REMOTE_CLUSTERS, TransportService.TRACE_LOG_EXCLUDE_SETTING, TransportService.TRACE_LOG_INCLUDE_SETTING, TransportCloseIndexAction.CLUSTER_INDICES_CLOSE_ENABLE_SETTING, diff --git a/server/src/main/java/org/elasticsearch/common/settings/Setting.java b/server/src/main/java/org/elasticsearch/common/settings/Setting.java index 8e9b1c30076f8..eabf2ef498406 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -16,6 +16,7 @@ * specific language governing permissions and limitations * under the License. */ + package org.elasticsearch.common.settings; import org.apache.logging.log4j.Logger; @@ -753,7 +754,7 @@ public void diff(Settings.Builder builder, Settings source, Settings defaultSett /** * Returns the namespace for a concrete setting. Ie. an affix setting with prefix: {@code search.} and suffix: {@code username} - * will return {@code remote} as a namespace for the setting {@code search.remote.username} + * will return {@code remote} as a namespace for the setting {@code cluster.remote.username} */ public String getNamespace(Setting concreteSetting) { return key.getNamespace(concreteSetting.getKey()); @@ -1043,7 +1044,15 @@ public static Setting simpleString(String key, Function } public static Setting simpleString(String key, Setting fallback, Property... properties) { - return new Setting<>(key, fallback, Function.identity(), properties); + return simpleString(key, fallback, Function.identity(), properties); + } + + public static Setting simpleString( + final String key, + final Setting fallback, + final Function parser, + final Property... properties) { + return new Setting<>(key, fallback, parser, properties); } public static Setting simpleString(String key, Validator validator, Property... properties) { @@ -1275,15 +1284,41 @@ public static Setting groupSetting(String key, Consumer vali return new GroupSetting(key, validator, properties); } - public static Setting timeSetting(String key, Function defaultValue, TimeValue minValue, - Property... properties) { - return new Setting<>(key, (s) -> defaultValue.apply(s).getStringRep(), (s) -> { - TimeValue timeValue = TimeValue.parseTimeValue(s, null, key); - if (timeValue.millis() < minValue.millis()) { - throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue); + public static Setting timeSetting( + final String key, + final Setting fallbackSetting, + final TimeValue minValue, + final Property... properties) { + final SimpleKey simpleKey = new SimpleKey(key); + return new Setting<>( + simpleKey, + fallbackSetting, + fallbackSetting::getRaw, + minTimeValueParser(key, minValue), + (v, s) -> {}, + properties); + } + + public static Setting timeSetting( + final String key, Function defaultValue, final TimeValue minValue, final Property... properties) { + final SimpleKey simpleKey = new SimpleKey(key); + return new Setting<>(simpleKey, s -> defaultValue.apply(s).getStringRep(), minTimeValueParser(key, minValue), properties); + } + + private static Function minTimeValueParser(final String key, final TimeValue minValue) { + return s -> { + final TimeValue value = TimeValue.parseTimeValue(s, null, key); + if (value.millis() < minValue.millis()) { + final String message = String.format( + Locale.ROOT, + "failed to parse value [%s] for setting [%s], must be >= [%s]", + s, + key, + minValue.getStringRep()); + throw new IllegalArgumentException(message); } - return timeValue; - }, properties); + return value; + }; } public static Setting timeSetting(String key, TimeValue defaultValue, TimeValue minValue, Property... properties) { @@ -1302,6 +1337,14 @@ public static Setting positiveTimeSetting(String key, TimeValue defau return timeSetting(key, defaultValue, TimeValue.timeValueMillis(0), properties); } + public static Setting positiveTimeSetting( + final String key, + final Setting fallbackSetting, + final TimeValue minValue, + final Property... properties) { + return timeSetting(key, fallbackSetting, minValue, properties); + } + public static Setting doubleSetting(String key, double defaultValue, double minValue, Property... properties) { return doubleSetting(key, defaultValue, minValue, Double.POSITIVE_INFINITY, properties); } diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java index 16d3c292bfe32..f08ef75612f7f 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAware.java @@ -16,10 +16,9 @@ * specific language governing permissions and limitations * under the License. */ + package org.elasticsearch.transport; -import java.util.EnumSet; -import java.util.function.Supplier; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.ClusterNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -37,11 +36,13 @@ import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Collections; +import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.function.Predicate; +import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -50,40 +51,83 @@ */ public abstract class RemoteClusterAware extends AbstractComponent { + public static final Setting.AffixSetting> SEARCH_REMOTE_CLUSTERS_SEEDS = + Setting.affixKeySetting( + "search.remote.", + "seeds", + key -> Setting.listSetting( + key, + Collections.emptyList(), + s -> { + parsePort(s); + return s; + }, + Setting.Property.Deprecated, + Setting.Property.Dynamic, + Setting.Property.NodeScope)); + /** * A list of initial seed nodes to discover eligible nodes from the remote cluster */ public static final Setting.AffixSetting> REMOTE_CLUSTERS_SEEDS = Setting.affixKeySetting( - "search.remote.", - "seeds", - key -> Setting.listSetting( - key, Collections.emptyList(), - s -> { - // validate seed address - parsePort(s); - return s; - }, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ) - ); + "cluster.remote.", + "seeds", + key -> Setting.listSetting( + key, + // the default needs to be emptyList() when fallback is removed + "_na_".equals(key) + ? SEARCH_REMOTE_CLUSTERS_SEEDS.getConcreteSettingForNamespace(key) + : SEARCH_REMOTE_CLUSTERS_SEEDS.getConcreteSetting(key.replaceAll("^cluster", "search")), + s -> { + // validate seed address + parsePort(s); + return s; + }, + Setting.Property.Dynamic, + Setting.Property.NodeScope)); + public static final char REMOTE_CLUSTER_INDEX_SEPARATOR = ':'; public static final String LOCAL_CLUSTER_GROUP_KEY = ""; + public static final Setting.AffixSetting SEARCH_REMOTE_CLUSTERS_PROXY = Setting.affixKeySetting( + "search.remote.", + "proxy", + key -> Setting.simpleString( + key, + s -> { + if (Strings.hasLength(s)) { + parsePort(s); + } + return s; + }, + Setting.Property.Deprecated, + Setting.Property.Dynamic, + Setting.Property.NodeScope), + REMOTE_CLUSTERS_SEEDS); + /** * A proxy address for the remote cluster. * NOTE: this settings is undocumented until we have at last one transport that supports passing * on the hostname via a mechanism like SNI. */ public static final Setting.AffixSetting REMOTE_CLUSTERS_PROXY = Setting.affixKeySetting( - "search.remote.", - "proxy", - key -> Setting.simpleString(key, s -> { - if (Strings.hasLength(s)) { - parsePort(s); - } - return s; - }, Setting.Property.NodeScope, Setting.Property.Dynamic), REMOTE_CLUSTERS_SEEDS); + "cluster.remote.", + "proxy", + key -> Setting.simpleString( + key, + // no default is needed when fallback is removed, use simple string which gives empty + "_na_".equals(key) + ? SEARCH_REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace(key) + : SEARCH_REMOTE_CLUSTERS_PROXY.getConcreteSetting(key.replaceAll("^cluster", "search")), + s -> { + if (Strings.hasLength(s)) { + parsePort(s); + } + return s; + }, + Setting.Property.Dynamic, + Setting.Property.NodeScope), + REMOTE_CLUSTERS_SEEDS); protected final ClusterNameExpressionResolver clusterNameResolver; @@ -105,16 +149,16 @@ protected RemoteClusterAware(Settings settings) { protected static Map>>> buildRemoteClustersDynamicConfig(Settings settings) { Stream>> allConcreteSettings = REMOTE_CLUSTERS_SEEDS.getAllConcreteSettings(settings); return allConcreteSettings.collect( - Collectors.toMap(REMOTE_CLUSTERS_SEEDS::getNamespace, concreteSetting -> { - String clusterName = REMOTE_CLUSTERS_SEEDS.getNamespace(concreteSetting); - List addresses = concreteSetting.get(settings); - final boolean proxyMode = REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace(clusterName).exists(settings); - List> nodes = new ArrayList<>(addresses.size()); - for (String address : addresses) { - nodes.add(() -> buildSeedNode(clusterName, address, proxyMode)); - } - return new Tuple<>(REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace(clusterName).get(settings), nodes); - })); + Collectors.toMap(REMOTE_CLUSTERS_SEEDS::getNamespace, concreteSetting -> { + String clusterName = REMOTE_CLUSTERS_SEEDS.getNamespace(concreteSetting); + List addresses = concreteSetting.get(settings); + final boolean proxyMode = REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace(clusterName).exists(settings); + List> nodes = new ArrayList<>(addresses.size()); + for (String address : addresses) { + nodes.add(() -> buildSeedNode(clusterName, address, proxyMode)); + } + return new Tuple<>(REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace(clusterName).get(settings), nodes); + })); } static DiscoveryNode buildSeedNode(String clusterName, String address, boolean proxyMode) { @@ -122,14 +166,14 @@ static DiscoveryNode buildSeedNode(String clusterName, String address, boolean p TransportAddress transportAddress = new TransportAddress(TransportAddress.META_ADDRESS, 0); String hostName = address.substring(0, indexOfPortSeparator(address)); return new DiscoveryNode("", clusterName + "#" + address, UUIDs.randomBase64UUID(), hostName, address, - transportAddress, Collections - .emptyMap(), EnumSet.allOf(DiscoveryNode.Role.class), - Version.CURRENT.minimumCompatibilityVersion()); + transportAddress, Collections + .emptyMap(), EnumSet.allOf(DiscoveryNode.Role.class), + Version.CURRENT.minimumCompatibilityVersion()); } else { TransportAddress transportAddress = new TransportAddress(RemoteClusterAware.parseSeedAddress(address)); return new DiscoveryNode(clusterName + "#" + transportAddress.toString(), - transportAddress, - Version.CURRENT.minimumCompatibilityVersion()); + transportAddress, + Version.CURRENT.minimumCompatibilityVersion()); } } @@ -157,8 +201,8 @@ public Map> groupClusterIndices(String[] requestIndices, Pr // remote_cluster_alias:index_name - for this case we fail the request. the user can easily change the cluster alias // if that happens throw new IllegalArgumentException("Can not filter indices; index " + index + - " exists but there is also a remote cluster named: " + remoteClusterName); - } + " exists but there is also a remote cluster named: " + remoteClusterName); + } String indexName = index.substring(i + 1); for (String clusterName : clusters) { perClusterIndices.computeIfAbsent(clusterName, k -> new ArrayList<>()).add(indexName); @@ -186,10 +230,16 @@ public Map> groupClusterIndices(String[] requestIndices, Pr * Registers this instance to listen to updates on the cluster settings. */ public void listenForUpdates(ClusterSettings clusterSettings) { - clusterSettings.addAffixUpdateConsumer(RemoteClusterAware.REMOTE_CLUSTERS_PROXY, - RemoteClusterAware.REMOTE_CLUSTERS_SEEDS, - (key, value) -> updateRemoteCluster(key, value.v2(), value.v1()), - (namespace, value) -> {}); + clusterSettings.addAffixUpdateConsumer( + RemoteClusterAware.REMOTE_CLUSTERS_PROXY, + RemoteClusterAware.REMOTE_CLUSTERS_SEEDS, + (key, value) -> updateRemoteCluster(key, value.v2(), value.v1()), + (namespace, value) -> {}); + clusterSettings.addAffixUpdateConsumer( + RemoteClusterAware.SEARCH_REMOTE_CLUSTERS_PROXY, + RemoteClusterAware.SEARCH_REMOTE_CLUSTERS_SEEDS, + (key, value) -> updateRemoteCluster(key, value.v2(), value.v1()), + (namespace, value) -> {}); } @@ -227,4 +277,5 @@ private static int indexOfPortSeparator(String remoteHost) { public static String buildRemoteIndexName(String clusterAlias, String indexName) { return clusterAlias != null ? clusterAlias + REMOTE_CLUSTER_INDEX_SEPARATOR + indexName : indexName; } + } diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java index 60126847cbea9..0e8bd5cb28db5 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java @@ -16,6 +16,7 @@ * specific language governing permissions and limitations * under the License. */ + package org.elasticsearch.transport; import java.util.Collection; @@ -64,18 +65,39 @@ */ public final class RemoteClusterService extends RemoteClusterAware implements Closeable { + public static final Setting SEARCH_REMOTE_CONNECTIONS_PER_CLUSTER = + Setting.intSetting("search.remote.connections_per_cluster", 3, 1, Setting.Property.NodeScope, Setting.Property.Deprecated); + /** * The maximum number of connections that will be established to a remote cluster. For instance if there is only a single * seed node, other nodes will be discovered up to the given number of nodes in this setting. The default is 3. */ - public static final Setting REMOTE_CONNECTIONS_PER_CLUSTER = Setting.intSetting("search.remote.connections_per_cluster", - 3, 1, Setting.Property.NodeScope); + public static final Setting REMOTE_CONNECTIONS_PER_CLUSTER = + Setting.intSetting( + "cluster.remote.connections_per_cluster", + SEARCH_REMOTE_CONNECTIONS_PER_CLUSTER, // the default needs to three when fallback is removed + 1, + Setting.Property.NodeScope); + + public static final Setting SEARCH_REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING = + Setting.positiveTimeSetting( + "search.remote.initial_connect_timeout", + TimeValue.timeValueSeconds(30), + Setting.Property.NodeScope, + Setting.Property.Deprecated); /** * The initial connect timeout for remote cluster connections */ public static final Setting REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING = - Setting.positiveTimeSetting("search.remote.initial_connect_timeout", TimeValue.timeValueSeconds(30), Setting.Property.NodeScope); + Setting.positiveTimeSetting( + "cluster.remote.initial_connect_timeout", + SEARCH_REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING, // the default needs to be thirty seconds when fallback is removed + TimeValue.timeValueSeconds(30), + Setting.Property.NodeScope); + + public static final Setting SEARCH_REMOTE_NODE_ATTRIBUTE = + Setting.simpleString("search.remote.node.attr", Setting.Property.NodeScope, Setting.Property.Deprecated); /** * The name of a node attribute to select nodes that should be connected to in the remote cluster. @@ -83,20 +105,46 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl * clusters. In that case {@code search.remote.node.attr: gateway} can be used to filter out other nodes in the remote cluster. * The value of the setting is expected to be a boolean, {@code true} for nodes that can become gateways, {@code false} otherwise. */ - public static final Setting REMOTE_NODE_ATTRIBUTE = Setting.simpleString("search.remote.node.attr", - Setting.Property.NodeScope); + public static final Setting REMOTE_NODE_ATTRIBUTE = + Setting.simpleString( + "cluster.remote.node.attr", + SEARCH_REMOTE_NODE_ATTRIBUTE, // no default is needed when fallback is removed, use simple string which gives empty + Setting.Property.NodeScope); + + public static final Setting SEARCH_ENABLE_REMOTE_CLUSTERS = + Setting.boolSetting("search.remote.connect", true, Setting.Property.NodeScope, Setting.Property.Deprecated); /** * If true connecting to remote clusters is supported on this node. If false this node will not establish * connections to any remote clusters configured. Search requests executed against this node (where this node is the coordinating node) * will fail if remote cluster syntax is used as an index pattern. The default is true */ - public static final Setting ENABLE_REMOTE_CLUSTERS = Setting.boolSetting("search.remote.connect", true, - Setting.Property.NodeScope); + public static final Setting ENABLE_REMOTE_CLUSTERS = + Setting.boolSetting( + "cluster.remote.connect", + SEARCH_ENABLE_REMOTE_CLUSTERS, // the default needs to be true when fallback is removed + Setting.Property.NodeScope); + + public static final Setting.AffixSetting SEARCH_REMOTE_CLUSTER_SKIP_UNAVAILABLE = + Setting.affixKeySetting( + "search.remote.", + "skip_unavailable", + key -> boolSetting(key, false, Setting.Property.Deprecated, Setting.Property.Dynamic, Setting.Property.NodeScope), + REMOTE_CLUSTERS_SEEDS); public static final Setting.AffixSetting REMOTE_CLUSTER_SKIP_UNAVAILABLE = - Setting.affixKeySetting("search.remote.", "skip_unavailable", - key -> boolSetting(key, false, Setting.Property.NodeScope, Setting.Property.Dynamic), REMOTE_CLUSTERS_SEEDS); + Setting.affixKeySetting( + "cluster.remote.", + "skip_unavailable", + key -> boolSetting( + key, + // the default needs to be false when fallback is removed + "_na_".equals(key) + ? SEARCH_REMOTE_CLUSTER_SKIP_UNAVAILABLE.getConcreteSettingForNamespace(key) + : SEARCH_REMOTE_CLUSTER_SKIP_UNAVAILABLE.getConcreteSetting(key.replaceAll("^cluster", "search")), + Setting.Property.Dynamic, + Setting.Property.NodeScope), + REMOTE_CLUSTERS_SEEDS); private static final Predicate DEFAULT_NODE_PREDICATE = (node) -> Version.CURRENT.isCompatible(node.getVersion()) && (node.isMasterNode() == false || node.isDataNode() || node.isIngestNode()); @@ -144,27 +192,27 @@ private synchronized void updateRemoteClusters(Map { - if (countDown.countDown()) { - connectionListener.onResponse(response); - } - }, - exception -> { - if (countDown.fastForward()) { - connectionListener.onFailure(exception); - } - if (finalRemote.isClosed() == false) { - logger.warn("failed to update seed list for cluster: " + entry.getKey(), exception); - } - })); + response -> { + if (countDown.countDown()) { + connectionListener.onResponse(response); + } + }, + exception -> { + if (countDown.fastForward()) { + connectionListener.onFailure(exception); + } + if (finalRemote.isClosed() == false) { + logger.warn("failed to update seed list for cluster: " + entry.getKey(), exception); + } + })); } } this.remoteClusters = Collections.unmodifiableMap(remoteClusters); @@ -198,7 +246,7 @@ public Map groupIndices(IndicesOptions indicesOptions, String clusterAlias = entry.getKey(); List originalIndices = entry.getValue(); originalIndicesMap.put(clusterAlias, - new OriginalIndices(originalIndices.toArray(new String[originalIndices.size()]), indicesOptions)); + new OriginalIndices(originalIndices.toArray(new String[originalIndices.size()]), indicesOptions)); } if (originalIndicesMap.containsKey(LOCAL_CLUSTER_GROUP_KEY) == false) { originalIndicesMap.put(LOCAL_CLUSTER_GROUP_KEY, new OriginalIndices(Strings.EMPTY_ARRAY, indicesOptions)); @@ -230,38 +278,38 @@ public void collectSearchShards(IndicesOptions indicesOptions, String preference } final String[] indices = entry.getValue().indices(); ClusterSearchShardsRequest searchShardsRequest = new ClusterSearchShardsRequest(indices) - .indicesOptions(indicesOptions).local(true).preference(preference) - .routing(routing); + .indicesOptions(indicesOptions).local(true).preference(preference) + .routing(routing); remoteClusterConnection.fetchSearchShards(searchShardsRequest, - new ActionListener() { - @Override - public void onResponse(ClusterSearchShardsResponse clusterSearchShardsResponse) { - searchShardsResponses.put(clusterName, clusterSearchShardsResponse); - if (responsesCountDown.countDown()) { - RemoteTransportException exception = transportException.get(); - if (exception == null) { - listener.onResponse(searchShardsResponses); - } else { - listener.onFailure(transportException.get()); + new ActionListener() { + @Override + public void onResponse(ClusterSearchShardsResponse clusterSearchShardsResponse) { + searchShardsResponses.put(clusterName, clusterSearchShardsResponse); + if (responsesCountDown.countDown()) { + RemoteTransportException exception = transportException.get(); + if (exception == null) { + listener.onResponse(searchShardsResponses); + } else { + listener.onFailure(transportException.get()); + } } } - } - @Override - public void onFailure(Exception e) { - RemoteTransportException exception = new RemoteTransportException("error while communicating with remote cluster [" - + clusterName + "]", e); - if (transportException.compareAndSet(null, exception) == false) { - exception = transportException.accumulateAndGet(exception, (previous, current) -> { - current.addSuppressed(previous); - return current; - }); - } - if (responsesCountDown.countDown()) { - listener.onFailure(exception); + @Override + public void onFailure(Exception e) { + RemoteTransportException exception = + new RemoteTransportException("error while communicating with remote cluster [" + clusterName + "]", e); + if (transportException.compareAndSet(null, exception) == false) { + exception = transportException.accumulateAndGet(exception, (previous, current) -> { + current.addSuppressed(previous); + return current; + }); + } + if (responsesCountDown.countDown()) { + listener.onFailure(exception); + } } - } - }); + }); } } @@ -306,6 +354,7 @@ protected Set getRemoteClusterNames() { public void listenForUpdates(ClusterSettings clusterSettings) { super.listenForUpdates(clusterSettings); clusterSettings.addAffixUpdateConsumer(REMOTE_CLUSTER_SKIP_UNAVAILABLE, this::updateSkipUnavailable, (alias, value) -> {}); + clusterSettings.addAffixUpdateConsumer(SEARCH_REMOTE_CLUSTER_SKIP_UNAVAILABLE, this::updateSkipUnavailable, (alias, value) -> {}); } synchronized void updateSkipUnavailable(String clusterAlias, Boolean skipUnavailable) { @@ -327,7 +376,7 @@ void updateRemoteCluster( final String proxyAddress, final ActionListener connectionListener) { final List> nodes = addresses.stream().>map(address -> () -> - buildSeedNode(clusterAlias, address, Strings.hasLength(proxyAddress)) + buildSeedNode(clusterAlias, address, Strings.hasLength(proxyAddress)) ).collect(Collectors.toList()); updateRemoteClusters(Collections.singletonMap(clusterAlias, new Tuple<>(proxyAddress, nodes)), connectionListener); } @@ -387,7 +436,7 @@ public void onResponse(Function nodeLookup) { } if (countDown.countDown()) { listener.onResponse((clusterAlias, nodeId) - -> clusterMap.getOrDefault(clusterAlias, nullFunction).apply(nodeId)); + -> clusterMap.getOrDefault(clusterAlias, nullFunction).apply(nodeId)); } } @@ -418,4 +467,5 @@ public Client getRemoteClusterClient(ThreadPool threadPool, String clusterAlias) Collection getConnections() { return remoteClusters.values(); } + } diff --git a/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java index 50bbad16ab73b..2a0fa6c7ce134 100644 --- a/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java @@ -60,7 +60,7 @@ public void testFilterShards() throws InterruptedException { final boolean shard2 = randomBoolean(); SearchTransportService searchTransportService = new SearchTransportService( - Settings.builder().put("search.remote.connect", false).build(), null, null) { + Settings.builder().put("cluster.remote.connect", false).build(), null, null) { @Override public void sendCanMatch(Transport.Connection connection, ShardSearchTransportRequest request, SearchTask task, @@ -119,7 +119,7 @@ public void testFilterWithFailure() throws InterruptedException { lookup.put("node2", new SearchAsyncActionTests.MockConnection(replicaNode)); final boolean shard1 = randomBoolean(); SearchTransportService searchTransportService = new SearchTransportService( - Settings.builder().put("search.remote.connect", false).build(), null, null) { + Settings.builder().put("cluster.remote.connect", false).build(), null, null) { @Override public void sendCanMatch(Transport.Connection connection, ShardSearchTransportRequest request, SearchTask task, @@ -186,7 +186,7 @@ public void testLotsOfShards() throws InterruptedException { final SearchTransportService searchTransportService = - new SearchTransportService(Settings.builder().put("search.remote.connect", false).build(), null, null) { + new SearchTransportService(Settings.builder().put("cluster.remote.connect", false).build(), null, null) { @Override public void sendCanMatch( Transport.Connection connection, diff --git a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java index c1f729a12ca2b..c1bdf901a6d1b 100644 --- a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java @@ -60,7 +60,7 @@ public void testDfsWith2Shards() throws IOException { SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, (b) -> new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, b)); SearchTransportService searchTransportService = new SearchTransportService( - Settings.builder().put("search.remote.connect", false).build(), null, null) { + Settings.builder().put("cluster.remote.connect", false).build(), null, null) { @Override public void sendExecuteQuery(Transport.Connection connection, QuerySearchRequest request, SearchTask task, @@ -118,7 +118,7 @@ public void testDfsWith1ShardFailed() throws IOException { SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, (b) -> new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, b)); SearchTransportService searchTransportService = new SearchTransportService( - Settings.builder().put("search.remote.connect", false).build(), null, null) { + Settings.builder().put("cluster.remote.connect", false).build(), null, null) { @Override public void sendExecuteQuery(Transport.Connection connection, QuerySearchRequest request, SearchTask task, @@ -175,7 +175,7 @@ public void testFailPhaseOnException() throws IOException { SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, (b) -> new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, b)); SearchTransportService searchTransportService = new SearchTransportService( - Settings.builder().put("search.remote.connect", false).build(), null, null) { + Settings.builder().put("cluster.remote.connect", false).build(), null, null) { @Override public void sendExecuteQuery(Transport.Connection connection, QuerySearchRequest request, SearchTask task, diff --git a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java index b580d48c11a43..7d19ee58f9fb8 100644 --- a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java @@ -70,7 +70,7 @@ public void testCollapseSingleHit() throws IOException { .collect(Collectors.toList())))); mockSearchPhaseContext.getRequest().source().query(originalQuery); mockSearchPhaseContext.searchTransport = new SearchTransportService( - Settings.builder().put("search.remote.connect", false).build(), null, null) { + Settings.builder().put("cluster.remote.connect", false).build(), null, null) { @Override void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionListener listener) { @@ -145,7 +145,7 @@ public void testFailOneItemFailsEntirePhase() throws IOException { mockSearchPhaseContext.getRequest().source(new SearchSourceBuilder() .collapse(new CollapseBuilder("someField").setInnerHits(new InnerHitBuilder().setName("foobarbaz")))); mockSearchPhaseContext.searchTransport = new SearchTransportService( - Settings.builder().put("search.remote.connect", false).build(), null, null) { + Settings.builder().put("cluster.remote.connect", false).build(), null, null) { @Override void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionListener listener) { @@ -187,7 +187,7 @@ public void run() throws IOException { public void testSkipPhase() throws IOException { MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(1); mockSearchPhaseContext.searchTransport = new SearchTransportService( - Settings.builder().put("search.remote.connect", false).build(), null, null) { + Settings.builder().put("cluster.remote.connect", false).build(), null, null) { @Override void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionListener listener) { @@ -218,7 +218,7 @@ public void run() throws IOException { public void testSkipExpandCollapseNoHits() throws IOException { MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(1); mockSearchPhaseContext.searchTransport = new SearchTransportService( - Settings.builder().put("search.remote.connect", false).build(), null, null) { + Settings.builder().put("cluster.remote.connect", false).build(), null, null) { @Override void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionListener listener) { @@ -250,7 +250,7 @@ public void testExpandRequestOptions() throws IOException { boolean version = randomBoolean(); mockSearchPhaseContext.searchTransport = new SearchTransportService( - Settings.builder().put("search.remote.connect", false).build(), null, null) { + Settings.builder().put("cluster.remote.connect", false).build(), null, null) { @Override void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionListener listener) { diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java index 7f4fbc9115791..e9795bfdf6f59 100644 --- a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java @@ -106,7 +106,7 @@ public void testFetchTwoDocument() throws IOException { results.consumeResult(queryResult); SearchTransportService searchTransportService = new SearchTransportService( - Settings.builder().put("search.remote.connect", false).build(), null, null) { + Settings.builder().put("cluster.remote.connect", false).build(), null, null) { @Override public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRequest request, SearchTask task, SearchActionListener listener) { @@ -161,7 +161,7 @@ public void testFailFetchOneDoc() throws IOException { results.consumeResult(queryResult); SearchTransportService searchTransportService = new SearchTransportService( - Settings.builder().put("search.remote.connect", false).build(), null, null) { + Settings.builder().put("cluster.remote.connect", false).build(), null, null) { @Override public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRequest request, SearchTask task, SearchActionListener listener) { @@ -215,7 +215,7 @@ public void testFetchDocsConcurrently() throws IOException, InterruptedException results.consumeResult(queryResult); } SearchTransportService searchTransportService = new SearchTransportService( - Settings.builder().put("search.remote.connect", false).build(), null, null) { + Settings.builder().put("cluster.remote.connect", false).build(), null, null) { @Override public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRequest request, SearchTask task, SearchActionListener listener) { @@ -277,7 +277,7 @@ public void testExceptionFailsPhase() throws IOException { results.consumeResult(queryResult); AtomicInteger numFetches = new AtomicInteger(0); SearchTransportService searchTransportService = new SearchTransportService( - Settings.builder().put("search.remote.connect", false).build(), null, null) { + Settings.builder().put("cluster.remote.connect", false).build(), null, null) { @Override public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRequest request, SearchTask task, SearchActionListener listener) { @@ -331,7 +331,7 @@ public void testCleanupIrrelevantContexts() throws IOException { // contexts tha results.consumeResult(queryResult); SearchTransportService searchTransportService = new SearchTransportService( - Settings.builder().put("search.remote.connect", false).build(), null, null) { + Settings.builder().put("cluster.remote.connect", false).build(), null, null) { @Override public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRequest request, SearchTask task, SearchActionListener listener) { diff --git a/server/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java b/server/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java index b53d61280f72d..633e043ddd14a 100644 --- a/server/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java @@ -286,7 +286,7 @@ public void testUpdateDiscoveryPublishTimeout() { .get(); fail("bogus value"); } catch (IllegalArgumentException ex) { - assertEquals(ex.getMessage(), "Failed to parse value [-1] for setting [discovery.zen.publish_timeout] must be >= 0s"); + assertEquals(ex.getMessage(), "failed to parse value [-1] for setting [discovery.zen.publish_timeout], must be >= [0ms]"); } assertThat(discoverySettings.getPublishTimeout().seconds(), equalTo(1L)); diff --git a/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java b/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java index b0b4ec3930adf..8ace3aa34e86a 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java @@ -318,7 +318,7 @@ public void testIllegalFsyncInterval() { createIndex("test", settings); fail(); } catch (IllegalArgumentException ex) { - assertEquals("Failed to parse value [0ms] for setting [index.translog.sync_interval] must be >= 100ms", ex.getMessage()); + assertEquals("failed to parse value [0ms] for setting [index.translog.sync_interval], must be >= [100ms]", ex.getMessage()); } } } diff --git a/server/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java b/server/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java index c9cc771370e21..01d7dc2a53558 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java @@ -258,7 +258,7 @@ public void testNegativeInterval() { Exception e = expectThrows(IllegalArgumentException.class, () -> new MockController(Settings.builder() .put("indices.memory.interval", "-42s").build())); - assertEquals("Failed to parse value [-42s] for setting [indices.memory.interval] must be >= 0s", e.getMessage()); + assertEquals("failed to parse value [-42s] for setting [indices.memory.interval], must be >= [0ms]", e.getMessage()); } @@ -266,7 +266,7 @@ public void testNegativeShardInactiveTime() { Exception e = expectThrows(IllegalArgumentException.class, () -> new MockController(Settings.builder() .put("indices.memory.shard_inactive_time", "-42s").build())); - assertEquals("Failed to parse value [-42s] for setting [indices.memory.shard_inactive_time] must be >= 0s", e.getMessage()); + assertEquals("failed to parse value [-42s] for setting [indices.memory.shard_inactive_time], must be >= [0ms]", e.getMessage()); } diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java index 34e22fd20de7f..3f85d927e9295 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java @@ -52,7 +52,7 @@ public void testConnectAndExecuteRequest() throws Exception { Settings localSettings = Settings.builder() .put(RemoteClusterService.ENABLE_REMOTE_CLUSTERS.getKey(), true) - .put("search.remote.test.seeds", remoteNode.getAddress().getAddress() + ":" + remoteNode.getAddress().getPort()).build(); + .put("cluster.remote.test.seeds", remoteNode.getAddress().getAddress() + ":" + remoteNode.getAddress().getPort()).build(); try (MockTransportService service = MockTransportService.createNewService(localSettings, Version.CURRENT, threadPool, null)) { service.start(); service.acceptIncomingRequests(); @@ -77,7 +77,7 @@ public void testEnsureWeReconnect() throws Exception { DiscoveryNode remoteNode = remoteTransport.getLocalDiscoNode(); Settings localSettings = Settings.builder() .put(RemoteClusterService.ENABLE_REMOTE_CLUSTERS.getKey(), true) - .put("search.remote.test.seeds", remoteNode.getAddress().getAddress() + ":" + remoteNode.getAddress().getPort()).build(); + .put("cluster.remote.test.seeds", remoteNode.getAddress().getAddress() + ":" + remoteNode.getAddress().getPort()).build(); try (MockTransportService service = MockTransportService.createNewService(localSettings, Version.CURRENT, threadPool, null)) { Semaphore semaphore = new Semaphore(1); service.start(); diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java index 9d42b4e458dbe..0abde8839b44b 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java @@ -98,17 +98,17 @@ public void testSettingsAreRegistered() { public void testRemoteClusterSeedSetting() { // simple validation Settings settings = Settings.builder() - .put("search.remote.foo.seeds", "192.168.0.1:8080") - .put("search.remote.bar.seed", "[::1]:9090").build(); + .put("cluster.remote.foo.seeds", "192.168.0.1:8080") + .put("cluster.remote.bar.seed", "[::1]:9090").build(); RemoteClusterAware.REMOTE_CLUSTERS_SEEDS.getAllConcreteSettings(settings).forEach(setting -> setting.get(settings)); Settings brokenSettings = Settings.builder() - .put("search.remote.foo.seeds", "192.168.0.1").build(); + .put("cluster.remote.foo.seeds", "192.168.0.1").build(); expectThrows(IllegalArgumentException.class, () -> RemoteClusterAware.REMOTE_CLUSTERS_SEEDS.getAllConcreteSettings(brokenSettings).forEach(setting -> setting.get(brokenSettings))); Settings brokenPortSettings = Settings.builder() - .put("search.remote.foo.seeds", "192.168.0.1:123456789123456789").build(); + .put("cluster.remote.foo.seeds", "192.168.0.1:123456789123456789").build(); Exception e = expectThrows( IllegalArgumentException.class, () -> RemoteClusterAware.REMOTE_CLUSTERS_SEEDS.getAllConcreteSettings(brokenSettings) @@ -119,10 +119,10 @@ public void testRemoteClusterSeedSetting() { public void testBuildRemoteClustersDynamicConfig() throws Exception { Map>>> map = RemoteClusterService.buildRemoteClustersDynamicConfig( - Settings.builder().put("search.remote.foo.seeds", "192.168.0.1:8080") - .put("search.remote.bar.seeds", "[::1]:9090") - .put("search.remote.boom.seeds", "boom-node1.internal:1000") - .put("search.remote.boom.proxy", "foo.bar.com:1234").build()); + Settings.builder().put("cluster.remote.foo.seeds", "192.168.0.1:8080") + .put("cluster.remote.bar.seeds", "[::1]:9090") + .put("cluster.remote.boom.seeds", "boom-node1.internal:1000") + .put("cluster.remote.boom.proxy", "foo.bar.com:1234").build()); assertEquals(3, map.size()); assertTrue(map.containsKey("foo")); assertTrue(map.containsKey("bar")); @@ -167,8 +167,8 @@ public void testGroupClusterIndices() throws IOException { transportService.start(); transportService.acceptIncomingRequests(); Settings.Builder builder = Settings.builder(); - builder.putList("search.remote.cluster_1.seeds", seedNode.getAddress().toString()); - builder.putList("search.remote.cluster_2.seeds", otherSeedNode.getAddress().toString()); + builder.putList("cluster.remote.cluster_1.seeds", seedNode.getAddress().toString()); + builder.putList("cluster.remote.cluster_2.seeds", otherSeedNode.getAddress().toString()); try (RemoteClusterService service = new RemoteClusterService(builder.build(), transportService)) { assertFalse(service.isCrossClusterSearchEnabled()); service.initializeRemoteClusters(); @@ -213,8 +213,8 @@ public void testIncrementallyAddClusters() throws IOException { transportService.start(); transportService.acceptIncomingRequests(); Settings.Builder builder = Settings.builder(); - builder.putList("search.remote.cluster_1.seeds", seedNode.getAddress().toString()); - builder.putList("search.remote.cluster_2.seeds", otherSeedNode.getAddress().toString()); + builder.putList("cluster.remote.cluster_1.seeds", seedNode.getAddress().toString()); + builder.putList("cluster.remote.cluster_2.seeds", otherSeedNode.getAddress().toString()); try (RemoteClusterService service = new RemoteClusterService(Settings.EMPTY, transportService)) { assertFalse(service.isCrossClusterSearchEnabled()); service.initializeRemoteClusters(); @@ -238,7 +238,7 @@ public void testIncrementallyAddClusters() throws IOException { public void testRemoteNodeAttribute() throws IOException, InterruptedException { final Settings settings = - Settings.builder().put("search.remote.node.attr", "gateway").build(); + Settings.builder().put("cluster.remote.node.attr", "gateway").build(); final List knownNodes = new CopyOnWriteArrayList<>(); final Settings gateway = Settings.builder().put("node.attr.gateway", true).build(); try (MockTransportService c1N1 = @@ -268,9 +268,9 @@ public void testRemoteNodeAttribute() throws IOException, InterruptedException { transportService.acceptIncomingRequests(); final Settings.Builder builder = Settings.builder(); builder.putList( - "search.remote.cluster_1.seeds", c1N1Node.getAddress().toString()); + "cluster.remote.cluster_1.seeds", c1N1Node.getAddress().toString()); builder.putList( - "search.remote.cluster_2.seeds", c2N1Node.getAddress().toString()); + "cluster.remote.cluster_2.seeds", c2N1Node.getAddress().toString()); try (RemoteClusterService service = new RemoteClusterService(settings, transportService)) { assertFalse(service.isCrossClusterSearchEnabled()); @@ -335,8 +335,8 @@ public void testRemoteNodeRoles() throws IOException, InterruptedException { transportService.start(); transportService.acceptIncomingRequests(); final Settings.Builder builder = Settings.builder(); - builder.putList("search.remote.cluster_1.seeds", c1N1Node.getAddress().toString()); - builder.putList("search.remote.cluster_2.seeds", c2N1Node.getAddress().toString()); + builder.putList("cluster.remote.cluster_1.seeds", c1N1Node.getAddress().toString()); + builder.putList("cluster.remote.cluster_2.seeds", c2N1Node.getAddress().toString()); try (RemoteClusterService service = new RemoteClusterService(settings, transportService)) { assertFalse(service.isCrossClusterSearchEnabled()); service.initializeRemoteClusters(); @@ -406,9 +406,9 @@ public void testCollectNodes() throws InterruptedException, IOException { transportService.acceptIncomingRequests(); final Settings.Builder builder = Settings.builder(); builder.putList( - "search.remote.cluster_1.seeds", c1N1Node.getAddress().toString()); + "cluster.remote.cluster_1.seeds", c1N1Node.getAddress().toString()); builder.putList( - "search.remote.cluster_2.seeds", c2N1Node.getAddress().toString()); + "cluster.remote.cluster_2.seeds", c2N1Node.getAddress().toString()); try (RemoteClusterService service = new RemoteClusterService(settings, transportService)) { assertFalse(service.isCrossClusterSearchEnabled()); @@ -540,7 +540,7 @@ public void testCollectSearchShards() throws Exception { DiscoveryNode remoteSeedNode = remoteSeedTransport.getLocalDiscoNode(); knownNodes.add(remoteSeedNode); nodes[i] = remoteSeedNode; - builder.put("search.remote.remote" + i + ".seeds", remoteSeedNode.getAddress().toString()); + builder.put("cluster.remote.remote" + i + ".seeds", remoteSeedNode.getAddress().toString()); remoteIndicesByCluster.put("remote" + i, new OriginalIndices(new String[]{"index"}, IndicesOptions.lenientExpandOpen())); } Settings settings = builder.build(); @@ -696,13 +696,13 @@ public void onNodeDisconnected(DiscoveryNode node) { public void testRemoteClusterSkipIfDisconnectedSetting() { { Settings settings = Settings.builder() - .put("search.remote.foo.skip_unavailable", true) - .put("search.remote.bar.skip_unavailable", false).build(); + .put("cluster.remote.foo.skip_unavailable", true) + .put("cluster.remote.bar.skip_unavailable", false).build(); RemoteClusterService.REMOTE_CLUSTER_SKIP_UNAVAILABLE.getAllConcreteSettings(settings).forEach(setting -> setting.get(settings)); } { Settings brokenSettings = Settings.builder() - .put("search.remote.foo.skip_unavailable", "broken").build(); + .put("cluster.remote.foo.skip_unavailable", "broken").build(); expectThrows(IllegalArgumentException.class, () -> RemoteClusterService.REMOTE_CLUSTER_SKIP_UNAVAILABLE.getAllConcreteSettings(brokenSettings) .forEach(setting -> setting.get(brokenSettings))); @@ -712,22 +712,22 @@ public void testRemoteClusterSkipIfDisconnectedSetting() { new HashSet<>(Arrays.asList(RemoteClusterAware.REMOTE_CLUSTERS_SEEDS, RemoteClusterService.REMOTE_CLUSTER_SKIP_UNAVAILABLE))); { - Settings settings = Settings.builder().put("search.remote.foo.skip_unavailable", randomBoolean()).build(); + Settings settings = Settings.builder().put("cluster.remote.foo.skip_unavailable", randomBoolean()).build(); IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> service.validate(settings, true)); - assertEquals("Missing required setting [search.remote.foo.seeds] for setting [search.remote.foo.skip_unavailable]", + assertEquals("Missing required setting [cluster.remote.foo.seeds] for setting [cluster.remote.foo.skip_unavailable]", iae.getMessage()); } { try (MockTransportService remoteSeedTransport = startTransport("seed", new CopyOnWriteArrayList<>(), Version.CURRENT)) { String seed = remoteSeedTransport.getLocalDiscoNode().getAddress().toString(); - service.validate(Settings.builder().put("search.remote.foo.skip_unavailable", randomBoolean()) - .put("search.remote.foo.seeds", seed).build(), true); - service.validate(Settings.builder().put("search.remote.foo.seeds", seed).build(), true); + service.validate(Settings.builder().put("cluster.remote.foo.skip_unavailable", randomBoolean()) + .put("cluster.remote.foo.seeds", seed).build(), true); + service.validate(Settings.builder().put("cluster.remote.foo.seeds", seed).build(), true); - AbstractScopedSettings service2 = new ClusterSettings(Settings.builder().put("search.remote.foo.seeds", seed).build(), + AbstractScopedSettings service2 = new ClusterSettings(Settings.builder().put("cluster.remote.foo.seeds", seed).build(), new HashSet<>(Arrays.asList(RemoteClusterAware.REMOTE_CLUSTERS_SEEDS, RemoteClusterService.REMOTE_CLUSTER_SKIP_UNAVAILABLE))); - service2.validate(Settings.builder().put("search.remote.foo.skip_unavailable", randomBoolean()).build(), false); + service2.validate(Settings.builder().put("cluster.remote.foo.skip_unavailable", randomBoolean()).build(), false); } } } @@ -789,7 +789,7 @@ public void testGetNodePredicateNodeVersion() { public void testGetNodePredicateNodeAttrs() { TransportAddress address = new TransportAddress(TransportAddress.META_ADDRESS, 0); Set roles = new HashSet<>(EnumSet.allOf(DiscoveryNode.Role.class)); - Settings settings = Settings.builder().put("search.remote.node.attr", "gateway").build(); + Settings settings = Settings.builder().put("cluster.remote.node.attr", "gateway").build(); Predicate nodePredicate = RemoteClusterService.getNodePredicate(settings); { DiscoveryNode nonGatewayNode = new DiscoveryNode("id", address, Collections.singletonMap("gateway", "false"), @@ -812,7 +812,7 @@ public void testGetNodePredicateNodeAttrs() { public void testGetNodePredicatesCombination() { TransportAddress address = new TransportAddress(TransportAddress.META_ADDRESS, 0); - Settings settings = Settings.builder().put("search.remote.node.attr", "gateway").build(); + Settings settings = Settings.builder().put("cluster.remote.node.attr", "gateway").build(); Predicate nodePredicate = RemoteClusterService.getNodePredicate(settings); Set allRoles = new HashSet<>(EnumSet.allOf(DiscoveryNode.Role.class)); Set dedicatedMasterRoles = new HashSet<>(EnumSet.of(DiscoveryNode.Role.MASTER)); @@ -861,8 +861,8 @@ public void testRemoteClusterWithProxy() throws Exception { transportService.start(); transportService.acceptIncomingRequests(); Settings.Builder builder = Settings.builder(); - builder.putList("search.remote.cluster_1.seeds", "cluster_1_node0:8080"); - builder.put("search.remote.cluster_1.proxy", cluster1Proxy); + builder.putList("cluster.remote.cluster_1.seeds", "cluster_1_node0:8080"); + builder.put("cluster.remote.cluster_1.proxy", cluster1Proxy); try (RemoteClusterService service = new RemoteClusterService(builder.build(), transportService)) { assertFalse(service.isCrossClusterSearchEnabled()); service.initializeRemoteClusters(); diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterSettingsTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterSettingsTests.java new file mode 100644 index 0000000000000..cfffc3839461e --- /dev/null +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterSettingsTests.java @@ -0,0 +1,146 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport; + +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.transport.RemoteClusterAware.REMOTE_CLUSTERS_PROXY; +import static org.elasticsearch.transport.RemoteClusterAware.REMOTE_CLUSTERS_SEEDS; +import static org.elasticsearch.transport.RemoteClusterAware.SEARCH_REMOTE_CLUSTERS_PROXY; +import static org.elasticsearch.transport.RemoteClusterAware.SEARCH_REMOTE_CLUSTERS_SEEDS; +import static org.elasticsearch.transport.RemoteClusterService.ENABLE_REMOTE_CLUSTERS; +import static org.elasticsearch.transport.RemoteClusterService.REMOTE_CLUSTER_SKIP_UNAVAILABLE; +import static org.elasticsearch.transport.RemoteClusterService.REMOTE_CONNECTIONS_PER_CLUSTER; +import static org.elasticsearch.transport.RemoteClusterService.REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING; +import static org.elasticsearch.transport.RemoteClusterService.REMOTE_NODE_ATTRIBUTE; +import static org.elasticsearch.transport.RemoteClusterService.SEARCH_ENABLE_REMOTE_CLUSTERS; +import static org.elasticsearch.transport.RemoteClusterService.SEARCH_REMOTE_CLUSTER_SKIP_UNAVAILABLE; +import static org.elasticsearch.transport.RemoteClusterService.SEARCH_REMOTE_CONNECTIONS_PER_CLUSTER; +import static org.elasticsearch.transport.RemoteClusterService.SEARCH_REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING; +import static org.elasticsearch.transport.RemoteClusterService.SEARCH_REMOTE_NODE_ATTRIBUTE; +import static org.hamcrest.Matchers.emptyCollectionOf; +import static org.hamcrest.Matchers.equalTo; + +public class RemoteClusterSettingsTests extends ESTestCase { + + public void testConnectionsPerClusterFallback() { + final int value = randomIntBetween(1, 8); + final Settings settings = Settings.builder().put(SEARCH_REMOTE_CONNECTIONS_PER_CLUSTER.getKey(), value).build(); + assertThat(REMOTE_CONNECTIONS_PER_CLUSTER.get(settings), equalTo(value)); + assertSettingDeprecationsAndWarnings(new Setting[]{SEARCH_REMOTE_CONNECTIONS_PER_CLUSTER}); + } + + public void testConnectionsPerClusterDefault() { + assertThat(REMOTE_CONNECTIONS_PER_CLUSTER.get(Settings.EMPTY), equalTo(3)); + } + + public void testInitialConnectTimeoutFallback() { + final String value = randomTimeValue(30, 300, "s"); + final Settings settings = Settings.builder().put(SEARCH_REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING.getKey(), value).build(); + assertThat( + REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING.get(settings), + equalTo(TimeValue.parseTimeValue(value, SEARCH_REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING.getKey()))); + assertSettingDeprecationsAndWarnings(new Setting[]{SEARCH_REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING}); + } + + public void testInitialConnectTimeoutDefault() { + assertThat(REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING.get(Settings.EMPTY), equalTo(new TimeValue(30, TimeUnit.SECONDS))); + } + + public void testRemoteNodeAttributeFallback() { + final String attribute = randomAlphaOfLength(8); + final Settings settings = Settings.builder().put(SEARCH_REMOTE_NODE_ATTRIBUTE.getKey(), attribute).build(); + assertThat(REMOTE_NODE_ATTRIBUTE.get(settings), equalTo(attribute)); + assertSettingDeprecationsAndWarnings(new Setting[]{SEARCH_REMOTE_NODE_ATTRIBUTE}); + } + + public void testRemoteNodeAttributeDefault() { + assertThat(REMOTE_NODE_ATTRIBUTE.get(Settings.EMPTY), equalTo("")); + } + + public void testEnableRemoteClustersFallback() { + final boolean enable = randomBoolean(); + final Settings settings = Settings.builder().put(SEARCH_ENABLE_REMOTE_CLUSTERS.getKey(), enable).build(); + assertThat(ENABLE_REMOTE_CLUSTERS.get(settings), equalTo(enable)); + assertSettingDeprecationsAndWarnings(new Setting[]{SEARCH_ENABLE_REMOTE_CLUSTERS}); + } + + public void testEnableRemoteClustersDefault() { + assertTrue(ENABLE_REMOTE_CLUSTERS.get(Settings.EMPTY)); + } + + public void testSkipUnavailableFallback() { + final String alias = randomAlphaOfLength(8); + final boolean skip = randomBoolean(); + final Settings settings = + Settings.builder().put(SEARCH_REMOTE_CLUSTER_SKIP_UNAVAILABLE.getConcreteSettingForNamespace(alias).getKey(), skip).build(); + assertThat(REMOTE_CLUSTER_SKIP_UNAVAILABLE.getConcreteSettingForNamespace(alias).get(settings), equalTo(skip)); + assertSettingDeprecationsAndWarnings(new Setting[]{SEARCH_REMOTE_CLUSTER_SKIP_UNAVAILABLE.getConcreteSettingForNamespace(alias)}); + } + + public void testSkipUnavailableDefault() { + final String alias = randomAlphaOfLength(8); + assertFalse(REMOTE_CLUSTER_SKIP_UNAVAILABLE.getConcreteSettingForNamespace(alias).get(Settings.EMPTY)); + } + + public void testSeedsFallback() { + final String alias = randomAlphaOfLength(8); + final int numberOfSeeds = randomIntBetween(1, 8); + final List seeds = new ArrayList<>(numberOfSeeds); + for (int i = 0; i < numberOfSeeds; i++) { + seeds.add("localhost:" + Integer.toString(9200 + i)); + } + final Settings settings = + Settings.builder() + .put(SEARCH_REMOTE_CLUSTERS_SEEDS.getConcreteSettingForNamespace(alias).getKey(), String.join(",", seeds)).build(); + assertThat(REMOTE_CLUSTERS_SEEDS.getConcreteSettingForNamespace(alias).get(settings), equalTo(seeds)); + assertSettingDeprecationsAndWarnings(new Setting[]{SEARCH_REMOTE_CLUSTERS_SEEDS.getConcreteSettingForNamespace(alias)}); + } + + public void testSeedsDefault() { + final String alias = randomAlphaOfLength(8); + assertThat(REMOTE_CLUSTERS_SEEDS.getConcreteSettingForNamespace(alias).get(Settings.EMPTY), emptyCollectionOf(String.class)); + } + + public void testProxyFallback() { + final String alias = randomAlphaOfLength(8); + final String proxy = randomAlphaOfLength(8); + final int port = randomIntBetween(9200, 9300); + final String value = proxy + ":" + port; + final Settings settings = + Settings.builder() + .put(SEARCH_REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace(alias).getKey(), value).build(); + assertThat(REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace(alias).get(settings), equalTo(value)); + assertSettingDeprecationsAndWarnings(new Setting[]{SEARCH_REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace(alias)}); + } + + public void testProxyDefault() { + final String alias = randomAlphaOfLength(8); + assertThat(REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace(alias).get(Settings.EMPTY), equalTo("")); + } + +} \ No newline at end of file diff --git a/x-pack/docs/en/security/ccs-clients-integrations/cross-cluster.asciidoc b/x-pack/docs/en/security/ccs-clients-integrations/cross-cluster.asciidoc index e5f43a08e7aee..12a5a56533682 100644 --- a/x-pack/docs/en/security/ccs-clients-integrations/cross-cluster.asciidoc +++ b/x-pack/docs/en/security/ccs-clients-integrations/cross-cluster.asciidoc @@ -49,7 +49,7 @@ information about the `xpack.security.enabled` setting, see PUT _cluster/settings { "persistent": { - "search": { + "cluster": { "remote": { "cluster_one": { "seeds": [ "10.0.1.1:9300" ] @@ -82,7 +82,7 @@ First, enable cluster `one` to perform cross cluster search on remote cluster PUT _cluster/settings { "persistent": { - "search.remote.cluster_two.seeds": [ "10.0.2.1:9300" ] + "cluster.remote.cluster_two.seeds": [ "10.0.2.1:9300" ] } } ----------------------------------------------------------- diff --git a/x-pack/plugin/ccr/qa/multi-cluster-with-incompatible-license/build.gradle b/x-pack/plugin/ccr/qa/multi-cluster-with-incompatible-license/build.gradle index 97d4008eb8c1f..1566333e60848 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster-with-incompatible-license/build.gradle +++ b/x-pack/plugin/ccr/qa/multi-cluster-with-incompatible-license/build.gradle @@ -27,7 +27,7 @@ followClusterTestCluster { numNodes = 1 clusterName = 'follow-cluster' setting 'xpack.license.self_generated.type', 'trial' - setting 'search.remote.leader_cluster.seeds', "\"${-> leaderClusterTest.nodes.get(0).transportUri()}\"" + setting 'cluster.remote.leader_cluster.seeds', "\"${-> leaderClusterTest.nodes.get(0).transportUri()}\"" } followClusterTestRunner { diff --git a/x-pack/plugin/ccr/qa/multi-cluster-with-security/build.gradle b/x-pack/plugin/ccr/qa/multi-cluster-with-security/build.gradle index 897aed0110e17..d4fe9ee554c3d 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster-with-security/build.gradle +++ b/x-pack/plugin/ccr/qa/multi-cluster-with-security/build.gradle @@ -44,7 +44,7 @@ followClusterTestCluster { dependsOn leaderClusterTestRunner numNodes = 1 clusterName = 'follow-cluster' - setting 'search.remote.leader_cluster.seeds', "\"${-> leaderClusterTest.nodes.get(0).transportUri()}\"" + setting 'cluster.remote.leader_cluster.seeds', "\"${-> leaderClusterTest.nodes.get(0).transportUri()}\"" setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' setting 'xpack.monitoring.enabled', 'false' diff --git a/x-pack/plugin/ccr/qa/multi-cluster/build.gradle b/x-pack/plugin/ccr/qa/multi-cluster/build.gradle index cc726e1a65257..396c247af40b0 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/build.gradle +++ b/x-pack/plugin/ccr/qa/multi-cluster/build.gradle @@ -28,7 +28,7 @@ followClusterTestCluster { numNodes = 1 clusterName = 'follow-cluster' setting 'xpack.license.self_generated.type', 'trial' - setting 'search.remote.leader_cluster.seeds', "\"${-> leaderClusterTest.nodes.get(0).transportUri()}\"" + setting 'cluster.remote.leader_cluster.seeds', "\"${-> leaderClusterTest.nodes.get(0).transportUri()}\"" } followClusterTestRunner { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index 7722a9d216632..8ccac83c86f5d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -183,7 +183,7 @@ public void setup() { rolesStore = mock(CompositeRolesStore.class); clusterService = mock(ClusterService.class); final Settings settings = Settings.builder() - .put("search.remote.other_cluster.seeds", "localhost:9999") + .put("cluster.remote.other_cluster.seeds", "localhost:9999") .build(); final ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); when(clusterService.getClusterSettings()).thenReturn(clusterSettings); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java index ebced2307978b..39b70e0a879b6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java @@ -110,8 +110,8 @@ public void setup() { .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 2)) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, randomIntBetween(0, 1)) - .put("search.remote.remote.seeds", "127.0.0.1:" + randomIntBetween(9301, 9350)) - .put("search.remote.other_remote.seeds", "127.0.0.1:" + randomIntBetween(9351, 9399)) + .put("cluster.remote.remote.seeds", "127.0.0.1:" + randomIntBetween(9301, 9350)) + .put("cluster.remote.other_remote.seeds", "127.0.0.1:" + randomIntBetween(9351, 9399)) .build(); indexNameExpressionResolver = new IndexNameExpressionResolver(Settings.EMPTY); diff --git a/x-pack/qa/multi-cluster-search-security/build.gradle b/x-pack/qa/multi-cluster-search-security/build.gradle index c06ad68d80325..e79490df829e3 100644 --- a/x-pack/qa/multi-cluster-search-security/build.gradle +++ b/x-pack/qa/multi-cluster-search-security/build.gradle @@ -15,7 +15,7 @@ task remoteClusterTest(type: RestIntegTestTask) { remoteClusterTestCluster { numNodes = 2 clusterName = 'remote-cluster' - setting 'search.remote.connect', false + setting 'cluster.remote.connect', false setting 'xpack.security.enabled', 'true' setting 'xpack.watcher.enabled', 'false' setting 'xpack.monitoring.enabled', 'false' @@ -60,9 +60,9 @@ mixedClusterTestCluster { retries: 10) return tmpFile.exists() } - setting 'search.remote.my_remote_cluster.seeds', "\"${-> remoteClusterTest.nodes.get(0).transportUri()}\"" - setting 'search.remote.connections_per_cluster', 1 - setting 'search.remote.connect', true + setting 'cluster.remote.my_remote_cluster.seeds', "\"${-> remoteClusterTest.nodes.get(0).transportUri()}\"" + setting 'cluster.remote.connections_per_cluster', 1 + setting 'cluster.remote.connect', true } mixedClusterTestRunner { diff --git a/x-pack/qa/multi-cluster-search-security/src/test/resources/rest-api-spec/test/multi_cluster/10_basic.yml b/x-pack/qa/multi-cluster-search-security/src/test/resources/rest-api-spec/test/multi_cluster/10_basic.yml index dc18ecd8a709e..35c6212451cf0 100644 --- a/x-pack/qa/multi-cluster-search-security/src/test/resources/rest-api-spec/test/multi_cluster/10_basic.yml +++ b/x-pack/qa/multi-cluster-search-security/src/test/resources/rest-api-spec/test/multi_cluster/10_basic.yml @@ -160,16 +160,16 @@ teardown: cluster.get_settings: include_defaults: true - - set: { defaults.search.remote.my_remote_cluster.seeds.0: remote_ip } + - set: { defaults.cluster.remote.my_remote_cluster.seeds.0: remote_ip } - do: cluster.put_settings: flat_settings: true body: transient: - search.remote.test_remote_cluster.seeds: $remote_ip + cluster.remote.test_remote_cluster.seeds: $remote_ip - - match: {transient: {search.remote.test_remote_cluster.seeds: $remote_ip}} + - match: {transient: {cluster.remote.test_remote_cluster.seeds: $remote_ip}} - do: headers: { Authorization: "Basic am9lOnMza3JpdA==" } diff --git a/x-pack/qa/multi-cluster-search-security/src/test/resources/rest-api-spec/test/multi_cluster/20_info.yml b/x-pack/qa/multi-cluster-search-security/src/test/resources/rest-api-spec/test/multi_cluster/20_info.yml index 5ff92df69b863..490edf794f652 100644 --- a/x-pack/qa/multi-cluster-search-security/src/test/resources/rest-api-spec/test/multi_cluster/20_info.yml +++ b/x-pack/qa/multi-cluster-search-security/src/test/resources/rest-api-spec/test/multi_cluster/20_info.yml @@ -48,16 +48,16 @@ teardown: cluster.get_settings: include_defaults: true - - set: { defaults.search.remote.my_remote_cluster.seeds.0: remote_ip } + - set: { defaults.cluster.remote.my_remote_cluster.seeds.0: remote_ip } - do: cluster.put_settings: flat_settings: true body: transient: - search.remote.test_remote_cluster.seeds: $remote_ip + cluster.remote.test_remote_cluster.seeds: $remote_ip - - match: {transient: {search.remote.test_remote_cluster.seeds: $remote_ip}} + - match: {transient: {cluster.remote.test_remote_cluster.seeds: $remote_ip}} # we do another search here since this will enforce the connection to be established # otherwise the cluster might not have been connected yet. From a721d09c81fc0881f3796550d6c35d459418b883 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 6 Sep 2018 08:01:58 +0200 Subject: [PATCH 08/91] [CCR] Added auto follow patterns feature (#33118) Auto Following Patterns is a cross cluster replication feature that keeps track whether in the leader cluster indices are being created with names that match with a specific pattern and if so automatically let the follower cluster follow these newly created indices. This change adds an `AutoFollowCoordinator` component that is only active on the elected master node. Periodically this component checks the the cluster state of remote clusters if there new leader indices that match with configured auto follow patterns that have been defined in `AutoFollowMetadata` custom metadata. This change also adds two new APIs to manage auto follow patterns. A put auto follow pattern api: ``` PUT /_ccr/_autofollow/{{remote_cluster}} { "leader_index_pattern": ["logs-*", ...], "follow_index_pattern": "{{leader_index}}-copy", "max_concurrent_read_batches": 2 ... // other optional parameters } ``` and delete auto follow pattern api: ``` DELETE /_ccr/_autofollow/{{remote_cluster_alias}} ``` The auto follow patterns are directly tied to the remote cluster aliases configured in the follow cluster. Relates to #33007 Co-authored-by: Jason Tedor jason@tedor.me --- .../org/elasticsearch/common/regex/Regex.java | 9 + .../xpack/ccr/FollowIndexIT.java | 37 ++ .../java/org/elasticsearch/xpack/ccr/Ccr.java | 33 +- .../elasticsearch/xpack/ccr/CcrSettings.java | 10 +- .../ccr/action/AutoFollowCoordinator.java | 306 +++++++++++++++ .../action/DeleteAutoFollowPatternAction.java | 81 ++++ .../action/PutAutoFollowPatternAction.java | 284 ++++++++++++++ ...ransportDeleteAutoFollowPatternAction.java | 102 +++++ .../TransportPutAutoFollowPatternAction.java | 173 +++++++++ .../RestDeleteAutoFollowPatternAction.java | 39 ++ .../rest/RestPutAutoFollowPatternAction.java | 44 +++ .../action/AutoFollowCoordinatorTests.java | 296 +++++++++++++++ .../xpack/ccr/action/AutoFollowTests.java | 189 ++++++++++ .../DeleteAutoFollowPatternRequestTests.java | 23 ++ .../PutAutoFollowPatternRequestTests.java | 63 ++++ ...ortDeleteAutoFollowPatternActionTests.java | 98 +++++ ...nsportPutAutoFollowPatternActionTests.java | 133 +++++++ .../xpack/core/XPackClientPlugin.java | 5 +- .../xpack/core/ccr/AutoFollowMetadata.java | 357 ++++++++++++++++++ .../core/ccr/AutoFollowMetadataTests.java | 53 +++ .../api/ccr.delete_auto_follow_pattern.json | 17 + .../api/ccr.put_auto_follow_pattern.json | 21 ++ .../rest-api-spec/test/ccr/auto_follow.yml | 13 + 23 files changed, 2380 insertions(+), 6 deletions(-) create mode 100644 x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java create mode 100644 x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/DeleteAutoFollowPatternAction.java create mode 100644 x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/PutAutoFollowPatternAction.java create mode 100644 x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportDeleteAutoFollowPatternAction.java create mode 100644 x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java create mode 100644 x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestDeleteAutoFollowPatternAction.java create mode 100644 x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutAutoFollowPatternAction.java create mode 100644 x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java create mode 100644 x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowTests.java create mode 100644 x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/DeleteAutoFollowPatternRequestTests.java create mode 100644 x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutAutoFollowPatternRequestTests.java create mode 100644 x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportDeleteAutoFollowPatternActionTests.java create mode 100644 x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternActionTests.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadataTests.java create mode 100644 x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.delete_auto_follow_pattern.json create mode 100644 x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.put_auto_follow_pattern.json create mode 100644 x-pack/plugin/src/test/resources/rest-api-spec/test/ccr/auto_follow.yml diff --git a/server/src/main/java/org/elasticsearch/common/regex/Regex.java b/server/src/main/java/org/elasticsearch/common/regex/Regex.java index bcf2dfba3ef82..1f4e465141222 100644 --- a/server/src/main/java/org/elasticsearch/common/regex/Regex.java +++ b/server/src/main/java/org/elasticsearch/common/regex/Regex.java @@ -138,6 +138,15 @@ public static boolean simpleMatch(String[] patterns, String str) { return false; } + /** + * Similar to {@link #simpleMatch(String[], String)}, but accepts a list of strings instead of an array of strings for the patterns to + * match. + */ + public static boolean simpleMatch(final List patterns, final String str) { + // #simpleMatch(String[], String) is likely to be inlined into this method + return patterns != null && simpleMatch(patterns.toArray(Strings.EMPTY_ARRAY), str); + } + public static boolean simpleMatch(String[] patterns, String[] types) { if (patterns != null && types != null) { for (String type : types) { diff --git a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java index c14e13e7bb050..17a6db286f283 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java @@ -78,6 +78,34 @@ public void testFollowIndex() throws Exception { } } + public void testAutoFollowPatterns() throws Exception { + assumeFalse("Test should only run when both clusters are running", runningAgainstLeaderCluster); + + Request request = new Request("PUT", "/_ccr/_auto_follow/leader_cluster"); + request.setJsonEntity("{\"leader_index_patterns\": [\"logs-*\"]}"); + assertOK(client().performRequest(request)); + + try (RestClient leaderClient = buildLeaderClient()) { + Settings settings = Settings.builder() + .put("index.soft_deletes.enabled", true) + .build(); + request = new Request("PUT", "/logs-20190101"); + request.setJsonEntity("{\"settings\": " + Strings.toString(settings) + + ", \"mappings\": {\"_doc\": {\"properties\": {\"field\": {\"type\": \"keyword\"}}}} }"); + assertOK(leaderClient.performRequest(request)); + + for (int i = 0; i < 5; i++) { + String id = Integer.toString(i); + index(leaderClient, "logs-20190101", id, "field", i, "filtered_field", "true"); + } + } + + assertBusy(() -> { + ensureYellow("logs-20190101"); + verifyDocuments("logs-20190101", 5); + }); + } + private static void index(RestClient client, String index, String id, Object... fields) throws IOException { XContentBuilder document = jsonBuilder().startObject(); for (int i = 0; i < fields.length; i += 2) { @@ -135,6 +163,15 @@ private static Map toMap(String response) { return XContentHelper.convertToMap(JsonXContent.jsonXContent, response, false); } + private static void ensureYellow(String index) throws IOException { + Request request = new Request("GET", "/_cluster/health/" + index); + request.addParameter("wait_for_status", "yellow"); + request.addParameter("wait_for_no_relocating_shards", "true"); + request.addParameter("timeout", "70s"); + request.addParameter("level", "shards"); + client().performRequest(request); + } + private RestClient buildLeaderClient() throws IOException { assert runningAgainstLeaderCluster == false; String leaderUrl = System.getProperty("tests.leader_host"); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java index b00883f5c2af2..cd0561b1c0c60 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java @@ -39,21 +39,28 @@ import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; +import org.elasticsearch.xpack.ccr.action.AutoFollowCoordinator; import org.elasticsearch.xpack.ccr.action.CcrStatsAction; import org.elasticsearch.xpack.ccr.action.CreateAndFollowIndexAction; +import org.elasticsearch.xpack.ccr.action.DeleteAutoFollowPatternAction; import org.elasticsearch.xpack.ccr.action.FollowIndexAction; +import org.elasticsearch.xpack.ccr.action.PutAutoFollowPatternAction; import org.elasticsearch.xpack.ccr.action.ShardChangesAction; import org.elasticsearch.xpack.ccr.action.ShardFollowNodeTask; import org.elasticsearch.xpack.ccr.action.ShardFollowTask; import org.elasticsearch.xpack.ccr.action.ShardFollowTasksExecutor; import org.elasticsearch.xpack.ccr.action.TransportCcrStatsAction; +import org.elasticsearch.xpack.ccr.action.TransportDeleteAutoFollowPatternAction; +import org.elasticsearch.xpack.ccr.action.TransportPutAutoFollowPatternAction; import org.elasticsearch.xpack.ccr.action.UnfollowIndexAction; import org.elasticsearch.xpack.ccr.action.bulk.BulkShardOperationsAction; import org.elasticsearch.xpack.ccr.action.bulk.TransportBulkShardOperationsAction; import org.elasticsearch.xpack.ccr.index.engine.FollowingEngineFactory; import org.elasticsearch.xpack.ccr.rest.RestCcrStatsAction; import org.elasticsearch.xpack.ccr.rest.RestCreateAndFollowIndexAction; +import org.elasticsearch.xpack.ccr.rest.RestDeleteAutoFollowPatternAction; import org.elasticsearch.xpack.ccr.rest.RestFollowIndexAction; +import org.elasticsearch.xpack.ccr.rest.RestPutAutoFollowPatternAction; import org.elasticsearch.xpack.ccr.rest.RestUnfollowIndexAction; import org.elasticsearch.xpack.core.XPackPlugin; @@ -113,7 +120,14 @@ public Collection createComponents( final Environment environment, final NodeEnvironment nodeEnvironment, final NamedWriteableRegistry namedWriteableRegistry) { - return Collections.singleton(ccrLicenseChecker); + if (enabled == false) { + return emptyList(); + } + + return Arrays.asList( + ccrLicenseChecker, + new AutoFollowCoordinator(settings, client, threadPool, clusterService) + ); } @Override @@ -128,12 +142,18 @@ public List> getPersistentTasksExecutor(ClusterServic } return Arrays.asList( + // internal actions new ActionHandler<>(BulkShardOperationsAction.INSTANCE, TransportBulkShardOperationsAction.class), + new ActionHandler<>(ShardChangesAction.INSTANCE, ShardChangesAction.TransportAction.class), + // stats action new ActionHandler<>(CcrStatsAction.INSTANCE, TransportCcrStatsAction.class), + // follow actions new ActionHandler<>(CreateAndFollowIndexAction.INSTANCE, CreateAndFollowIndexAction.TransportAction.class), new ActionHandler<>(FollowIndexAction.INSTANCE, FollowIndexAction.TransportAction.class), - new ActionHandler<>(ShardChangesAction.INSTANCE, ShardChangesAction.TransportAction.class), - new ActionHandler<>(UnfollowIndexAction.INSTANCE, UnfollowIndexAction.TransportAction.class)); + new ActionHandler<>(UnfollowIndexAction.INSTANCE, UnfollowIndexAction.TransportAction.class), + // auto-follow actions + new ActionHandler<>(DeleteAutoFollowPatternAction.INSTANCE, TransportDeleteAutoFollowPatternAction.class), + new ActionHandler<>(PutAutoFollowPatternAction.INSTANCE, TransportPutAutoFollowPatternAction.class)); } public List getRestHandlers(Settings settings, RestController restController, ClusterSettings clusterSettings, @@ -141,10 +161,15 @@ public List getRestHandlers(Settings settings, RestController restC IndexNameExpressionResolver indexNameExpressionResolver, Supplier nodesInCluster) { return Arrays.asList( + // stats API new RestCcrStatsAction(settings, restController), + // follow APIs new RestCreateAndFollowIndexAction(settings, restController), new RestFollowIndexAction(settings, restController), - new RestUnfollowIndexAction(settings, restController)); + new RestUnfollowIndexAction(settings, restController), + // auto-follow APIs + new RestDeleteAutoFollowPatternAction(settings, restController), + new RestPutAutoFollowPatternAction(settings, restController)); } public List getNamedWriteables() { diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrSettings.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrSettings.java index 6960766bad0cb..a942990ea5a74 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrSettings.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrSettings.java @@ -7,6 +7,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.common.unit.TimeValue; import java.util.Arrays; import java.util.List; @@ -32,6 +33,12 @@ private CcrSettings() { public static final Setting CCR_FOLLOWING_INDEX_SETTING = Setting.boolSetting("index.xpack.ccr.following_index", false, Setting.Property.IndexScope); + /** + * Setting for controlling the interval in between polling leader clusters to check whether there are indices to follow + */ + public static final Setting CCR_AUTO_FOLLOW_POLL_INTERVAL = + Setting.timeSetting("xpack.ccr.auto_follow.poll_interval", TimeValue.timeValueMillis(2500), Property.NodeScope); + /** * The settings defined by CCR. * @@ -40,7 +47,8 @@ private CcrSettings() { static List> getSettings() { return Arrays.asList( CCR_ENABLED_SETTING, - CCR_FOLLOWING_INDEX_SETTING); + CCR_FOLLOWING_INDEX_SETTING, + CCR_AUTO_FOLLOW_POLL_INTERVAL); } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java new file mode 100644 index 0000000000000..234fe32cdd0ee --- /dev/null +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java @@ -0,0 +1,306 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ccr.action; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateApplier; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.CountDown; +import org.elasticsearch.index.Index; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.ccr.CcrSettings; +import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; +import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BiConsumer; +import java.util.function.Consumer; +import java.util.function.Function; + +/** + * A component that runs only on the elected master node and follows leader indices automatically + * if they match with a auto follow pattern that is defined in {@link AutoFollowMetadata}. + */ +public class AutoFollowCoordinator implements ClusterStateApplier { + + private static final Logger LOGGER = LogManager.getLogger(AutoFollowCoordinator.class); + + private final Client client; + private final TimeValue pollInterval; + private final ThreadPool threadPool; + private final ClusterService clusterService; + + private volatile boolean localNodeMaster = false; + + public AutoFollowCoordinator(Settings settings, + Client client, + ThreadPool threadPool, + ClusterService clusterService) { + this.client = client; + this.threadPool = threadPool; + this.clusterService = clusterService; + + this.pollInterval = CcrSettings.CCR_AUTO_FOLLOW_POLL_INTERVAL.get(settings); + clusterService.addStateApplier(this); + } + + private void doAutoFollow() { + if (localNodeMaster == false) { + return; + } + ClusterState followerClusterState = clusterService.state(); + AutoFollowMetadata autoFollowMetadata = followerClusterState.getMetaData().custom(AutoFollowMetadata.TYPE); + if (autoFollowMetadata == null) { + threadPool.schedule(pollInterval, ThreadPool.Names.SAME, this::doAutoFollow); + return; + } + + if (autoFollowMetadata.getPatterns().isEmpty()) { + threadPool.schedule(pollInterval, ThreadPool.Names.SAME, this::doAutoFollow); + return; + } + + Consumer handler = e -> { + if (e != null) { + LOGGER.warn("Failure occurred during auto following indices", e); + } + threadPool.schedule(pollInterval, ThreadPool.Names.SAME, this::doAutoFollow); + }; + AutoFollower operation = new AutoFollower(client, handler, followerClusterState) { + + @Override + void getLeaderClusterState(Client leaderClient, BiConsumer handler) { + ClusterStateRequest request = new ClusterStateRequest(); + request.clear(); + request.metaData(true); + leaderClient.admin().cluster().state(request, + ActionListener.wrap( + r -> handler.accept(r.getState(), null), + e -> handler.accept(null, e) + ) + ); + } + + @Override + void createAndFollow(FollowIndexAction.Request followRequest, + Runnable successHandler, + Consumer failureHandler) { + client.execute(CreateAndFollowIndexAction.INSTANCE, new CreateAndFollowIndexAction.Request(followRequest), + ActionListener.wrap(r -> successHandler.run(), failureHandler)); + } + + @Override + void updateAutoFollowMetadata(Function updateFunction, Consumer handler) { + clusterService.submitStateUpdateTask("update_auto_follow_metadata", new ClusterStateUpdateTask() { + + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + return updateFunction.apply(currentState); + } + + @Override + public void onFailure(String source, Exception e) { + handler.accept(e); + } + + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + handler.accept(null); + } + }); + } + + }; + operation.autoFollowIndices(); + } + + @Override + public void applyClusterState(ClusterChangedEvent event) { + final boolean beforeLocalMasterNode = localNodeMaster; + localNodeMaster = event.localNodeMaster(); + if (beforeLocalMasterNode == false && localNodeMaster) { + threadPool.schedule(pollInterval, ThreadPool.Names.SAME, this::doAutoFollow); + } + } + + abstract static class AutoFollower { + + private final Client client; + private final Consumer handler; + private final ClusterState followerClusterState; + private final AutoFollowMetadata autoFollowMetadata; + + private final CountDown autoFollowPatternsCountDown; + private final AtomicReference autoFollowPatternsErrorHolder = new AtomicReference<>(); + + AutoFollower(Client client, Consumer handler, ClusterState followerClusterState) { + this.client = client; + this.handler = handler; + this.followerClusterState = followerClusterState; + this.autoFollowMetadata = followerClusterState.getMetaData().custom(AutoFollowMetadata.TYPE); + this.autoFollowPatternsCountDown = new CountDown(autoFollowMetadata.getPatterns().size()); + } + + void autoFollowIndices() { + for (Map.Entry entry : autoFollowMetadata.getPatterns().entrySet()) { + String clusterAlias = entry.getKey(); + AutoFollowPattern autoFollowPattern = entry.getValue(); + Client leaderClient = clusterAlias.equals("_local_") ? client : client.getRemoteClusterClient(clusterAlias); + List followedIndices = autoFollowMetadata.getFollowedLeaderIndexUUIDs().get(clusterAlias); + + getLeaderClusterState(leaderClient, (leaderClusterState, e) -> { + if (leaderClusterState != null) { + assert e == null; + handleClusterAlias(clusterAlias, autoFollowPattern, followedIndices, leaderClusterState); + } else { + finalise(e); + } + }); + } + } + + private void handleClusterAlias(String clusterAlias, AutoFollowPattern autoFollowPattern, + List followedIndexUUIDs, ClusterState leaderClusterState) { + final List leaderIndicesToFollow = + getLeaderIndicesToFollow(autoFollowPattern, leaderClusterState, followerClusterState, followedIndexUUIDs); + if (leaderIndicesToFollow.isEmpty()) { + finalise(null); + } else { + final CountDown leaderIndicesCountDown = new CountDown(leaderIndicesToFollow.size()); + final AtomicReference leaderIndicesErrorHolder = new AtomicReference<>(); + for (Index indexToFollow : leaderIndicesToFollow) { + final String leaderIndexName = indexToFollow.getName(); + final String followIndexName = getFollowerIndexName(autoFollowPattern, leaderIndexName); + + String leaderIndexNameWithClusterAliasPrefix = clusterAlias.equals("_local_") ? leaderIndexName : + clusterAlias + ":" + leaderIndexName; + FollowIndexAction.Request followRequest = + new FollowIndexAction.Request(leaderIndexNameWithClusterAliasPrefix, followIndexName, + autoFollowPattern.getMaxBatchOperationCount(), autoFollowPattern.getMaxConcurrentReadBatches(), + autoFollowPattern.getMaxOperationSizeInBytes(), autoFollowPattern.getMaxConcurrentWriteBatches(), + autoFollowPattern.getMaxWriteBufferSize(), autoFollowPattern.getRetryTimeout(), + autoFollowPattern.getIdleShardRetryDelay()); + + // Execute if the create and follow api call succeeds: + Runnable successHandler = () -> { + LOGGER.info("Auto followed leader index [{}] as follow index [{}]", leaderIndexName, followIndexName); + + // This function updates the auto follow metadata in the cluster to record that the leader index has been followed: + // (so that we do not try to follow it in subsequent auto follow runs) + Function function = recordLeaderIndexAsFollowFunction(clusterAlias, indexToFollow); + // The coordinator always runs on the elected master node, so we can update cluster state here: + updateAutoFollowMetadata(function, updateError -> { + if (updateError != null) { + LOGGER.error("Failed to mark leader index [" + leaderIndexName + "] as auto followed", updateError); + if (leaderIndicesErrorHolder.compareAndSet(null, updateError) == false) { + leaderIndicesErrorHolder.get().addSuppressed(updateError); + } + } else { + LOGGER.debug("Successfully marked leader index [{}] as auto followed", leaderIndexName); + } + if (leaderIndicesCountDown.countDown()) { + finalise(leaderIndicesErrorHolder.get()); + } + }); + }; + // Execute if the create and follow apu call fails: + Consumer failureHandler = followError -> { + assert followError != null; + LOGGER.warn("Failed to auto follow leader index [" + leaderIndexName + "]", followError); + if (leaderIndicesCountDown.countDown()) { + finalise(followError); + } + }; + createAndFollow(followRequest, successHandler, failureHandler); + } + } + } + + private void finalise(Exception failure) { + if (autoFollowPatternsErrorHolder.compareAndSet(null, failure) == false) { + autoFollowPatternsErrorHolder.get().addSuppressed(failure); + } + + if (autoFollowPatternsCountDown.countDown()) { + handler.accept(autoFollowPatternsErrorHolder.get()); + } + } + + static List getLeaderIndicesToFollow(AutoFollowPattern autoFollowPattern, + ClusterState leaderClusterState, + ClusterState followerClusterState, + List followedIndexUUIDs) { + List leaderIndicesToFollow = new ArrayList<>(); + for (IndexMetaData leaderIndexMetaData : leaderClusterState.getMetaData()) { + if (autoFollowPattern.match(leaderIndexMetaData.getIndex().getName())) { + if (followedIndexUUIDs.contains(leaderIndexMetaData.getIndex().getUUID()) == false) { + // TODO: iterate over the indices in the followerClusterState and check whether a IndexMetaData + // has a leader index uuid custom metadata entry that matches with uuid of leaderIndexMetaData variable + // If so then handle it differently: not follow it, but just add an entry to + // AutoFollowMetadata#followedLeaderIndexUUIDs + leaderIndicesToFollow.add(leaderIndexMetaData.getIndex()); + } + } + } + return leaderIndicesToFollow; + } + + static String getFollowerIndexName(AutoFollowPattern autoFollowPattern, String leaderIndexName) { + if (autoFollowPattern.getFollowIndexPattern() != null) { + return autoFollowPattern.getFollowIndexPattern().replace("{{leader_index}}", leaderIndexName); + } else { + return leaderIndexName; + } + } + + static Function recordLeaderIndexAsFollowFunction(String clusterAlias, Index indexToFollow) { + return currentState -> { + AutoFollowMetadata currentAutoFollowMetadata = currentState.metaData().custom(AutoFollowMetadata.TYPE); + + Map> newFollowedIndexUUIDS = + new HashMap<>(currentAutoFollowMetadata.getFollowedLeaderIndexUUIDs()); + newFollowedIndexUUIDS.get(clusterAlias).add(indexToFollow.getUUID()); + + ClusterState.Builder newState = ClusterState.builder(currentState); + AutoFollowMetadata newAutoFollowMetadata = + new AutoFollowMetadata(currentAutoFollowMetadata.getPatterns(), newFollowedIndexUUIDS); + newState.metaData(MetaData.builder(currentState.getMetaData()) + .putCustom(AutoFollowMetadata.TYPE, newAutoFollowMetadata) + .build()); + return newState.build(); + }; + } + + // abstract methods to make unit testing possible: + + abstract void getLeaderClusterState(Client leaderClient, + BiConsumer handler); + + abstract void createAndFollow(FollowIndexAction.Request followRequest, + Runnable successHandler, + Consumer failureHandler); + + abstract void updateAutoFollowMetadata(Function updateFunction, + Consumer handler); + + } +} diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/DeleteAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/DeleteAutoFollowPatternAction.java new file mode 100644 index 0000000000000..82e142202d26c --- /dev/null +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/DeleteAutoFollowPatternAction.java @@ -0,0 +1,81 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ccr.action; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +public class DeleteAutoFollowPatternAction extends Action { + + public static final String NAME = "cluster:admin/xpack/ccr/auto_follow_pattern/delete"; + public static final DeleteAutoFollowPatternAction INSTANCE = new DeleteAutoFollowPatternAction(); + + private DeleteAutoFollowPatternAction() { + super(NAME); + } + + @Override + public AcknowledgedResponse newResponse() { + return new AcknowledgedResponse(); + } + + public static class Request extends AcknowledgedRequest { + + private String leaderClusterAlias; + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (leaderClusterAlias == null) { + validationException = addValidationError("leaderClusterAlias is missing", validationException); + } + return validationException; + } + + public String getLeaderClusterAlias() { + return leaderClusterAlias; + } + + public void setLeaderClusterAlias(String leaderClusterAlias) { + this.leaderClusterAlias = leaderClusterAlias; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + leaderClusterAlias = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(leaderClusterAlias); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(leaderClusterAlias, request.leaderClusterAlias); + } + + @Override + public int hashCode() { + return Objects.hash(leaderClusterAlias); + } + } + +} diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/PutAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/PutAutoFollowPatternAction.java new file mode 100644 index 0000000000000..a01fd8e3bc209 --- /dev/null +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/PutAutoFollowPatternAction.java @@ -0,0 +1,284 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ccr.action; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +public class PutAutoFollowPatternAction extends Action { + + public static final String NAME = "cluster:admin/xpack/ccr/auto_follow_pattern/put"; + public static final PutAutoFollowPatternAction INSTANCE = new PutAutoFollowPatternAction(); + + private PutAutoFollowPatternAction() { + super(NAME); + } + + @Override + public AcknowledgedResponse newResponse() { + return new AcknowledgedResponse(); + } + + public static class Request extends AcknowledgedRequest implements ToXContentObject { + + static final ParseField LEADER_CLUSTER_ALIAS_FIELD = new ParseField("leader_cluster_alias"); + static final ParseField LEADER_INDEX_PATTERNS_FIELD = new ParseField("leader_index_patterns"); + static final ParseField FOLLOW_INDEX_NAME_PATTERN_FIELD = new ParseField("follow_index_name_pattern"); + + private static final ObjectParser PARSER = new ObjectParser<>("put_auto_follow_pattern_request", Request::new); + + static { + PARSER.declareString(Request::setLeaderClusterAlias, LEADER_CLUSTER_ALIAS_FIELD); + PARSER.declareStringArray(Request::setLeaderIndexPatterns, LEADER_INDEX_PATTERNS_FIELD); + PARSER.declareString(Request::setFollowIndexNamePattern, FOLLOW_INDEX_NAME_PATTERN_FIELD); + PARSER.declareInt(Request::setMaxBatchOperationCount, AutoFollowPattern.MAX_BATCH_OPERATION_COUNT); + PARSER.declareInt(Request::setMaxConcurrentReadBatches, AutoFollowPattern.MAX_CONCURRENT_READ_BATCHES); + PARSER.declareLong(Request::setMaxOperationSizeInBytes, AutoFollowPattern.MAX_BATCH_SIZE_IN_BYTES); + PARSER.declareInt(Request::setMaxConcurrentWriteBatches, AutoFollowPattern.MAX_CONCURRENT_WRITE_BATCHES); + PARSER.declareInt(Request::setMaxWriteBufferSize, AutoFollowPattern.MAX_WRITE_BUFFER_SIZE); + PARSER.declareField(Request::setRetryTimeout, + (p, c) -> TimeValue.parseTimeValue(p.text(), AutoFollowPattern.RETRY_TIMEOUT.getPreferredName()), + ShardFollowTask.RETRY_TIMEOUT, ObjectParser.ValueType.STRING); + PARSER.declareField(Request::setIdleShardRetryDelay, + (p, c) -> TimeValue.parseTimeValue(p.text(), AutoFollowPattern.IDLE_SHARD_RETRY_DELAY.getPreferredName()), + ShardFollowTask.IDLE_SHARD_RETRY_DELAY, ObjectParser.ValueType.STRING); + } + + public static Request fromXContent(XContentParser parser, String remoteClusterAlias) throws IOException { + Request request = PARSER.parse(parser, null); + if (remoteClusterAlias != null) { + if (request.leaderClusterAlias == null) { + request.leaderClusterAlias = remoteClusterAlias; + } else { + if (request.leaderClusterAlias.equals(remoteClusterAlias) == false) { + throw new IllegalArgumentException("provided leaderClusterAlias is not equal"); + } + } + } + return request; + } + + private String leaderClusterAlias; + private List leaderIndexPatterns; + private String followIndexNamePattern; + + private Integer maxBatchOperationCount; + private Integer maxConcurrentReadBatches; + private Long maxOperationSizeInBytes; + private Integer maxConcurrentWriteBatches; + private Integer maxWriteBufferSize; + private TimeValue retryTimeout; + private TimeValue idleShardRetryDelay; + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (leaderClusterAlias == null) { + validationException = addValidationError("leaderClusterAlias is missing", validationException); + } + if (leaderIndexPatterns == null || leaderIndexPatterns.isEmpty()) { + validationException = addValidationError("leaderIndexPatterns is missing", validationException); + } + return validationException; + } + + public String getLeaderClusterAlias() { + return leaderClusterAlias; + } + + public void setLeaderClusterAlias(String leaderClusterAlias) { + this.leaderClusterAlias = leaderClusterAlias; + } + + public List getLeaderIndexPatterns() { + return leaderIndexPatterns; + } + + public void setLeaderIndexPatterns(List leaderIndexPatterns) { + this.leaderIndexPatterns = leaderIndexPatterns; + } + + public String getFollowIndexNamePattern() { + return followIndexNamePattern; + } + + public void setFollowIndexNamePattern(String followIndexNamePattern) { + this.followIndexNamePattern = followIndexNamePattern; + } + + public Integer getMaxBatchOperationCount() { + return maxBatchOperationCount; + } + + public void setMaxBatchOperationCount(Integer maxBatchOperationCount) { + this.maxBatchOperationCount = maxBatchOperationCount; + } + + public Integer getMaxConcurrentReadBatches() { + return maxConcurrentReadBatches; + } + + public void setMaxConcurrentReadBatches(Integer maxConcurrentReadBatches) { + this.maxConcurrentReadBatches = maxConcurrentReadBatches; + } + + public Long getMaxOperationSizeInBytes() { + return maxOperationSizeInBytes; + } + + public void setMaxOperationSizeInBytes(Long maxOperationSizeInBytes) { + this.maxOperationSizeInBytes = maxOperationSizeInBytes; + } + + public Integer getMaxConcurrentWriteBatches() { + return maxConcurrentWriteBatches; + } + + public void setMaxConcurrentWriteBatches(Integer maxConcurrentWriteBatches) { + this.maxConcurrentWriteBatches = maxConcurrentWriteBatches; + } + + public Integer getMaxWriteBufferSize() { + return maxWriteBufferSize; + } + + public void setMaxWriteBufferSize(Integer maxWriteBufferSize) { + this.maxWriteBufferSize = maxWriteBufferSize; + } + + public TimeValue getRetryTimeout() { + return retryTimeout; + } + + public void setRetryTimeout(TimeValue retryTimeout) { + this.retryTimeout = retryTimeout; + } + + public TimeValue getIdleShardRetryDelay() { + return idleShardRetryDelay; + } + + public void setIdleShardRetryDelay(TimeValue idleShardRetryDelay) { + this.idleShardRetryDelay = idleShardRetryDelay; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + leaderClusterAlias = in.readString(); + leaderIndexPatterns = in.readList(StreamInput::readString); + followIndexNamePattern = in.readOptionalString(); + maxBatchOperationCount = in.readOptionalVInt(); + maxConcurrentReadBatches = in.readOptionalVInt(); + maxOperationSizeInBytes = in.readOptionalLong(); + maxConcurrentWriteBatches = in.readOptionalVInt(); + maxWriteBufferSize = in.readOptionalVInt(); + retryTimeout = in.readOptionalTimeValue(); + idleShardRetryDelay = in.readOptionalTimeValue(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(leaderClusterAlias); + out.writeStringList(leaderIndexPatterns); + out.writeOptionalString(followIndexNamePattern); + out.writeOptionalVInt(maxBatchOperationCount); + out.writeOptionalVInt(maxConcurrentReadBatches); + out.writeOptionalLong(maxOperationSizeInBytes); + out.writeOptionalVInt(maxConcurrentWriteBatches); + out.writeOptionalVInt(maxWriteBufferSize); + out.writeOptionalTimeValue(retryTimeout); + out.writeOptionalTimeValue(idleShardRetryDelay); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field(LEADER_CLUSTER_ALIAS_FIELD.getPreferredName(), leaderClusterAlias); + builder.field(LEADER_INDEX_PATTERNS_FIELD.getPreferredName(), leaderIndexPatterns); + if (followIndexNamePattern != null) { + builder.field(FOLLOW_INDEX_NAME_PATTERN_FIELD.getPreferredName(), followIndexNamePattern); + } + if (maxBatchOperationCount != null) { + builder.field(ShardFollowTask.MAX_BATCH_OPERATION_COUNT.getPreferredName(), maxBatchOperationCount); + } + if (maxOperationSizeInBytes != null) { + builder.field(ShardFollowTask.MAX_BATCH_SIZE_IN_BYTES.getPreferredName(), maxOperationSizeInBytes); + } + if (maxWriteBufferSize != null) { + builder.field(ShardFollowTask.MAX_WRITE_BUFFER_SIZE.getPreferredName(), maxWriteBufferSize); + } + if (maxConcurrentReadBatches != null) { + builder.field(ShardFollowTask.MAX_CONCURRENT_READ_BATCHES.getPreferredName(), maxConcurrentReadBatches); + } + if (maxConcurrentWriteBatches != null) { + builder.field(ShardFollowTask.MAX_CONCURRENT_WRITE_BATCHES.getPreferredName(), maxConcurrentWriteBatches); + } + if (retryTimeout != null) { + builder.field(ShardFollowTask.RETRY_TIMEOUT.getPreferredName(), retryTimeout.getStringRep()); + } + if (idleShardRetryDelay != null) { + builder.field(ShardFollowTask.IDLE_SHARD_RETRY_DELAY.getPreferredName(), idleShardRetryDelay.getStringRep()); + } + } + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(leaderClusterAlias, request.leaderClusterAlias) && + Objects.equals(leaderIndexPatterns, request.leaderIndexPatterns) && + Objects.equals(followIndexNamePattern, request.followIndexNamePattern) && + Objects.equals(maxBatchOperationCount, request.maxBatchOperationCount) && + Objects.equals(maxConcurrentReadBatches, request.maxConcurrentReadBatches) && + Objects.equals(maxOperationSizeInBytes, request.maxOperationSizeInBytes) && + Objects.equals(maxConcurrentWriteBatches, request.maxConcurrentWriteBatches) && + Objects.equals(maxWriteBufferSize, request.maxWriteBufferSize) && + Objects.equals(retryTimeout, request.retryTimeout) && + Objects.equals(idleShardRetryDelay, request.idleShardRetryDelay); + } + + @Override + public int hashCode() { + return Objects.hash( + leaderClusterAlias, + leaderIndexPatterns, + followIndexNamePattern, + maxBatchOperationCount, + maxConcurrentReadBatches, + maxOperationSizeInBytes, + maxConcurrentWriteBatches, + maxWriteBufferSize, + retryTimeout, + idleShardRetryDelay + ); + } + } + +} diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportDeleteAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportDeleteAutoFollowPatternAction.java new file mode 100644 index 0000000000000..6c1ca81e7c49a --- /dev/null +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportDeleteAutoFollowPatternAction.java @@ -0,0 +1,102 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ccr.action; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.cluster.AckedClusterStateUpdateTask; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; +import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class TransportDeleteAutoFollowPatternAction extends + TransportMasterNodeAction { + + @Inject + public TransportDeleteAutoFollowPatternAction(Settings settings, TransportService transportService, ClusterService clusterService, + ThreadPool threadPool, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver) { + super(settings, DeleteAutoFollowPatternAction.NAME, transportService, clusterService, threadPool, actionFilters, + indexNameExpressionResolver, DeleteAutoFollowPatternAction.Request::new); + } + + @Override + protected String executor() { + return ThreadPool.Names.SAME; + } + + @Override + protected AcknowledgedResponse newResponse() { + return new AcknowledgedResponse(); + } + + @Override + protected void masterOperation(DeleteAutoFollowPatternAction.Request request, + ClusterState state, + ActionListener listener) throws Exception { + clusterService.submitStateUpdateTask("put-auto-follow-pattern-" + request.getLeaderClusterAlias(), + new AckedClusterStateUpdateTask(request, listener) { + + @Override + protected AcknowledgedResponse newResponse(boolean acknowledged) { + return new AcknowledgedResponse(acknowledged); + } + + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + return innerDelete(request, currentState); + } + }); + } + + static ClusterState innerDelete(DeleteAutoFollowPatternAction.Request request, ClusterState currentState) { + AutoFollowMetadata currentAutoFollowMetadata = currentState.metaData().custom(AutoFollowMetadata.TYPE); + if (currentAutoFollowMetadata == null) { + throw new ResourceNotFoundException("no auto-follow patterns for cluster alias [{}] found", + request.getLeaderClusterAlias()); + } + Map patterns = currentAutoFollowMetadata.getPatterns(); + AutoFollowPattern autoFollowPatternToRemove = patterns.get(request.getLeaderClusterAlias()); + if (autoFollowPatternToRemove == null) { + throw new ResourceNotFoundException("no auto-follow patterns for cluster alias [{}] found", + request.getLeaderClusterAlias()); + } + + final Map patternsCopy = new HashMap<>(patterns); + final Map> followedLeaderIndexUUIDSCopy = + new HashMap<>(currentAutoFollowMetadata.getFollowedLeaderIndexUUIDs()); + patternsCopy.remove(request.getLeaderClusterAlias()); + followedLeaderIndexUUIDSCopy.remove(request.getLeaderClusterAlias()); + + AutoFollowMetadata newAutoFollowMetadata = new AutoFollowMetadata(patternsCopy, followedLeaderIndexUUIDSCopy); + ClusterState.Builder newState = ClusterState.builder(currentState); + newState.metaData(MetaData.builder(currentState.getMetaData()) + .putCustom(AutoFollowMetadata.TYPE, newAutoFollowMetadata) + .build()); + return newState.build(); + } + + @Override + protected ClusterBlockException checkBlock(DeleteAutoFollowPatternAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } +} diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java new file mode 100644 index 0000000000000..3d3e342c0cd3e --- /dev/null +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java @@ -0,0 +1,173 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ccr.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.AckedClusterStateUpdateTask; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; +import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +public class TransportPutAutoFollowPatternAction extends + TransportMasterNodeAction { + + private final Client client; + + @Inject + public TransportPutAutoFollowPatternAction(Settings settings, TransportService transportService, ClusterService clusterService, + ThreadPool threadPool, ActionFilters actionFilters, Client client, + IndexNameExpressionResolver indexNameExpressionResolver) { + super(settings, PutAutoFollowPatternAction.NAME, transportService, clusterService, threadPool, actionFilters, + indexNameExpressionResolver, PutAutoFollowPatternAction.Request::new); + this.client = client; + } + + @Override + protected String executor() { + return ThreadPool.Names.SAME; + } + + @Override + protected AcknowledgedResponse newResponse() { + return new AcknowledgedResponse(); + } + + @Override + protected void masterOperation(PutAutoFollowPatternAction.Request request, + ClusterState state, + ActionListener listener) throws Exception { + final Client leaderClient; + if (request.getLeaderClusterAlias().equals("_local_")) { + leaderClient = client; + } else { + leaderClient = client.getRemoteClusterClient(request.getLeaderClusterAlias()); + } + + final ClusterStateRequest clusterStateRequest = new ClusterStateRequest(); + clusterStateRequest.clear(); + clusterStateRequest.metaData(true); + + leaderClient.admin().cluster().state(clusterStateRequest, ActionListener.wrap(clusterStateResponse -> { + final ClusterState leaderClusterState = clusterStateResponse.getState(); + clusterService.submitStateUpdateTask("put-auto-follow-pattern-" + request.getLeaderClusterAlias(), + new AckedClusterStateUpdateTask(request, listener) { + + @Override + protected AcknowledgedResponse newResponse(boolean acknowledged) { + return new AcknowledgedResponse(acknowledged); + } + + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + return innerPut(request, currentState, leaderClusterState); + } + }); + }, listener::onFailure)); + } + + static ClusterState innerPut(PutAutoFollowPatternAction.Request request, + ClusterState localState, + ClusterState leaderClusterState) { + // auto patterns are always overwritten + // only already followed index uuids are updated + + AutoFollowMetadata currentAutoFollowMetadata = localState.metaData().custom(AutoFollowMetadata.TYPE); + Map> followedLeaderIndices; + Map patterns; + if (currentAutoFollowMetadata != null) { + patterns = new HashMap<>(currentAutoFollowMetadata.getPatterns()); + followedLeaderIndices = new HashMap<>(currentAutoFollowMetadata.getFollowedLeaderIndexUUIDs()); + } else { + patterns = new HashMap<>(); + followedLeaderIndices = new HashMap<>(); + } + + AutoFollowPattern previousPattern = patterns.get(request.getLeaderClusterAlias()); + List followedIndexUUIDs = followedLeaderIndices.get(request.getLeaderClusterAlias()); + if (followedIndexUUIDs == null) { + followedIndexUUIDs = new ArrayList<>(); + followedLeaderIndices.put(request.getLeaderClusterAlias(), followedIndexUUIDs); + } + + // Mark existing leader indices as already auto followed: + if (previousPattern != null) { + markExistingIndicesAsAutoFollowedForNewPatterns(request.getLeaderIndexPatterns(), leaderClusterState.metaData(), + previousPattern, followedIndexUUIDs); + } else { + markExistingIndicesAsAutoFollowed(request.getLeaderIndexPatterns(), leaderClusterState.metaData(), + followedIndexUUIDs); + } + + AutoFollowPattern autoFollowPattern = new AutoFollowPattern( + request.getLeaderIndexPatterns(), + request.getFollowIndexNamePattern(), + request.getMaxBatchOperationCount(), + request.getMaxConcurrentReadBatches(), + request.getMaxOperationSizeInBytes(), + request.getMaxConcurrentWriteBatches(), + request.getMaxWriteBufferSize(), + request.getRetryTimeout(), + request.getIdleShardRetryDelay() + ); + patterns.put(request.getLeaderClusterAlias(), autoFollowPattern); + ClusterState.Builder newState = ClusterState.builder(localState); + newState.metaData(MetaData.builder(localState.getMetaData()) + .putCustom(AutoFollowMetadata.TYPE, new AutoFollowMetadata(patterns, followedLeaderIndices)) + .build()); + return newState.build(); + } + + private static void markExistingIndicesAsAutoFollowedForNewPatterns( + List leaderIndexPatterns, + MetaData leaderMetaData, + AutoFollowPattern previousPattern, + List followedIndexUUIDS) { + + final List newPatterns = leaderIndexPatterns + .stream() + .filter(p -> previousPattern.getLeaderIndexPatterns().contains(p) == false) + .collect(Collectors.toList()); + markExistingIndicesAsAutoFollowed(newPatterns, leaderMetaData, followedIndexUUIDS); + } + + private static void markExistingIndicesAsAutoFollowed( + List patterns, + MetaData leaderMetaData, + List followedIndexUUIDS) { + + for (final IndexMetaData indexMetaData : leaderMetaData) { + if (AutoFollowPattern.match(patterns, indexMetaData.getIndex().getName())) { + followedIndexUUIDS.add(indexMetaData.getIndexUUID()); + } + } + } + + @Override + protected ClusterBlockException checkBlock(PutAutoFollowPatternAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_WRITE); + } +} diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestDeleteAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestDeleteAutoFollowPatternAction.java new file mode 100644 index 0000000000000..bd3585c7982c1 --- /dev/null +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestDeleteAutoFollowPatternAction.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ccr.rest; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.ccr.action.DeleteAutoFollowPatternAction.Request; + +import java.io.IOException; + +import static org.elasticsearch.xpack.ccr.action.DeleteAutoFollowPatternAction.INSTANCE; + +public class RestDeleteAutoFollowPatternAction extends BaseRestHandler { + + public RestDeleteAutoFollowPatternAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(RestRequest.Method.DELETE, "/_ccr/_auto_follow/{leader_cluster_alias}", this); + } + + @Override + public String getName() { + return "ccr_delete_auto_follow_pattern_action"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + Request request = new Request(); + request.setLeaderClusterAlias(restRequest.param("leader_cluster_alias")); + return channel -> client.execute(INSTANCE, request, new RestToXContentListener<>(channel)); + } + +} diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutAutoFollowPatternAction.java new file mode 100644 index 0000000000000..d92ebb7b0bbe5 --- /dev/null +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutAutoFollowPatternAction.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ccr.rest; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.ccr.action.PutAutoFollowPatternAction.Request; + +import java.io.IOException; + +import static org.elasticsearch.xpack.ccr.action.PutAutoFollowPatternAction.INSTANCE; + +public class RestPutAutoFollowPatternAction extends BaseRestHandler { + + public RestPutAutoFollowPatternAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(RestRequest.Method.PUT, "/_ccr/_auto_follow/{leader_cluster_alias}", this); + } + + @Override + public String getName() { + return "ccr_put_auto_follow_pattern_action"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + Request request = createRequest(restRequest); + return channel -> client.execute(INSTANCE, request, new RestToXContentListener<>(channel)); + } + + static Request createRequest(RestRequest restRequest) throws IOException { + try (XContentParser parser = restRequest.contentOrSourceParamParser()) { + return Request.fromXContent(parser, restRequest.param("leader_cluster_alias")); + } + } +} diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java new file mode 100644 index 0000000000000..dd1376a4d7a73 --- /dev/null +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java @@ -0,0 +1,296 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ccr.action; + +import org.elasticsearch.Version; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.Index; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ccr.action.AutoFollowCoordinator.AutoFollower; +import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; +import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.BiConsumer; +import java.util.function.Consumer; +import java.util.function.Function; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; +import static org.mockito.Matchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class AutoFollowCoordinatorTests extends ESTestCase { + + public void testAutoFollower() { + Client client = mock(Client.class); + when(client.getRemoteClusterClient(anyString())).thenReturn(client); + + ClusterState leaderState = ClusterState.builder(new ClusterName("remote")) + .metaData(MetaData.builder().put(IndexMetaData.builder("logs-20190101") + .settings(settings(Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0))) + .build(); + + AutoFollowPattern autoFollowPattern = + new AutoFollowPattern(Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null); + Map patterns = new HashMap<>(); + patterns.put("remote", autoFollowPattern); + Map> followedLeaderIndexUUIDS = new HashMap<>(); + followedLeaderIndexUUIDS.put("remote", new ArrayList<>()); + AutoFollowMetadata autoFollowMetadata = new AutoFollowMetadata(patterns, followedLeaderIndexUUIDS); + + ClusterState currentState = ClusterState.builder(new ClusterName("name")) + .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, autoFollowMetadata)) + .build(); + + boolean[] invoked = new boolean[]{false}; + Consumer handler = e -> { + invoked[0] = true; + assertThat(e, nullValue()); + }; + AutoFollower autoFollower = new AutoFollower(client, handler, currentState) { + @Override + void getLeaderClusterState(Client leaderClient, BiConsumer handler) { + handler.accept(leaderState, null); + } + + @Override + void createAndFollow(FollowIndexAction.Request followRequest, Runnable successHandler, Consumer failureHandler) { + assertThat(followRequest.getLeaderIndex(), equalTo("remote:logs-20190101")); + assertThat(followRequest.getFollowerIndex(), equalTo("logs-20190101")); + successHandler.run(); + } + + @Override + void updateAutoFollowMetadata(Function updateFunction, Consumer handler) { + ClusterState resultCs = updateFunction.apply(currentState); + AutoFollowMetadata result = resultCs.metaData().custom(AutoFollowMetadata.TYPE); + assertThat(result.getFollowedLeaderIndexUUIDs().size(), equalTo(1)); + assertThat(result.getFollowedLeaderIndexUUIDs().get("remote").size(), equalTo(1)); + handler.accept(null); + } + }; + autoFollower.autoFollowIndices(); + assertThat(invoked[0], is(true)); + } + + public void testAutoFollowerClusterStateApiFailure() { + Client client = mock(Client.class); + when(client.getRemoteClusterClient(anyString())).thenReturn(client); + + AutoFollowPattern autoFollowPattern = + new AutoFollowPattern(Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null); + Map patterns = new HashMap<>(); + patterns.put("remote", autoFollowPattern); + Map> followedLeaderIndexUUIDS = new HashMap<>(); + followedLeaderIndexUUIDS.put("remote", new ArrayList<>()); + AutoFollowMetadata autoFollowMetadata = new AutoFollowMetadata(patterns, followedLeaderIndexUUIDS); + ClusterState followerState = ClusterState.builder(new ClusterName("remote")) + .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, autoFollowMetadata)) + .build(); + + Exception failure = new RuntimeException("failure"); + boolean[] invoked = new boolean[]{false}; + Consumer handler = e -> { + invoked[0] = true; + assertThat(e, sameInstance(failure)); + }; + AutoFollower autoFollower = new AutoFollower(client, handler, followerState) { + @Override + void getLeaderClusterState(Client leaderClient, BiConsumer handler) { + handler.accept(null, failure); + } + + @Override + void createAndFollow(FollowIndexAction.Request followRequest, Runnable successHandler, Consumer failureHandler) { + fail("should not get here"); + } + + @Override + void updateAutoFollowMetadata(Function updateFunction, Consumer handler) { + fail("should not get here"); + } + }; + autoFollower.autoFollowIndices(); + assertThat(invoked[0], is(true)); + } + + public void testAutoFollowerUpdateClusterStateFailure() { + Client client = mock(Client.class); + when(client.getRemoteClusterClient(anyString())).thenReturn(client); + + ClusterState leaderState = ClusterState.builder(new ClusterName("remote")) + .metaData(MetaData.builder().put(IndexMetaData.builder("logs-20190101") + .settings(settings(Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0))) + .build(); + + AutoFollowPattern autoFollowPattern = + new AutoFollowPattern(Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null); + Map patterns = new HashMap<>(); + patterns.put("remote", autoFollowPattern); + Map> followedLeaderIndexUUIDS = new HashMap<>(); + followedLeaderIndexUUIDS.put("remote", new ArrayList<>()); + AutoFollowMetadata autoFollowMetadata = new AutoFollowMetadata(patterns, followedLeaderIndexUUIDS); + ClusterState followerState = ClusterState.builder(new ClusterName("remote")) + .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, autoFollowMetadata)) + .build(); + + Exception failure = new RuntimeException("failure"); + boolean[] invoked = new boolean[]{false}; + Consumer handler = e -> { + invoked[0] = true; + assertThat(e, sameInstance(failure)); + }; + AutoFollower autoFollower = new AutoFollower(client, handler, followerState) { + @Override + void getLeaderClusterState(Client leaderClient, BiConsumer handler) { + handler.accept(leaderState, null); + } + + @Override + void createAndFollow(FollowIndexAction.Request followRequest, Runnable successHandler, Consumer failureHandler) { + assertThat(followRequest.getLeaderIndex(), equalTo("remote:logs-20190101")); + assertThat(followRequest.getFollowerIndex(), equalTo("logs-20190101")); + successHandler.run(); + } + + @Override + void updateAutoFollowMetadata(Function updateFunction, Consumer handler) { + handler.accept(failure); + } + }; + autoFollower.autoFollowIndices(); + assertThat(invoked[0], is(true)); + } + + public void testAutoFollowerCreateAndFollowApiCallFailure() { + Client client = mock(Client.class); + when(client.getRemoteClusterClient(anyString())).thenReturn(client); + + ClusterState leaderState = ClusterState.builder(new ClusterName("remote")) + .metaData(MetaData.builder().put(IndexMetaData.builder("logs-20190101") + .settings(settings(Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0))) + .build(); + + AutoFollowPattern autoFollowPattern = + new AutoFollowPattern(Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null); + Map patterns = new HashMap<>(); + patterns.put("remote", autoFollowPattern); + Map> followedLeaderIndexUUIDS = new HashMap<>(); + followedLeaderIndexUUIDS.put("remote", new ArrayList<>()); + AutoFollowMetadata autoFollowMetadata = new AutoFollowMetadata(patterns, followedLeaderIndexUUIDS); + ClusterState followerState = ClusterState.builder(new ClusterName("remote")) + .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, autoFollowMetadata)) + .build(); + + Exception failure = new RuntimeException("failure"); + boolean[] invoked = new boolean[]{false}; + Consumer handler = e -> { + invoked[0] = true; + assertThat(e, sameInstance(failure)); + }; + AutoFollower autoFollower = new AutoFollower(client, handler, followerState) { + @Override + void getLeaderClusterState(Client leaderClient, BiConsumer handler) { + handler.accept(leaderState, null); + } + + @Override + void createAndFollow(FollowIndexAction.Request followRequest, Runnable successHandler, Consumer failureHandler) { + assertThat(followRequest.getLeaderIndex(), equalTo("remote:logs-20190101")); + assertThat(followRequest.getFollowerIndex(), equalTo("logs-20190101")); + failureHandler.accept(failure); + } + + @Override + void updateAutoFollowMetadata(Function updateFunction, Consumer handler) { + fail("should not get here"); + } + }; + autoFollower.autoFollowIndices(); + assertThat(invoked[0], is(true)); + } + + public void testGetLeaderIndicesToFollow() { + AutoFollowPattern autoFollowPattern = + new AutoFollowPattern(Collections.singletonList("metrics-*"), null, null, null, null, null, null, null, null); + ClusterState followerState = ClusterState.builder(new ClusterName("remote")) + .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, + new AutoFollowMetadata(Collections.singletonMap("remote", autoFollowPattern), Collections.emptyMap()))) + .build(); + + MetaData.Builder imdBuilder = MetaData.builder(); + for (int i = 0; i < 5; i++) { + Settings.Builder builder = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_INDEX_UUID, "metrics-" + i); + imdBuilder.put(IndexMetaData.builder("metrics-" + i) + .settings(builder) + .numberOfShards(1) + .numberOfReplicas(0)); + } + imdBuilder.put(IndexMetaData.builder("logs-0") + .settings(settings(Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0)); + + ClusterState leaderState = ClusterState.builder(new ClusterName("remote")) + .metaData(imdBuilder) + .build(); + + List result = AutoFollower.getLeaderIndicesToFollow(autoFollowPattern, leaderState, followerState, Collections.emptyList()); + result.sort(Comparator.comparing(Index::getName)); + assertThat(result.size(), equalTo(5)); + assertThat(result.get(0).getName(), equalTo("metrics-0")); + assertThat(result.get(1).getName(), equalTo("metrics-1")); + assertThat(result.get(2).getName(), equalTo("metrics-2")); + assertThat(result.get(3).getName(), equalTo("metrics-3")); + assertThat(result.get(4).getName(), equalTo("metrics-4")); + + List followedIndexUUIDs = Collections.singletonList(leaderState.metaData().index("metrics-2").getIndexUUID()); + result = AutoFollower.getLeaderIndicesToFollow(autoFollowPattern, leaderState, followerState, followedIndexUUIDs); + result.sort(Comparator.comparing(Index::getName)); + assertThat(result.size(), equalTo(4)); + assertThat(result.get(0).getName(), equalTo("metrics-0")); + assertThat(result.get(1).getName(), equalTo("metrics-1")); + assertThat(result.get(2).getName(), equalTo("metrics-3")); + assertThat(result.get(3).getName(), equalTo("metrics-4")); + } + + public void testGetFollowerIndexName() { + AutoFollowPattern autoFollowPattern = new AutoFollowPattern(Collections.singletonList("metrics-*"), null, null, + null, null, null, null, null, null); + assertThat(AutoFollower.getFollowerIndexName(autoFollowPattern, "metrics-0"), equalTo("metrics-0")); + + autoFollowPattern = new AutoFollowPattern(Collections.singletonList("metrics-*"), "eu-metrics-0", null, null, + null, null, null, null, null); + assertThat(AutoFollower.getFollowerIndexName(autoFollowPattern, "metrics-0"), equalTo("eu-metrics-0")); + + autoFollowPattern = new AutoFollowPattern(Collections.singletonList("metrics-*"), "eu-{{leader_index}}", null, + null, null, null, null, null, null); + assertThat(AutoFollower.getFollowerIndexName(autoFollowPattern, "metrics-0"), equalTo("eu-metrics-0")); + } + +} diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowTests.java new file mode 100644 index 0000000000000..a4808e428feca --- /dev/null +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowTests.java @@ -0,0 +1,189 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ccr.action; + +import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xpack.ccr.LocalStateCcr; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; + +public class AutoFollowTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return Collections.singleton(LocalStateCcr.class); + } + + @Override + protected boolean resetNodeAfterTest() { + return true; + } + + public void testAutoFollow() throws Exception { + Settings leaderIndexSettings = Settings.builder() + .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) + .put(IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1) + .put(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 0) + .build(); + + createIndex("logs-201812", leaderIndexSettings, "_doc"); + + // Enabling auto following: + putAutoFollowPatterns("logs-*", "transactions-*"); + + createIndex("metrics-201901", leaderIndexSettings, "_doc"); + + createIndex("logs-201901", leaderIndexSettings, "_doc"); + assertBusy(() -> { + IndicesExistsRequest request = new IndicesExistsRequest("copy-logs-201901"); + assertTrue(client().admin().indices().exists(request).actionGet().isExists()); + }); + createIndex("transactions-201901", leaderIndexSettings, "_doc"); + assertBusy(() -> { + IndicesExistsRequest request = new IndicesExistsRequest("copy-transactions-201901"); + assertTrue(client().admin().indices().exists(request).actionGet().isExists()); + }); + + IndicesExistsRequest request = new IndicesExistsRequest("copy-metrics-201901"); + assertFalse(client().admin().indices().exists(request).actionGet().isExists()); + request = new IndicesExistsRequest("copy-logs-201812"); + assertFalse(client().admin().indices().exists(request).actionGet().isExists()); + } + + public void testAutoFollowManyIndices() throws Exception { + Settings leaderIndexSettings = Settings.builder() + .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) + .put(IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1) + .put(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 0) + .build(); + + putAutoFollowPatterns("logs-*"); + int numIndices = randomIntBetween(4, 32); + for (int i = 0; i < numIndices; i++) { + createIndex("logs-" + i, leaderIndexSettings, "_doc"); + } + int expectedVal1 = numIndices; + assertBusy(() -> { + MetaData metaData = client().admin().cluster().prepareState().get().getState().metaData(); + int count = (int) Arrays.stream(metaData.getConcreteAllIndices()).filter(s -> s.startsWith("copy-")).count(); + assertThat(count, equalTo(expectedVal1)); + }); + + deleteAutoFollowPatternSetting(); + createIndex("logs-does-not-count", leaderIndexSettings, "_doc"); + + putAutoFollowPatterns("logs-*"); + int i = numIndices; + numIndices = numIndices + randomIntBetween(4, 32); + for (; i < numIndices; i++) { + createIndex("logs-" + i, leaderIndexSettings, "_doc"); + } + int expectedVal2 = numIndices; + assertBusy(() -> { + MetaData metaData = client().admin().cluster().prepareState().get().getState().metaData(); + int count = (int) Arrays.stream(metaData.getConcreteAllIndices()).filter(s -> s.startsWith("copy-")).count(); + assertThat(count, equalTo(expectedVal2)); + }); + } + + public void testAutoFollowParameterAreDelegated() throws Exception { + Settings leaderIndexSettings = Settings.builder() + .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) + .put(IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1) + .put(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 0) + .build(); + + // Enabling auto following: + PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request(); + request.setLeaderClusterAlias("_local_"); + request.setLeaderIndexPatterns(Collections.singletonList("logs-*")); + // Need to set this, because following an index in the same cluster + request.setFollowIndexNamePattern("copy-{{leader_index}}"); + if (randomBoolean()) { + request.setMaxWriteBufferSize(randomIntBetween(0, Integer.MAX_VALUE)); + } + if (randomBoolean()) { + request.setMaxConcurrentReadBatches(randomIntBetween(0, Integer.MAX_VALUE)); + } + if (randomBoolean()) { + request.setMaxConcurrentWriteBatches(randomIntBetween(0, Integer.MAX_VALUE)); + } + if (randomBoolean()) { + request.setMaxBatchOperationCount(randomIntBetween(0, Integer.MAX_VALUE)); + } + if (randomBoolean()) { + request.setMaxOperationSizeInBytes(randomNonNegativeLong()); + } + if (randomBoolean()) { + request.setRetryTimeout(TimeValue.timeValueMillis(500)); + } + if (randomBoolean()) { + request.setIdleShardRetryDelay(TimeValue.timeValueMillis(500)); + } + assertTrue(client().execute(PutAutoFollowPatternAction.INSTANCE, request).actionGet().isAcknowledged()); + + createIndex("logs-201901", leaderIndexSettings, "_doc"); + assertBusy(() -> { + PersistentTasksCustomMetaData persistentTasksMetaData = + client().admin().cluster().prepareState().get().getState().getMetaData().custom(PersistentTasksCustomMetaData.TYPE); + assertThat(persistentTasksMetaData, notNullValue()); + assertThat(persistentTasksMetaData.tasks().size(), equalTo(1)); + ShardFollowTask shardFollowTask = (ShardFollowTask) persistentTasksMetaData.tasks().iterator().next().getParams(); + assertThat(shardFollowTask.getLeaderShardId().getIndexName(), equalTo("logs-201901")); + assertThat(shardFollowTask.getFollowShardId().getIndexName(), equalTo("copy-logs-201901")); + if (request.getMaxWriteBufferSize() != null) { + assertThat(shardFollowTask.getMaxWriteBufferSize(), equalTo(request.getMaxWriteBufferSize())); + } + if (request.getMaxConcurrentReadBatches() != null) { + assertThat(shardFollowTask.getMaxConcurrentReadBatches(), equalTo(request.getMaxConcurrentReadBatches())); + } + if (request.getMaxConcurrentWriteBatches() != null) { + assertThat(shardFollowTask.getMaxConcurrentWriteBatches(), equalTo(request.getMaxConcurrentWriteBatches())); + } + if (request.getMaxBatchOperationCount() != null) { + assertThat(shardFollowTask.getMaxBatchOperationCount(), equalTo(request.getMaxBatchOperationCount())); + } + if (request.getMaxOperationSizeInBytes() != null) { + assertThat(shardFollowTask.getMaxBatchSizeInBytes(), equalTo(request.getMaxOperationSizeInBytes())); + } + if (request.getRetryTimeout() != null) { + assertThat(shardFollowTask.getRetryTimeout(), equalTo(request.getRetryTimeout())); + } + if (request.getIdleShardRetryDelay() != null) { + assertThat(shardFollowTask.getIdleShardRetryDelay(), equalTo(request.getIdleShardRetryDelay())); + } + }); + } + + private void putAutoFollowPatterns(String... patterns) { + PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request(); + request.setLeaderClusterAlias("_local_"); + request.setLeaderIndexPatterns(Arrays.asList(patterns)); + // Need to set this, because following an index in the same cluster + request.setFollowIndexNamePattern("copy-{{leader_index}}"); + assertTrue(client().execute(PutAutoFollowPatternAction.INSTANCE, request).actionGet().isAcknowledged()); + } + + private void deleteAutoFollowPatternSetting() { + DeleteAutoFollowPatternAction.Request request = new DeleteAutoFollowPatternAction.Request(); + request.setLeaderClusterAlias("_local_"); + assertTrue(client().execute(DeleteAutoFollowPatternAction.INSTANCE, request).actionGet().isAcknowledged()); + } + +} diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/DeleteAutoFollowPatternRequestTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/DeleteAutoFollowPatternRequestTests.java new file mode 100644 index 0000000000000..0ca1b3d127827 --- /dev/null +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/DeleteAutoFollowPatternRequestTests.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ccr.action; + +import org.elasticsearch.test.AbstractStreamableTestCase; + +public class DeleteAutoFollowPatternRequestTests extends AbstractStreamableTestCase { + + @Override + protected DeleteAutoFollowPatternAction.Request createBlankInstance() { + return new DeleteAutoFollowPatternAction.Request(); + } + + @Override + protected DeleteAutoFollowPatternAction.Request createTestInstance() { + DeleteAutoFollowPatternAction.Request request = new DeleteAutoFollowPatternAction.Request(); + request.setLeaderClusterAlias(randomAlphaOfLength(4)); + return request; + } +} diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutAutoFollowPatternRequestTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutAutoFollowPatternRequestTests.java new file mode 100644 index 0000000000000..27760578db945 --- /dev/null +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/PutAutoFollowPatternRequestTests.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ccr.action; + +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; + +import java.io.IOException; +import java.util.Arrays; + +public class PutAutoFollowPatternRequestTests extends AbstractStreamableXContentTestCase { + + @Override + protected boolean supportsUnknownFields() { + return false; + } + + @Override + protected PutAutoFollowPatternAction.Request doParseInstance(XContentParser parser) throws IOException { + return PutAutoFollowPatternAction.Request.fromXContent(parser, null); + } + + @Override + protected PutAutoFollowPatternAction.Request createBlankInstance() { + return new PutAutoFollowPatternAction.Request(); + } + + @Override + protected PutAutoFollowPatternAction.Request createTestInstance() { + PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request(); + request.setLeaderClusterAlias(randomAlphaOfLength(4)); + request.setLeaderIndexPatterns(Arrays.asList(generateRandomStringArray(4, 4, false))); + if (randomBoolean()) { + request.setFollowIndexNamePattern(randomAlphaOfLength(4)); + } + if (randomBoolean()) { + request.setIdleShardRetryDelay(TimeValue.timeValueMillis(500)); + } + if (randomBoolean()) { + request.setRetryTimeout(TimeValue.timeValueMillis(500)); + } + if (randomBoolean()) { + request.setMaxBatchOperationCount(randomIntBetween(0, Integer.MAX_VALUE)); + } + if (randomBoolean()) { + request.setMaxConcurrentReadBatches(randomIntBetween(0, Integer.MAX_VALUE)); + } + if (randomBoolean()) { + request.setMaxConcurrentWriteBatches(randomIntBetween(0, Integer.MAX_VALUE)); + } + if (randomBoolean()) { + request.setMaxOperationSizeInBytes(randomNonNegativeLong()); + } + if (randomBoolean()) { + request.setMaxWriteBufferSize(randomIntBetween(0, Integer.MAX_VALUE)); + } + return request; + } +} diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportDeleteAutoFollowPatternActionTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportDeleteAutoFollowPatternActionTests.java new file mode 100644 index 0000000000000..03065ea8d38f2 --- /dev/null +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportDeleteAutoFollowPatternActionTests.java @@ -0,0 +1,98 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ccr.action; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ccr.action.DeleteAutoFollowPatternAction.Request; +import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; + +public class TransportDeleteAutoFollowPatternActionTests extends ESTestCase { + + public void testInnerDelete() { + Map> existingAlreadyFollowedIndexUUIDS = new HashMap<>(); + Map existingAutoFollowPatterns = new HashMap<>(); + { + List existingPatterns = new ArrayList<>(); + existingPatterns.add("transactions-*"); + existingAutoFollowPatterns.put("eu_cluster", + new AutoFollowMetadata.AutoFollowPattern(existingPatterns, null, null, null, null, null, null, null, null)); + + List existingUUIDS = new ArrayList<>(); + existingUUIDS.add("_val"); + existingAlreadyFollowedIndexUUIDS.put("eu_cluster", existingUUIDS); + } + { + List existingPatterns = new ArrayList<>(); + existingPatterns.add("logs-*"); + existingAutoFollowPatterns.put("asia_cluster", + new AutoFollowMetadata.AutoFollowPattern(existingPatterns, null, null, null, null, null, null, null, null)); + + List existingUUIDS = new ArrayList<>(); + existingUUIDS.add("_val"); + existingAlreadyFollowedIndexUUIDS.put("asia_cluster", existingUUIDS); + } + ClusterState clusterState = ClusterState.builder(new ClusterName("us_cluster")) + .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, + new AutoFollowMetadata(existingAutoFollowPatterns, existingAlreadyFollowedIndexUUIDS))) + .build(); + + Request request = new Request(); + request.setLeaderClusterAlias("eu_cluster"); + AutoFollowMetadata result = TransportDeleteAutoFollowPatternAction.innerDelete(request, clusterState) + .getMetaData() + .custom(AutoFollowMetadata.TYPE); + assertThat(result.getPatterns().size(), equalTo(1)); + assertThat(result.getPatterns().get("asia_cluster"), notNullValue()); + assertThat(result.getFollowedLeaderIndexUUIDs().size(), equalTo(1)); + assertThat(result.getFollowedLeaderIndexUUIDs().get("asia_cluster"), notNullValue()); + } + + public void testInnerDeleteDoesNotExist() { + Map> existingAlreadyFollowedIndexUUIDS = new HashMap<>(); + Map existingAutoFollowPatterns = new HashMap<>(); + { + List existingPatterns = new ArrayList<>(); + existingPatterns.add("transactions-*"); + existingAutoFollowPatterns.put("eu_cluster", + new AutoFollowMetadata.AutoFollowPattern(existingPatterns, null, null, null, null, null, null, null, null)); + } + ClusterState clusterState = ClusterState.builder(new ClusterName("us_cluster")) + .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, + new AutoFollowMetadata(existingAutoFollowPatterns, existingAlreadyFollowedIndexUUIDS))) + .build(); + + Request request = new Request(); + request.setLeaderClusterAlias("asia_cluster"); + Exception e = expectThrows(ResourceNotFoundException.class, + () -> TransportDeleteAutoFollowPatternAction.innerDelete(request, clusterState)); + assertThat(e.getMessage(), equalTo("no auto-follow patterns for cluster alias [asia_cluster] found")); + } + + public void testInnerDeleteNoAutoFollowMetadata() { + ClusterState clusterState = ClusterState.builder(new ClusterName("us_cluster")) + .metaData(MetaData.builder()) + .build(); + + Request request = new Request(); + request.setLeaderClusterAlias("asia_cluster"); + Exception e = expectThrows(ResourceNotFoundException.class, + () -> TransportDeleteAutoFollowPatternAction.innerDelete(request, clusterState)); + assertThat(e.getMessage(), equalTo("no auto-follow patterns for cluster alias [asia_cluster] found")); + } + +} diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternActionTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternActionTests.java new file mode 100644 index 0000000000000..d894eda0b1186 --- /dev/null +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternActionTests.java @@ -0,0 +1,133 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ccr.action; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; + +public class TransportPutAutoFollowPatternActionTests extends ESTestCase { + + public void testInnerPut() { + PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request(); + request.setLeaderClusterAlias("eu_cluster"); + request.setLeaderIndexPatterns(Collections.singletonList("logs-*")); + + ClusterState localState = ClusterState.builder(new ClusterName("us_cluster")) + .metaData(MetaData.builder()) + .build(); + + ClusterState remoteState = ClusterState.builder(new ClusterName("eu_cluster")) + .metaData(MetaData.builder()) + .build(); + + ClusterState result = TransportPutAutoFollowPatternAction.innerPut(request, localState, remoteState); + AutoFollowMetadata autoFollowMetadata = result.metaData().custom(AutoFollowMetadata.TYPE); + assertThat(autoFollowMetadata, notNullValue()); + assertThat(autoFollowMetadata.getPatterns().size(), equalTo(1)); + assertThat(autoFollowMetadata.getPatterns().get("eu_cluster").getLeaderIndexPatterns().size(), equalTo(1)); + assertThat(autoFollowMetadata.getPatterns().get("eu_cluster").getLeaderIndexPatterns().get(0), equalTo("logs-*")); + assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().size(), equalTo(1)); + assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().get("eu_cluster").size(), equalTo(0)); + } + + public void testInnerPut_existingLeaderIndices() { + PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request(); + request.setLeaderClusterAlias("eu_cluster"); + request.setLeaderIndexPatterns(Collections.singletonList("logs-*")); + + ClusterState localState = ClusterState.builder(new ClusterName("us_cluster")) + .metaData(MetaData.builder()) + .build(); + + int numLeaderIndices = randomIntBetween(1, 8); + int numMatchingLeaderIndices = randomIntBetween(1, 8); + MetaData.Builder mdBuilder = MetaData.builder(); + for (int i = 0; i < numLeaderIndices; i++) { + mdBuilder.put(IndexMetaData.builder("transactions-" + i) + .settings(settings(Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0)); + } + for (int i = 0; i < numMatchingLeaderIndices; i++) { + mdBuilder.put(IndexMetaData.builder("logs-" + i) + .settings(settings(Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0)); + } + + ClusterState remoteState = ClusterState.builder(new ClusterName("eu_cluster")) + .metaData(mdBuilder) + .build(); + + ClusterState result = TransportPutAutoFollowPatternAction.innerPut(request, localState, remoteState); + AutoFollowMetadata autoFollowMetadata = result.metaData().custom(AutoFollowMetadata.TYPE); + assertThat(autoFollowMetadata, notNullValue()); + assertThat(autoFollowMetadata.getPatterns().size(), equalTo(1)); + assertThat(autoFollowMetadata.getPatterns().get("eu_cluster").getLeaderIndexPatterns().size(), equalTo(1)); + assertThat(autoFollowMetadata.getPatterns().get("eu_cluster").getLeaderIndexPatterns().get(0), equalTo("logs-*")); + assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().size(), equalTo(1)); + assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().get("eu_cluster").size(), equalTo(numMatchingLeaderIndices)); + } + + public void testInnerPut_existingLeaderIndicesAndAutoFollowMetadata() { + PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request(); + request.setLeaderClusterAlias("eu_cluster"); + request.setLeaderIndexPatterns(Arrays.asList("logs-*", "transactions-*")); + + Map existingAutoFollowPatterns = new HashMap<>(); + List existingPatterns = new ArrayList<>(); + existingPatterns.add("transactions-*"); + existingAutoFollowPatterns.put("eu_cluster", + new AutoFollowMetadata.AutoFollowPattern(existingPatterns, null, null, null, null, null, null, null, null)); + Map> existingAlreadyFollowedIndexUUIDS = new HashMap<>(); + List existingUUIDS = new ArrayList<>(); + existingUUIDS.add("_val"); + existingAlreadyFollowedIndexUUIDS.put("eu_cluster", existingUUIDS); + ClusterState localState = ClusterState.builder(new ClusterName("us_cluster")) + .metaData(MetaData.builder().putCustom(AutoFollowMetadata.TYPE, + new AutoFollowMetadata(existingAutoFollowPatterns, existingAlreadyFollowedIndexUUIDS))) + .build(); + + int numLeaderIndices = randomIntBetween(1, 8); + MetaData.Builder mdBuilder = MetaData.builder(); + for (int i = 0; i < numLeaderIndices; i++) { + mdBuilder.put(IndexMetaData.builder("logs-" + i) + .settings(settings(Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0)); + } + + ClusterState remoteState = ClusterState.builder(new ClusterName("eu_cluster")) + .metaData(mdBuilder) + .build(); + + ClusterState result = TransportPutAutoFollowPatternAction.innerPut(request, localState, remoteState); + AutoFollowMetadata autoFollowMetadata = result.metaData().custom(AutoFollowMetadata.TYPE); + assertThat(autoFollowMetadata, notNullValue()); + assertThat(autoFollowMetadata.getPatterns().size(), equalTo(1)); + assertThat(autoFollowMetadata.getPatterns().get("eu_cluster").getLeaderIndexPatterns().size(), equalTo(2)); + assertThat(autoFollowMetadata.getPatterns().get("eu_cluster").getLeaderIndexPatterns().get(0), equalTo("logs-*")); + assertThat(autoFollowMetadata.getPatterns().get("eu_cluster").getLeaderIndexPatterns().get(1), equalTo("transactions-*")); + assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().size(), equalTo(1)); + assertThat(autoFollowMetadata.getFollowedLeaderIndexUUIDs().get("eu_cluster").size(), equalTo(numLeaderIndices + 1)); + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index 2d3707e98cf80..03820b1f40b22 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -38,6 +38,7 @@ import org.elasticsearch.transport.Transport; import org.elasticsearch.xpack.core.action.XPackInfoAction; import org.elasticsearch.xpack.core.action.XPackUsageAction; +import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; import org.elasticsearch.xpack.core.deprecation.DeprecationInfoAction; import org.elasticsearch.xpack.core.graph.GraphFeatureSetUsage; import org.elasticsearch.xpack.core.graph.action.GraphExploreAction; @@ -366,7 +367,9 @@ public List getNamedWriteables() { new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.ROLLUP, RollupFeatureSetUsage::new), new NamedWriteableRegistry.Entry(PersistentTaskParams.class, RollupJob.NAME, RollupJob::new), new NamedWriteableRegistry.Entry(Task.Status.class, RollupJobStatus.NAME, RollupJobStatus::new), - new NamedWriteableRegistry.Entry(PersistentTaskState.class, RollupJobStatus.NAME, RollupJobStatus::new) + new NamedWriteableRegistry.Entry(PersistentTaskState.class, RollupJobStatus.NAME, RollupJobStatus::new), + // ccr + new NamedWriteableRegistry.Entry(AutoFollowMetadata.class, AutoFollowMetadata.TYPE, AutoFollowMetadata::new) ); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java new file mode 100644 index 0000000000000..244a5d441d9bf --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadata.java @@ -0,0 +1,357 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ccr; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.AbstractNamedDiffable; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.XPackPlugin; +import org.elasticsearch.xpack.core.security.xcontent.XContentUtils; + +import java.io.IOException; +import java.util.Arrays; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * Custom metadata that contains auto follow patterns and what leader indices an auto follow pattern has already followed. + */ +public class AutoFollowMetadata extends AbstractNamedDiffable implements XPackPlugin.XPackMetaDataCustom { + + public static final String TYPE = "ccr_auto_follow"; + + private static final ParseField PATTERNS_FIELD = new ParseField("patterns"); + private static final ParseField FOLLOWED_LEADER_INDICES_FIELD = new ParseField("followed_leader_indices"); + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("auto_follow", + args -> new AutoFollowMetadata((Map) args[0], (Map>) args[1])); + + static { + PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> { + Map patterns = new HashMap<>(); + String fieldName = null; + for (XContentParser.Token token = p.nextToken(); token != XContentParser.Token.END_OBJECT; token = p.nextToken()) { + if (token == XContentParser.Token.FIELD_NAME) { + fieldName = p.currentName(); + } else if (token == XContentParser.Token.START_OBJECT) { + patterns.put(fieldName, AutoFollowPattern.PARSER.parse(p, c)); + } else { + throw new ElasticsearchParseException("unexpected token [" + token + "]"); + } + } + return patterns; + }, PATTERNS_FIELD); + PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> { + Map> alreadyFollowedIndexUUIDS = new HashMap<>(); + String fieldName = null; + for (XContentParser.Token token = p.nextToken(); token != XContentParser.Token.END_OBJECT; token = p.nextToken()) { + if (token == XContentParser.Token.FIELD_NAME) { + fieldName = p.currentName(); + } else if (token == XContentParser.Token.START_ARRAY) { + alreadyFollowedIndexUUIDS.put(fieldName, Arrays.asList(XContentUtils.readStringArray(p, false))); + } else { + throw new ElasticsearchParseException("unexpected token [" + token + "]"); + } + } + return alreadyFollowedIndexUUIDS; + }, FOLLOWED_LEADER_INDICES_FIELD); + } + + public static AutoFollowMetadata fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + private final Map patterns; + private final Map> followedLeaderIndexUUIDs; + + public AutoFollowMetadata(Map patterns, Map> followedLeaderIndexUUIDs) { + this.patterns = patterns; + this.followedLeaderIndexUUIDs = followedLeaderIndexUUIDs; + } + + public AutoFollowMetadata(StreamInput in) throws IOException { + patterns = in.readMap(StreamInput::readString, AutoFollowPattern::new); + followedLeaderIndexUUIDs = in.readMapOfLists(StreamInput::readString, StreamInput::readString); + } + + public Map getPatterns() { + return patterns; + } + + public Map> getFollowedLeaderIndexUUIDs() { + return followedLeaderIndexUUIDs; + } + + @Override + public EnumSet context() { + // TODO: When a snapshot is restored do we want to restore this? + // (Otherwise we would start following indices automatically immediately) + return MetaData.ALL_CONTEXTS; + } + + @Override + public String getWriteableName() { + return TYPE; + } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_6_5_0.minimumCompatibilityVersion(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeMap(patterns, StreamOutput::writeString, (out1, value) -> value.writeTo(out1)); + out.writeMapOfLists(followedLeaderIndexUUIDs, StreamOutput::writeString, StreamOutput::writeString); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(PATTERNS_FIELD.getPreferredName()); + for (Map.Entry entry : patterns.entrySet()) { + builder.startObject(entry.getKey()); + builder.value(entry.getValue()); + builder.endObject(); + } + builder.endObject(); + + builder.startObject(FOLLOWED_LEADER_INDICES_FIELD.getPreferredName()); + for (Map.Entry> entry : followedLeaderIndexUUIDs.entrySet()) { + builder.field(entry.getKey(), entry.getValue()); + } + builder.endObject(); + return builder; + } + + @Override + public boolean isFragment() { + return true; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AutoFollowMetadata that = (AutoFollowMetadata) o; + return Objects.equals(patterns, that.patterns); + } + + @Override + public int hashCode() { + return Objects.hash(patterns); + } + + public static class AutoFollowPattern implements Writeable, ToXContentObject { + + private static final ParseField LEADER_PATTERNS_FIELD = new ParseField("leader_patterns"); + private static final ParseField FOLLOW_PATTERN_FIELD = new ParseField("follow_pattern"); + public static final ParseField MAX_BATCH_OPERATION_COUNT = new ParseField("max_batch_operation_count"); + public static final ParseField MAX_CONCURRENT_READ_BATCHES = new ParseField("max_concurrent_read_batches"); + public static final ParseField MAX_BATCH_SIZE_IN_BYTES = new ParseField("max_batch_size_in_bytes"); + public static final ParseField MAX_CONCURRENT_WRITE_BATCHES = new ParseField("max_concurrent_write_batches"); + public static final ParseField MAX_WRITE_BUFFER_SIZE = new ParseField("max_write_buffer_size"); + public static final ParseField RETRY_TIMEOUT = new ParseField("retry_timeout"); + public static final ParseField IDLE_SHARD_RETRY_DELAY = new ParseField("idle_shard_retry_delay"); + + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("auto_follow_pattern", + args -> new AutoFollowPattern((List) args[0], (String) args[1], (Integer) args[2], (Integer) args[3], + (Long) args[4], (Integer) args[5], (Integer) args[6], (TimeValue) args[7], (TimeValue) args[8])); + + static { + PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), LEADER_PATTERNS_FIELD); + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), FOLLOW_PATTERN_FIELD); + PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_BATCH_OPERATION_COUNT); + PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_CONCURRENT_READ_BATCHES); + PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), MAX_BATCH_SIZE_IN_BYTES); + PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_CONCURRENT_WRITE_BATCHES); + PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_WRITE_BUFFER_SIZE); + PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), + (p, c) -> TimeValue.parseTimeValue(p.text(), RETRY_TIMEOUT.getPreferredName()), + RETRY_TIMEOUT, ObjectParser.ValueType.STRING); + PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), + (p, c) -> TimeValue.parseTimeValue(p.text(), IDLE_SHARD_RETRY_DELAY.getPreferredName()), + IDLE_SHARD_RETRY_DELAY, ObjectParser.ValueType.STRING); + } + + private final List leaderIndexPatterns; + private final String followIndexPattern; + private final Integer maxBatchOperationCount; + private final Integer maxConcurrentReadBatches; + private final Long maxOperationSizeInBytes; + private final Integer maxConcurrentWriteBatches; + private final Integer maxWriteBufferSize; + private final TimeValue retryTimeout; + private final TimeValue idleShardRetryDelay; + + public AutoFollowPattern(List leaderIndexPatterns, String followIndexPattern, Integer maxBatchOperationCount, + Integer maxConcurrentReadBatches, Long maxOperationSizeInBytes, Integer maxConcurrentWriteBatches, + Integer maxWriteBufferSize, TimeValue retryTimeout, TimeValue idleShardRetryDelay) { + this.leaderIndexPatterns = leaderIndexPatterns; + this.followIndexPattern = followIndexPattern; + this.maxBatchOperationCount = maxBatchOperationCount; + this.maxConcurrentReadBatches = maxConcurrentReadBatches; + this.maxOperationSizeInBytes = maxOperationSizeInBytes; + this.maxConcurrentWriteBatches = maxConcurrentWriteBatches; + this.maxWriteBufferSize = maxWriteBufferSize; + this.retryTimeout = retryTimeout; + this.idleShardRetryDelay = idleShardRetryDelay; + } + + AutoFollowPattern(StreamInput in) throws IOException { + leaderIndexPatterns = in.readList(StreamInput::readString); + followIndexPattern = in.readOptionalString(); + maxBatchOperationCount = in.readOptionalVInt(); + maxConcurrentReadBatches = in.readOptionalVInt(); + maxOperationSizeInBytes = in.readOptionalLong(); + maxConcurrentWriteBatches = in.readOptionalVInt(); + maxWriteBufferSize = in.readOptionalVInt(); + retryTimeout = in.readOptionalTimeValue(); + idleShardRetryDelay = in.readOptionalTimeValue(); + } + + public boolean match(String indexName) { + return match(leaderIndexPatterns, indexName); + } + + public static boolean match(List leaderIndexPatterns, String indexName) { + return Regex.simpleMatch(leaderIndexPatterns, indexName); + } + + public List getLeaderIndexPatterns() { + return leaderIndexPatterns; + } + + public String getFollowIndexPattern() { + return followIndexPattern; + } + + public Integer getMaxBatchOperationCount() { + return maxBatchOperationCount; + } + + public Integer getMaxConcurrentReadBatches() { + return maxConcurrentReadBatches; + } + + public Long getMaxOperationSizeInBytes() { + return maxOperationSizeInBytes; + } + + public Integer getMaxConcurrentWriteBatches() { + return maxConcurrentWriteBatches; + } + + public Integer getMaxWriteBufferSize() { + return maxWriteBufferSize; + } + + public TimeValue getRetryTimeout() { + return retryTimeout; + } + + public TimeValue getIdleShardRetryDelay() { + return idleShardRetryDelay; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeStringList(leaderIndexPatterns); + out.writeOptionalString(followIndexPattern); + out.writeOptionalVInt(maxBatchOperationCount); + out.writeOptionalVInt(maxConcurrentReadBatches); + out.writeOptionalLong(maxOperationSizeInBytes); + out.writeOptionalVInt(maxConcurrentWriteBatches); + out.writeOptionalVInt(maxWriteBufferSize); + out.writeOptionalTimeValue(retryTimeout); + out.writeOptionalTimeValue(idleShardRetryDelay); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.array(LEADER_PATTERNS_FIELD.getPreferredName(), leaderIndexPatterns.toArray(new String[0])); + if (followIndexPattern != null) { + builder.field(FOLLOW_PATTERN_FIELD.getPreferredName(), followIndexPattern); + } + if (maxBatchOperationCount != null) { + builder.field(MAX_BATCH_OPERATION_COUNT.getPreferredName(), maxBatchOperationCount); + } + if (maxConcurrentReadBatches != null) { + builder.field(MAX_CONCURRENT_READ_BATCHES.getPreferredName(), maxConcurrentReadBatches); + } + if (maxOperationSizeInBytes != null) { + builder.field(MAX_BATCH_SIZE_IN_BYTES.getPreferredName(), maxOperationSizeInBytes); + } + if (maxConcurrentWriteBatches != null) { + builder.field(MAX_CONCURRENT_WRITE_BATCHES.getPreferredName(), maxConcurrentWriteBatches); + } + if (maxWriteBufferSize != null){ + builder.field(MAX_WRITE_BUFFER_SIZE.getPreferredName(), maxWriteBufferSize); + } + if (retryTimeout != null) { + builder.field(RETRY_TIMEOUT.getPreferredName(), retryTimeout); + } + if (idleShardRetryDelay != null) { + builder.field(IDLE_SHARD_RETRY_DELAY.getPreferredName(), idleShardRetryDelay); + } + return builder; + } + + @Override + public boolean isFragment() { + return true; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AutoFollowPattern that = (AutoFollowPattern) o; + return Objects.equals(leaderIndexPatterns, that.leaderIndexPatterns) && + Objects.equals(followIndexPattern, that.followIndexPattern) && + Objects.equals(maxBatchOperationCount, that.maxBatchOperationCount) && + Objects.equals(maxConcurrentReadBatches, that.maxConcurrentReadBatches) && + Objects.equals(maxOperationSizeInBytes, that.maxOperationSizeInBytes) && + Objects.equals(maxConcurrentWriteBatches, that.maxConcurrentWriteBatches) && + Objects.equals(maxWriteBufferSize, that.maxWriteBufferSize) && + Objects.equals(retryTimeout, that.retryTimeout) && + Objects.equals(idleShardRetryDelay, that.idleShardRetryDelay); + } + + @Override + public int hashCode() { + return Objects.hash( + leaderIndexPatterns, + followIndexPattern, + maxBatchOperationCount, + maxConcurrentReadBatches, + maxOperationSizeInBytes, + maxConcurrentWriteBatches, + maxWriteBufferSize, + retryTimeout, + idleShardRetryDelay + ); + } + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadataTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadataTests.java new file mode 100644 index 0000000000000..5227c04962a7b --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ccr/AutoFollowMetadataTests.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ccr; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractSerializingTestCase; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Predicate; + +public class AutoFollowMetadataTests extends AbstractSerializingTestCase { + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return s -> true; + } + + @Override + protected AutoFollowMetadata doParseInstance(XContentParser parser) throws IOException { + return AutoFollowMetadata.fromXContent(parser); + } + + @Override + protected AutoFollowMetadata createTestInstance() { + int numEntries = randomIntBetween(0, 32); + Map configs = new HashMap<>(numEntries); + Map> followedLeaderIndices = new HashMap<>(numEntries); + for (int i = 0; i < numEntries; i++) { + List leaderPatterns = Arrays.asList(generateRandomStringArray(4, 4, false)); + AutoFollowMetadata.AutoFollowPattern autoFollowPattern = + new AutoFollowMetadata.AutoFollowPattern(leaderPatterns, randomAlphaOfLength(4), randomIntBetween(0, Integer.MAX_VALUE), + randomIntBetween(0, Integer.MAX_VALUE), randomNonNegativeLong(), randomIntBetween(0, Integer.MAX_VALUE), + randomIntBetween(0, Integer.MAX_VALUE), TimeValue.timeValueMillis(500), TimeValue.timeValueMillis(500)); + configs.put(Integer.toString(i), autoFollowPattern); + followedLeaderIndices.put(Integer.toString(i), Arrays.asList(generateRandomStringArray(4, 4, false))); + } + return new AutoFollowMetadata(configs, followedLeaderIndices); + } + + @Override + protected Writeable.Reader instanceReader() { + return AutoFollowMetadata::new; + } +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.delete_auto_follow_pattern.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.delete_auto_follow_pattern.json new file mode 100644 index 0000000000000..b14effd5f3f73 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.delete_auto_follow_pattern.json @@ -0,0 +1,17 @@ +{ + "ccr.delete_auto_follow_pattern": { + "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/current", + "methods": [ "DELETE" ], + "url": { + "path": "/_ccr/_auto_follow/{leader_cluster_alias}", + "paths": [ "/_ccr/_auto_follow/{leader_cluster_alias}" ], + "parts": { + "leader_cluster_alias": { + "type": "string", + "required": true, + "description": "The name of the leader cluster alias." + } + } + } + } +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.put_auto_follow_pattern.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.put_auto_follow_pattern.json new file mode 100644 index 0000000000000..28e7299713da5 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.put_auto_follow_pattern.json @@ -0,0 +1,21 @@ +{ + "ccr.put_auto_follow_pattern": { + "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/current", + "methods": [ "PUT" ], + "url": { + "path": "/_ccr/_auto_follow/{leader_cluster_alias}", + "paths": [ "/_ccr/_auto_follow/{leader_cluster_alias}" ], + "parts": { + "leader_cluster_alias": { + "type": "string", + "required": true, + "description": "The name of the leader cluster alias." + } + } + }, + "body": { + "description" : "The specification of the auto follow pattern", + "required" : true + } + } +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ccr/auto_follow.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ccr/auto_follow.yml new file mode 100644 index 0000000000000..f4cf79fb5589a --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ccr/auto_follow.yml @@ -0,0 +1,13 @@ +--- +"Test put and delete auto follow pattern": + - do: + ccr.put_auto_follow_pattern: + leader_cluster_alias: _local_ + body: + leader_index_patterns: ['logs-*'] + - is_true: acknowledged + + - do: + ccr.delete_auto_follow_pattern: + leader_cluster_alias: _local_ + - is_true: acknowledged From 5236f2b1af88bd493c05ea619b96877d38e940bf Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Thu, 6 Sep 2018 09:42:45 +0200 Subject: [PATCH 09/91] Improve reproducability of RestControllerTests With this commit we use the classic parent circuit breaker which does not account for real memory usage. In those tests we want to have reproducible results and hence it makes sense to disable the real memory circuit breaker there. --- .../test/java/org/elasticsearch/rest/RestControllerTests.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java index 348b85a8ba4a1..cbf554289711e 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java @@ -81,6 +81,8 @@ public void setup() { circuitBreakerService = new HierarchyCircuitBreakerService( Settings.builder() .put(HierarchyCircuitBreakerService.IN_FLIGHT_REQUESTS_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), BREAKER_LIMIT) + // We want to have reproducible results in this test, hence we disable real memory usage accounting + .put(HierarchyCircuitBreakerService.USE_REAL_MEMORY_USAGE_SETTING.getKey(), false) .build(), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); usageService = new UsageService(settings); From 82fab40099c640952c09719330f6511a1a7cd28e Mon Sep 17 00:00:00 2001 From: Alexander Reelsen Date: Thu, 6 Sep 2018 09:56:20 +0200 Subject: [PATCH 10/91] Core: Fix IndicesSegmentResponse.toXcontent() serialization (#33414) When index sorting is enabled, toXContent tried to serialize an SortField object, resulting in an exception, when using the _segments endpoint. Relates #29120 --- .../segments/IndicesSegmentResponse.java | 4 +- .../segments/IndicesSegmentResponseTests.java | 53 +++++++++++++++++++ 2 files changed, 56 insertions(+), 1 deletion(-) create mode 100644 server/src/test/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponseTests.java diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java index aa693c1b9e5cc..cc68a4a7e34b7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponse.java @@ -186,7 +186,9 @@ private static void toXContent(XContentBuilder builder, Sort sort) throws IOExce builder.field("mode", ((SortedSetSortField) field).getSelector() .toString().toLowerCase(Locale.ROOT)); } - builder.field("missing", field.getMissingValue()); + if (field.getMissingValue() != null) { + builder.field("missing", field.getMissingValue().toString()); + } builder.field("reverse", field.getReverse()); builder.endObject(); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponseTests.java new file mode 100644 index 0000000000000..1d63db7585e65 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentResponseTests.java @@ -0,0 +1,53 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.segments; + +import org.apache.lucene.search.Sort; +import org.apache.lucene.search.SortField; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.routing.TestShardRouting; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.engine.Segment; +import org.elasticsearch.test.ESTestCase; + +import java.util.Collections; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; + +public class IndicesSegmentResponseTests extends ESTestCase { + + public void testToXContentSerialiationWithSortedFields() throws Exception { + ShardRouting shardRouting = TestShardRouting.newShardRouting("foo", 0, "node_id", true, ShardRoutingState.STARTED); + Segment segment = new Segment("my"); + + SortField sortField = new SortField("foo", SortField.Type.STRING); + sortField.setMissingValue(SortField.STRING_LAST); + segment.segmentSort = new Sort(sortField); + + ShardSegments shardSegments = new ShardSegments(shardRouting, Collections.singletonList(segment)); + IndicesSegmentResponse response = + new IndicesSegmentResponse(new ShardSegments[] { shardSegments }, 1, 1, 0, Collections.emptyList()); + try (XContentBuilder builder = jsonBuilder()) { + response.toXContent(builder, ToXContent.EMPTY_PARAMS); + } + } +} From e134f9b5f3dd93b422a738bf9f08b6672bf66d7a Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Thu, 6 Sep 2018 09:04:22 +0100 Subject: [PATCH 11/91] Fix generics in ScriptPlugin#getContexts() (#33426) Changes the return value from List to List> to remove raw-types warnings. --- .../elasticsearch/analysis/common/CommonAnalysisPlugin.java | 3 +-- .../main/java/org/elasticsearch/painless/PainlessPlugin.java | 4 ++-- .../src/main/java/org/elasticsearch/plugins/ScriptPlugin.java | 2 +- .../src/main/java/org/elasticsearch/script/ScriptModule.java | 4 ++-- .../xpack/core/LocalStateCompositeXPackPlugin.java | 4 ++-- .../main/java/org/elasticsearch/xpack/watcher/Watcher.java | 2 +- 6 files changed, 9 insertions(+), 10 deletions(-) diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index bbd721169c6c7..a8e00d74486a8 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -167,8 +167,7 @@ public Collection createComponents(Client client, ClusterService cluster } @Override - @SuppressWarnings("rawtypes") // TODO ScriptPlugin needs to change this to pass precommit? - public List getContexts() { + public List> getContexts() { return Collections.singletonList(AnalysisPredicateScript.CONTEXT); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java index 833ff0eac4134..3057378646730 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java @@ -105,8 +105,8 @@ public void reloadSPI(ClassLoader loader) { } } - @SuppressWarnings("rawtypes") - public List getContexts() { + @Override + public List> getContexts() { return Collections.singletonList(PainlessExecuteAction.PainlessTestScript.CONTEXT); } diff --git a/server/src/main/java/org/elasticsearch/plugins/ScriptPlugin.java b/server/src/main/java/org/elasticsearch/plugins/ScriptPlugin.java index 88af291983af8..384f98609a5e7 100644 --- a/server/src/main/java/org/elasticsearch/plugins/ScriptPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/ScriptPlugin.java @@ -44,7 +44,7 @@ default ScriptEngine getScriptEngine(Settings settings, Collection getContexts() { + default List> getContexts() { return Collections.emptyList(); } } diff --git a/server/src/main/java/org/elasticsearch/script/ScriptModule.java b/server/src/main/java/org/elasticsearch/script/ScriptModule.java index 1788d8c792bf0..6dc507fa0d8a4 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptModule.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptModule.java @@ -70,8 +70,8 @@ public ScriptModule(Settings settings, List scriptPlugins) { Map engines = new HashMap<>(); Map> contexts = new HashMap<>(CORE_CONTEXTS); for (ScriptPlugin plugin : scriptPlugins) { - for (ScriptContext context : plugin.getContexts()) { - ScriptContext oldContext = contexts.put(context.name, context); + for (ScriptContext context : plugin.getContexts()) { + ScriptContext oldContext = contexts.put(context.name, context); if (oldContext != null) { throw new IllegalArgumentException("Context name [" + context.name + "] defined twice"); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java index 70dca834c1669..f0db64d3271c8 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java @@ -251,8 +251,8 @@ public Settings additionalSettings() { @Override - public List getContexts() { - List contexts = new ArrayList<>(); + public List> getContexts() { + List> contexts = new ArrayList<>(); contexts.addAll(super.getContexts()); filterPlugins(ScriptPlugin.class).stream().forEach(p -> contexts.addAll(p.getContexts())); return contexts; diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java index 330962e21678d..975ceacbffaf0 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java @@ -602,7 +602,7 @@ public List getBootstrapChecks() { } @Override - public List getContexts() { + public List> getContexts() { return Arrays.asList(Watcher.SCRIPT_SEARCH_CONTEXT, Watcher.SCRIPT_EXECUTABLE_CONTEXT, Watcher.SCRIPT_TEMPLATE_CONTEXT); } From 0849b98f60f64439f8298192a9b9c8444e6d7b66 Mon Sep 17 00:00:00 2001 From: David Roberts Date: Thu, 6 Sep 2018 09:13:08 +0100 Subject: [PATCH 12/91] [ML] Rename log structure to file structure (#33421) Many files supplied to the upcoming ML data preparation functionality will not be "log" files. For example, CSV files are generally not "log" files. Therefore it makes sense to rename library that determines the structure of these files. Although "file structure" could be considered too broad, as the library currently only works with a few text formats, in the future it may be extended to work with more formats. --- .../ml/filestructurefinder}/FieldStats.java | 8 +- .../filestructurefinder/FileStructure.java} | 24 +-- .../filestructurefinder}/FieldStatsTests.java | 2 +- .../FileStructureTests.java} | 18 +- x-pack/plugin/ml/build.gradle | 1 + .../licenses/icu4j-62.1.jar.sha1 | 0 .../licenses/icu4j-LICENSE.txt | 0 .../licenses/icu4j-NOTICE.txt | 0 .../ml/log-structure-finder/build.gradle | 34 --- .../licenses/super-csv-2.4.0.jar.sha1 | 1 - .../licenses/super-csv-LICENSE.txt | 203 ------------------ .../licenses/super-csv-NOTICE.txt | 0 .../LogStructureFinder.java | 23 -- .../TsvLogStructureFinderFactory.java | 35 --- .../DelimitedFileStructureFinder.java} | 32 +-- .../DelimitedFileStructureFinderFactory.java} | 12 +- .../FieldStatsCalculator.java | 4 +- .../FileStructureFinder.java | 25 +++ .../FileStructureFinderFactory.java} | 18 +- .../FileStructureFinderManager.java} | 38 ++-- .../FileStructureUtils.java} | 19 +- .../GrokPatternCreator.java | 15 +- .../JsonFileStructureFinder.java} | 30 +-- .../JsonFileStructureFinderFactory.java} | 8 +- .../TextLogFileStructureFinder.java} | 30 +-- .../TextLogFileStructureFinderFactory.java} | 8 +- .../TimestampFormatFinder.java | 8 +- .../XmlFileStructureFinder.java} | 36 ++-- .../XmlFileStructureFinderFactory.java} | 10 +- ...mitedFileStructureFinderFactoryTests.java} | 12 +- .../DelimitedFileStructureFinderTests.java} | 117 +++++----- .../FieldStatsCalculatorTests.java | 6 +- .../FileStructureFinderManagerTests.java} | 14 +- .../FileStructureTestCase.java} | 6 +- .../FileStructureUtilsTests.java} | 71 +++--- .../GrokPatternCreatorTests.java | 48 ++--- .../JsonFileStructureFinderFactoryTests.java} | 6 +- .../JsonFileStructureFinderTests.java} | 14 +- ...xtLogFileStructureFinderFactoryTests.java} | 6 +- .../TextLogFileStructureFinderTests.java} | 31 +-- .../TimestampFormatFinderTests.java | 6 +- .../XmlFileStructureFinderFactoryTests.java} | 6 +- .../XmlFileStructureFinderTests.java} | 14 +- 43 files changed, 375 insertions(+), 624 deletions(-) rename x-pack/plugin/{ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder => core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder}/FieldStats.java (93%) rename x-pack/plugin/{ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructure.java => core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructure.java} (95%) rename x-pack/plugin/{ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder => core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder}/FieldStatsTests.java (96%) rename x-pack/plugin/{ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureTests.java => core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructureTests.java} (81%) rename x-pack/plugin/ml/{log-structure-finder => }/licenses/icu4j-62.1.jar.sha1 (100%) rename x-pack/plugin/ml/{log-structure-finder => }/licenses/icu4j-LICENSE.txt (100%) rename x-pack/plugin/ml/{log-structure-finder => }/licenses/icu4j-NOTICE.txt (100%) delete mode 100644 x-pack/plugin/ml/log-structure-finder/build.gradle delete mode 100644 x-pack/plugin/ml/log-structure-finder/licenses/super-csv-2.4.0.jar.sha1 delete mode 100644 x-pack/plugin/ml/log-structure-finder/licenses/super-csv-LICENSE.txt delete mode 100644 x-pack/plugin/ml/log-structure-finder/licenses/super-csv-NOTICE.txt delete mode 100644 x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureFinder.java delete mode 100644 x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/TsvLogStructureFinderFactory.java rename x-pack/plugin/ml/{log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/DelimitedLogStructureFinder.java => src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinder.java} (93%) rename x-pack/plugin/ml/{log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/DelimitedLogStructureFinderFactory.java => src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderFactory.java} (72%) rename x-pack/plugin/ml/{log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder => src/main/java/org/elasticsearch/xpack/ml/filestructurefinder}/FieldStatsCalculator.java (98%) create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinder.java rename x-pack/plugin/ml/{log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureFinderFactory.java => src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderFactory.java} (67%) rename x-pack/plugin/ml/{log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureFinderManager.java => src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManager.java} (89%) rename x-pack/plugin/ml/{log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureUtils.java => src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtils.java} (95%) rename x-pack/plugin/ml/{log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder => src/main/java/org/elasticsearch/xpack/ml/filestructurefinder}/GrokPatternCreator.java (97%) rename x-pack/plugin/ml/{log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/JsonLogStructureFinder.java => src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinder.java} (65%) rename x-pack/plugin/ml/{log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/JsonLogStructureFinderFactory.java => src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinderFactory.java} (88%) rename x-pack/plugin/ml/{log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/TextLogStructureFinder.java => src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinder.java} (86%) rename x-pack/plugin/ml/{log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/TextLogStructureFinderFactory.java => src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderFactory.java} (73%) rename x-pack/plugin/ml/{log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder => src/main/java/org/elasticsearch/xpack/ml/filestructurefinder}/TimestampFormatFinder.java (98%) rename x-pack/plugin/ml/{log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/XmlLogStructureFinder.java => src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinder.java} (79%) rename x-pack/plugin/ml/{log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/XmlLogStructureFinderFactory.java => src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderFactory.java} (92%) rename x-pack/plugin/ml/{log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/DelimitedLogStructureFinderFactoryTests.java => src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderFactoryTests.java} (83%) rename x-pack/plugin/ml/{log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/DelimitedLogStructureFinderTests.java => src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderTests.java} (67%) rename x-pack/plugin/ml/{log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder => src/test/java/org/elasticsearch/xpack/ml/filestructurefinder}/FieldStatsCalculatorTests.java (97%) rename x-pack/plugin/ml/{log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureFinderManagerTests.java => src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManagerTests.java} (86%) rename x-pack/plugin/ml/{log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureTestCase.java => src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureTestCase.java} (94%) rename x-pack/plugin/ml/{log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureUtilsTests.java => src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtilsTests.java} (80%) rename x-pack/plugin/ml/{log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder => src/test/java/org/elasticsearch/xpack/ml/filestructurefinder}/GrokPatternCreatorTests.java (85%) rename x-pack/plugin/ml/{log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/JsonLogStructureFinderFactoryTests.java => src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinderFactoryTests.java} (84%) rename x-pack/plugin/ml/{log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/JsonLogStructureFinderTests.java => src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinderTests.java} (69%) rename x-pack/plugin/ml/{log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/TextLogStructureFinderFactoryTests.java => src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderFactoryTests.java} (70%) rename x-pack/plugin/ml/{log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/TextLogStructureFinderTests.java => src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderTests.java} (92%) rename x-pack/plugin/ml/{log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder => src/test/java/org/elasticsearch/xpack/ml/filestructurefinder}/TimestampFormatFinderTests.java (98%) rename x-pack/plugin/ml/{log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/XmlLogStructureFinderFactoryTests.java => src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderFactoryTests.java} (84%) rename x-pack/plugin/ml/{log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/XmlLogStructureFinderTests.java => src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderTests.java} (70%) diff --git a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/FieldStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FieldStats.java similarity index 93% rename from x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/FieldStats.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FieldStats.java index 8e8401123aa9f..a09aa522f7f87 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/FieldStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FieldStats.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.core.ml.filestructurefinder; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ConstructingObjectParser; @@ -49,12 +49,12 @@ public class FieldStats implements ToXContentObject { private final Double medianValue; private final List> topHits; - FieldStats(long count, int cardinality, List> topHits) { + public FieldStats(long count, int cardinality, List> topHits) { this(count, cardinality, null, null, null, null, topHits); } - FieldStats(long count, int cardinality, Double minValue, Double maxValue, Double meanValue, Double medianValue, - List> topHits) { + public FieldStats(long count, int cardinality, Double minValue, Double maxValue, Double meanValue, Double medianValue, + List> topHits) { this.count = count; this.cardinality = cardinality; this.minValue = minValue; diff --git a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructure.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructure.java similarity index 95% rename from x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructure.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructure.java index 6d36da1180220..6993737e8547d 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructure.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructure.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.core.ml.filestructurefinder; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ObjectParser; @@ -22,9 +22,9 @@ import java.util.TreeMap; /** - * Stores the log file format determined by a {@link LogStructureFinder}. + * Stores the file format determined by Machine Learning. */ -public class LogStructure implements ToXContentObject { +public class FileStructure implements ToXContentObject { public enum Format { @@ -99,7 +99,7 @@ public String toString() { static final ParseField FIELD_STATS = new ParseField("field_stats"); static final ParseField EXPLANATION = new ParseField("explanation"); - public static final ObjectParser PARSER = new ObjectParser<>("log_file_structure", false, Builder::new); + public static final ObjectParser PARSER = new ObjectParser<>("file_structure", false, Builder::new); static { PARSER.declareInt(Builder::setNumLinesAnalyzed, NUM_LINES_ANALYZED); @@ -149,11 +149,11 @@ public String toString() { private final SortedMap fieldStats; private final List explanation; - public LogStructure(int numLinesAnalyzed, int numMessagesAnalyzed, String sampleStart, String charset, Boolean hasByteOrderMarker, - Format format, String multilineStartPattern, String excludeLinesPattern, List inputFields, - Boolean hasHeaderRow, Character delimiter, Boolean shouldTrimFields, String grokPattern, String timestampField, - List timestampFormats, boolean needClientTimezone, Map mappings, - Map fieldStats, List explanation) { + public FileStructure(int numLinesAnalyzed, int numMessagesAnalyzed, String sampleStart, String charset, Boolean hasByteOrderMarker, + Format format, String multilineStartPattern, String excludeLinesPattern, List inputFields, + Boolean hasHeaderRow, Character delimiter, Boolean shouldTrimFields, String grokPattern, String timestampField, + List timestampFormats, boolean needClientTimezone, Map mappings, + Map fieldStats, List explanation) { this.numLinesAnalyzed = numLinesAnalyzed; this.numMessagesAnalyzed = numMessagesAnalyzed; @@ -325,7 +325,7 @@ public boolean equals(Object other) { return false; } - LogStructure that = (LogStructure) other; + FileStructure that = (FileStructure) other; return this.numLinesAnalyzed == that.numLinesAnalyzed && this.numMessagesAnalyzed == that.numMessagesAnalyzed && this.needClientTimezone == that.needClientTimezone && @@ -473,7 +473,7 @@ public Builder setExplanation(List explanation) { } @SuppressWarnings("fallthrough") - public LogStructure build() { + public FileStructure build() { if (numLinesAnalyzed <= 0) { throw new IllegalArgumentException("Number of lines analyzed must be positive."); @@ -567,7 +567,7 @@ public LogStructure build() { throw new IllegalArgumentException("Explanation must be specified."); } - return new LogStructure(numLinesAnalyzed, numMessagesAnalyzed, sampleStart, charset, hasByteOrderMarker, format, + return new FileStructure(numLinesAnalyzed, numMessagesAnalyzed, sampleStart, charset, hasByteOrderMarker, format, multilineStartPattern, excludeLinesPattern, inputFields, hasHeaderRow, delimiter, shouldTrimFields, grokPattern, timestampField, timestampFormats, needClientTimezone, mappings, fieldStats, explanation); } diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/FieldStatsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FieldStatsTests.java similarity index 96% rename from x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/FieldStatsTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FieldStatsTests.java index 4a95e6631c96a..2041fb26a6259 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/FieldStatsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FieldStatsTests.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.core.ml.filestructurefinder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractXContentTestCase; diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructureTests.java similarity index 81% rename from x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructureTests.java index 2a10e11164f6e..5e89a4840b585 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructureTests.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.core.ml.filestructurefinder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.AbstractXContentTestCase; @@ -16,13 +16,13 @@ import java.util.Map; import java.util.TreeMap; -public class LogStructureTests extends AbstractXContentTestCase { +public class FileStructureTests extends AbstractXContentTestCase { - protected LogStructure createTestInstance() { + protected FileStructure createTestInstance() { - LogStructure.Format format = randomFrom(EnumSet.allOf(LogStructure.Format.class)); + FileStructure.Format format = randomFrom(EnumSet.allOf(FileStructure.Format.class)); - LogStructure.Builder builder = new LogStructure.Builder(format); + FileStructure.Builder builder = new FileStructure.Builder(format); int numLinesAnalyzed = randomIntBetween(2, 10000); builder.setNumLinesAnalyzed(numLinesAnalyzed); @@ -43,10 +43,10 @@ protected LogStructure createTestInstance() { builder.setExcludeLinesPattern(randomAlphaOfLength(100)); } - if (format == LogStructure.Format.DELIMITED || (format.supportsNesting() && randomBoolean())) { + if (format == FileStructure.Format.DELIMITED || (format.supportsNesting() && randomBoolean())) { builder.setInputFields(Arrays.asList(generateRandomStringArray(10, 10, false, false))); } - if (format == LogStructure.Format.DELIMITED) { + if (format == FileStructure.Format.DELIMITED) { builder.setHasHeaderRow(randomBoolean()); builder.setDelimiter(randomFrom(',', '\t', ';', '|')); } @@ -79,8 +79,8 @@ protected LogStructure createTestInstance() { return builder.build(); } - protected LogStructure doParseInstance(XContentParser parser) { - return LogStructure.PARSER.apply(parser, null).build(); + protected FileStructure doParseInstance(XContentParser parser) { + return FileStructure.PARSER.apply(parser, null).build(); } protected boolean supportsUnknownFields() { diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index 7c3594a06cfdd..5996458537ae0 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -48,6 +48,7 @@ dependencies { // ml deps compile project(':libs:grok') + compile "com.ibm.icu:icu4j:${versions.icu4j}" compile "net.sf.supercsv:super-csv:${versions.supercsv}" nativeBundle "org.elasticsearch.ml:ml-cpp:${project.version}@zip" testCompile 'org.ini4j:ini4j:0.5.2' diff --git a/x-pack/plugin/ml/log-structure-finder/licenses/icu4j-62.1.jar.sha1 b/x-pack/plugin/ml/licenses/icu4j-62.1.jar.sha1 similarity index 100% rename from x-pack/plugin/ml/log-structure-finder/licenses/icu4j-62.1.jar.sha1 rename to x-pack/plugin/ml/licenses/icu4j-62.1.jar.sha1 diff --git a/x-pack/plugin/ml/log-structure-finder/licenses/icu4j-LICENSE.txt b/x-pack/plugin/ml/licenses/icu4j-LICENSE.txt similarity index 100% rename from x-pack/plugin/ml/log-structure-finder/licenses/icu4j-LICENSE.txt rename to x-pack/plugin/ml/licenses/icu4j-LICENSE.txt diff --git a/x-pack/plugin/ml/log-structure-finder/licenses/icu4j-NOTICE.txt b/x-pack/plugin/ml/licenses/icu4j-NOTICE.txt similarity index 100% rename from x-pack/plugin/ml/log-structure-finder/licenses/icu4j-NOTICE.txt rename to x-pack/plugin/ml/licenses/icu4j-NOTICE.txt diff --git a/x-pack/plugin/ml/log-structure-finder/build.gradle b/x-pack/plugin/ml/log-structure-finder/build.gradle deleted file mode 100644 index f5dff6dc8464d..0000000000000 --- a/x-pack/plugin/ml/log-structure-finder/build.gradle +++ /dev/null @@ -1,34 +0,0 @@ -apply plugin: 'elasticsearch.build' - -archivesBaseName = 'x-pack-log-structure-finder' - -description = 'Common code for reverse engineering log structure' - -dependencies { - compile "org.elasticsearch:elasticsearch-core:${version}" - compile "org.elasticsearch:elasticsearch-x-content:${version}" - compile project(':libs:grok') - compile "com.ibm.icu:icu4j:${versions.icu4j}" - compile "net.sf.supercsv:super-csv:${versions.supercsv}" - - testCompile "org.elasticsearch.test:framework:${version}" -} - -configurations { - testArtifacts.extendsFrom testRuntime -} -task testJar(type: Jar) { - appendix 'test' - from sourceSets.test.output -} -artifacts { - // normal es plugins do not publish the jar but we need to since users need it for Transport Clients and extensions - archives jar - testArtifacts testJar -} - -forbiddenApisMain { - // log-structure-finder does not depend on server, so cannot forbid server methods - replaceSignatureFiles 'jdk-signatures' -} - diff --git a/x-pack/plugin/ml/log-structure-finder/licenses/super-csv-2.4.0.jar.sha1 b/x-pack/plugin/ml/log-structure-finder/licenses/super-csv-2.4.0.jar.sha1 deleted file mode 100644 index a0b402133090d..0000000000000 --- a/x-pack/plugin/ml/log-structure-finder/licenses/super-csv-2.4.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -017f8708c929029dde48bc298deaf3c7ae2452d3 \ No newline at end of file diff --git a/x-pack/plugin/ml/log-structure-finder/licenses/super-csv-LICENSE.txt b/x-pack/plugin/ml/log-structure-finder/licenses/super-csv-LICENSE.txt deleted file mode 100644 index 9e0ad072b2527..0000000000000 --- a/x-pack/plugin/ml/log-structure-finder/licenses/super-csv-LICENSE.txt +++ /dev/null @@ -1,203 +0,0 @@ -/* - * Apache License - * Version 2.0, January 2004 - * http://www.apache.org/licenses/ - * - * TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - * - * 1. Definitions. - * - * "License" shall mean the terms and conditions for use, reproduction, - * and distribution as defined by Sections 1 through 9 of this document. - * - * "Licensor" shall mean the copyright owner or entity authorized by - * the copyright owner that is granting the License. - * - * "Legal Entity" shall mean the union of the acting entity and all - * other entities that control, are controlled by, or are under common - * control with that entity. For the purposes of this definition, - * "control" means (i) the power, direct or indirect, to cause the - * direction or management of such entity, whether by contract or - * otherwise, or (ii) ownership of fifty percent (50%) or more of the - * outstanding shares, or (iii) beneficial ownership of such entity. - * - * "You" (or "Your") shall mean an individual or Legal Entity - * exercising permissions granted by this License. - * - * "Source" form shall mean the preferred form for making modifications, - * including but not limited to software source code, documentation - * source, and configuration files. - * - * "Object" form shall mean any form resulting from mechanical - * transformation or translation of a Source form, including but - * not limited to compiled object code, generated documentation, - * and conversions to other media types. - * - * "Work" shall mean the work of authorship, whether in Source or - * Object form, made available under the License, as indicated by a - * copyright notice that is included in or attached to the work - * (an example is provided in the Appendix below). - * - * "Derivative Works" shall mean any work, whether in Source or Object - * form, that is based on (or derived from) the Work and for which the - * editorial revisions, annotations, elaborations, or other modifications - * represent, as a whole, an original work of authorship. For the purposes - * of this License, Derivative Works shall not include works that remain - * separable from, or merely link (or bind by name) to the interfaces of, - * the Work and Derivative Works thereof. - * - * "Contribution" shall mean any work of authorship, including - * the original version of the Work and any modifications or additions - * to that Work or Derivative Works thereof, that is intentionally - * submitted to Licensor for inclusion in the Work by the copyright owner - * or by an individual or Legal Entity authorized to submit on behalf of - * the copyright owner. For the purposes of this definition, "submitted" - * means any form of electronic, verbal, or written communication sent - * to the Licensor or its representatives, including but not limited to - * communication on electronic mailing lists, source code control systems, - * and issue tracking systems that are managed by, or on behalf of, the - * Licensor for the purpose of discussing and improving the Work, but - * excluding communication that is conspicuously marked or otherwise - * designated in writing by the copyright owner as "Not a Contribution." - * - * "Contributor" shall mean Licensor and any individual or Legal Entity - * on behalf of whom a Contribution has been received by Licensor and - * subsequently incorporated within the Work. - * - * 2. Grant of Copyright License. Subject to the terms and conditions of - * this License, each Contributor hereby grants to You a perpetual, - * worldwide, non-exclusive, no-charge, royalty-free, irrevocable - * copyright license to reproduce, prepare Derivative Works of, - * publicly display, publicly perform, sublicense, and distribute the - * Work and such Derivative Works in Source or Object form. - * - * 3. Grant of Patent License. Subject to the terms and conditions of - * this License, each Contributor hereby grants to You a perpetual, - * worldwide, non-exclusive, no-charge, royalty-free, irrevocable - * (except as stated in this section) patent license to make, have made, - * use, offer to sell, sell, import, and otherwise transfer the Work, - * where such license applies only to those patent claims licensable - * by such Contributor that are necessarily infringed by their - * Contribution(s) alone or by combination of their Contribution(s) - * with the Work to which such Contribution(s) was submitted. If You - * institute patent litigation against any entity (including a - * cross-claim or counterclaim in a lawsuit) alleging that the Work - * or a Contribution incorporated within the Work constitutes direct - * or contributory patent infringement, then any patent licenses - * granted to You under this License for that Work shall terminate - * as of the date such litigation is filed. - * - * 4. Redistribution. You may reproduce and distribute copies of the - * Work or Derivative Works thereof in any medium, with or without - * modifications, and in Source or Object form, provided that You - * meet the following conditions: - * - * (a) You must give any other recipients of the Work or - * Derivative Works a copy of this License; and - * - * (b) You must cause any modified files to carry prominent notices - * stating that You changed the files; and - * - * (c) You must retain, in the Source form of any Derivative Works - * that You distribute, all copyright, patent, trademark, and - * attribution notices from the Source form of the Work, - * excluding those notices that do not pertain to any part of - * the Derivative Works; and - * - * (d) If the Work includes a "NOTICE" text file as part of its - * distribution, then any Derivative Works that You distribute must - * include a readable copy of the attribution notices contained - * within such NOTICE file, excluding those notices that do not - * pertain to any part of the Derivative Works, in at least one - * of the following places: within a NOTICE text file distributed - * as part of the Derivative Works; within the Source form or - * documentation, if provided along with the Derivative Works; or, - * within a display generated by the Derivative Works, if and - * wherever such third-party notices normally appear. The contents - * of the NOTICE file are for informational purposes only and - * do not modify the License. You may add Your own attribution - * notices within Derivative Works that You distribute, alongside - * or as an addendum to the NOTICE text from the Work, provided - * that such additional attribution notices cannot be construed - * as modifying the License. - * - * You may add Your own copyright statement to Your modifications and - * may provide additional or different license terms and conditions - * for use, reproduction, or distribution of Your modifications, or - * for any such Derivative Works as a whole, provided Your use, - * reproduction, and distribution of the Work otherwise complies with - * the conditions stated in this License. - * - * 5. Submission of Contributions. Unless You explicitly state otherwise, - * any Contribution intentionally submitted for inclusion in the Work - * by You to the Licensor shall be under the terms and conditions of - * this License, without any additional terms or conditions. - * Notwithstanding the above, nothing herein shall supersede or modify - * the terms of any separate license agreement you may have executed - * with Licensor regarding such Contributions. - * - * 6. Trademarks. This License does not grant permission to use the trade - * names, trademarks, service marks, or product names of the Licensor, - * except as required for reasonable and customary use in describing the - * origin of the Work and reproducing the content of the NOTICE file. - * - * 7. Disclaimer of Warranty. Unless required by applicable law or - * agreed to in writing, Licensor provides the Work (and each - * Contributor provides its Contributions) on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - * implied, including, without limitation, any warranties or conditions - * of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - * PARTICULAR PURPOSE. You are solely responsible for determining the - * appropriateness of using or redistributing the Work and assume any - * risks associated with Your exercise of permissions under this License. - * - * 8. Limitation of Liability. In no event and under no legal theory, - * whether in tort (including negligence), contract, or otherwise, - * unless required by applicable law (such as deliberate and grossly - * negligent acts) or agreed to in writing, shall any Contributor be - * liable to You for damages, including any direct, indirect, special, - * incidental, or consequential damages of any character arising as a - * result of this License or out of the use or inability to use the - * Work (including but not limited to damages for loss of goodwill, - * work stoppage, computer failure or malfunction, or any and all - * other commercial damages or losses), even if such Contributor - * has been advised of the possibility of such damages. - * - * 9. Accepting Warranty or Additional Liability. While redistributing - * the Work or Derivative Works thereof, You may choose to offer, - * and charge a fee for, acceptance of support, warranty, indemnity, - * or other liability obligations and/or rights consistent with this - * License. However, in accepting such obligations, You may act only - * on Your own behalf and on Your sole responsibility, not on behalf - * of any other Contributor, and only if You agree to indemnify, - * defend, and hold each Contributor harmless for any liability - * incurred by, or claims asserted against, such Contributor by reason - * of your accepting any such warranty or additional liability. - * - * END OF TERMS AND CONDITIONS - * - * APPENDIX: How to apply the Apache License to your work. - * - * To apply the Apache License to your work, attach the following - * boilerplate notice, with the fields enclosed by brackets "[]" - * replaced with your own identifying information. (Don't include - * the brackets!) The text should be enclosed in the appropriate - * comment syntax for the file format. We also recommend that a - * file or class name and description of purpose be included on the - * same "printed page" as the copyright notice for easier - * identification within third-party archives. - * - * Copyright 2007 Kasper B. Graversen - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ diff --git a/x-pack/plugin/ml/log-structure-finder/licenses/super-csv-NOTICE.txt b/x-pack/plugin/ml/log-structure-finder/licenses/super-csv-NOTICE.txt deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureFinder.java b/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureFinder.java deleted file mode 100644 index ea2e9efc5fb34..0000000000000 --- a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureFinder.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.logstructurefinder; - -import java.util.List; - -public interface LogStructureFinder { - - /** - * The (possibly multi-line) messages that the log sample was split into. - * @return A list of messages. - */ - List getSampleMessages(); - - /** - * Retrieve the structure of the log file used to instantiate the finder. - * @return The log file structure. - */ - LogStructure getStructure(); -} diff --git a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/TsvLogStructureFinderFactory.java b/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/TsvLogStructureFinderFactory.java deleted file mode 100644 index 1b53a33f31ee4..0000000000000 --- a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/TsvLogStructureFinderFactory.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.ml.logstructurefinder; - -import org.supercsv.prefs.CsvPreference; - -import java.io.IOException; -import java.util.List; - -public class TsvLogStructureFinderFactory implements LogStructureFinderFactory { - - /** - * Rules are: - * - The file must be valid TSV - * - It must contain at least two complete records - * - There must be at least two fields per record (otherwise files with no tabs could be treated as TSV!) - * - Every TSV record except the last must have the same number of fields - * The reason the last record is allowed to have fewer fields than the others is that - * it could have been truncated when the file was sampled. - */ - @Override - public boolean canCreateFromSample(List explanation, String sample) { - return DelimitedLogStructureFinder.canCreateFromSample(explanation, sample, 2, CsvPreference.TAB_PREFERENCE, "TSV"); - } - - @Override - public LogStructureFinder createFromSample(List explanation, String sample, String charsetName, Boolean hasByteOrderMarker) - throws IOException { - return DelimitedLogStructureFinder.makeDelimitedLogStructureFinder(explanation, sample, charsetName, hasByteOrderMarker, - CsvPreference.TAB_PREFERENCE, false); - } -} diff --git a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/DelimitedLogStructureFinder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinder.java similarity index 93% rename from x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/DelimitedLogStructureFinder.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinder.java index de010196808d4..625858c867a45 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/DelimitedLogStructureFinder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinder.java @@ -3,10 +3,12 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.ml.filestructurefinder; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.xpack.ml.logstructurefinder.TimestampFormatFinder.TimestampMatch; +import org.elasticsearch.xpack.core.ml.filestructurefinder.FieldStats; +import org.elasticsearch.xpack.core.ml.filestructurefinder.FileStructure; +import org.elasticsearch.xpack.ml.filestructurefinder.TimestampFormatFinder.TimestampMatch; import org.supercsv.exception.SuperCsvException; import org.supercsv.io.CsvListReader; import org.supercsv.prefs.CsvPreference; @@ -29,16 +31,16 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; -public class DelimitedLogStructureFinder implements LogStructureFinder { +public class DelimitedFileStructureFinder implements FileStructureFinder { private static final int MAX_LEVENSHTEIN_COMPARISONS = 100; private final List sampleMessages; - private final LogStructure structure; + private final FileStructure structure; - static DelimitedLogStructureFinder makeDelimitedLogStructureFinder(List explanation, String sample, String charsetName, - Boolean hasByteOrderMarker, CsvPreference csvPreference, - boolean trimFields) throws IOException { + static DelimitedFileStructureFinder makeDelimitedFileStructureFinder(List explanation, String sample, String charsetName, + Boolean hasByteOrderMarker, CsvPreference csvPreference, + boolean trimFields) throws IOException { Tuple>, List> parsed = readRows(sample, csvPreference); List> rows = parsed.v1(); @@ -72,7 +74,7 @@ static DelimitedLogStructureFinder makeDelimitedLogStructureFinder(List String preamble = Pattern.compile("\n").splitAsStream(sample).limit(lineNumbers.get(1)).collect(Collectors.joining("\n", "", "\n")); char delimiter = (char) csvPreference.getDelimiterChar(); - LogStructure.Builder structureBuilder = new LogStructure.Builder(LogStructure.Format.DELIMITED) + FileStructure.Builder structureBuilder = new FileStructure.Builder(FileStructure.Format.DELIMITED) .setCharset(charsetName) .setHasByteOrderMarker(hasByteOrderMarker) .setSampleStart(preamble) @@ -86,7 +88,7 @@ static DelimitedLogStructureFinder makeDelimitedLogStructureFinder(List structureBuilder.setShouldTrimFields(true); } - Tuple timeField = LogStructureUtils.guessTimestampField(explanation, sampleRecords); + Tuple timeField = FileStructureUtils.guessTimestampField(explanation, sampleRecords); if (timeField != null) { String timeLineRegex = null; StringBuilder builder = new StringBuilder("^"); @@ -124,24 +126,24 @@ static DelimitedLogStructureFinder makeDelimitedLogStructureFinder(List } Tuple, SortedMap> mappingsAndFieldStats = - LogStructureUtils.guessMappingsAndCalculateFieldStats(explanation, sampleRecords); + FileStructureUtils.guessMappingsAndCalculateFieldStats(explanation, sampleRecords); SortedMap mappings = mappingsAndFieldStats.v1(); - mappings.put(LogStructureUtils.DEFAULT_TIMESTAMP_FIELD, Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "date")); + mappings.put(FileStructureUtils.DEFAULT_TIMESTAMP_FIELD, Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "date")); if (mappingsAndFieldStats.v2() != null) { structureBuilder.setFieldStats(mappingsAndFieldStats.v2()); } - LogStructure structure = structureBuilder + FileStructure structure = structureBuilder .setMappings(mappings) .setExplanation(explanation) .build(); - return new DelimitedLogStructureFinder(sampleMessages, structure); + return new DelimitedFileStructureFinder(sampleMessages, structure); } - private DelimitedLogStructureFinder(List sampleMessages, LogStructure structure) { + private DelimitedFileStructureFinder(List sampleMessages, FileStructure structure) { this.sampleMessages = Collections.unmodifiableList(sampleMessages); this.structure = structure; } @@ -152,7 +154,7 @@ public List getSampleMessages() { } @Override - public LogStructure getStructure() { + public FileStructure getStructure() { return structure; } diff --git a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/DelimitedLogStructureFinderFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderFactory.java similarity index 72% rename from x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/DelimitedLogStructureFinderFactory.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderFactory.java index 3e4c3ea225cf8..0bbe13e3b05c3 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/DelimitedLogStructureFinderFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderFactory.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.ml.filestructurefinder; import org.supercsv.prefs.CsvPreference; @@ -11,13 +11,13 @@ import java.util.List; import java.util.Locale; -public class DelimitedLogStructureFinderFactory implements LogStructureFinderFactory { +public class DelimitedFileStructureFinderFactory implements FileStructureFinderFactory { private final CsvPreference csvPreference; private final int minFieldsPerRow; private final boolean trimFields; - DelimitedLogStructureFinderFactory(char delimiter, int minFieldsPerRow, boolean trimFields) { + DelimitedFileStructureFinderFactory(char delimiter, int minFieldsPerRow, boolean trimFields) { csvPreference = new CsvPreference.Builder('"', delimiter, "\n").build(); this.minFieldsPerRow = minFieldsPerRow; this.trimFields = trimFields; @@ -45,13 +45,13 @@ public boolean canCreateFromSample(List explanation, String sample) { formatName = Character.getName(csvPreference.getDelimiterChar()).toLowerCase(Locale.ROOT) + " delimited values"; break; } - return DelimitedLogStructureFinder.canCreateFromSample(explanation, sample, minFieldsPerRow, csvPreference, formatName); + return DelimitedFileStructureFinder.canCreateFromSample(explanation, sample, minFieldsPerRow, csvPreference, formatName); } @Override - public LogStructureFinder createFromSample(List explanation, String sample, String charsetName, Boolean hasByteOrderMarker) + public FileStructureFinder createFromSample(List explanation, String sample, String charsetName, Boolean hasByteOrderMarker) throws IOException { - return DelimitedLogStructureFinder.makeDelimitedLogStructureFinder(explanation, sample, charsetName, hasByteOrderMarker, + return DelimitedFileStructureFinder.makeDelimitedFileStructureFinder(explanation, sample, charsetName, hasByteOrderMarker, csvPreference, trimFields); } } diff --git a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/FieldStatsCalculator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FieldStatsCalculator.java similarity index 98% rename from x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/FieldStatsCalculator.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FieldStatsCalculator.java index 5f76e48f0c8b1..130a37dbc19f5 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/FieldStatsCalculator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FieldStatsCalculator.java @@ -3,7 +3,9 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.ml.filestructurefinder; + +import org.elasticsearch.xpack.core.ml.filestructurefinder.FieldStats; import java.util.ArrayList; import java.util.Collection; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinder.java new file mode 100644 index 0000000000000..c09978b6bcb0e --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinder.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.filestructurefinder; + +import org.elasticsearch.xpack.core.ml.filestructurefinder.FileStructure; + +import java.util.List; + +public interface FileStructureFinder { + + /** + * The (possibly multi-line) messages that the sampled lines were combined into. + * @return A list of messages. + */ + List getSampleMessages(); + + /** + * Retrieve the structure of the file used to instantiate the finder. + * @return The file structure. + */ + FileStructure getStructure(); +} diff --git a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureFinderFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderFactory.java similarity index 67% rename from x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureFinderFactory.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderFactory.java index af322ee4bf0e0..4b6fce322ee1d 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureFinderFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderFactory.java @@ -3,33 +3,33 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.ml.filestructurefinder; import java.util.List; -public interface LogStructureFinderFactory { +public interface FileStructureFinderFactory { /** - * Given a sample of a log file, decide whether this factory will be able + * Given a sample of a file, decide whether this factory will be able * to create an appropriate object to represent its ingestion configs. * @param explanation List of reasons for making decisions. May contain items when passed and new reasons * can be appended by this method. - * @param sample A sample from the log file to be ingested. - * @return true if this factory can create an appropriate log + * @param sample A sample from the file to be ingested. + * @return true if this factory can create an appropriate * file structure given the sample; otherwise false. */ boolean canCreateFromSample(List explanation, String sample); /** - * Create an object representing the structure of a log file. + * Create an object representing the structure of a file. * @param explanation List of reasons for making decisions. May contain items when passed and new reasons * can be appended by this method. - * @param sample A sample from the log file to be ingested. + * @param sample A sample from the file to be ingested. * @param charsetName The name of the character set in which the sample was provided. * @param hasByteOrderMarker Did the sample have a byte order marker? null means "not relevant". - * @return A log file structure object suitable for ingesting the supplied sample. + * @return A file structure object suitable for ingesting the supplied sample. * @throws Exception if something goes wrong during creation. */ - LogStructureFinder createFromSample(List explanation, String sample, String charsetName, Boolean hasByteOrderMarker) + FileStructureFinder createFromSample(List explanation, String sample, String charsetName, Boolean hasByteOrderMarker) throws Exception; } diff --git a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureFinderManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManager.java similarity index 89% rename from x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureFinderManager.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManager.java index e747a588dfd84..983188614d0ca 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureFinderManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManager.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.ml.filestructurefinder; import com.ibm.icu.text.CharsetDetector; import com.ibm.icu.text.CharsetMatch; @@ -26,13 +26,13 @@ import java.util.Set; /** - * Runs the high-level steps needed to create ingest configs for the specified log file. In order: + * Runs the high-level steps needed to create ingest configs for the specified file. In order: * 1. Determine the most likely character set (UTF-8, UTF-16LE, ISO-8859-2, etc.) * 2. Load a sample of the file, consisting of the first 1000 lines of the file * 3. Determine the most likely file structure - one of ND-JSON, XML, CSV, TSV or semi-structured text * 4. Create an appropriate structure object and delegate writing configs to it */ -public final class LogStructureFinderManager { +public final class FileStructureFinderManager { public static final int MIN_SAMPLE_LINE_COUNT = 2; @@ -65,33 +65,33 @@ public final class LogStructureFinderManager { /** * These need to be ordered so that the more generic formats come after the more specific ones */ - private static final List ORDERED_STRUCTURE_FACTORIES = Collections.unmodifiableList(Arrays.asList( - new JsonLogStructureFinderFactory(), - new XmlLogStructureFinderFactory(), + private static final List ORDERED_STRUCTURE_FACTORIES = Collections.unmodifiableList(Arrays.asList( + new JsonFileStructureFinderFactory(), + new XmlFileStructureFinderFactory(), // ND-JSON will often also be valid (although utterly weird) CSV, so JSON must come before CSV - new DelimitedLogStructureFinderFactory(',', 2, false), - new DelimitedLogStructureFinderFactory('\t', 2, false), - new DelimitedLogStructureFinderFactory(';', 4, false), - new DelimitedLogStructureFinderFactory('|', 5, true), - new TextLogStructureFinderFactory() + new DelimitedFileStructureFinderFactory(',', 2, false), + new DelimitedFileStructureFinderFactory('\t', 2, false), + new DelimitedFileStructureFinderFactory(';', 4, false), + new DelimitedFileStructureFinderFactory('|', 5, true), + new TextLogFileStructureFinderFactory() )); private static final int BUFFER_SIZE = 8192; /** - * Given a stream of data from some log file, determine its structure. + * Given a stream of data from some file, determine its structure. * @param idealSampleLineCount Ideally, how many lines from the stream will be read to determine the structure? * If the stream has fewer lines then an attempt will still be made, providing at * least {@link #MIN_SAMPLE_LINE_COUNT} lines can be read. * @param fromFile A stream from which the sample will be read. - * @return A {@link LogStructureFinder} object from which the structure and messages can be queried. + * @return A {@link FileStructureFinder} object from which the structure and messages can be queried. * @throws Exception A variety of problems could occur at various stages of the structure finding process. */ - public LogStructureFinder findLogStructure(int idealSampleLineCount, InputStream fromFile) throws Exception { + public FileStructureFinder findLogStructure(int idealSampleLineCount, InputStream fromFile) throws Exception { return findLogStructure(new ArrayList<>(), idealSampleLineCount, fromFile); } - public LogStructureFinder findLogStructure(List explanation, int idealSampleLineCount, InputStream fromFile) + public FileStructureFinder findLogStructure(List explanation, int idealSampleLineCount, InputStream fromFile) throws Exception { CharsetMatch charsetMatch = findCharset(explanation, fromFile); @@ -159,8 +159,8 @@ CharsetMatch findCharset(List explanation, InputStream inputStream) thro String name = charsetMatch.getName(); if (Charset.isSupported(name) && FILEBEAT_SUPPORTED_ENCODINGS.contains(name.toLowerCase(Locale.ROOT))) { - // This extra test is to avoid trying to read binary files as text. Running the log config - // deduction algorithms on binary files is very slow as the binary files generally appear to + // This extra test is to avoid trying to read binary files as text. Running the structure + // finding algorithms on binary files is very slow as the binary files generally appear to // have very long lines. boolean spaceEncodingContainsZeroByte = false; Charset charset = Charset.forName(name); @@ -192,10 +192,10 @@ CharsetMatch findCharset(List explanation, InputStream inputStream) thro (containsZeroBytes ? " - could it be binary data?" : "")); } - LogStructureFinder makeBestStructureFinder(List explanation, String sample, String charsetName, Boolean hasByteOrderMarker) + FileStructureFinder makeBestStructureFinder(List explanation, String sample, String charsetName, Boolean hasByteOrderMarker) throws Exception { - for (LogStructureFinderFactory factory : ORDERED_STRUCTURE_FACTORIES) { + for (FileStructureFinderFactory factory : ORDERED_STRUCTURE_FACTORIES) { if (factory.canCreateFromSample(explanation, sample)) { return factory.createFromSample(explanation, sample, charsetName, hasByteOrderMarker); } diff --git a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureUtils.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtils.java similarity index 95% rename from x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureUtils.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtils.java index 69214a746ed79..0341e03a20bc6 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureUtils.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtils.java @@ -3,11 +3,12 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.ml.filestructurefinder; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.grok.Grok; -import org.elasticsearch.xpack.ml.logstructurefinder.TimestampFormatFinder.TimestampMatch; +import org.elasticsearch.xpack.core.ml.filestructurefinder.FieldStats; +import org.elasticsearch.xpack.ml.filestructurefinder.TimestampFormatFinder.TimestampMatch; import java.util.ArrayList; import java.util.Arrays; @@ -22,7 +23,7 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -public final class LogStructureUtils { +public final class FileStructureUtils { public static final String DEFAULT_TIMESTAMP_FIELD = "@timestamp"; public static final String MAPPING_TYPE_SETTING = "type"; @@ -36,7 +37,7 @@ public final class LogStructureUtils { private static final int KEYWORD_MAX_LEN = 256; private static final int KEYWORD_MAX_SPACES = 5; - private LogStructureUtils() { + private FileStructureUtils() { } /** @@ -46,10 +47,10 @@ private LogStructureUtils() { * - Must have the same timestamp format in every record * If multiple fields meet these criteria then the one that occurred first in the first sample record * is chosen. - * @param explanation List of reasons for choosing the overall log structure. This list + * @param explanation List of reasons for choosing the overall file structure. This list * may be non-empty when the method is called, and this method may * append to it. - * @param sampleRecords List of records derived from the provided log sample. + * @param sampleRecords List of records derived from the provided sample. * @return A tuple of (field name, timestamp format) if one can be found, or null if * there is no consistent timestamp. */ @@ -164,7 +165,7 @@ static Tuple, FieldStats> guessMappingAndCalculateFieldStats if (fieldValues.stream().anyMatch(value -> value instanceof List || value instanceof Object[])) { // Elasticsearch fields can be either arrays or single values, but array values must all have the same type return guessMappingAndCalculateFieldStats(explanation, fieldName, - fieldValues.stream().flatMap(LogStructureUtils::flatten).collect(Collectors.toList())); + fieldValues.stream().flatMap(FileStructureUtils::flatten).collect(Collectors.toList())); } Collection fieldValuesAsStrings = fieldValues.stream().map(Object::toString).collect(Collectors.toList()); @@ -186,7 +187,7 @@ private static Stream flatten(Object value) { /** * Given some sample values for a field, guess the most appropriate index mapping for the * field. - * @param explanation List of reasons for choosing the overall log structure. This list + * @param explanation List of reasons for choosing the overall file structure. This list * may be non-empty when the method is called, and this method may * append to it. * @param fieldName Name of the field for which mappings are to be guessed. @@ -236,7 +237,7 @@ else if (fieldValues.stream().allMatch(IP_GROK::match)) { return Collections.singletonMap(MAPPING_TYPE_SETTING, "ip"); } - if (fieldValues.stream().anyMatch(LogStructureUtils::isMoreLikelyTextThanKeyword)) { + if (fieldValues.stream().anyMatch(FileStructureUtils::isMoreLikelyTextThanKeyword)) { return Collections.singletonMap(MAPPING_TYPE_SETTING, "text"); } diff --git a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/GrokPatternCreator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/GrokPatternCreator.java similarity index 97% rename from x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/GrokPatternCreator.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/GrokPatternCreator.java index b24e067b59d4b..3caa78589ba1b 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/GrokPatternCreator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/GrokPatternCreator.java @@ -3,11 +3,12 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.ml.filestructurefinder; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.grok.Grok; -import org.elasticsearch.xpack.ml.logstructurefinder.TimestampFormatFinder.TimestampMatch; +import org.elasticsearch.xpack.core.ml.filestructurefinder.FieldStats; +import org.elasticsearch.xpack.ml.filestructurefinder.TimestampFormatFinder.TimestampMatch; import java.util.ArrayList; import java.util.Arrays; @@ -459,7 +460,7 @@ public String processCaptures(Map fieldNameCountStore, Collecti } String adjustedFieldName = buildFieldName(fieldNameCountStore, fieldName); if (mappings != null) { - Map fullMappingType = Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, mappingType); + Map fullMappingType = Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, mappingType); if ("date".equals(mappingType)) { TimestampMatch timestampMatch = TimestampFormatFinder.findFirstFullMatch(sampleValue); if (timestampMatch != null) { @@ -531,10 +532,10 @@ public String processCaptures(Map fieldNameCountStore, Collecti } String adjustedFieldName = buildFieldName(fieldNameCountStore, fieldName); if (mappings != null) { - mappings.put(adjustedFieldName, LogStructureUtils.guessScalarMapping(explanation, adjustedFieldName, values)); + mappings.put(adjustedFieldName, FileStructureUtils.guessScalarMapping(explanation, adjustedFieldName, values)); } if (fieldStats != null) { - fieldStats.put(adjustedFieldName, LogStructureUtils.calculateFieldStats(values)); + fieldStats.put(adjustedFieldName, FileStructureUtils.calculateFieldStats(values)); } return "\\b" + fieldName + "=%{USER:" + adjustedFieldName + "}"; } @@ -616,11 +617,11 @@ public Tuple processMatch(List explanation, Collection sampleMessages; - private final LogStructure structure; + private final FileStructure structure; - static JsonLogStructureFinder makeJsonLogStructureFinder(List explanation, String sample, String charsetName, - Boolean hasByteOrderMarker) throws IOException { + static JsonFileStructureFinder makeJsonFileStructureFinder(List explanation, String sample, String charsetName, + Boolean hasByteOrderMarker) throws IOException { List> sampleRecords = new ArrayList<>(); @@ -42,14 +44,14 @@ static JsonLogStructureFinder makeJsonLogStructureFinder(List explanatio sampleRecords.add(parser.mapOrdered()); } - LogStructure.Builder structureBuilder = new LogStructure.Builder(LogStructure.Format.JSON) + FileStructure.Builder structureBuilder = new FileStructure.Builder(FileStructure.Format.JSON) .setCharset(charsetName) .setHasByteOrderMarker(hasByteOrderMarker) .setSampleStart(sampleMessages.stream().limit(2).collect(Collectors.joining("\n", "", "\n"))) .setNumLinesAnalyzed(sampleMessages.size()) .setNumMessagesAnalyzed(sampleRecords.size()); - Tuple timeField = LogStructureUtils.guessTimestampField(explanation, sampleRecords); + Tuple timeField = FileStructureUtils.guessTimestampField(explanation, sampleRecords); if (timeField != null) { structureBuilder.setTimestampField(timeField.v1()) .setTimestampFormats(timeField.v2().dateFormats) @@ -57,24 +59,24 @@ static JsonLogStructureFinder makeJsonLogStructureFinder(List explanatio } Tuple, SortedMap> mappingsAndFieldStats = - LogStructureUtils.guessMappingsAndCalculateFieldStats(explanation, sampleRecords); + FileStructureUtils.guessMappingsAndCalculateFieldStats(explanation, sampleRecords); SortedMap mappings = mappingsAndFieldStats.v1(); - mappings.put(LogStructureUtils.DEFAULT_TIMESTAMP_FIELD, Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "date")); + mappings.put(FileStructureUtils.DEFAULT_TIMESTAMP_FIELD, Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "date")); if (mappingsAndFieldStats.v2() != null) { structureBuilder.setFieldStats(mappingsAndFieldStats.v2()); } - LogStructure structure = structureBuilder + FileStructure structure = structureBuilder .setMappings(mappings) .setExplanation(explanation) .build(); - return new JsonLogStructureFinder(sampleMessages, structure); + return new JsonFileStructureFinder(sampleMessages, structure); } - private JsonLogStructureFinder(List sampleMessages, LogStructure structure) { + private JsonFileStructureFinder(List sampleMessages, FileStructure structure) { this.sampleMessages = Collections.unmodifiableList(sampleMessages); this.structure = structure; } @@ -85,7 +87,7 @@ public List getSampleMessages() { } @Override - public LogStructure getStructure() { + public FileStructure getStructure() { return structure; } } diff --git a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/JsonLogStructureFinderFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinderFactory.java similarity index 88% rename from x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/JsonLogStructureFinderFactory.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinderFactory.java index c5da103eb0560..02be3c1cf19d4 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/JsonLogStructureFinderFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinderFactory.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.ml.filestructurefinder; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -16,7 +16,7 @@ import static org.elasticsearch.common.xcontent.json.JsonXContent.jsonXContent; -public class JsonLogStructureFinderFactory implements LogStructureFinderFactory { +public class JsonFileStructureFinderFactory implements FileStructureFinderFactory { /** * This format matches if the sample consists of one or more JSON documents. @@ -61,9 +61,9 @@ DeprecationHandler.THROW_UNSUPPORTED_OPERATION, new ContextPrintingStringReader( } @Override - public LogStructureFinder createFromSample(List explanation, String sample, String charsetName, Boolean hasByteOrderMarker) + public FileStructureFinder createFromSample(List explanation, String sample, String charsetName, Boolean hasByteOrderMarker) throws IOException { - return JsonLogStructureFinder.makeJsonLogStructureFinder(explanation, sample, charsetName, hasByteOrderMarker); + return JsonFileStructureFinder.makeJsonFileStructureFinder(explanation, sample, charsetName, hasByteOrderMarker); } private static class ContextPrintingStringReader extends StringReader { diff --git a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/TextLogStructureFinder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinder.java similarity index 86% rename from x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/TextLogStructureFinder.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinder.java index e830aa30a1e87..95e0a5dc69d6a 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/TextLogStructureFinder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinder.java @@ -3,10 +3,12 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.ml.filestructurefinder; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.xpack.ml.logstructurefinder.TimestampFormatFinder.TimestampMatch; +import org.elasticsearch.xpack.core.ml.filestructurefinder.FieldStats; +import org.elasticsearch.xpack.core.ml.filestructurefinder.FileStructure; +import org.elasticsearch.xpack.ml.filestructurefinder.TimestampFormatFinder.TimestampMatch; import java.util.ArrayList; import java.util.Collection; @@ -20,20 +22,20 @@ import java.util.TreeMap; import java.util.regex.Pattern; -public class TextLogStructureFinder implements LogStructureFinder { +public class TextLogFileStructureFinder implements FileStructureFinder { private final List sampleMessages; - private final LogStructure structure; + private final FileStructure structure; - static TextLogStructureFinder makeTextLogStructureFinder(List explanation, String sample, String charsetName, - Boolean hasByteOrderMarker) { + static TextLogFileStructureFinder makeTextLogFileStructureFinder(List explanation, String sample, String charsetName, + Boolean hasByteOrderMarker) { String[] sampleLines = sample.split("\n"); Tuple> bestTimestamp = mostLikelyTimestamp(sampleLines); if (bestTimestamp == null) { // Is it appropriate to treat a file that is neither structured nor has // a regular pattern of timestamps as a log file? Probably not... - throw new IllegalArgumentException("Could not find a timestamp in the log sample provided"); + throw new IllegalArgumentException("Could not find a timestamp in the sample provided"); } explanation.add("Most likely timestamp format is [" + bestTimestamp.v1() + "]"); @@ -70,7 +72,7 @@ static TextLogStructureFinder makeTextLogStructureFinder(List explanatio } // Don't add the last message, as it might be partial and mess up subsequent pattern finding - LogStructure.Builder structureBuilder = new LogStructure.Builder(LogStructure.Format.SEMI_STRUCTURED_TEXT) + FileStructure.Builder structureBuilder = new FileStructure.Builder(FileStructure.Format.SEMI_STRUCTURED_TEXT) .setCharset(charsetName) .setHasByteOrderMarker(hasByteOrderMarker) .setSampleStart(preamble.toString()) @@ -79,8 +81,8 @@ static TextLogStructureFinder makeTextLogStructureFinder(List explanatio .setMultilineStartPattern(multiLineRegex); SortedMap mappings = new TreeMap<>(); - mappings.put("message", Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "text")); - mappings.put(LogStructureUtils.DEFAULT_TIMESTAMP_FIELD, Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "date")); + mappings.put("message", Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "text")); + mappings.put(FileStructureUtils.DEFAULT_TIMESTAMP_FIELD, Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "date")); SortedMap fieldStats = new TreeMap<>(); @@ -97,7 +99,7 @@ static TextLogStructureFinder makeTextLogStructureFinder(List explanatio grokPattern = grokPatternCreator.createGrokPatternFromExamples(bestTimestamp.v1().grokPatternName, interimTimestampField); } - LogStructure structure = structureBuilder + FileStructure structure = structureBuilder .setTimestampField(interimTimestampField) .setTimestampFormats(bestTimestamp.v1().dateFormats) .setNeedClientTimezone(bestTimestamp.v1().hasTimezoneDependentParsing()) @@ -107,10 +109,10 @@ static TextLogStructureFinder makeTextLogStructureFinder(List explanatio .setExplanation(explanation) .build(); - return new TextLogStructureFinder(sampleMessages, structure); + return new TextLogFileStructureFinder(sampleMessages, structure); } - private TextLogStructureFinder(List sampleMessages, LogStructure structure) { + private TextLogFileStructureFinder(List sampleMessages, FileStructure structure) { this.sampleMessages = Collections.unmodifiableList(sampleMessages); this.structure = structure; } @@ -121,7 +123,7 @@ public List getSampleMessages() { } @Override - public LogStructure getStructure() { + public FileStructure getStructure() { return structure; } diff --git a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/TextLogStructureFinderFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderFactory.java similarity index 73% rename from x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/TextLogStructureFinderFactory.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderFactory.java index d129ba95bd87e..5f737eeb9b823 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/TextLogStructureFinderFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderFactory.java @@ -3,12 +3,12 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.ml.filestructurefinder; import java.util.List; import java.util.regex.Pattern; -public class TextLogStructureFinderFactory implements LogStructureFinderFactory { +public class TextLogFileStructureFinderFactory implements FileStructureFinderFactory { // This works because, by default, dot doesn't match newlines private static final Pattern TWO_NON_BLANK_LINES_PATTERN = Pattern.compile(".\n+."); @@ -33,7 +33,7 @@ public boolean canCreateFromSample(List explanation, String sample) { } @Override - public LogStructureFinder createFromSample(List explanation, String sample, String charsetName, Boolean hasByteOrderMarker) { - return TextLogStructureFinder.makeTextLogStructureFinder(explanation, sample, charsetName, hasByteOrderMarker); + public FileStructureFinder createFromSample(List explanation, String sample, String charsetName, Boolean hasByteOrderMarker) { + return TextLogFileStructureFinder.makeTextLogFileStructureFinder(explanation, sample, charsetName, hasByteOrderMarker); } } diff --git a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/TimestampFormatFinder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinder.java similarity index 98% rename from x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/TimestampFormatFinder.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinder.java index 30c94378f9e22..81e490878a007 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/TimestampFormatFinder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinder.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.ml.filestructurefinder; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.grok.Grok; @@ -334,10 +334,10 @@ public boolean hasTimezoneDependentParsing() { public Map getEsDateMappingTypeWithFormat() { if (dateFormats.contains("TAI64N")) { // There's no format for TAI64N in the date formats used in mappings - return Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "keyword"); + return Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "keyword"); } Map mapping = new LinkedHashMap<>(); - mapping.put(LogStructureUtils.MAPPING_TYPE_SETTING, "date"); + mapping.put(FileStructureUtils.MAPPING_TYPE_SETTING, "date"); String formats = dateFormats.stream().flatMap(format -> { switch (format) { case "ISO8601": @@ -351,7 +351,7 @@ public Map getEsDateMappingTypeWithFormat() { } }).collect(Collectors.joining("||")); if (formats.isEmpty() == false) { - mapping.put(LogStructureUtils.MAPPING_FORMAT_SETTING, formats); + mapping.put(FileStructureUtils.MAPPING_FORMAT_SETTING, formats); } return mapping; } diff --git a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/XmlLogStructureFinder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinder.java similarity index 79% rename from x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/XmlLogStructureFinder.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinder.java index 6c81032c05baf..570f36f59c06e 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/XmlLogStructureFinder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinder.java @@ -3,10 +3,12 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.ml.filestructurefinder; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.xpack.ml.logstructurefinder.TimestampFormatFinder.TimestampMatch; +import org.elasticsearch.xpack.core.ml.filestructurefinder.FieldStats; +import org.elasticsearch.xpack.core.ml.filestructurefinder.FileStructure; +import org.elasticsearch.xpack.ml.filestructurefinder.TimestampFormatFinder.TimestampMatch; import org.w3c.dom.Document; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; @@ -30,13 +32,13 @@ import java.util.TreeMap; import java.util.regex.Pattern; -public class XmlLogStructureFinder implements LogStructureFinder { +public class XmlFileStructureFinder implements FileStructureFinder { private final List sampleMessages; - private final LogStructure structure; + private final FileStructure structure; - static XmlLogStructureFinder makeXmlLogStructureFinder(List explanation, String sample, String charsetName, - Boolean hasByteOrderMarker) + static XmlFileStructureFinder makeXmlFileStructureFinder(List explanation, String sample, String charsetName, + Boolean hasByteOrderMarker) throws IOException, ParserConfigurationException, SAXException { String messagePrefix; @@ -80,7 +82,7 @@ static XmlLogStructureFinder makeXmlLogStructureFinder(List explanation, assert messagePrefix.charAt(0) == '<'; String topLevelTag = messagePrefix.substring(1); - LogStructure.Builder structureBuilder = new LogStructure.Builder(LogStructure.Format.XML) + FileStructure.Builder structureBuilder = new FileStructure.Builder(FileStructure.Format.XML) .setCharset(charsetName) .setHasByteOrderMarker(hasByteOrderMarker) .setSampleStart(preamble.toString()) @@ -88,7 +90,7 @@ static XmlLogStructureFinder makeXmlLogStructureFinder(List explanation, .setNumMessagesAnalyzed(sampleRecords.size()) .setMultilineStartPattern("^\\s*<" + topLevelTag); - Tuple timeField = LogStructureUtils.guessTimestampField(explanation, sampleRecords); + Tuple timeField = FileStructureUtils.guessTimestampField(explanation, sampleRecords); if (timeField != null) { structureBuilder.setTimestampField(timeField.v1()) .setTimestampFormats(timeField.v2().dateFormats) @@ -96,7 +98,7 @@ static XmlLogStructureFinder makeXmlLogStructureFinder(List explanation, } Tuple, SortedMap> mappingsAndFieldStats = - LogStructureUtils.guessMappingsAndCalculateFieldStats(explanation, sampleRecords); + FileStructureUtils.guessMappingsAndCalculateFieldStats(explanation, sampleRecords); if (mappingsAndFieldStats.v2() != null) { structureBuilder.setFieldStats(mappingsAndFieldStats.v2()); @@ -104,22 +106,22 @@ static XmlLogStructureFinder makeXmlLogStructureFinder(List explanation, SortedMap innerMappings = mappingsAndFieldStats.v1(); Map secondLevelProperties = new LinkedHashMap<>(); - secondLevelProperties.put(LogStructureUtils.MAPPING_TYPE_SETTING, "object"); - secondLevelProperties.put(LogStructureUtils.MAPPING_PROPERTIES_SETTING, innerMappings); + secondLevelProperties.put(FileStructureUtils.MAPPING_TYPE_SETTING, "object"); + secondLevelProperties.put(FileStructureUtils.MAPPING_PROPERTIES_SETTING, innerMappings); SortedMap outerMappings = new TreeMap<>(); outerMappings.put(topLevelTag, secondLevelProperties); - outerMappings.put(LogStructureUtils.DEFAULT_TIMESTAMP_FIELD, - Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "date")); + outerMappings.put(FileStructureUtils.DEFAULT_TIMESTAMP_FIELD, + Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "date")); - LogStructure structure = structureBuilder + FileStructure structure = structureBuilder .setMappings(outerMappings) .setExplanation(explanation) .build(); - return new XmlLogStructureFinder(sampleMessages, structure); + return new XmlFileStructureFinder(sampleMessages, structure); } - private XmlLogStructureFinder(List sampleMessages, LogStructure structure) { + private XmlFileStructureFinder(List sampleMessages, FileStructure structure) { this.sampleMessages = Collections.unmodifiableList(sampleMessages); this.structure = structure; } @@ -130,7 +132,7 @@ public List getSampleMessages() { } @Override - public LogStructure getStructure() { + public FileStructure getStructure() { return structure; } diff --git a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/XmlLogStructureFinderFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderFactory.java similarity index 92% rename from x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/XmlLogStructureFinderFactory.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderFactory.java index c7577ff07de6d..f8536d1437594 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/main/java/org/elasticsearch/xpack/ml/logstructurefinder/XmlLogStructureFinderFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderFactory.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.ml.filestructurefinder; import org.xml.sax.SAXException; @@ -17,11 +17,11 @@ import java.io.StringReader; import java.util.List; -public class XmlLogStructureFinderFactory implements LogStructureFinderFactory { +public class XmlFileStructureFinderFactory implements FileStructureFinderFactory { private final XMLInputFactory xmlFactory; - public XmlLogStructureFinderFactory() { + public XmlFileStructureFinderFactory() { xmlFactory = XMLInputFactory.newInstance(); xmlFactory.setProperty(XMLInputFactory.IS_NAMESPACE_AWARE, Boolean.FALSE); xmlFactory.setProperty(XMLInputFactory.IS_VALIDATING, Boolean.FALSE); @@ -115,8 +115,8 @@ public boolean canCreateFromSample(List explanation, String sample) { } @Override - public LogStructureFinder createFromSample(List explanation, String sample, String charsetName, Boolean hasByteOrderMarker) + public FileStructureFinder createFromSample(List explanation, String sample, String charsetName, Boolean hasByteOrderMarker) throws IOException, ParserConfigurationException, SAXException { - return XmlLogStructureFinder.makeXmlLogStructureFinder(explanation, sample, charsetName, hasByteOrderMarker); + return XmlFileStructureFinder.makeXmlFileStructureFinder(explanation, sample, charsetName, hasByteOrderMarker); } } diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/DelimitedLogStructureFinderFactoryTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderFactoryTests.java similarity index 83% rename from x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/DelimitedLogStructureFinderFactoryTests.java rename to x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderFactoryTests.java index d9eadbc8f0fde..6bcb827be94d8 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/DelimitedLogStructureFinderFactoryTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderFactoryTests.java @@ -3,14 +3,14 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.ml.filestructurefinder; -public class DelimitedLogStructureFinderFactoryTests extends LogStructureTestCase { +public class DelimitedFileStructureFinderFactoryTests extends FileStructureTestCase { - private LogStructureFinderFactory csvFactory = new DelimitedLogStructureFinderFactory(',', 2, false); - private LogStructureFinderFactory tsvFactory = new DelimitedLogStructureFinderFactory('\t', 2, false); - private LogStructureFinderFactory semiColonDelimitedfactory = new DelimitedLogStructureFinderFactory(';', 4, false); - private LogStructureFinderFactory pipeDelimitedFactory = new DelimitedLogStructureFinderFactory('|', 5, true); + private FileStructureFinderFactory csvFactory = new DelimitedFileStructureFinderFactory(',', 2, false); + private FileStructureFinderFactory tsvFactory = new DelimitedFileStructureFinderFactory('\t', 2, false); + private FileStructureFinderFactory semiColonDelimitedfactory = new DelimitedFileStructureFinderFactory(';', 4, false); + private FileStructureFinderFactory pipeDelimitedFactory = new DelimitedFileStructureFinderFactory('|', 5, true); // CSV - no need to check JSON or XML because they come earlier in the order we check formats diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/DelimitedLogStructureFinderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderTests.java similarity index 67% rename from x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/DelimitedLogStructureFinderTests.java rename to x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderTests.java index 57c297cf8d571..6d1f039399eba 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/DelimitedLogStructureFinderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderTests.java @@ -3,22 +3,23 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.ml.filestructurefinder; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.xpack.core.ml.filestructurefinder.FileStructure; import org.supercsv.prefs.CsvPreference; import java.io.IOException; import java.util.Arrays; import java.util.Collections; -import static org.elasticsearch.xpack.ml.logstructurefinder.DelimitedLogStructureFinder.levenshteinFieldwiseCompareRows; -import static org.elasticsearch.xpack.ml.logstructurefinder.DelimitedLogStructureFinder.levenshteinDistance; +import static org.elasticsearch.xpack.ml.filestructurefinder.DelimitedFileStructureFinder.levenshteinFieldwiseCompareRows; +import static org.elasticsearch.xpack.ml.filestructurefinder.DelimitedFileStructureFinder.levenshteinDistance; import static org.hamcrest.Matchers.arrayContaining; -public class DelimitedLogStructureFinderTests extends LogStructureTestCase { +public class DelimitedFileStructureFinderTests extends FileStructureTestCase { - private LogStructureFinderFactory csvFactory = new DelimitedLogStructureFinderFactory(',', 2, false); + private FileStructureFinderFactory csvFactory = new DelimitedFileStructureFinderFactory(',', 2, false); public void testCreateConfigsGivenCompleteCsv() throws Exception { String sample = "time,message\n" + @@ -28,11 +29,11 @@ public void testCreateConfigsGivenCompleteCsv() throws Exception { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); - LogStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker); + FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker); - LogStructure structure = structureFinder.getStructure(); + FileStructure structure = structureFinder.getStructure(); - assertEquals(LogStructure.Format.DELIMITED, structure.getFormat()); + assertEquals(FileStructure.Format.DELIMITED, structure.getFormat()); assertEquals(charset, structure.getCharset()); if (hasByteOrderMarker == null) { assertNull(structure.getHasByteOrderMarker()); @@ -59,11 +60,11 @@ public void testCreateConfigsGivenCsvWithIncompleteLastRecord() throws Exception String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); - LogStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker); + FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker); - LogStructure structure = structureFinder.getStructure(); + FileStructure structure = structureFinder.getStructure(); - assertEquals(LogStructure.Format.DELIMITED, structure.getFormat()); + assertEquals(FileStructure.Format.DELIMITED, structure.getFormat()); assertEquals(charset, structure.getCharset()); if (hasByteOrderMarker == null) { assertNull(structure.getHasByteOrderMarker()); @@ -92,11 +93,11 @@ public void testCreateConfigsGivenCsvWithTrailingNulls() throws Exception { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); - LogStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker); + FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker); - LogStructure structure = structureFinder.getStructure(); + FileStructure structure = structureFinder.getStructure(); - assertEquals(LogStructure.Format.DELIMITED, structure.getFormat()); + assertEquals(FileStructure.Format.DELIMITED, structure.getFormat()); assertEquals(charset, structure.getCharset()); if (hasByteOrderMarker == null) { assertNull(structure.getHasByteOrderMarker()); @@ -130,11 +131,11 @@ public void testCreateConfigsGivenCsvWithTrailingNullsExceptHeader() throws Exce String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); - LogStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker); + FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker); - LogStructure structure = structureFinder.getStructure(); + FileStructure structure = structureFinder.getStructure(); - assertEquals(LogStructure.Format.DELIMITED, structure.getFormat()); + assertEquals(FileStructure.Format.DELIMITED, structure.getFormat()); assertEquals(charset, structure.getCharset()); if (hasByteOrderMarker == null) { assertNull(structure.getHasByteOrderMarker()); @@ -165,11 +166,11 @@ public void testCreateConfigsGivenCsvWithTimeLastColumn() throws Exception { String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); - LogStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker); + FileStructureFinder structureFinder = csvFactory.createFromSample(explanation, sample, charset, hasByteOrderMarker); - LogStructure structure = structureFinder.getStructure(); + FileStructure structure = structureFinder.getStructure(); - assertEquals(LogStructure.Format.DELIMITED, structure.getFormat()); + assertEquals(FileStructure.Format.DELIMITED, structure.getFormat()); assertEquals(charset, structure.getCharset()); if (hasByteOrderMarker == null) { assertNull(structure.getHasByteOrderMarker()); @@ -195,8 +196,8 @@ public void testFindHeaderFromSampleGivenHeaderInSample() throws IOException { "2014-06-23 00:00:01Z,JBU,877.5927,farequote\n" + "2014-06-23 00:00:01Z,KLM,1355.4812,farequote\n"; - Tuple header = DelimitedLogStructureFinder.findHeaderFromSample(explanation, - DelimitedLogStructureFinder.readRows(withHeader, CsvPreference.EXCEL_PREFERENCE).v1()); + Tuple header = DelimitedFileStructureFinder.findHeaderFromSample(explanation, + DelimitedFileStructureFinder.readRows(withHeader, CsvPreference.EXCEL_PREFERENCE).v1()); assertTrue(header.v1()); assertThat(header.v2(), arrayContaining("time", "airline", "responsetime", "sourcetype")); @@ -208,8 +209,8 @@ public void testFindHeaderFromSampleGivenHeaderNotInSample() throws IOException "2014-06-23 00:00:01Z,JBU,877.5927,farequote\n" + "2014-06-23 00:00:01Z,KLM,1355.4812,farequote\n"; - Tuple header = DelimitedLogStructureFinder.findHeaderFromSample(explanation, - DelimitedLogStructureFinder.readRows(withoutHeader, CsvPreference.EXCEL_PREFERENCE).v1()); + Tuple header = DelimitedFileStructureFinder.findHeaderFromSample(explanation, + DelimitedFileStructureFinder.readRows(withoutHeader, CsvPreference.EXCEL_PREFERENCE).v1()); assertFalse(header.v1()); assertThat(header.v2(), arrayContaining("column1", "column2", "column3", "column4")); @@ -251,43 +252,43 @@ public void testLevenshteinCompareRows() { public void testLineHasUnescapedQuote() { - assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("a,b,c", CsvPreference.EXCEL_PREFERENCE)); - assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("\"a\",b,c", CsvPreference.EXCEL_PREFERENCE)); - assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("\"a,b\",c", CsvPreference.EXCEL_PREFERENCE)); - assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("\"a,b,c\"", CsvPreference.EXCEL_PREFERENCE)); - assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("a,\"b\",c", CsvPreference.EXCEL_PREFERENCE)); - assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("a,b,\"c\"", CsvPreference.EXCEL_PREFERENCE)); - assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("a,\"b\"\"\",c", CsvPreference.EXCEL_PREFERENCE)); - assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("a,b,\"c\"\"\"", CsvPreference.EXCEL_PREFERENCE)); - assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("\"\"\"a\",b,c", CsvPreference.EXCEL_PREFERENCE)); - assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("\"a\"\"\",b,c", CsvPreference.EXCEL_PREFERENCE)); - assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("\"a,\"\"b\",c", CsvPreference.EXCEL_PREFERENCE)); - assertTrue(DelimitedLogStructureFinder.lineHasUnescapedQuote("between\"words,b,c", CsvPreference.EXCEL_PREFERENCE)); - assertTrue(DelimitedLogStructureFinder.lineHasUnescapedQuote("x and \"y\",b,c", CsvPreference.EXCEL_PREFERENCE)); - - assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("a\tb\tc", CsvPreference.TAB_PREFERENCE)); - assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("\"a\"\tb\tc", CsvPreference.TAB_PREFERENCE)); - assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("\"a\tb\"\tc", CsvPreference.TAB_PREFERENCE)); - assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("\"a\tb\tc\"", CsvPreference.TAB_PREFERENCE)); - assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("a\t\"b\"\tc", CsvPreference.TAB_PREFERENCE)); - assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("a\tb\t\"c\"", CsvPreference.TAB_PREFERENCE)); - assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("a\t\"b\"\"\"\tc", CsvPreference.TAB_PREFERENCE)); - assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("a\tb\t\"c\"\"\"", CsvPreference.TAB_PREFERENCE)); - assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("\"\"\"a\"\tb\tc", CsvPreference.TAB_PREFERENCE)); - assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("\"a\"\"\"\tb\tc", CsvPreference.TAB_PREFERENCE)); - assertFalse(DelimitedLogStructureFinder.lineHasUnescapedQuote("\"a\t\"\"b\"\tc", CsvPreference.TAB_PREFERENCE)); - assertTrue(DelimitedLogStructureFinder.lineHasUnescapedQuote("between\"words\tb\tc", CsvPreference.TAB_PREFERENCE)); - assertTrue(DelimitedLogStructureFinder.lineHasUnescapedQuote("x and \"y\"\tb\tc", CsvPreference.TAB_PREFERENCE)); + assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("a,b,c", CsvPreference.EXCEL_PREFERENCE)); + assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"a\",b,c", CsvPreference.EXCEL_PREFERENCE)); + assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"a,b\",c", CsvPreference.EXCEL_PREFERENCE)); + assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"a,b,c\"", CsvPreference.EXCEL_PREFERENCE)); + assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("a,\"b\",c", CsvPreference.EXCEL_PREFERENCE)); + assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("a,b,\"c\"", CsvPreference.EXCEL_PREFERENCE)); + assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("a,\"b\"\"\",c", CsvPreference.EXCEL_PREFERENCE)); + assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("a,b,\"c\"\"\"", CsvPreference.EXCEL_PREFERENCE)); + assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"\"\"a\",b,c", CsvPreference.EXCEL_PREFERENCE)); + assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"a\"\"\",b,c", CsvPreference.EXCEL_PREFERENCE)); + assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"a,\"\"b\",c", CsvPreference.EXCEL_PREFERENCE)); + assertTrue(DelimitedFileStructureFinder.lineHasUnescapedQuote("between\"words,b,c", CsvPreference.EXCEL_PREFERENCE)); + assertTrue(DelimitedFileStructureFinder.lineHasUnescapedQuote("x and \"y\",b,c", CsvPreference.EXCEL_PREFERENCE)); + + assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("a\tb\tc", CsvPreference.TAB_PREFERENCE)); + assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"a\"\tb\tc", CsvPreference.TAB_PREFERENCE)); + assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"a\tb\"\tc", CsvPreference.TAB_PREFERENCE)); + assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"a\tb\tc\"", CsvPreference.TAB_PREFERENCE)); + assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("a\t\"b\"\tc", CsvPreference.TAB_PREFERENCE)); + assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("a\tb\t\"c\"", CsvPreference.TAB_PREFERENCE)); + assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("a\t\"b\"\"\"\tc", CsvPreference.TAB_PREFERENCE)); + assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("a\tb\t\"c\"\"\"", CsvPreference.TAB_PREFERENCE)); + assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"\"\"a\"\tb\tc", CsvPreference.TAB_PREFERENCE)); + assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"a\"\"\"\tb\tc", CsvPreference.TAB_PREFERENCE)); + assertFalse(DelimitedFileStructureFinder.lineHasUnescapedQuote("\"a\t\"\"b\"\tc", CsvPreference.TAB_PREFERENCE)); + assertTrue(DelimitedFileStructureFinder.lineHasUnescapedQuote("between\"words\tb\tc", CsvPreference.TAB_PREFERENCE)); + assertTrue(DelimitedFileStructureFinder.lineHasUnescapedQuote("x and \"y\"\tb\tc", CsvPreference.TAB_PREFERENCE)); } public void testRowContainsDuplicateNonEmptyValues() { - assertFalse(DelimitedLogStructureFinder.rowContainsDuplicateNonEmptyValues(Collections.singletonList("a"))); - assertFalse(DelimitedLogStructureFinder.rowContainsDuplicateNonEmptyValues(Collections.singletonList(""))); - assertFalse(DelimitedLogStructureFinder.rowContainsDuplicateNonEmptyValues(Arrays.asList("a", "b", "c"))); - assertTrue(DelimitedLogStructureFinder.rowContainsDuplicateNonEmptyValues(Arrays.asList("a", "b", "a"))); - assertTrue(DelimitedLogStructureFinder.rowContainsDuplicateNonEmptyValues(Arrays.asList("a", "b", "b"))); - assertFalse(DelimitedLogStructureFinder.rowContainsDuplicateNonEmptyValues(Arrays.asList("a", "", ""))); - assertFalse(DelimitedLogStructureFinder.rowContainsDuplicateNonEmptyValues(Arrays.asList("", "a", ""))); + assertFalse(DelimitedFileStructureFinder.rowContainsDuplicateNonEmptyValues(Collections.singletonList("a"))); + assertFalse(DelimitedFileStructureFinder.rowContainsDuplicateNonEmptyValues(Collections.singletonList(""))); + assertFalse(DelimitedFileStructureFinder.rowContainsDuplicateNonEmptyValues(Arrays.asList("a", "b", "c"))); + assertTrue(DelimitedFileStructureFinder.rowContainsDuplicateNonEmptyValues(Arrays.asList("a", "b", "a"))); + assertTrue(DelimitedFileStructureFinder.rowContainsDuplicateNonEmptyValues(Arrays.asList("a", "b", "b"))); + assertFalse(DelimitedFileStructureFinder.rowContainsDuplicateNonEmptyValues(Arrays.asList("a", "", ""))); + assertFalse(DelimitedFileStructureFinder.rowContainsDuplicateNonEmptyValues(Arrays.asList("", "a", ""))); } } diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/FieldStatsCalculatorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FieldStatsCalculatorTests.java similarity index 97% rename from x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/FieldStatsCalculatorTests.java rename to x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FieldStatsCalculatorTests.java index 6d8927c1c2b3a..08035dc741d4d 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/FieldStatsCalculatorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FieldStatsCalculatorTests.java @@ -3,7 +3,9 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.ml.filestructurefinder; + +import org.elasticsearch.xpack.core.ml.filestructurefinder.FieldStats; import java.util.Arrays; import java.util.Collections; @@ -11,7 +13,7 @@ import java.util.List; import java.util.Map; -public class FieldStatsCalculatorTests extends LogStructureTestCase { +public class FieldStatsCalculatorTests extends FileStructureTestCase { public void testMean() { diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureFinderManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManagerTests.java similarity index 86% rename from x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureFinderManagerTests.java rename to x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManagerTests.java index 520a55510c7a4..10e780f1d34c1 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureFinderManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManagerTests.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.ml.filestructurefinder; import com.ibm.icu.text.CharsetMatch; @@ -15,9 +15,9 @@ import static org.hamcrest.Matchers.startsWith; import static org.hamcrest.core.IsInstanceOf.instanceOf; -public class LogStructureFinderManagerTests extends LogStructureTestCase { +public class FileStructureFinderManagerTests extends FileStructureTestCase { - private LogStructureFinderManager structureFinderManager = new LogStructureFinderManager(); + private FileStructureFinderManager structureFinderManager = new FileStructureFinderManager(); public void testFindCharsetGivenCharacterWidths() throws Exception { @@ -49,24 +49,24 @@ public void testFindCharsetGivenBinary() throws Exception { public void testMakeBestStructureGivenJson() throws Exception { assertThat(structureFinderManager.makeBestStructureFinder(explanation, "{ \"time\": \"2018-05-17T13:41:23\", \"message\": \"hello\" }", StandardCharsets.UTF_8.name(), randomBoolean()), - instanceOf(JsonLogStructureFinder.class)); + instanceOf(JsonFileStructureFinder.class)); } public void testMakeBestStructureGivenXml() throws Exception { assertThat(structureFinderManager.makeBestStructureFinder(explanation, "hello", StandardCharsets.UTF_8.name(), randomBoolean()), - instanceOf(XmlLogStructureFinder.class)); + instanceOf(XmlFileStructureFinder.class)); } public void testMakeBestStructureGivenCsv() throws Exception { assertThat(structureFinderManager.makeBestStructureFinder(explanation, "time,message\n" + "2018-05-17T13:41:23,hello\n", StandardCharsets.UTF_8.name(), randomBoolean()), - instanceOf(DelimitedLogStructureFinder.class)); + instanceOf(DelimitedFileStructureFinder.class)); } public void testMakeBestStructureGivenText() throws Exception { assertThat(structureFinderManager.makeBestStructureFinder(explanation, "[2018-05-17T13:41:23] hello\n" + "[2018-05-17T13:41:24] hello again\n", StandardCharsets.UTF_8.name(), randomBoolean()), - instanceOf(TextLogStructureFinder.class)); + instanceOf(TextLogFileStructureFinder.class)); } } diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureTestCase.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureTestCase.java similarity index 94% rename from x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureTestCase.java rename to x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureTestCase.java index 6b718fef6c7ea..6246a7ad01e6a 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureTestCase.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureTestCase.java @@ -3,7 +3,7 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.ml.filestructurefinder; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.test.ESTestCase; @@ -17,10 +17,10 @@ import java.util.Locale; import java.util.stream.Collectors; -public abstract class LogStructureTestCase extends ESTestCase { +public abstract class FileStructureTestCase extends ESTestCase { protected static final List POSSIBLE_CHARSETS = Collections.unmodifiableList(Charset.availableCharsets().keySet().stream() - .filter(name -> LogStructureFinderManager.FILEBEAT_SUPPORTED_ENCODINGS.contains(name.toLowerCase(Locale.ROOT))) + .filter(name -> FileStructureFinderManager.FILEBEAT_SUPPORTED_ENCODINGS.contains(name.toLowerCase(Locale.ROOT))) .collect(Collectors.toList())); protected static final String CSV_SAMPLE = "time,id,value\n" + diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureUtilsTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtilsTests.java similarity index 80% rename from x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureUtilsTests.java rename to x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtilsTests.java index 8ebfe520d6621..ac8f95670aba8 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/LogStructureUtilsTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureUtilsTests.java @@ -3,10 +3,11 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.ml.filestructurefinder; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.xpack.ml.logstructurefinder.TimestampFormatFinder.TimestampMatch; +import org.elasticsearch.xpack.core.ml.filestructurefinder.FieldStats; +import org.elasticsearch.xpack.ml.filestructurefinder.TimestampFormatFinder.TimestampMatch; import java.util.Arrays; import java.util.Collections; @@ -18,23 +19,23 @@ import static org.hamcrest.Matchers.contains; -public class LogStructureUtilsTests extends LogStructureTestCase { +public class FileStructureUtilsTests extends FileStructureTestCase { public void testMoreLikelyGivenText() { - assertTrue(LogStructureUtils.isMoreLikelyTextThanKeyword("the quick brown fox jumped over the lazy dog")); - assertTrue(LogStructureUtils.isMoreLikelyTextThanKeyword(randomAlphaOfLengthBetween(257, 10000))); + assertTrue(FileStructureUtils.isMoreLikelyTextThanKeyword("the quick brown fox jumped over the lazy dog")); + assertTrue(FileStructureUtils.isMoreLikelyTextThanKeyword(randomAlphaOfLengthBetween(257, 10000))); } public void testMoreLikelyGivenKeyword() { - assertFalse(LogStructureUtils.isMoreLikelyTextThanKeyword("1")); - assertFalse(LogStructureUtils.isMoreLikelyTextThanKeyword("DEBUG")); - assertFalse(LogStructureUtils.isMoreLikelyTextThanKeyword(randomAlphaOfLengthBetween(1, 256))); + assertFalse(FileStructureUtils.isMoreLikelyTextThanKeyword("1")); + assertFalse(FileStructureUtils.isMoreLikelyTextThanKeyword("DEBUG")); + assertFalse(FileStructureUtils.isMoreLikelyTextThanKeyword(randomAlphaOfLengthBetween(1, 256))); } public void testSingleSampleSingleField() { Map sample = Collections.singletonMap("field1", "2018-05-24T17:28:31,735"); Tuple match = - LogStructureUtils.guessTimestampField(explanation, Collections.singletonList(sample)); + FileStructureUtils.guessTimestampField(explanation, Collections.singletonList(sample)); assertNotNull(match); assertEquals("field1", match.v1()); assertThat(match.v2().dateFormats, contains("ISO8601")); @@ -45,7 +46,7 @@ public void testSamplesWithSameSingleTimeField() { Map sample1 = Collections.singletonMap("field1", "2018-05-24T17:28:31,735"); Map sample2 = Collections.singletonMap("field1", "2018-05-24T17:33:39,406"); Tuple match = - LogStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2)); + FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2)); assertNotNull(match); assertEquals("field1", match.v1()); assertThat(match.v2().dateFormats, contains("ISO8601")); @@ -56,7 +57,7 @@ public void testSamplesWithOneSingleTimeFieldDifferentFormat() { Map sample1 = Collections.singletonMap("field1", "2018-05-24T17:28:31,735"); Map sample2 = Collections.singletonMap("field1", "2018-05-24 17:33:39,406"); Tuple match = - LogStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2)); + FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2)); assertNull(match); } @@ -64,7 +65,7 @@ public void testSamplesWithDifferentSingleTimeField() { Map sample1 = Collections.singletonMap("field1", "2018-05-24T17:28:31,735"); Map sample2 = Collections.singletonMap("another_field", "2018-05-24T17:33:39,406"); Tuple match = - LogStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2)); + FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2)); assertNull(match); } @@ -74,7 +75,7 @@ public void testSingleSampleManyFieldsOneTimeFormat() { sample.put("time", "2018-05-24 17:28:31,735"); sample.put("bar", 42); Tuple match = - LogStructureUtils.guessTimestampField(explanation, Collections.singletonList(sample)); + FileStructureUtils.guessTimestampField(explanation, Collections.singletonList(sample)); assertNotNull(match); assertEquals("time", match.v1()); assertThat(match.v2().dateFormats, contains("YYYY-MM-dd HH:mm:ss,SSS")); @@ -91,7 +92,7 @@ public void testSamplesWithManyFieldsSameSingleTimeFormat() { sample2.put("time", "2018-05-29 11:53:02,837"); sample2.put("bar", 17); Tuple match = - LogStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2)); + FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2)); assertNotNull(match); assertEquals("time", match.v1()); assertThat(match.v2().dateFormats, contains("YYYY-MM-dd HH:mm:ss,SSS")); @@ -108,7 +109,7 @@ public void testSamplesWithManyFieldsSameTimeFieldDifferentTimeFormat() { sample2.put("time", "May 29 2018 11:53:02"); sample2.put("bar", 17); Tuple match = - LogStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2)); + FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2)); assertNull(match); } @@ -122,7 +123,7 @@ public void testSamplesWithManyFieldsSameSingleTimeFormatDistractionBefore() { sample2.put("time", "2018-05-29 11:53:02,837"); sample2.put("bar", 17); Tuple match = - LogStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2)); + FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2)); assertNotNull(match); assertEquals("time", match.v1()); assertThat(match.v2().dateFormats, contains("YYYY-MM-dd HH:mm:ss,SSS")); @@ -139,7 +140,7 @@ public void testSamplesWithManyFieldsSameSingleTimeFormatDistractionAfter() { sample2.put("time", "May 29 2018 11:53:02"); sample2.put("red_herring", "17"); Tuple match = - LogStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2)); + FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2)); assertNotNull(match); assertEquals("time", match.v1()); assertThat(match.v2().dateFormats, contains("MMM dd YYYY HH:mm:ss", "MMM d YYYY HH:mm:ss")); @@ -156,7 +157,7 @@ public void testSamplesWithManyFieldsInconsistentTimeFields() { sample2.put("time2", "May 29 2018 11:53:02"); sample2.put("bar", 42); Tuple match = - LogStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2)); + FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2)); assertNull(match); } @@ -172,7 +173,7 @@ public void testSamplesWithManyFieldsInconsistentAndConsistentTimeFields() { sample2.put("time3", "Thu, May 10 2018 11:53:02"); sample2.put("bar", 42); Tuple match = - LogStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2)); + FileStructureUtils.guessTimestampField(explanation, Arrays.asList(sample1, sample2)); assertNotNull(match); assertEquals("time2", match.v1()); assertThat(match.v2().dateFormats, contains("MMM dd YYYY HH:mm:ss", "MMM d YYYY HH:mm:ss")); @@ -184,26 +185,26 @@ public void testGuessMappingGivenNothing() { } public void testGuessMappingGivenKeyword() { - Map expected = Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "keyword"); + Map expected = Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "keyword"); assertEquals(expected, guessMapping(explanation, "foo", Arrays.asList("ERROR", "INFO", "DEBUG"))); assertEquals(expected, guessMapping(explanation, "foo", Arrays.asList("2018-06-11T13:26:47Z", "not a date"))); } public void testGuessMappingGivenText() { - Map expected = Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "text"); + Map expected = Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "text"); assertEquals(expected, guessMapping(explanation, "foo", Arrays.asList("a", "the quick brown fox jumped over the lazy dog"))); } public void testGuessMappingGivenIp() { - Map expected = Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "ip"); + Map expected = Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "ip"); assertEquals(expected, guessMapping(explanation, "foo", Arrays.asList("10.0.0.1", "172.16.0.1", "192.168.0.1"))); } public void testGuessMappingGivenDouble() { - Map expected = Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "double"); + Map expected = Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "double"); assertEquals(expected, guessMapping(explanation, "foo", Arrays.asList("3.14159265359", "0", "-8"))); // 12345678901234567890 is too long for long @@ -213,37 +214,37 @@ public void testGuessMappingGivenDouble() { } public void testGuessMappingGivenLong() { - Map expected = Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "long"); + Map expected = Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "long"); assertEquals(expected, guessMapping(explanation, "foo", Arrays.asList("500", "3", "-3"))); assertEquals(expected, guessMapping(explanation, "foo", Arrays.asList(500, 6, 0))); } public void testGuessMappingGivenDate() { - Map expected = Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "date"); + Map expected = Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "date"); assertEquals(expected, guessMapping(explanation, "foo", Arrays.asList("2018-06-11T13:26:47Z", "2018-06-11T13:27:12Z"))); } public void testGuessMappingGivenBoolean() { - Map expected = Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "boolean"); + Map expected = Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "boolean"); assertEquals(expected, guessMapping(explanation, "foo", Arrays.asList("false", "true"))); assertEquals(expected, guessMapping(explanation, "foo", Arrays.asList(true, false))); } public void testGuessMappingGivenArray() { - Map expected = Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "long"); + Map expected = Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "long"); assertEquals(expected, guessMapping(explanation, "foo", Arrays.asList(42, Arrays.asList(1, -99)))); - expected = Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "keyword"); + expected = Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "keyword"); assertEquals(expected, guessMapping(explanation, "foo", Arrays.asList(new String[]{ "x", "y" }, "z"))); } public void testGuessMappingGivenObject() { - Map expected = Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "object"); + Map expected = Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "object"); assertEquals(expected, guessMapping(explanation, "foo", Arrays.asList(Collections.singletonMap("name", "value1"), Collections.singletonMap("name", "value2")))); @@ -269,17 +270,17 @@ public void testGuessMappingsAndCalculateFieldStats() { sample2.put("nothing", null); Tuple, SortedMap> mappingsAndFieldStats = - LogStructureUtils.guessMappingsAndCalculateFieldStats(explanation, Arrays.asList(sample1, sample2)); + FileStructureUtils.guessMappingsAndCalculateFieldStats(explanation, Arrays.asList(sample1, sample2)); assertNotNull(mappingsAndFieldStats); Map mappings = mappingsAndFieldStats.v1(); assertNotNull(mappings); - assertEquals(Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("foo")); + assertEquals(Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("foo")); Map expectedTimeMapping = new HashMap<>(); - expectedTimeMapping.put(LogStructureUtils.MAPPING_TYPE_SETTING, "date"); - expectedTimeMapping.put(LogStructureUtils.MAPPING_FORMAT_SETTING, "YYYY-MM-dd HH:mm:ss,SSS"); + expectedTimeMapping.put(FileStructureUtils.MAPPING_TYPE_SETTING, "date"); + expectedTimeMapping.put(FileStructureUtils.MAPPING_FORMAT_SETTING, "YYYY-MM-dd HH:mm:ss,SSS"); assertEquals(expectedTimeMapping, mappings.get("time")); - assertEquals(Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "long"), mappings.get("bar")); + assertEquals(Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "long"), mappings.get("bar")); assertNull(mappings.get("nothing")); Map fieldStats = mappingsAndFieldStats.v2(); @@ -293,7 +294,7 @@ public void testGuessMappingsAndCalculateFieldStats() { private Map guessMapping(List explanation, String fieldName, List fieldValues) { Tuple, FieldStats> mappingAndFieldStats = - LogStructureUtils.guessMappingAndCalculateFieldStats(explanation, fieldName, fieldValues); + FileStructureUtils.guessMappingAndCalculateFieldStats(explanation, fieldName, fieldValues); return (mappingAndFieldStats == null) ? null : mappingAndFieldStats.v1(); } diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/GrokPatternCreatorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/GrokPatternCreatorTests.java similarity index 85% rename from x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/GrokPatternCreatorTests.java rename to x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/GrokPatternCreatorTests.java index 9853efd41de84..858709e2764bb 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/GrokPatternCreatorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/GrokPatternCreatorTests.java @@ -3,10 +3,10 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.ml.filestructurefinder; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.xpack.ml.logstructurefinder.GrokPatternCreator.ValueOnlyGrokPatternCandidate; +import org.elasticsearch.xpack.ml.filestructurefinder.GrokPatternCreator.ValueOnlyGrokPatternCandidate; import java.util.ArrayList; import java.util.Arrays; @@ -17,7 +17,7 @@ import static org.hamcrest.Matchers.containsInAnyOrder; -public class GrokPatternCreatorTests extends LogStructureTestCase { +public class GrokPatternCreatorTests extends FileStructureTestCase { public void testBuildFieldName() { Map fieldNameCountStore = new HashMap<>(); @@ -195,11 +195,11 @@ public void testCreateGrokPatternFromExamplesGivenNamedLogs() { "%{QUOTEDSTRING:field2}: %{IP:ipaddress}#%{INT:field3}", grokPatternCreator.createGrokPatternFromExamples("SYSLOGTIMESTAMP", "timestamp")); assertEquals(5, mappings.size()); - assertEquals(Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "long"), mappings.get("field")); - assertEquals(Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("loglevel")); - assertEquals(Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("field2")); - assertEquals(Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "ip"), mappings.get("ipaddress")); - assertEquals(Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "long"), mappings.get("field3")); + assertEquals(Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "long"), mappings.get("field")); + assertEquals(Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("loglevel")); + assertEquals(Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("field2")); + assertEquals(Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "ip"), mappings.get("ipaddress")); + assertEquals(Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "long"), mappings.get("field3")); } public void testCreateGrokPatternFromExamplesGivenCatalinaLogs() { @@ -220,7 +220,7 @@ public void testCreateGrokPatternFromExamplesGivenCatalinaLogs() { assertEquals("%{CATALINA_DATESTAMP:timestamp} .*? .*?\\n%{LOGLEVEL:loglevel}: .*", grokPatternCreator.createGrokPatternFromExamples("CATALINA_DATESTAMP", "timestamp")); assertEquals(1, mappings.size()); - assertEquals(Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("loglevel")); + assertEquals(Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("loglevel")); } public void testCreateGrokPatternFromExamplesGivenMultiTimestampLogs() { @@ -243,12 +243,12 @@ public void testCreateGrokPatternFromExamplesGivenMultiTimestampLogs() { "%{IP:ipaddress}\\t.*?\\t%{LOGLEVEL:loglevel}\\t.*", grokPatternCreator.createGrokPatternFromExamples("TIMESTAMP_ISO8601", "timestamp")); assertEquals(5, mappings.size()); - assertEquals(Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "long"), mappings.get("field")); - assertEquals(Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "date"), + assertEquals(Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "long"), mappings.get("field")); + assertEquals(Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "date"), mappings.get("extra_timestamp")); - assertEquals(Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "long"), mappings.get("field2")); - assertEquals(Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "ip"), mappings.get("ipaddress")); - assertEquals(Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("loglevel")); + assertEquals(Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "long"), mappings.get("field2")); + assertEquals(Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "ip"), mappings.get("ipaddress")); + assertEquals(Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("loglevel")); } public void testFindFullLineGrokPatternGivenApacheCombinedLogs() { @@ -275,16 +275,16 @@ public void testFindFullLineGrokPatternGivenApacheCombinedLogs() { assertEquals(new Tuple<>("timestamp", "%{COMBINEDAPACHELOG}"), grokPatternCreator.findFullLineGrokPattern()); assertEquals(10, mappings.size()); - assertEquals(Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "text"), mappings.get("agent")); - assertEquals(Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("auth")); - assertEquals(Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "long"), mappings.get("bytes")); - assertEquals(Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "ip"), mappings.get("clientip")); - assertEquals(Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "double"), mappings.get("httpversion")); - assertEquals(Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("ident")); - assertEquals(Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("referrer")); - assertEquals(Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("request")); - assertEquals(Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "long"), mappings.get("response")); - assertEquals(Collections.singletonMap(LogStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("verb")); + assertEquals(Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "text"), mappings.get("agent")); + assertEquals(Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("auth")); + assertEquals(Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "long"), mappings.get("bytes")); + assertEquals(Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "ip"), mappings.get("clientip")); + assertEquals(Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "double"), mappings.get("httpversion")); + assertEquals(Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("ident")); + assertEquals(Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("referrer")); + assertEquals(Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("request")); + assertEquals(Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "long"), mappings.get("response")); + assertEquals(Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, "keyword"), mappings.get("verb")); } public void testAdjustForPunctuationGivenCommonPrefix() { diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/JsonLogStructureFinderFactoryTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinderFactoryTests.java similarity index 84% rename from x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/JsonLogStructureFinderFactoryTests.java rename to x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinderFactoryTests.java index cdbffa8259e0c..092f11676a877 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/JsonLogStructureFinderFactoryTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinderFactoryTests.java @@ -3,11 +3,11 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.ml.filestructurefinder; -public class JsonLogStructureFinderFactoryTests extends LogStructureTestCase { +public class JsonFileStructureFinderFactoryTests extends FileStructureTestCase { - private LogStructureFinderFactory factory = new JsonLogStructureFinderFactory(); + private FileStructureFinderFactory factory = new JsonFileStructureFinderFactory(); public void testCanCreateFromSampleGivenJson() { diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/JsonLogStructureFinderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinderTests.java similarity index 69% rename from x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/JsonLogStructureFinderTests.java rename to x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinderTests.java index 917054919dd50..f41868be86286 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/JsonLogStructureFinderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/JsonFileStructureFinderTests.java @@ -3,24 +3,26 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.ml.filestructurefinder; + +import org.elasticsearch.xpack.core.ml.filestructurefinder.FileStructure; import java.util.Collections; -public class JsonLogStructureFinderTests extends LogStructureTestCase { +public class JsonFileStructureFinderTests extends FileStructureTestCase { - private LogStructureFinderFactory factory = new JsonLogStructureFinderFactory(); + private FileStructureFinderFactory factory = new JsonFileStructureFinderFactory(); public void testCreateConfigsGivenGoodJson() throws Exception { assertTrue(factory.canCreateFromSample(explanation, JSON_SAMPLE)); String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); - LogStructureFinder structureFinder = factory.createFromSample(explanation, JSON_SAMPLE, charset, hasByteOrderMarker); + FileStructureFinder structureFinder = factory.createFromSample(explanation, JSON_SAMPLE, charset, hasByteOrderMarker); - LogStructure structure = structureFinder.getStructure(); + FileStructure structure = structureFinder.getStructure(); - assertEquals(LogStructure.Format.JSON, structure.getFormat()); + assertEquals(FileStructure.Format.JSON, structure.getFormat()); assertEquals(charset, structure.getCharset()); if (hasByteOrderMarker == null) { assertNull(structure.getHasByteOrderMarker()); diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/TextLogStructureFinderFactoryTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderFactoryTests.java similarity index 70% rename from x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/TextLogStructureFinderFactoryTests.java rename to x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderFactoryTests.java index c1b30cc749612..8234357fe3676 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/TextLogStructureFinderFactoryTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderFactoryTests.java @@ -3,11 +3,11 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.ml.filestructurefinder; -public class TextLogStructureFinderFactoryTests extends LogStructureTestCase { +public class TextLogFileStructureFinderFactoryTests extends FileStructureTestCase { - private LogStructureFinderFactory factory = new TextLogStructureFinderFactory(); + private FileStructureFinderFactory factory = new TextLogFileStructureFinderFactory(); // No need to check JSON, XML, CSV, TSV, semi-colon delimited values or pipe // delimited values because they come earlier in the order we check formats diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/TextLogStructureFinderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderTests.java similarity index 92% rename from x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/TextLogStructureFinderTests.java rename to x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderTests.java index c9e153a82c437..a23080a827277 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/TextLogStructureFinderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TextLogFileStructureFinderTests.java @@ -3,29 +3,30 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.ml.filestructurefinder; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.xpack.ml.logstructurefinder.TimestampFormatFinder.TimestampMatch; +import org.elasticsearch.xpack.core.ml.filestructurefinder.FileStructure; +import org.elasticsearch.xpack.ml.filestructurefinder.TimestampFormatFinder.TimestampMatch; import java.util.Collections; import java.util.Set; -public class TextLogStructureFinderTests extends LogStructureTestCase { +public class TextLogFileStructureFinderTests extends FileStructureTestCase { - private LogStructureFinderFactory factory = new TextLogStructureFinderFactory(); + private FileStructureFinderFactory factory = new TextLogFileStructureFinderFactory(); public void testCreateConfigsGivenElasticsearchLog() throws Exception { assertTrue(factory.canCreateFromSample(explanation, TEXT_SAMPLE)); String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); - LogStructureFinder structureFinder = factory.createFromSample(explanation, TEXT_SAMPLE, charset, hasByteOrderMarker); + FileStructureFinder structureFinder = factory.createFromSample(explanation, TEXT_SAMPLE, charset, hasByteOrderMarker); - LogStructure structure = structureFinder.getStructure(); + FileStructure structure = structureFinder.getStructure(); - assertEquals(LogStructure.Format.SEMI_STRUCTURED_TEXT, structure.getFormat()); + assertEquals(FileStructure.Format.SEMI_STRUCTURED_TEXT, structure.getFormat()); assertEquals(charset, structure.getCharset()); if (hasByteOrderMarker == null) { assertNull(structure.getHasByteOrderMarker()); @@ -46,7 +47,7 @@ public void testCreateMultiLineMessageStartRegexGivenNoPrefaces() { for (TimestampFormatFinder.CandidateTimestampFormat candidateTimestampFormat : TimestampFormatFinder.ORDERED_CANDIDATE_FORMATS) { String simpleDateRegex = candidateTimestampFormat.simplePattern.pattern(); assertEquals("^" + simpleDateRegex.replaceFirst("^\\\\b", ""), - TextLogStructureFinder.createMultiLineMessageStartRegex(Collections.emptySet(), simpleDateRegex)); + TextLogFileStructureFinder.createMultiLineMessageStartRegex(Collections.emptySet(), simpleDateRegex)); } } @@ -54,7 +55,7 @@ public void testCreateMultiLineMessageStartRegexGivenOneEmptyPreface() { for (TimestampFormatFinder.CandidateTimestampFormat candidateTimestampFormat : TimestampFormatFinder.ORDERED_CANDIDATE_FORMATS) { String simpleDateRegex = candidateTimestampFormat.simplePattern.pattern(); assertEquals("^" + simpleDateRegex.replaceFirst("^\\\\b", ""), - TextLogStructureFinder.createMultiLineMessageStartRegex(Collections.singleton(""), simpleDateRegex)); + TextLogFileStructureFinder.createMultiLineMessageStartRegex(Collections.singleton(""), simpleDateRegex)); } } @@ -62,7 +63,7 @@ public void testCreateMultiLineMessageStartRegexGivenOneLogLevelPreface() { for (TimestampFormatFinder.CandidateTimestampFormat candidateTimestampFormat : TimestampFormatFinder.ORDERED_CANDIDATE_FORMATS) { String simpleDateRegex = candidateTimestampFormat.simplePattern.pattern(); assertEquals("^\\[.*?\\] \\[" + simpleDateRegex, - TextLogStructureFinder.createMultiLineMessageStartRegex(Collections.singleton("[ERROR] ["), simpleDateRegex)); + TextLogFileStructureFinder.createMultiLineMessageStartRegex(Collections.singleton("[ERROR] ["), simpleDateRegex)); } } @@ -71,7 +72,7 @@ public void testCreateMultiLineMessageStartRegexGivenManyLogLevelPrefaces() { Set prefaces = Sets.newHashSet("[ERROR] [", "[DEBUG] ["); String simpleDateRegex = candidateTimestampFormat.simplePattern.pattern(); assertEquals("^\\[.*?\\] \\[" + simpleDateRegex, - TextLogStructureFinder.createMultiLineMessageStartRegex(prefaces, simpleDateRegex)); + TextLogFileStructureFinder.createMultiLineMessageStartRegex(prefaces, simpleDateRegex)); } } @@ -80,7 +81,7 @@ public void testCreateMultiLineMessageStartRegexGivenManyHostnamePrefaces() { Set prefaces = Sets.newHashSet("host-1.acme.com|", "my_host.elastic.co|"); String simpleDateRegex = candidateTimestampFormat.simplePattern.pattern(); assertEquals("^.*?\\|" + simpleDateRegex, - TextLogStructureFinder.createMultiLineMessageStartRegex(prefaces, simpleDateRegex)); + TextLogFileStructureFinder.createMultiLineMessageStartRegex(prefaces, simpleDateRegex)); } } @@ -89,7 +90,7 @@ public void testCreateMultiLineMessageStartRegexGivenManyPrefacesIncludingEmpty( Set prefaces = Sets.newHashSet("", "[non-standard] "); String simpleDateRegex = candidateTimestampFormat.simplePattern.pattern(); assertEquals("^.*?" + simpleDateRegex, - TextLogStructureFinder.createMultiLineMessageStartRegex(prefaces, simpleDateRegex)); + TextLogFileStructureFinder.createMultiLineMessageStartRegex(prefaces, simpleDateRegex)); } } @@ -143,7 +144,7 @@ public void testMostLikelyTimestampGivenAllSame() { "[2018-06-27T11:59:23,588][INFO ][o.e.p.PluginsService ] [node-0] loaded module [x-pack-watcher]\n" + "[2018-06-27T11:59:23,588][INFO ][o.e.p.PluginsService ] [node-0] no plugins loaded\n"; - Tuple> mostLikelyMatch = TextLogStructureFinder.mostLikelyTimestamp(sample.split("\n")); + Tuple> mostLikelyMatch = TextLogFileStructureFinder.mostLikelyTimestamp(sample.split("\n")); assertNotNull(mostLikelyMatch); assertEquals(new TimestampMatch(7, "", "ISO8601", "\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}", "TIMESTAMP_ISO8601", ""), mostLikelyMatch.v1()); @@ -233,7 +234,7 @@ public void testMostLikelyTimestampGivenExceptionTrace() { "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) [?:1.8.0_144]\n" + "\tat java.lang.Thread.run(Thread.java:748) [?:1.8.0_144]\n"; - Tuple> mostLikelyMatch = TextLogStructureFinder.mostLikelyTimestamp(sample.split("\n")); + Tuple> mostLikelyMatch = TextLogFileStructureFinder.mostLikelyTimestamp(sample.split("\n")); assertNotNull(mostLikelyMatch); // Even though many lines have a timestamp near the end (in the Lucene version information), diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/TimestampFormatFinderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinderTests.java similarity index 98% rename from x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/TimestampFormatFinderTests.java rename to x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinderTests.java index cf1b65d1be234..bf27912b9db26 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/TimestampFormatFinderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/TimestampFormatFinderTests.java @@ -3,10 +3,10 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.ml.filestructurefinder; import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.xpack.ml.logstructurefinder.TimestampFormatFinder.TimestampMatch; +import org.elasticsearch.xpack.ml.filestructurefinder.TimestampFormatFinder.TimestampMatch; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormat; @@ -16,7 +16,7 @@ import java.util.Arrays; import java.util.Locale; -public class TimestampFormatFinderTests extends LogStructureTestCase { +public class TimestampFormatFinderTests extends FileStructureTestCase { public void testFindFirstMatchGivenNoMatch() { diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/XmlLogStructureFinderFactoryTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderFactoryTests.java similarity index 84% rename from x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/XmlLogStructureFinderFactoryTests.java rename to x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderFactoryTests.java index b6dc3a56f1dfb..e7c111818317f 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/XmlLogStructureFinderFactoryTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderFactoryTests.java @@ -3,11 +3,11 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.ml.filestructurefinder; -public class XmlLogStructureFinderFactoryTests extends LogStructureTestCase { +public class XmlFileStructureFinderFactoryTests extends FileStructureTestCase { - private LogStructureFinderFactory factory = new XmlLogStructureFinderFactory(); + private FileStructureFinderFactory factory = new XmlFileStructureFinderFactory(); // No need to check JSON because it comes earlier in the order we check formats diff --git a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/XmlLogStructureFinderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderTests.java similarity index 70% rename from x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/XmlLogStructureFinderTests.java rename to x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderTests.java index de653d7bcd0cd..4bf65ba783572 100644 --- a/x-pack/plugin/ml/log-structure-finder/src/test/java/org/elasticsearch/xpack/ml/logstructurefinder/XmlLogStructureFinderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/XmlFileStructureFinderTests.java @@ -3,24 +3,26 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -package org.elasticsearch.xpack.ml.logstructurefinder; +package org.elasticsearch.xpack.ml.filestructurefinder; + +import org.elasticsearch.xpack.core.ml.filestructurefinder.FileStructure; import java.util.Collections; -public class XmlLogStructureFinderTests extends LogStructureTestCase { +public class XmlFileStructureFinderTests extends FileStructureTestCase { - private LogStructureFinderFactory factory = new XmlLogStructureFinderFactory(); + private FileStructureFinderFactory factory = new XmlFileStructureFinderFactory(); public void testCreateConfigsGivenGoodXml() throws Exception { assertTrue(factory.canCreateFromSample(explanation, XML_SAMPLE)); String charset = randomFrom(POSSIBLE_CHARSETS); Boolean hasByteOrderMarker = randomHasByteOrderMarker(charset); - LogStructureFinder structureFinder = factory.createFromSample(explanation, XML_SAMPLE, charset, hasByteOrderMarker); + FileStructureFinder structureFinder = factory.createFromSample(explanation, XML_SAMPLE, charset, hasByteOrderMarker); - LogStructure structure = structureFinder.getStructure(); + FileStructure structure = structureFinder.getStructure(); - assertEquals(LogStructure.Format.XML, structure.getFormat()); + assertEquals(FileStructure.Format.XML, structure.getFormat()); assertEquals(charset, structure.getCharset()); if (hasByteOrderMarker == null) { assertNull(structure.getHasByteOrderMarker()); From cdd82bb2034b4b34a3d7d7424b7a14baea232fd1 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 6 Sep 2018 11:48:51 +0200 Subject: [PATCH 13/91] test: fetch `SeqNoStats` inside try-catch block Relates to #33457 --- .../xpack/ccr/action/ShardFollowTaskReplicationTests.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java index ec180943a3b5b..2cd024cb03cf7 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowTaskReplicationTests.java @@ -43,6 +43,7 @@ import java.util.function.LongConsumer; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTestCase { @@ -121,6 +122,7 @@ public void testFailLeaderReplicaShard() throws Exception { thread.join(); leaderGroup.assertAllEqual(docCount); + assertThat(shardFollowTask.getFailure(), nullValue()); assertBusy(() -> followerGroup.assertAllEqual(docCount)); shardFollowTask.markAsCompleted(); assertConsistentHistoryBetweenLeaderAndFollower(leaderGroup, followerGroup); @@ -205,8 +207,8 @@ protected void innerSendShardChangesRequest(long from, int maxOperationCount, Co Exception exception = null; for (IndexShard indexShard : indexShards) { - final SeqNoStats seqNoStats = indexShard.seqNoStats(); try { + final SeqNoStats seqNoStats = indexShard.seqNoStats(); Translog.Operation[] ops = ShardChangesAction.getOperations(indexShard, seqNoStats.getGlobalCheckpoint(), from, maxOperationCount, params.getMaxBatchSizeInBytes()); // hard code mapping version; this is ok, as mapping updates are not tested here From ef207edbf0ce3edb408b37de733aedc816d778c1 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 6 Sep 2018 14:14:24 +0200 Subject: [PATCH 14/91] test: do not schedule when test has stopped --- .../xpack/ccr/action/ShardFollowNodeTaskRandomTests.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskRandomTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskRandomTests.java index f5fe1215a0edc..9bfd6b9d6ef42 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskRandomTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/ShardFollowNodeTaskRandomTests.java @@ -81,7 +81,9 @@ private ShardFollowNodeTask createShardFollowTask(int concurrency, TestRun testR ThreadPool threadPool = new TestThreadPool(getClass().getSimpleName()); BiConsumer scheduler = (delay, task) -> { assert delay.millis() < 100 : "The delay should be kept to a minimum, so that this test does not take to long to run"; - threadPool.schedule(delay, ThreadPool.Names.GENERIC, task); + if (stopped.get() == false) { + threadPool.schedule(delay, ThreadPool.Names.GENERIC, task); + } }; List receivedOperations = Collections.synchronizedList(new ArrayList<>()); LocalCheckpointTracker tracker = new LocalCheckpointTracker(testRun.startSeqNo - 1, testRun.startSeqNo - 1); From 9b6bbc0182229c4db32f4dff9d83bc878c2eb74e Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Thu, 6 Sep 2018 07:18:09 -0500 Subject: [PATCH 15/91] HLRC: ML Update Job (#33392) * HLRC: ML Update Job --- .../client/MLRequestConverters.java | 14 + .../client/MachineLearningClient.java | 34 ++ .../client/ml/UpdateJobRequest.java | 80 +++ .../client/ml/job/config/JobUpdate.java | 454 ++++++++++++++++++ .../client/MLRequestConvertersTests.java | 17 + .../client/MachineLearningIT.java | 19 + .../MlClientDocumentationIT.java | 97 ++++ .../client/ml/UpdateJobRequestTests.java | 44 ++ .../client/ml/job/config/JobUpdateTests.java | 120 +++++ .../high-level/ml/update-job.asciidoc | 93 ++++ .../high-level/supported-apis.asciidoc | 2 + 11 files changed, 974 insertions(+) create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateJobRequest.java create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobUpdate.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/ml/UpdateJobRequestTests.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/config/JobUpdateTests.java create mode 100644 docs/java-rest/high-level/ml/update-job.asciidoc diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java index dc4d550e7b9ad..b8d977d8eeb94 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java @@ -35,6 +35,7 @@ import org.elasticsearch.client.ml.GetRecordsRequest; import org.elasticsearch.client.ml.OpenJobRequest; import org.elasticsearch.client.ml.PutJobRequest; +import org.elasticsearch.client.ml.UpdateJobRequest; import org.elasticsearch.common.Strings; import java.io.IOException; @@ -146,6 +147,19 @@ static Request flushJob(FlushJobRequest flushJobRequest) throws IOException { return request; } + static Request updateJob(UpdateJobRequest updateJobRequest) throws IOException { + String endpoint = new EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("ml") + .addPathPartAsIs("anomaly_detectors") + .addPathPart(updateJobRequest.getJobUpdate().getJobId()) + .addPathPartAsIs("_update") + .build(); + Request request = new Request(HttpPost.METHOD_NAME, endpoint); + request.setEntity(createEntity(updateJobRequest.getJobUpdate(), REQUEST_BODY_CONTENT_TYPE)); + return request; + } + static Request getBuckets(GetBucketsRequest getBucketsRequest) throws IOException { String endpoint = new EndpointBuilder() .addPathPartAsIs("_xpack") diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java index be5f81076ae90..bdfc34ad997d6 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java @@ -19,6 +19,7 @@ package org.elasticsearch.client; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.ml.UpdateJobRequest; import org.elasticsearch.client.ml.CloseJobRequest; import org.elasticsearch.client.ml.CloseJobResponse; import org.elasticsearch.client.ml.DeleteJobRequest; @@ -319,6 +320,7 @@ public void closeJobAsync(CloseJobRequest request, RequestOptions options, Actio * * @param request The {@link FlushJobRequest} object enclosing the `jobId` and additional request options * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @throws IOException when there is a serialization issue sending the request or receiving the response */ public FlushJobResponse flushJob(FlushJobRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(request, @@ -356,6 +358,38 @@ public void flushJobAsync(FlushJobRequest request, RequestOptions options, Actio Collections.emptySet()); } + /** + * Updates a Machine Learning {@link org.elasticsearch.client.ml.job.config.Job} + * + * @param request the {@link UpdateJobRequest} object enclosing the desired updates + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return a PutJobResponse object containing the updated job object + * @throws IOException when there is a serialization issue sending the request or receiving the response + */ + public PutJobResponse updateJob(UpdateJobRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, + MLRequestConverters::updateJob, + options, + PutJobResponse::fromXContent, + Collections.emptySet()); + } + + /** + * Updates a Machine Learning {@link org.elasticsearch.client.ml.job.config.Job} asynchronously + * + * @param request the {@link UpdateJobRequest} object enclosing the desired updates + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener Listener to be notified upon request completion + */ + public void updateJobAsync(UpdateJobRequest request, RequestOptions options, ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, + MLRequestConverters::updateJob, + options, + PutJobResponse::fromXContent, + listener, + Collections.emptySet()); + } + /** * Gets the buckets for a Machine Learning Job. *

diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateJobRequest.java new file mode 100644 index 0000000000000..6e050f8adcf90 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateJobRequest.java @@ -0,0 +1,80 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.ml.job.config.JobUpdate; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +/** + * Updates a {@link org.elasticsearch.client.ml.job.config.Job} with the passed {@link JobUpdate} + * settings + */ +public class UpdateJobRequest extends ActionRequest implements ToXContentObject { + + private final JobUpdate update; + + public UpdateJobRequest(JobUpdate update) { + this.update = update; + } + + public JobUpdate getJobUpdate() { + return update; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return update.toXContent(builder, params); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + + if (o == null || getClass() != o.getClass()) { + return false; + } + + UpdateJobRequest that = (UpdateJobRequest) o; + return Objects.equals(update, that.update); + } + + @Override + public int hashCode() { + return Objects.hash(update); + } + + @Override + public final String toString() { + return Strings.toString(this); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobUpdate.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobUpdate.java new file mode 100644 index 0000000000000..15499a650439d --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobUpdate.java @@ -0,0 +1,454 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml.job.config; + +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * POJO for updating an existing Machine Learning {@link Job} + */ +public class JobUpdate implements ToXContentObject { + public static final ParseField DETECTORS = new ParseField("detectors"); + + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "job_update", true, args -> new Builder((String) args[0])); + + static { + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), Job.ID); + PARSER.declareStringArray(Builder::setGroups, Job.GROUPS); + PARSER.declareStringOrNull(Builder::setDescription, Job.DESCRIPTION); + PARSER.declareObjectArray(Builder::setDetectorUpdates, DetectorUpdate.PARSER, DETECTORS); + PARSER.declareObject(Builder::setModelPlotConfig, ModelPlotConfig.PARSER, Job.MODEL_PLOT_CONFIG); + PARSER.declareObject(Builder::setAnalysisLimits, AnalysisLimits.PARSER, Job.ANALYSIS_LIMITS); + PARSER.declareString((builder, val) -> builder.setBackgroundPersistInterval( + TimeValue.parseTimeValue(val, Job.BACKGROUND_PERSIST_INTERVAL.getPreferredName())), Job.BACKGROUND_PERSIST_INTERVAL); + PARSER.declareLong(Builder::setRenormalizationWindowDays, Job.RENORMALIZATION_WINDOW_DAYS); + PARSER.declareLong(Builder::setResultsRetentionDays, Job.RESULTS_RETENTION_DAYS); + PARSER.declareLong(Builder::setModelSnapshotRetentionDays, Job.MODEL_SNAPSHOT_RETENTION_DAYS); + PARSER.declareStringArray(Builder::setCategorizationFilters, AnalysisConfig.CATEGORIZATION_FILTERS); + PARSER.declareField(Builder::setCustomSettings, (p, c) -> p.map(), Job.CUSTOM_SETTINGS, ObjectParser.ValueType.OBJECT); + } + + private final String jobId; + private final List groups; + private final String description; + private final List detectorUpdates; + private final ModelPlotConfig modelPlotConfig; + private final AnalysisLimits analysisLimits; + private final Long renormalizationWindowDays; + private final TimeValue backgroundPersistInterval; + private final Long modelSnapshotRetentionDays; + private final Long resultsRetentionDays; + private final List categorizationFilters; + private final Map customSettings; + + private JobUpdate(String jobId, @Nullable List groups, @Nullable String description, + @Nullable List detectorUpdates, @Nullable ModelPlotConfig modelPlotConfig, + @Nullable AnalysisLimits analysisLimits, @Nullable TimeValue backgroundPersistInterval, + @Nullable Long renormalizationWindowDays, @Nullable Long resultsRetentionDays, + @Nullable Long modelSnapshotRetentionDays, @Nullable List categorisationFilters, + @Nullable Map customSettings) { + this.jobId = jobId; + this.groups = groups; + this.description = description; + this.detectorUpdates = detectorUpdates; + this.modelPlotConfig = modelPlotConfig; + this.analysisLimits = analysisLimits; + this.renormalizationWindowDays = renormalizationWindowDays; + this.backgroundPersistInterval = backgroundPersistInterval; + this.modelSnapshotRetentionDays = modelSnapshotRetentionDays; + this.resultsRetentionDays = resultsRetentionDays; + this.categorizationFilters = categorisationFilters; + this.customSettings = customSettings; + } + + public String getJobId() { + return jobId; + } + + public List getGroups() { + return groups; + } + + public String getDescription() { + return description; + } + + public List getDetectorUpdates() { + return detectorUpdates; + } + + public ModelPlotConfig getModelPlotConfig() { + return modelPlotConfig; + } + + public AnalysisLimits getAnalysisLimits() { + return analysisLimits; + } + + public Long getRenormalizationWindowDays() { + return renormalizationWindowDays; + } + + public TimeValue getBackgroundPersistInterval() { + return backgroundPersistInterval; + } + + public Long getModelSnapshotRetentionDays() { + return modelSnapshotRetentionDays; + } + + public Long getResultsRetentionDays() { + return resultsRetentionDays; + } + + public List getCategorizationFilters() { + return categorizationFilters; + } + + public Map getCustomSettings() { + return customSettings; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(Job.ID.getPreferredName(), jobId); + if (groups != null) { + builder.field(Job.GROUPS.getPreferredName(), groups); + } + if (description != null) { + builder.field(Job.DESCRIPTION.getPreferredName(), description); + } + if (detectorUpdates != null) { + builder.field(DETECTORS.getPreferredName(), detectorUpdates); + } + if (modelPlotConfig != null) { + builder.field(Job.MODEL_PLOT_CONFIG.getPreferredName(), modelPlotConfig); + } + if (analysisLimits != null) { + builder.field(Job.ANALYSIS_LIMITS.getPreferredName(), analysisLimits); + } + if (renormalizationWindowDays != null) { + builder.field(Job.RENORMALIZATION_WINDOW_DAYS.getPreferredName(), renormalizationWindowDays); + } + if (backgroundPersistInterval != null) { + builder.field(Job.BACKGROUND_PERSIST_INTERVAL.getPreferredName(), backgroundPersistInterval); + } + if (modelSnapshotRetentionDays != null) { + builder.field(Job.MODEL_SNAPSHOT_RETENTION_DAYS.getPreferredName(), modelSnapshotRetentionDays); + } + if (resultsRetentionDays != null) { + builder.field(Job.RESULTS_RETENTION_DAYS.getPreferredName(), resultsRetentionDays); + } + if (categorizationFilters != null) { + builder.field(AnalysisConfig.CATEGORIZATION_FILTERS.getPreferredName(), categorizationFilters); + } + if (customSettings != null) { + builder.field(Job.CUSTOM_SETTINGS.getPreferredName(), customSettings); + } + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + JobUpdate that = (JobUpdate) other; + + return Objects.equals(this.jobId, that.jobId) + && Objects.equals(this.groups, that.groups) + && Objects.equals(this.description, that.description) + && Objects.equals(this.detectorUpdates, that.detectorUpdates) + && Objects.equals(this.modelPlotConfig, that.modelPlotConfig) + && Objects.equals(this.analysisLimits, that.analysisLimits) + && Objects.equals(this.renormalizationWindowDays, that.renormalizationWindowDays) + && Objects.equals(this.backgroundPersistInterval, that.backgroundPersistInterval) + && Objects.equals(this.modelSnapshotRetentionDays, that.modelSnapshotRetentionDays) + && Objects.equals(this.resultsRetentionDays, that.resultsRetentionDays) + && Objects.equals(this.categorizationFilters, that.categorizationFilters) + && Objects.equals(this.customSettings, that.customSettings); + } + + @Override + public int hashCode() { + return Objects.hash(jobId, groups, description, detectorUpdates, modelPlotConfig, analysisLimits, renormalizationWindowDays, + backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, categorizationFilters, customSettings); + } + + public static class DetectorUpdate implements ToXContentObject { + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("detector_update", true, a -> new DetectorUpdate((int) a[0], (String) a[1], + (List) a[2])); + + static { + PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), Detector.DETECTOR_INDEX); + PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), Job.DESCRIPTION); + PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(), (parser, parseFieldMatcher) -> + DetectionRule.PARSER.apply(parser, parseFieldMatcher).build(), Detector.CUSTOM_RULES_FIELD); + } + + private final int detectorIndex; + private final String description; + private final List rules; + + /** + * A detector update to apply to the Machine Learning Job + * + * @param detectorIndex The identifier of the detector to update. + * @param description The new description for the detector. + * @param rules The new list of rules for the detector. + */ + public DetectorUpdate(int detectorIndex, String description, List rules) { + this.detectorIndex = detectorIndex; + this.description = description; + this.rules = rules; + } + + public int getDetectorIndex() { + return detectorIndex; + } + + public String getDescription() { + return description; + } + + public List getRules() { + return rules; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + builder.field(Detector.DETECTOR_INDEX.getPreferredName(), detectorIndex); + if (description != null) { + builder.field(Job.DESCRIPTION.getPreferredName(), description); + } + if (rules != null) { + builder.field(Detector.CUSTOM_RULES_FIELD.getPreferredName(), rules); + } + builder.endObject(); + + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(detectorIndex, description, rules); + } + + @Override + public boolean equals(Object other) { + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + DetectorUpdate that = (DetectorUpdate) other; + return this.detectorIndex == that.detectorIndex && Objects.equals(this.description, that.description) + && Objects.equals(this.rules, that.rules); + } + } + + public static class Builder { + + private final String jobId; + private List groups; + private String description; + private List detectorUpdates; + private ModelPlotConfig modelPlotConfig; + private AnalysisLimits analysisLimits; + private Long renormalizationWindowDays; + private TimeValue backgroundPersistInterval; + private Long modelSnapshotRetentionDays; + private Long resultsRetentionDays; + private List categorizationFilters; + private Map customSettings; + + /** + * New {@link JobUpdate.Builder} object for the existing job + * + * @param jobId non-null `jobId` for referencing an exising {@link Job} + */ + public Builder(String jobId) { + this.jobId = jobId; + } + + /** + * Set the job groups + * + * Updates the {@link Job#groups} setting + * + * @param groups A list of group names + */ + public Builder setGroups(List groups) { + this.groups = groups; + return this; + } + + /** + * Set the job description + * + * Updates the {@link Job#description} setting + * + * @param description the desired Machine Learning job description + */ + public Builder setDescription(String description) { + this.description = description; + return this; + } + + /** + * The detector updates to apply to the job + * + * Updates the {@link AnalysisConfig#detectors} setting + * + * @param detectorUpdates list of {@link JobUpdate.DetectorUpdate} objects + */ + public Builder setDetectorUpdates(List detectorUpdates) { + this.detectorUpdates = detectorUpdates; + return this; + } + + /** + * Enables/disables the model plot config setting through {@link ModelPlotConfig#enabled} + * + * Updates the {@link Job#modelPlotConfig} setting + * + * @param modelPlotConfig {@link ModelPlotConfig} object with updated fields + */ + public Builder setModelPlotConfig(ModelPlotConfig modelPlotConfig) { + this.modelPlotConfig = modelPlotConfig; + return this; + } + + /** + * Sets new {@link AnalysisLimits} for the {@link Job} + * + * Updates the {@link Job#analysisLimits} setting + * + * @param analysisLimits Updates to {@link AnalysisLimits} + */ + public Builder setAnalysisLimits(AnalysisLimits analysisLimits) { + this.analysisLimits = analysisLimits; + return this; + } + + /** + * Advanced configuration option. The period over which adjustments to the score are applied, as new data is seen + * + * Updates the {@link Job#renormalizationWindowDays} setting + * + * @param renormalizationWindowDays number of renormalization window days + */ + public Builder setRenormalizationWindowDays(Long renormalizationWindowDays) { + this.renormalizationWindowDays = renormalizationWindowDays; + return this; + } + + /** + * Advanced configuration option. The time between each periodic persistence of the model + * + * Updates the {@link Job#backgroundPersistInterval} setting + * + * @param backgroundPersistInterval the time between background persistence + */ + public Builder setBackgroundPersistInterval(TimeValue backgroundPersistInterval) { + this.backgroundPersistInterval = backgroundPersistInterval; + return this; + } + + /** + * The time in days that model snapshots are retained for the job. + * + * Updates the {@link Job#modelSnapshotRetentionDays} setting + * + * @param modelSnapshotRetentionDays number of days to keep a model snapshot + */ + public Builder setModelSnapshotRetentionDays(Long modelSnapshotRetentionDays) { + this.modelSnapshotRetentionDays = modelSnapshotRetentionDays; + return this; + } + + /** + * Advanced configuration option. The number of days for which job results are retained + * + * Updates the {@link Job#resultsRetentionDays} setting + * + * @param resultsRetentionDays number of days to keep results. + */ + public Builder setResultsRetentionDays(Long resultsRetentionDays) { + this.resultsRetentionDays = resultsRetentionDays; + return this; + } + + /** + * Sets the categorization filters on the {@link Job} + * + * Updates the {@link AnalysisConfig#categorizationFilters} setting. + * Requires {@link AnalysisConfig#categorizationFieldName} to have been set on the existing Job. + * + * @param categorizationFilters list of categorization filters for the Job's {@link AnalysisConfig} + */ + public Builder setCategorizationFilters(List categorizationFilters) { + this.categorizationFilters = categorizationFilters; + return this; + } + + /** + * Contains custom meta data about the job. + * + * Updates the {@link Job#customSettings} setting + * + * @param customSettings custom settings map for the job + */ + public Builder setCustomSettings(Map customSettings) { + this.customSettings = customSettings; + return this; + } + + public JobUpdate build() { + return new JobUpdate(jobId, groups, description, detectorUpdates, modelPlotConfig, analysisLimits, backgroundPersistInterval, + renormalizationWindowDays, resultsRetentionDays, modelSnapshotRetentionDays, categorizationFilters, customSettings); + } + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java index 0822db33505c4..f1b035566aa4d 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java @@ -34,9 +34,12 @@ import org.elasticsearch.client.ml.GetRecordsRequest; import org.elasticsearch.client.ml.OpenJobRequest; import org.elasticsearch.client.ml.PutJobRequest; +import org.elasticsearch.client.ml.UpdateJobRequest; import org.elasticsearch.client.ml.job.config.AnalysisConfig; import org.elasticsearch.client.ml.job.config.Detector; import org.elasticsearch.client.ml.job.config.Job; +import org.elasticsearch.client.ml.job.config.JobUpdate; +import org.elasticsearch.client.ml.job.config.JobUpdateTests; import org.elasticsearch.client.ml.job.util.PageParams; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentParser; @@ -166,6 +169,20 @@ public void testFlushJob() throws Exception { requestEntityToString(request)); } + public void testUpdateJob() throws Exception { + String jobId = randomAlphaOfLength(10); + JobUpdate updates = JobUpdateTests.createRandom(jobId); + UpdateJobRequest updateJobRequest = new UpdateJobRequest(updates); + + Request request = MLRequestConverters.updateJob(updateJobRequest); + assertEquals(HttpPost.METHOD_NAME, request.getMethod()); + assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/_update", request.getEndpoint()); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) { + JobUpdate.Builder parsedRequest = JobUpdate.PARSER.apply(parser, null); + assertThat(parsedRequest.build(), equalTo(updates)); + } + } + public void testGetBuckets() throws IOException { String jobId = randomAlphaOfLength(10); GetBucketsRequest getBucketsRequest = new GetBucketsRequest(jobId); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java index cd4b6ffc7691f..bf25d9d1c0fb3 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java @@ -20,6 +20,8 @@ import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.client.ml.UpdateJobRequest; +import org.elasticsearch.client.ml.job.config.JobUpdate; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.client.ml.GetJobStatsRequest; import org.elasticsearch.client.ml.GetJobStatsResponse; @@ -218,6 +220,23 @@ public void testGetJobStats() throws Exception { assertThat(exception.status().getStatus(), equalTo(404)); } + public void testUpdateJob() throws Exception { + String jobId = randomValidJobId(); + Job job = buildJob(jobId); + MachineLearningClient machineLearningClient = highLevelClient().machineLearning(); + machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT); + + UpdateJobRequest request = new UpdateJobRequest(new JobUpdate.Builder(jobId).setDescription("Updated description").build()); + + PutJobResponse response = execute(request, machineLearningClient::updateJob, machineLearningClient::updateJobAsync); + + assertEquals("Updated description", response.getResponse().getDescription()); + + GetJobRequest getRequest = new GetJobRequest(jobId); + GetJobResponse getResponse = machineLearningClient.getJob(getRequest, RequestOptions.DEFAULT); + assertEquals("Updated description", getResponse.jobs().get(0).getDescription()); + } + public static String randomValidJobId() { CodepointSetGenerator generator = new CodepointSetGenerator("abcdefghijklmnopqrstuvwxyz0123456789".toCharArray()); return generator.ofCodePointsLength(random(), 10, 10); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java index 2e1fc6c2711d2..ac7835735fcf1 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java @@ -51,10 +51,17 @@ import org.elasticsearch.client.ml.OpenJobResponse; import org.elasticsearch.client.ml.PutJobRequest; import org.elasticsearch.client.ml.PutJobResponse; +import org.elasticsearch.client.ml.UpdateJobRequest; import org.elasticsearch.client.ml.job.config.AnalysisConfig; +import org.elasticsearch.client.ml.job.config.AnalysisLimits; import org.elasticsearch.client.ml.job.config.DataDescription; +import org.elasticsearch.client.ml.job.config.DetectionRule; import org.elasticsearch.client.ml.job.config.Detector; import org.elasticsearch.client.ml.job.config.Job; +import org.elasticsearch.client.ml.job.config.JobUpdate; +import org.elasticsearch.client.ml.job.config.ModelPlotConfig; +import org.elasticsearch.client.ml.job.config.Operator; +import org.elasticsearch.client.ml.job.config.RuleCondition; import org.elasticsearch.client.ml.job.results.AnomalyRecord; import org.elasticsearch.client.ml.job.results.Bucket; import org.elasticsearch.client.ml.job.results.Influencer; @@ -66,9 +73,12 @@ import org.junit.After; import java.io.IOException; +import java.util.Arrays; import java.util.Collections; import java.util.Date; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; @@ -372,6 +382,93 @@ public void onFailure(Exception e) { } } + public void testUpdateJob() throws Exception { + RestHighLevelClient client = highLevelClient(); + String jobId = "test-update-job"; + Job tempJob = MachineLearningIT.buildJob(jobId); + Job job = new Job.Builder(tempJob) + .setAnalysisConfig(new AnalysisConfig.Builder(tempJob.getAnalysisConfig()) + .setCategorizationFieldName("categorization-field") + .setDetector(0, + new Detector.Builder().setFieldName("total") + .setFunction("sum") + .setPartitionFieldName("mlcategory") + .setDetectorDescription(randomAlphaOfLength(10)) + .build())) + .build(); + client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); + + { + + List detectionRules = Arrays.asList( + new DetectionRule.Builder(Arrays.asList(RuleCondition.createTime(Operator.GT, 100L))).build()); + Map customSettings = new HashMap<>(); + customSettings.put("custom-setting-1", "custom-value"); + + //tag::x-pack-ml-update-job-detector-options + JobUpdate.DetectorUpdate detectorUpdate = new JobUpdate.DetectorUpdate(0, //<1> + "detector description", //<2> + detectionRules); //<3> + //end::x-pack-ml-update-job-detector-options + + //tag::x-pack-ml-update-job-options + JobUpdate update = new JobUpdate.Builder(jobId) //<1> + .setDescription("My description") //<2> + .setAnalysisLimits(new AnalysisLimits(1000L, null)) //<3> + .setBackgroundPersistInterval(TimeValue.timeValueHours(3)) //<4> + .setCategorizationFilters(Arrays.asList("categorization-filter")) //<5> + .setDetectorUpdates(Arrays.asList(detectorUpdate)) //<6> + .setGroups(Arrays.asList("job-group-1")) //<7> + .setResultsRetentionDays(10L) //<8> + .setModelPlotConfig(new ModelPlotConfig(true, null)) //<9> + .setModelSnapshotRetentionDays(7L) //<10> + .setCustomSettings(customSettings) //<11> + .setRenormalizationWindowDays(3L) //<12> + .build(); + //end::x-pack-ml-update-job-options + + + //tag::x-pack-ml-update-job-request + UpdateJobRequest updateJobRequest = new UpdateJobRequest(update); //<1> + //end::x-pack-ml-update-job-request + + //tag::x-pack-ml-update-job-execute + PutJobResponse updateJobResponse = client.machineLearning().updateJob(updateJobRequest, RequestOptions.DEFAULT); + //end::x-pack-ml-update-job-execute + //tag::x-pack-ml-update-job-response + Job updatedJob = updateJobResponse.getResponse(); //<1> + //end::x-pack-ml-update-job-response + + assertEquals(update.getDescription(), updatedJob.getDescription()); + } + { + //tag::x-pack-ml-update-job-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(PutJobResponse updateJobResponse) { + //<1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + //end::x-pack-ml-update-job-listener + UpdateJobRequest updateJobRequest = new UpdateJobRequest(new JobUpdate.Builder(jobId).build()); + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::x-pack-ml-update-job-execute-async + client.machineLearning().updateJobAsync(updateJobRequest, RequestOptions.DEFAULT, listener); //<1> + // end::x-pack-ml-update-job-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + public void testGetBuckets() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/UpdateJobRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/UpdateJobRequestTests.java new file mode 100644 index 0000000000000..4d2bbb2e2006d --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/UpdateJobRequestTests.java @@ -0,0 +1,44 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.client.ml.job.config.JobTests; +import org.elasticsearch.client.ml.job.config.JobUpdate; +import org.elasticsearch.client.ml.job.config.JobUpdateTests; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + + +public class UpdateJobRequestTests extends AbstractXContentTestCase { + + @Override + protected UpdateJobRequest createTestInstance() { + return new UpdateJobRequest(JobUpdateTests.createRandom(JobTests.randomValidJobId())); + } + + @Override + protected UpdateJobRequest doParseInstance(XContentParser parser) { + return new UpdateJobRequest(JobUpdate.PARSER.apply(parser, null).build()); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/config/JobUpdateTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/config/JobUpdateTests.java new file mode 100644 index 0000000000000..b159fedb95d44 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/config/JobUpdateTests.java @@ -0,0 +1,120 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml.job.config; + +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.function.Predicate; + +public class JobUpdateTests extends AbstractXContentTestCase { + + @Override + protected JobUpdate createTestInstance() { + return createRandom(randomAlphaOfLength(4)); + } + + /** + * Creates a completely random update when the job is null + * or a random update that is is valid for the given job + */ + public static JobUpdate createRandom(String jobId) { + JobUpdate.Builder update = new JobUpdate.Builder(jobId); + if (randomBoolean()) { + int groupsNum = randomIntBetween(0, 10); + List groups = new ArrayList<>(groupsNum); + for (int i = 0; i < groupsNum; i++) { + groups.add(JobTests.randomValidJobId()); + } + update.setGroups(groups); + } + if (randomBoolean()) { + update.setDescription(randomAlphaOfLength(20)); + } + if (randomBoolean()) { + update.setDetectorUpdates(createRandomDetectorUpdates()); + } + if (randomBoolean()) { + update.setModelPlotConfig(new ModelPlotConfig(randomBoolean(), randomAlphaOfLength(10))); + } + if (randomBoolean()) { + update.setAnalysisLimits(AnalysisLimitsTests.createRandomized()); + } + if (randomBoolean()) { + update.setRenormalizationWindowDays(randomNonNegativeLong()); + } + if (randomBoolean()) { + update.setBackgroundPersistInterval(TimeValue.timeValueHours(randomIntBetween(1, 24))); + } + if (randomBoolean()) { + update.setModelSnapshotRetentionDays(randomNonNegativeLong()); + } + if (randomBoolean()) { + update.setResultsRetentionDays(randomNonNegativeLong()); + } + if (randomBoolean()) { + update.setCategorizationFilters(Arrays.asList(generateRandomStringArray(10, 10, false))); + } + if (randomBoolean()) { + update.setCustomSettings(Collections.singletonMap(randomAlphaOfLength(10), randomAlphaOfLength(10))); + } + + return update.build(); + } + + + private static List createRandomDetectorUpdates() { + int size = randomInt(10); + List detectorUpdates = new ArrayList<>(size); + for (int i = 0; i < size; i++) { + String detectorDescription = null; + if (randomBoolean()) { + detectorDescription = randomAlphaOfLength(12); + } + List detectionRules = null; + if (randomBoolean()) { + detectionRules = new ArrayList<>(); + detectionRules.add(new DetectionRule.Builder( + Collections.singletonList(new RuleCondition(RuleCondition.AppliesTo.ACTUAL, Operator.GT, 5))).build()); + } + detectorUpdates.add(new JobUpdate.DetectorUpdate(i, detectorDescription, detectionRules)); + } + return detectorUpdates; + } + + @Override + protected JobUpdate doParseInstance(XContentParser parser) { + return JobUpdate.PARSER.apply(parser, null).build(); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return field -> !field.isEmpty(); + } +} diff --git a/docs/java-rest/high-level/ml/update-job.asciidoc b/docs/java-rest/high-level/ml/update-job.asciidoc new file mode 100644 index 0000000000000..3e1d1e2313b72 --- /dev/null +++ b/docs/java-rest/high-level/ml/update-job.asciidoc @@ -0,0 +1,93 @@ +[[java-rest-high-x-pack-ml-update-job]] +=== Update Job API + +The Update Job API provides the ability to update a {ml} job. +It accepts a `UpdateJobRequest` object and responds +with a `PutJobResponse` object. + +[[java-rest-high-x-pack-ml-update-job-request]] +==== Update Job Request + +An `UpdateJobRequest` object gets created with a `JobUpdate` object. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-update-job-request] +-------------------------------------------------- +<1> Constructing a new request referencing a `JobUpdate` object + +==== Optional Arguments + +The `JobUpdate` object has many optional arguments with which to update an existing {ml} +job. An existing, non-null `jobId` must be referenced in its creation. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-update-job-options] +-------------------------------------------------- +<1> Mandatory, non-null `jobId` referencing an existing {ml} job +<2> Updated description +<3> Updated analysis limits +<4> Updated background persistence interval +<5> Updated analysis config's categorization filters +<6> Updated detectors through the `JobUpdate.DetectorUpdate` object +<7> Updated group membership +<8> Updated result retention +<9> Updated model plot configuration +<10> Updated model snapshot retention setting +<11> Updated custom settings +<12> Updated renormalization window + +Included with these options are specific optional `JobUpdate.DetectorUpdate` updates. +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-update-job-detector-options] +-------------------------------------------------- +<1> The index of the detector. `O` means unknown +<2> The optional description of the detector +<3> The `DetectionRule` rules that apply to this detector + +[[java-rest-high-x-pack-ml-update-job-execution]] +==== Execution + +The request can be executed through the `MachineLearningClient` contained +in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-update-job-execute] +-------------------------------------------------- + +[[java-rest-high-x-pack-ml-update-job-execution-async]] +==== Asynchronous Execution + +The request can also be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-update-job-execute-async] +-------------------------------------------------- +<1> The `UpdateJobRequest` to execute and the `ActionListener` to use when +the execution completes + +The method does not block and returns immediately. The passed `ActionListener` is used +to notify the caller of completion. A typical `ActionListener` for `PutJobResponse` may +look like + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-update-job-listener] +-------------------------------------------------- +<1> `onResponse` is called back when the action is completed successfully +<2> `onFailure` is called back when some unexpected error occurs + +[[java-rest-high-x-pack-ml-update-job-response]] +==== Update Job Response + +A `PutJobResponse` contains the updated `Job` object + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-update-job-response] +-------------------------------------------------- +<1> `getResponse()` returns the updated `Job` object diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index f01e12c4c9812..c482c8bccff23 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -216,6 +216,7 @@ The Java High Level REST Client supports the following Machine Learning APIs: * <> * <> * <> +* <> * <> * <> * <> @@ -227,6 +228,7 @@ include::ml/get-job.asciidoc[] include::ml/delete-job.asciidoc[] include::ml/open-job.asciidoc[] include::ml/close-job.asciidoc[] +include::ml/update-job.asciidoc[] include::ml/flush-job.asciidoc[] include::ml/get-job-stats.asciidoc[] include::ml/get-buckets.asciidoc[] From 7ad71f906a9608e1c168358624f4b0a39b240296 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Thu, 6 Sep 2018 14:42:06 +0200 Subject: [PATCH 16/91] Upgrade to a Lucene 8 snapshot (#33310) The main benefit of the upgrade for users is the search optimization for top scored documents when the total hit count is not needed. However this optimization is not activated in this change, there is another issue opened to discuss how it should be integrated smoothly. Some comments about the change: * Tests that can produce negative scores have been adapted but we need to forbid them completely: #33309 Closes #32899 --- .../elasticsearch/gradle/BuildPlugin.groovy | 6 +- buildSrc/version.properties | 2 +- .../org/elasticsearch/client/SearchIT.java | 2 +- .../plugins/InstallPluginCommand.java | 4 +- docs/Versions.asciidoc | 4 +- docs/plugins/analysis-phonetic.asciidoc | 1 - .../metrics/tophits-aggregation.asciidoc | 4 +- .../analyzers/standard-analyzer.asciidoc | 2 - docs/reference/analysis/tokenfilters.asciidoc | 2 - .../asciifolding-tokenfilter.asciidoc | 4 +- .../tokenfilters/elision-tokenfilter.asciidoc | 2 +- .../keep-types-tokenfilter.asciidoc | 4 +- .../keep-words-tokenfilter.asciidoc | 4 +- .../snowball-tokenfilter.asciidoc | 2 +- .../standard-tokenfilter.asciidoc | 15 -- .../tokenfilters/stemmer-tokenfilter.asciidoc | 2 +- .../how-to/recipes/stemming.asciidoc | 8 +- .../index-modules/similarity.asciidoc | 24 ++-- .../mapping/types/percolator.asciidoc | 3 - .../migration/migrate_7_0/analysis.asciidoc | 4 + docs/reference/redirects.asciidoc | 5 + docs/reference/search/explain.asciidoc | 87 ++++++------ docs/reference/search/profile.asciidoc | 36 ++++- .../search/request/inner-hits.asciidoc | 8 +- .../search/suggesters/phrase-suggest.asciidoc | 4 +- .../matrix/stats/MatrixStatsAggregator.java | 5 +- .../common/ChineseAnalyzerProvider.java | 3 +- .../analysis/common/CommonAnalysisPlugin.java | 17 +-- .../common/EdgeNGramTokenFilterFactory.java | 8 +- .../common/NGramTokenFilterFactory.java | 7 +- .../analysis/common/SnowballAnalyzer.java | 6 +- .../common/SnowballAnalyzerProvider.java | 6 +- .../common/StandardHtmlStripAnalyzer.java | 8 +- .../analysis/common/PatternAnalyzerTests.java | 10 +- .../common/SnowballAnalyzerTests.java | 4 +- ...essions-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 - ...essions-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 + .../painless/PainlessExecuteAction.java | 3 +- .../elasticsearch/painless/ScoreTests.java | 15 ++ .../ScriptedMetricAggContextsTests.java | 6 + .../painless/SimilarityScriptTests.java | 4 +- .../rest-api-spec/test/painless/30_search.yml | 2 +- .../ParentToChildrenAggregator.java | 5 +- .../ParentChildInnerHitContextBuilder.java | 37 +++-- .../percolator/PercolateQuery.java | 61 +++++--- .../percolator/PercolateQueryBuilder.java | 26 ++-- .../percolator/PercolatorFieldMapper.java | 9 +- .../PercolatorMatchedSlotSubFetchPhase.java | 6 +- .../percolator/CandidateQueryTests.java | 101 ++++++++------ .../PercolateQueryBuilderTests.java | 56 +------- .../percolator/PercolateQueryTests.java | 16 +-- .../PercolateWithNestedQueryBuilderTests.java | 57 ++++++++ ...PercolatorHighlightSubFetchPhaseTests.java | 6 +- ...rcolatorMatchedSlotSubFetchPhaseTests.java | 11 +- ...ers-icu-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 - ...ers-icu-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 + .../test/analysis_icu/20_search.yml | 2 +- ...uromoji-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 - ...uromoji-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 + ...rs-nori-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 - ...rs-nori-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 + ...honetic-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 - ...honetic-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 + .../test/analysis_phonetic/10_metaphone.yml | 2 +- .../analysis_phonetic/20_double_metaphone.yml | 2 +- .../analysis_phonetic/30_beider_morse.yml | 2 +- .../test/analysis_phonetic/40_search.yml | 2 +- .../analysis_phonetic/50_daitch_mokotoff.yml | 2 +- ...smartcn-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 - ...smartcn-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 + ...stempel-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 - ...stempel-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 + ...fologik-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 - ...fologik-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 + .../rescore/ExampleRescoreBuilderTests.java | 3 +- ...-common-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 - ...-common-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 + ...-codecs-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 - ...-codecs-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 + ...ne-core-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 - ...ne-core-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 + ...rouping-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 - ...rouping-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 + ...lighter-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 - ...lighter-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 + ...ne-join-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 - ...ne-join-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 + ...-memory-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 - ...-memory-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 + ...ne-misc-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 - ...ne-misc-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 + ...queries-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 - ...queries-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 + ...yparser-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 - ...yparser-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 + ...sandbox-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 - ...sandbox-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 + ...spatial-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 - ...spatial-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 + ...-extras-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 - ...-extras-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 + ...atial3d-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 - ...atial3d-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 + ...suggest-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 - ...suggest-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 + .../queries/BinaryDocValuesRangeQuery.java | 3 +- .../lucene/queries/BlendedTermQuery.java | 30 ++-- .../apache/lucene/queries/MinDocQuery.java | 3 +- .../queries/SearchAfterSortedDocQuery.java | 9 +- .../search/grouping/CollapseTopFieldDocs.java | 24 ++-- .../grouping/CollapsingTopDocsCollector.java | 48 +++---- .../uhighlight/CustomUnifiedHighlighter.java | 8 +- .../vectorhighlight/CustomFieldQuery.java | 12 +- .../main/java/org/elasticsearch/Version.java | 2 +- .../action/search/MaxScoreCollector.java | 62 +++++++++ .../action/search/SearchPhaseController.java | 63 +++++---- .../action/termvectors/TermVectorsFields.java | 7 + .../action/termvectors/TermVectorsWriter.java | 13 +- .../elasticsearch/common/lucene/Lucene.java | 131 ++++++++++++++---- .../common/lucene/MinimumScoreCollector.java | 5 +- .../lucene/index/FilterableTermsEnum.java | 9 +- .../lucene/search/FilteredCollector.java | 5 +- .../lucene/search/TopDocsAndMaxScore.java} | 27 ++-- .../search/function/CombineFunction.java | 22 +-- .../function/FieldValueFactorFunction.java | 2 +- .../search/function/FunctionScoreQuery.java | 35 +++-- .../search/function/MinScoreScorer.java | 10 ++ .../search/function/RandomScoreFunction.java | 2 +- .../search/function/ScriptScoreFunction.java | 9 +- .../search/function/WeightFactorFunction.java | 2 +- .../settings/AbstractScopedSettings.java | 4 +- .../common/util/CachedSupplier.java | 48 +++++++ .../index/analysis/StopAnalyzerProvider.java | 3 +- .../analysis/StopTokenFilterFactory.java | 4 +- .../index/cache/bitset/BitsetFilterCache.java | 3 +- .../index/codec/CodecService.java | 6 +- .../PerFieldMappingPostingFormatCodec.java | 5 +- .../index/engine/LuceneChangesSnapshot.java | 2 +- .../RecoverySourcePruneMergePolicy.java | 3 +- .../index/fielddata/IndexFieldData.java | 3 +- .../index/mapper/DocumentMapper.java | 3 +- .../index/mapper/TextFieldMapper.java | 2 +- .../index/mapper/TypeFieldMapper.java | 4 +- .../index/query/BoostingQueryBuilder.java | 4 +- .../index/query/NestedQueryBuilder.java | 33 +++-- .../index/query/ScriptQueryBuilder.java | 3 +- .../query/SpanMultiTermQueryBuilder.java | 4 +- .../functionscore/DecayFunctionBuilder.java | 2 +- .../search/ESToParentBlockJoinQuery.java | 4 +- .../index/shard/IndexSearcherWrapper.java | 2 +- .../index/shard/ShardSplittingQuery.java | 5 +- .../index/similarity/ScriptedSimilarity.java | 122 +++++----------- .../index/similarity/SimilarityProviders.java | 62 +++++++-- .../indices/analysis/AnalysisModule.java | 44 +++++- .../indices/analysis/PreBuiltAnalyzers.java | 3 +- .../elasticsearch/rest/BaseRestHandler.java | 4 +- .../script/SimilarityScript.java | 4 +- .../script/SimilarityWeightScript.java | 4 +- .../org/elasticsearch/search/SearchHit.java | 2 +- .../elasticsearch/search/SearchModule.java | 2 + .../elasticsearch/search/SearchService.java | 4 +- .../search/aggregations/AggregatorBase.java | 13 +- .../aggregations/AggregatorFactory.java | 5 +- .../search/aggregations/BucketCollector.java | 5 +- .../aggregations/MultiBucketCollector.java | 29 ++-- .../bucket/BestBucketsDeferringCollector.java | 9 +- .../bucket/DeferringBucketCollector.java | 5 +- .../MergingBucketsDeferringCollector.java | 11 +- .../AdjacencyMatrixAggregatorFactory.java | 3 +- .../bucket/composite/CompositeAggregator.java | 5 +- .../composite/PointsSortedDocsProducer.java | 8 +- .../filter/FilterAggregatorFactory.java | 3 +- .../filter/FiltersAggregatorFactory.java | 3 +- .../bucket/geogrid/GeoHashGridAggregator.java | 8 +- .../AutoDateHistogramAggregator.java | 8 +- .../histogram/DateHistogramAggregator.java | 8 +- .../bucket/histogram/HistogramAggregator.java | 8 +- .../bucket/nested/NestedAggregator.java | 3 +- .../bucket/range/BinaryRangeAggregator.java | 8 +- .../bucket/range/RangeAggregator.java | 8 +- .../sampler/BestDocsDeferringCollector.java | 12 +- .../bucket/sampler/SamplerAggregator.java | 5 +- .../bucket/terms/LongTermsAggregator.java | 8 +- .../bucket/terms/StringTermsAggregator.java | 8 +- .../bucket/terms/TermsAggregator.java | 2 +- .../metrics/avg/AvgAggregator.java | 5 +- .../cardinality/CardinalityAggregator.java | 5 +- .../metrics/max/MaxAggregator.java | 5 +- .../metrics/min/MinAggregator.java | 5 +- .../hdr/AbstractHDRPercentilesAggregator.java | 5 +- .../AbstractTDigestPercentilesAggregator.java | 5 +- .../scripted/ScriptedMetricAggregator.java | 5 +- .../metrics/stats/StatsAggregator.java | 5 +- .../extended/ExtendedStatsAggregator.java | 5 +- .../metrics/sum/SumAggregator.java | 5 +- .../metrics/tophits/InternalTopHits.java | 53 ++++--- .../metrics/tophits/TopHitsAggregator.java | 124 ++++++++++------- .../weighted_avg/WeightedAvgAggregator.java | 5 +- .../search/collapse/CollapseContext.java | 6 +- .../elasticsearch/search/dfs/DfsPhase.java | 16 ++- .../search/dfs/DfsSearchResult.java | 44 ++++-- .../search/fetch/FetchPhase.java | 10 +- .../fetch/subphase/InnerHitsContext.java | 7 +- .../subphase/InnerHitsFetchSubPhase.java | 14 +- .../subphase/MatchedQueriesFetchSubPhase.java | 3 +- .../fetch/subphase/ScoreFetchSubPhase.java | 77 ++++++++++ .../search/internal/ContextIndexSearcher.java | 30 ++-- .../search/internal/ScrollContext.java | 5 +- .../aggregation/ProfilingAggregator.java | 5 +- .../query/InternalProfileCollector.java | 5 +- .../profile/query/ProfileCollector.java | 5 +- .../search/profile/query/ProfileScorer.java | 24 +++- .../search/profile/query/QueryTimingType.java | 4 +- .../search/query/QueryCollectorContext.java | 3 +- .../search/query/QueryPhase.java | 13 +- .../search/query/QuerySearchResult.java | 49 +++---- .../search/query/TopDocsCollectorContext.java | 95 ++++++++----- .../search/rescore/QueryRescorer.java | 19 ++- .../search/rescore/RescorePhase.java | 9 +- .../search/slice/DocValuesSliceQuery.java | 3 +- .../search/slice/TermsSliceQuery.java | 3 +- .../completion/CompletionSuggester.java | 8 +- .../DirectCandidateGeneratorBuilder.java | 4 +- .../suggest/term/TermSuggestionBuilder.java | 4 +- .../CollapsingTopDocsCollectorTests.java | 54 ++++---- .../lucene/queries/BlendedTermQueryTests.java | 14 +- ...sRandomBinaryDocValuesRangeQueryTests.java | 20 +-- .../CustomUnifiedHighlighterTests.java | 2 +- .../java/org/elasticsearch/VersionTests.java | 7 +- .../action/search/DfsQueryPhaseTests.java | 30 ++-- .../action/search/FetchSearchPhaseTests.java | 32 +++-- .../search/SearchPhaseControllerTests.java | 45 +++--- .../common/lucene/LuceneTests.java | 11 +- .../search/function/MinScoreScorerTests.java | 5 + .../deps/lucene/VectorHighlighterTests.java | 8 +- .../elasticsearch/index/IndexModuleTests.java | 10 +- .../index/IndexServiceTests.java | 6 +- .../elasticsearch/index/codec/CodecTests.java | 7 +- .../index/engine/InternalEngineTests.java | 36 ++--- .../index/engine/SegmentTests.java | 2 +- .../AbstractFieldDataImplTestCase.java | 12 +- .../AbstractStringFieldDataTestCase.java | 4 +- .../index/mapper/DoubleIndexingDocTests.java | 14 +- .../query/BoostingQueryBuilderTests.java | 4 +- .../index/query/DisMaxQueryBuilderTests.java | 2 +- .../query/QueryStringQueryBuilderTests.java | 2 +- .../query/TermsSetQueryBuilderTests.java | 4 +- .../functionscore/FunctionScoreTests.java | 35 ++--- .../query/plugin/DummyQueryParserPlugin.java | 5 +- .../IndexLevelReplicationTests.java | 2 +- .../AbstractNumberNestedSortingTestCase.java | 14 +- .../nested/DoubleNestedSortingTests.java | 2 +- .../nested/FloatNestedSortingTests.java | 2 +- .../search/nested/NestedSortingTests.java | 42 +++--- .../shard/IndexSearcherWrapperTests.java | 8 +- .../index/shard/IndexShardTests.java | 8 +- .../index/shard/ShardSplittingQueryTests.java | 6 +- .../similarity/ScriptedSimilarityTests.java | 55 +++++--- .../indices/IndicesQueryCacheTests.java | 5 +- .../indices/IndicesRequestCacheTests.java | 2 +- .../indices/analysis/AnalysisModuleTests.java | 73 +++++++--- .../elasticsearch/search/SearchHitsTests.java | 4 +- .../AggregationCollectorTests.java | 2 +- .../MultiBucketCollectorTests.java | 18 ++- .../BestBucketsDeferringCollectorTests.java | 9 +- .../BestDocsDeferringCollectorTests.java | 5 +- .../metrics/tophits/InternalTopHitsTests.java | 16 ++- .../basic/TransportTwoNodesSearchIT.java | 41 +++--- .../functionscore/ExplainableScriptIT.java | 9 +- .../search/functionscore/FunctionScoreIT.java | 9 +- .../profile/query/QueryProfilerTests.java | 5 +- .../profile/query/RandomQueryGenerator.java | 3 +- .../search/query/QueryPhaseTests.java | 116 +++++++--------- .../slice/DocValuesSliceQueryTests.java | 5 +- .../search/slice/TermsSliceQueryTests.java | 6 +- .../suggest/CompletionSuggestSearchIT.java | 4 +- .../search/suggest/SuggestSearchIT.java | 4 +- .../phrase/DirectCandidateGeneratorTests.java | 4 +- .../analysis/MyFilterTokenFilterFactory.java | 5 +- .../analysis/AnalysisFactoryTestCase.java | 5 +- .../script/MockScriptEngine.java | 4 +- .../aggregations/AggregatorTestCase.java | 3 +- .../test/engine/MockEngineSupport.java | 4 +- .../elasticsearch/analysis/common/test1.json | 2 +- .../elasticsearch/analysis/common/test1.yml | 2 +- .../accesscontrol/FieldSubsetReader.java | 10 +- .../SecurityIndexSearcherWrapper.java | 2 +- .../DocumentSubsetReaderTests.java | 8 +- ...SecurityIndexSearcherWrapperUnitTests.java | 9 +- .../accesscontrol/OptOutQueryCacheTests.java | 3 +- ...ne-core-7.5.0-snapshot-13b9e28f9d.jar.sha1 | 1 - ...ne-core-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 + .../xpack/sql/util/StringUtils.java | 4 +- 293 files changed, 2139 insertions(+), 1392 deletions(-) delete mode 100644 docs/reference/analysis/tokenfilters/standard-tokenfilter.asciidoc delete mode 100644 modules/lang-expression/licenses/lucene-expressions-7.5.0-snapshot-13b9e28f9d.jar.sha1 create mode 100644 modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-4d78db26be.jar.sha1 create mode 100644 modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateWithNestedQueryBuilderTests.java delete mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-7.5.0-snapshot-13b9e28f9d.jar.sha1 create mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-4d78db26be.jar.sha1 delete mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.5.0-snapshot-13b9e28f9d.jar.sha1 create mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-4d78db26be.jar.sha1 delete mode 100644 plugins/analysis-nori/licenses/lucene-analyzers-nori-7.5.0-snapshot-13b9e28f9d.jar.sha1 create mode 100644 plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-4d78db26be.jar.sha1 delete mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.5.0-snapshot-13b9e28f9d.jar.sha1 create mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-4d78db26be.jar.sha1 delete mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.5.0-snapshot-13b9e28f9d.jar.sha1 create mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-4d78db26be.jar.sha1 delete mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.5.0-snapshot-13b9e28f9d.jar.sha1 create mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-4d78db26be.jar.sha1 delete mode 100644 plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.5.0-snapshot-13b9e28f9d.jar.sha1 create mode 100644 plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-4d78db26be.jar.sha1 delete mode 100644 server/licenses/lucene-analyzers-common-7.5.0-snapshot-13b9e28f9d.jar.sha1 create mode 100644 server/licenses/lucene-analyzers-common-8.0.0-snapshot-4d78db26be.jar.sha1 delete mode 100644 server/licenses/lucene-backward-codecs-7.5.0-snapshot-13b9e28f9d.jar.sha1 create mode 100644 server/licenses/lucene-backward-codecs-8.0.0-snapshot-4d78db26be.jar.sha1 delete mode 100644 server/licenses/lucene-core-7.5.0-snapshot-13b9e28f9d.jar.sha1 create mode 100644 server/licenses/lucene-core-8.0.0-snapshot-4d78db26be.jar.sha1 delete mode 100644 server/licenses/lucene-grouping-7.5.0-snapshot-13b9e28f9d.jar.sha1 create mode 100644 server/licenses/lucene-grouping-8.0.0-snapshot-4d78db26be.jar.sha1 delete mode 100644 server/licenses/lucene-highlighter-7.5.0-snapshot-13b9e28f9d.jar.sha1 create mode 100644 server/licenses/lucene-highlighter-8.0.0-snapshot-4d78db26be.jar.sha1 delete mode 100644 server/licenses/lucene-join-7.5.0-snapshot-13b9e28f9d.jar.sha1 create mode 100644 server/licenses/lucene-join-8.0.0-snapshot-4d78db26be.jar.sha1 delete mode 100644 server/licenses/lucene-memory-7.5.0-snapshot-13b9e28f9d.jar.sha1 create mode 100644 server/licenses/lucene-memory-8.0.0-snapshot-4d78db26be.jar.sha1 delete mode 100644 server/licenses/lucene-misc-7.5.0-snapshot-13b9e28f9d.jar.sha1 create mode 100644 server/licenses/lucene-misc-8.0.0-snapshot-4d78db26be.jar.sha1 delete mode 100644 server/licenses/lucene-queries-7.5.0-snapshot-13b9e28f9d.jar.sha1 create mode 100644 server/licenses/lucene-queries-8.0.0-snapshot-4d78db26be.jar.sha1 delete mode 100644 server/licenses/lucene-queryparser-7.5.0-snapshot-13b9e28f9d.jar.sha1 create mode 100644 server/licenses/lucene-queryparser-8.0.0-snapshot-4d78db26be.jar.sha1 delete mode 100644 server/licenses/lucene-sandbox-7.5.0-snapshot-13b9e28f9d.jar.sha1 create mode 100644 server/licenses/lucene-sandbox-8.0.0-snapshot-4d78db26be.jar.sha1 delete mode 100644 server/licenses/lucene-spatial-7.5.0-snapshot-13b9e28f9d.jar.sha1 create mode 100644 server/licenses/lucene-spatial-8.0.0-snapshot-4d78db26be.jar.sha1 delete mode 100644 server/licenses/lucene-spatial-extras-7.5.0-snapshot-13b9e28f9d.jar.sha1 create mode 100644 server/licenses/lucene-spatial-extras-8.0.0-snapshot-4d78db26be.jar.sha1 delete mode 100644 server/licenses/lucene-spatial3d-7.5.0-snapshot-13b9e28f9d.jar.sha1 create mode 100644 server/licenses/lucene-spatial3d-8.0.0-snapshot-4d78db26be.jar.sha1 delete mode 100644 server/licenses/lucene-suggest-7.5.0-snapshot-13b9e28f9d.jar.sha1 create mode 100644 server/licenses/lucene-suggest-8.0.0-snapshot-4d78db26be.jar.sha1 create mode 100644 server/src/main/java/org/elasticsearch/action/search/MaxScoreCollector.java rename server/src/main/java/org/elasticsearch/{index/analysis/StandardTokenFilterFactory.java => common/lucene/search/TopDocsAndMaxScore.java} (54%) create mode 100644 server/src/main/java/org/elasticsearch/common/util/CachedSupplier.java create mode 100644 server/src/main/java/org/elasticsearch/search/fetch/subphase/ScoreFetchSubPhase.java delete mode 100644 x-pack/plugin/sql/sql-action/licenses/lucene-core-7.5.0-snapshot-13b9e28f9d.jar.sha1 create mode 100644 x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-4d78db26be.jar.sha1 diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 6ed98217d34fe..110982e31e661 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -539,9 +539,9 @@ class BuildPlugin implements Plugin { from generatePOMTask.destination into "${project.buildDir}/distributions" rename { - generatePOMTask.ext.pomFileName == null ? - "${project.archivesBaseName}-${project.version}.pom" : - generatePOMTask.ext.pomFileName + generatePOMTask.ext.pomFileName == null ? + "${project.archivesBaseName}-${project.version}.pom" : + generatePOMTask.ext.pomFileName } } } diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 34c266913d0a2..386457146685f 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,5 +1,5 @@ elasticsearch = 7.0.0-alpha1 -lucene = 7.5.0-snapshot-13b9e28f9d +lucene = 8.0.0-snapshot-4d78db26be # optional dependencies spatial4j = 0.7 diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java index 739a590ba5f64..063fce9bcac5e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java @@ -1034,7 +1034,7 @@ public void testExplain() throws IOException { assertTrue(explainResponse.isExists()); assertTrue(explainResponse.isMatch()); assertTrue(explainResponse.hasExplanation()); - assertThat(explainResponse.getExplanation().getValue(), greaterThan(0.0f)); + assertThat(explainResponse.getExplanation().getValue().floatValue(), greaterThan(0.0f)); assertNull(explainResponse.getGetResult()); } { diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java index 3c54afb92c7b7..dd19594d29b87 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/InstallPluginCommand.java @@ -21,7 +21,7 @@ import joptsimple.OptionSet; import joptsimple.OptionSpec; -import org.apache.lucene.search.spell.LevensteinDistance; +import org.apache.lucene.search.spell.LevenshteinDistance; import org.apache.lucene.util.CollectionUtil; import org.bouncycastle.bcpg.ArmoredInputStream; import org.bouncycastle.jce.provider.BouncyCastleProvider; @@ -355,7 +355,7 @@ boolean urlExists(Terminal terminal, String urlString) throws IOException { /** Returns all the official plugin names that look similar to pluginId. **/ private List checkMisspelledPlugin(String pluginId) { - LevensteinDistance ld = new LevensteinDistance(); + LevenshteinDistance ld = new LevenshteinDistance(); List> scoredKeys = new ArrayList<>(); for (String officialPlugin : OFFICIAL_PLUGINS) { float distance = ld.getDistance(pluginId, officialPlugin); diff --git a/docs/Versions.asciidoc b/docs/Versions.asciidoc index 6e127a6ccfc69..f0303323d855f 100644 --- a/docs/Versions.asciidoc +++ b/docs/Versions.asciidoc @@ -1,7 +1,7 @@ :version: 7.0.0-alpha1 :major-version: 7.x -:lucene_version: 7.5.0 -:lucene_version_path: 7_5_0 +:lucene_version: 8.0.0 +:lucene_version_path: 8_0_0 :branch: master :jdk: 1.8.0_131 :jdk_major: 8 diff --git a/docs/plugins/analysis-phonetic.asciidoc b/docs/plugins/analysis-phonetic.asciidoc index a75c21fdac658..9d9df4827fd4e 100644 --- a/docs/plugins/analysis-phonetic.asciidoc +++ b/docs/plugins/analysis-phonetic.asciidoc @@ -38,7 +38,6 @@ PUT phonetic_sample "my_analyzer": { "tokenizer": "standard", "filter": [ - "standard", "lowercase", "my_metaphone" ] diff --git a/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc b/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc index 5eeb3a4605a40..958f48d835cd9 100644 --- a/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc @@ -320,7 +320,7 @@ Top hits response snippet with a nested hit, which resides in the first slot of "by_nested": { "hits": { "total": 1, - "max_score": 0.2876821, + "max_score": 0.3616575, "hits": [ { "_index": "sales", @@ -330,7 +330,7 @@ Top hits response snippet with a nested hit, which resides in the first slot of "field": "comments", <1> "offset": 0 <2> }, - "_score": 0.2876821, + "_score": 0.3616575, "_source": { "comment": "This car could have better brakes", <3> "username": "baddriver007" diff --git a/docs/reference/analysis/analyzers/standard-analyzer.asciidoc b/docs/reference/analysis/analyzers/standard-analyzer.asciidoc index 20aa072066b5f..3097ece21db23 100644 --- a/docs/reference/analysis/analyzers/standard-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/standard-analyzer.asciidoc @@ -273,7 +273,6 @@ Tokenizer:: * <> Token Filters:: -* <> * <> * <> (disabled by default) @@ -292,7 +291,6 @@ PUT /standard_example "rebuilt_standard": { "tokenizer": "standard", "filter": [ - "standard", "lowercase" <1> ] } diff --git a/docs/reference/analysis/tokenfilters.asciidoc b/docs/reference/analysis/tokenfilters.asciidoc index 5899744247899..f531bc5d0e9e3 100644 --- a/docs/reference/analysis/tokenfilters.asciidoc +++ b/docs/reference/analysis/tokenfilters.asciidoc @@ -9,8 +9,6 @@ or add tokens (eg synonyms). Elasticsearch has a number of built in token filters which can be used to build <>. -include::tokenfilters/standard-tokenfilter.asciidoc[] - include::tokenfilters/asciifolding-tokenfilter.asciidoc[] include::tokenfilters/flatten-graph-tokenfilter.asciidoc[] diff --git a/docs/reference/analysis/tokenfilters/asciifolding-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/asciifolding-tokenfilter.asciidoc index 73d35549da8b6..bd22b013334a9 100644 --- a/docs/reference/analysis/tokenfilters/asciifolding-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/asciifolding-tokenfilter.asciidoc @@ -15,7 +15,7 @@ PUT /asciifold_example "analyzer" : { "default" : { "tokenizer" : "standard", - "filter" : ["standard", "asciifolding"] + "filter" : ["asciifolding"] } } } @@ -37,7 +37,7 @@ PUT /asciifold_example "analyzer" : { "default" : { "tokenizer" : "standard", - "filter" : ["standard", "my_ascii_folding"] + "filter" : ["my_ascii_folding"] } }, "filter" : { diff --git a/docs/reference/analysis/tokenfilters/elision-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/elision-tokenfilter.asciidoc index 956c5ad13d034..924903b9f65a8 100644 --- a/docs/reference/analysis/tokenfilters/elision-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/elision-tokenfilter.asciidoc @@ -16,7 +16,7 @@ PUT /elision_example "analyzer" : { "default" : { "tokenizer" : "standard", - "filter" : ["standard", "elision"] + "filter" : ["elision"] } }, "filter" : { diff --git a/docs/reference/analysis/tokenfilters/keep-types-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/keep-types-tokenfilter.asciidoc index 05687f8669155..33a927c4b98bf 100644 --- a/docs/reference/analysis/tokenfilters/keep-types-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/keep-types-tokenfilter.asciidoc @@ -26,7 +26,7 @@ PUT /keep_types_example "analyzer" : { "my_analyzer" : { "tokenizer" : "standard", - "filter" : ["standard", "lowercase", "extract_numbers"] + "filter" : ["lowercase", "extract_numbers"] } }, "filter" : { @@ -87,7 +87,7 @@ PUT /keep_types_exclude_example "analyzer" : { "my_analyzer" : { "tokenizer" : "standard", - "filter" : ["standard", "lowercase", "remove_numbers"] + "filter" : ["lowercase", "remove_numbers"] } }, "filter" : { diff --git a/docs/reference/analysis/tokenfilters/keep-words-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/keep-words-tokenfilter.asciidoc index 50c74942a0101..b7385379be94b 100644 --- a/docs/reference/analysis/tokenfilters/keep-words-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/keep-words-tokenfilter.asciidoc @@ -27,11 +27,11 @@ PUT /keep_words_example "analyzer" : { "example_1" : { "tokenizer" : "standard", - "filter" : ["standard", "lowercase", "words_till_three"] + "filter" : ["lowercase", "words_till_three"] }, "example_2" : { "tokenizer" : "standard", - "filter" : ["standard", "lowercase", "words_in_file"] + "filter" : ["lowercase", "words_in_file"] } }, "filter" : { diff --git a/docs/reference/analysis/tokenfilters/snowball-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/snowball-tokenfilter.asciidoc index 93e1eed26b4b2..99ed03649ff93 100644 --- a/docs/reference/analysis/tokenfilters/snowball-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/snowball-tokenfilter.asciidoc @@ -19,7 +19,7 @@ PUT /my_index "analyzer" : { "my_analyzer" : { "tokenizer" : "standard", - "filter" : ["standard", "lowercase", "my_snow"] + "filter" : ["lowercase", "my_snow"] } }, "filter" : { diff --git a/docs/reference/analysis/tokenfilters/standard-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/standard-tokenfilter.asciidoc deleted file mode 100644 index 0270bf71b4b3e..0000000000000 --- a/docs/reference/analysis/tokenfilters/standard-tokenfilter.asciidoc +++ /dev/null @@ -1,15 +0,0 @@ -[[analysis-standard-tokenfilter]] -=== Standard Token Filter - -A token filter of type `standard` that normalizes tokens extracted with -the -<>. - -[TIP] -================================================== - -The `standard` token filter currently does nothing. It remains as a placeholder -in case some filtering function needs to be added in a future version. - -================================================== diff --git a/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc index a13c6746d74be..f59e2f3f2cf88 100644 --- a/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc @@ -13,7 +13,7 @@ PUT /my_index "analyzer" : { "my_analyzer" : { "tokenizer" : "standard", - "filter" : ["standard", "lowercase", "my_stemmer"] + "filter" : ["lowercase", "my_stemmer"] } }, "filter" : { diff --git a/docs/reference/how-to/recipes/stemming.asciidoc b/docs/reference/how-to/recipes/stemming.asciidoc index 37901cb3abe62..c09922fe63fc3 100644 --- a/docs/reference/how-to/recipes/stemming.asciidoc +++ b/docs/reference/how-to/recipes/stemming.asciidoc @@ -143,13 +143,13 @@ GET index/_search }, "hits": { "total": 1, - "max_score": 0.80259144, + "max_score": 0.8025915, "hits": [ { "_index": "index", "_type": "_doc", "_id": "1", - "_score": 0.80259144, + "_score": 0.8025915, "_source": { "body": "Ski resort" } @@ -200,13 +200,13 @@ GET index/_search }, "hits": { "total": 1, - "max_score": 0.80259144, + "max_score": 0.8025915, "hits": [ { "_index": "index", "_type": "_doc", "_id": "1", - "_score": 0.80259144, + "_score": 0.8025915, "_source": { "body": "Ski resort" } diff --git a/docs/reference/index-modules/similarity.asciidoc b/docs/reference/index-modules/similarity.asciidoc index f5d5610ca1a2e..cf5cab106f891 100644 --- a/docs/reference/index-modules/similarity.asciidoc +++ b/docs/reference/index-modules/similarity.asciidoc @@ -295,27 +295,27 @@ Which yields: "details": [] }, { - "value": 2.0, + "value": 2, "description": "field.docCount", "details": [] }, { - "value": 4.0, + "value": 4, "description": "field.sumDocFreq", "details": [] }, { - "value": 5.0, + "value": 5, "description": "field.sumTotalTermFreq", "details": [] }, { - "value": 1.0, + "value": 1, "description": "term.docFreq", "details": [] }, { - "value": 2.0, + "value": 2, "description": "term.totalTermFreq", "details": [] }, @@ -325,7 +325,7 @@ Which yields: "details": [] }, { - "value": 3.0, + "value": 3, "description": "doc.length", "details": [] } @@ -469,27 +469,27 @@ GET /index/_search?explain=true "details": [] }, { - "value": 2.0, + "value": 2, "description": "field.docCount", "details": [] }, { - "value": 4.0, + "value": 4, "description": "field.sumDocFreq", "details": [] }, { - "value": 5.0, + "value": 5, "description": "field.sumTotalTermFreq", "details": [] }, { - "value": 1.0, + "value": 1, "description": "term.docFreq", "details": [] }, { - "value": 2.0, + "value": 2, "description": "term.totalTermFreq", "details": [] }, @@ -499,7 +499,7 @@ GET /index/_search?explain=true "details": [] }, { - "value": 3.0, + "value": 3, "description": "doc.length", "details": [] } diff --git a/docs/reference/mapping/types/percolator.asciidoc b/docs/reference/mapping/types/percolator.asciidoc index 066d3ce1ac597..e4502d37360c9 100644 --- a/docs/reference/mapping/types/percolator.asciidoc +++ b/docs/reference/mapping/types/percolator.asciidoc @@ -446,7 +446,6 @@ PUT my_queries1 "type": "custom", "tokenizer": "standard", "filter": [ - "standard", "lowercase", "wildcard_edge_ngram" ] @@ -597,7 +596,6 @@ PUT my_queries2 "type": "custom", "tokenizer": "standard", "filter": [ - "standard", "lowercase", "reverse", "wildcard_edge_ngram" @@ -607,7 +605,6 @@ PUT my_queries2 "type": "custom", "tokenizer": "standard", "filter": [ - "standard", "lowercase", "reverse" ] diff --git a/docs/reference/migration/migrate_7_0/analysis.asciidoc b/docs/reference/migration/migrate_7_0/analysis.asciidoc index db617d3301fd7..6e6cc5b078d61 100644 --- a/docs/reference/migration/migrate_7_0/analysis.asciidoc +++ b/docs/reference/migration/migrate_7_0/analysis.asciidoc @@ -22,3 +22,7 @@ The `delimited_payload_filter` was deprecated and renamed to `delimited_payload` Using it in indices created before 7.0 will issue deprecation warnings. Using the old name in new indices created in 7.0 will throw an error. Use the new name `delimited_payload` instead. + +==== `standard` filter has been removed + +The `standard` token filter has been removed because it doesn't change anything in the stream. diff --git a/docs/reference/redirects.asciidoc b/docs/reference/redirects.asciidoc index 1a932fdd41400..f07d1d09747e7 100644 --- a/docs/reference/redirects.asciidoc +++ b/docs/reference/redirects.asciidoc @@ -555,3 +555,8 @@ See <>. See <>. +[role="exclude",id="analysis-standard-tokenfilter"] +=== Standard filter removed + +The standard token filter has been removed. + diff --git a/docs/reference/search/explain.asciidoc b/docs/reference/search/explain.asciidoc index fd09984f1696f..341c8e4802b09 100644 --- a/docs/reference/search/explain.asciidoc +++ b/docs/reference/search/explain.asciidoc @@ -30,62 +30,67 @@ This will yield the following result: [source,js] -------------------------------------------------- { - "_index": "twitter", - "_type": "_doc", - "_id": "0", - "matched": true, - "explanation": { - "value": 1.6943599, - "description": "weight(message:elasticsearch in 0) [PerFieldSimilarity], result of:", - "details": [ + "_index":"twitter", + "_type":"_doc", + "_id":"0", + "matched":true, + "explanation":{ + "value":1.6943597, + "description":"weight(message:elasticsearch in 0) [PerFieldSimilarity], result of:", + "details":[ { - "value": 1.6943599, - "description": "score(doc=0,freq=1.0 = termFreq=1.0\n), product of:", - "details": [ + "value":1.6943597, + "description":"score(freq=1.0), product of:", + "details":[ { - "value": 1.3862944, - "description": "idf, computed as log(1 + (docCount - docFreq + 0.5) / (docFreq + 0.5)) from:", - "details": [ + "value":2.2, + "description":"scaling factor, k1 + 1", + "details":[] + }, + { + "value":1.3862944, + "description":"idf, computed as log(1 + (N - n + 0.5) / (n + 0.5)) from:", + "details":[ { - "value": 1.0, - "description": "docFreq", - "details": [] + "value":1, + "description":"n, number of documents containing term", + "details":[] }, { - "value": 5.0, - "description": "docCount", - "details": [] - } - ] + "value":5, + "description":"N, total number of documents with field", + "details":[] + } + ] }, - { - "value": 1.2222223, - "description": "tfNorm, computed as (freq * (k1 + 1)) / (freq + k1 * (1 - b + b * fieldLength / avgFieldLength)) from:", - "details": [ + { + "value":0.5555555, + "description":"tf, computed as freq / (freq + k1 * (1 - b + b * dl / avgdl)) from:", + "details":[ { - "value": 1.0, - "description": "termFreq=1.0", - "details": [] + "value":1.0, + "description":"freq, occurrences of term within document", + "details":[] }, { - "value": 1.2, - "description": "parameter k1", - "details": [] + "value":1.2, + "description":"k1, term saturation parameter", + "details":[] }, { - "value": 0.75, - "description": "parameter b", - "details": [] + "value":0.75, + "description":"b, length normalization parameter", + "details":[] }, { - "value": 5.4, - "description": "avgFieldLength", - "details": [] + "value":3.0, + "description":"dl, length of field", + "details":[] }, { - "value": 3.0, - "description": "fieldLength", - "details": [] + "value":5.4, + "description":"avgdl, average length of field", + "details":[] } ] } diff --git a/docs/reference/search/profile.asciidoc b/docs/reference/search/profile.asciidoc index e2df59ad3f4a3..bc7edcd3a88fa 100644 --- a/docs/reference/search/profile.asciidoc +++ b/docs/reference/search/profile.asciidoc @@ -72,7 +72,11 @@ This will yield the following result: "next_doc": 53876, "next_doc_count": 5, "advance": 0, - "advance_count": 0 + "advance_count": 0, + "compute_max_score": 0, + "compute_max_score_count": 0, + "shallow_advance": 0, + "shallow_advance_count": 0 }, "children": [ { @@ -91,7 +95,11 @@ This will yield the following result: "next_doc": 10111, "next_doc_count": 5, "advance": 0, - "advance_count": 0 + "advance_count": 0, + "compute_max_score": 0, + "compute_max_score_count": 0, + "shallow_advance": 0, + "shallow_advance_count": 0 } }, { @@ -110,7 +118,11 @@ This will yield the following result: "next_doc": 2852, "next_doc_count": 5, "advance": 0, - "advance_count": 0 + "advance_count": 0, + "compute_max_score": 0, + "compute_max_score_count": 0, + "shallow_advance": 0, + "shallow_advance_count": 0 } } ] @@ -288,7 +300,11 @@ The `breakdown` component lists detailed timing statistics about low-level Lucen "next_doc": 53876, "next_doc_count": 5, "advance": 0, - "advance_count": 0 + "advance_count": 0, + "compute_max_score": 0, + "compute_max_score_count": 0, + "shallow_advance": 0, + "shallow_advance_count": 0 } -------------------------------------------------- // TESTRESPONSE[s/^/{\n"took": $body.took,\n"timed_out": $body.timed_out,\n"_shards": $body._shards,\n"hits": $body.hits,\n"profile": {\n"shards": [ {\n"id": "$body.$_path",\n"searches": [{\n"query": [{\n"type": "BooleanQuery",\n"description": "message:some message:number",\n"time_in_nanos": $body.$_path,/] @@ -548,7 +564,11 @@ And the response: "score_count": 1, "build_scorer": 377872, "advance": 0, - "advance_count": 0 + "advance_count": 0, + "compute_max_score": 0, + "compute_max_score_count": 0, + "shallow_advance": 0, + "shallow_advance_count": 0 } }, { @@ -567,7 +587,11 @@ And the response: "score_count": 1, "build_scorer": 112551, "advance": 0, - "advance_count": 0 + "advance_count": 0, + "compute_max_score": 0, + "compute_max_score_count": 0, + "shallow_advance": 0, + "shallow_advance_count": 0 } } ], diff --git a/docs/reference/search/request/inner-hits.asciidoc b/docs/reference/search/request/inner-hits.asciidoc index 887ae2bdf149e..8e719a02c759b 100644 --- a/docs/reference/search/request/inner-hits.asciidoc +++ b/docs/reference/search/request/inner-hits.asciidoc @@ -265,19 +265,19 @@ Response not included in text but tested for completeness sake. ..., "hits": { "total": 1, - "max_score": 1.0444683, + "max_score": 1.0444684, "hits": [ { "_index": "test", "_type": "_doc", "_id": "1", - "_score": 1.0444683, + "_score": 1.0444684, "_source": ..., "inner_hits": { "comments": { <1> "hits": { "total": 1, - "max_score": 1.0444683, + "max_score": 1.0444684, "hits": [ { "_index": "test", @@ -287,7 +287,7 @@ Response not included in text but tested for completeness sake. "field": "comments", "offset": 1 }, - "_score": 1.0444683, + "_score": 1.0444684, "fields": { "comments.text.keyword": [ "words words words" diff --git a/docs/reference/search/suggesters/phrase-suggest.asciidoc b/docs/reference/search/suggesters/phrase-suggest.asciidoc index cba299e97cb8d..96d60467d1072 100644 --- a/docs/reference/search/suggesters/phrase-suggest.asciidoc +++ b/docs/reference/search/suggesters/phrase-suggest.asciidoc @@ -33,12 +33,12 @@ PUT test "trigram": { "type": "custom", "tokenizer": "standard", - "filter": ["standard", "shingle"] + "filter": ["shingle"] }, "reverse": { "type": "custom", "tokenizer": "standard", - "filter": ["standard", "reverse"] + "filter": ["reverse"] } }, "filter": { diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregator.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregator.java index aa19f62fedc4f..714e7759c54fb 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregator.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregator.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.aggregations.matrix.stats; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.ScoreMode; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.ObjectArray; @@ -61,8 +62,8 @@ final class MatrixStatsAggregator extends MetricsAggregator { } @Override - public boolean needsScores() { - return (valuesSources == null) ? false : valuesSources.needsScores(); + public ScoreMode scoreMode() { + return (valuesSources != null && valuesSources.needsScores()) ? ScoreMode.COMPLETE : ScoreMode.COMPLETE_NO_SCORES; } @Override diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ChineseAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ChineseAnalyzerProvider.java index 01b529188c6f0..2259560bcbc7d 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ChineseAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ChineseAnalyzerProvider.java @@ -19,6 +19,7 @@ package org.elasticsearch.analysis.common; +import org.apache.lucene.analysis.en.EnglishAnalyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -35,7 +36,7 @@ public class ChineseAnalyzerProvider extends AbstractIndexAnalyzerProvider getPreBuiltAnalyzerProviderFactorie () -> new PatternAnalyzer(Regex.compile("\\W+" /*PatternAnalyzer.NON_WORD_PATTERN*/, null), true, CharArraySet.EMPTY_SET))); analyzers.add(new PreBuiltAnalyzerProviderFactory("snowball", CachingStrategy.LUCENE, - () -> new SnowballAnalyzer("English", StopAnalyzer.ENGLISH_STOP_WORDS_SET))); + () -> new SnowballAnalyzer("English", EnglishAnalyzer.ENGLISH_STOP_WORDS_SET))); // Language analyzers: analyzers.add(new PreBuiltAnalyzerProviderFactory("arabic", CachingStrategy.LUCENE, ArabicAnalyzer::new)); @@ -336,7 +335,8 @@ public List getPreBuiltAnalyzerProviderFactorie analyzers.add(new PreBuiltAnalyzerProviderFactory("bulgarian", CachingStrategy.LUCENE, BulgarianAnalyzer::new)); analyzers.add(new PreBuiltAnalyzerProviderFactory("catalan", CachingStrategy.LUCENE, CatalanAnalyzer::new)); // chinese analyzer: only for old indices, best effort - analyzers.add(new PreBuiltAnalyzerProviderFactory("chinese", CachingStrategy.ONE, StandardAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("chinese", CachingStrategy.ONE, + () -> new StandardAnalyzer(EnglishAnalyzer.ENGLISH_STOP_WORDS_SET))); analyzers.add(new PreBuiltAnalyzerProviderFactory("cjk", CachingStrategy.LUCENE, CJKAnalyzer::new)); analyzers.add(new PreBuiltAnalyzerProviderFactory("czech", CachingStrategy.LUCENE, CzechAnalyzer::new)); analyzers.add(new PreBuiltAnalyzerProviderFactory("danish", CachingStrategy.LUCENE, DanishAnalyzer::new)); @@ -408,14 +408,14 @@ public List getPreConfiguredTokenFilters() { DelimitedPayloadTokenFilterFactory.DEFAULT_ENCODER))); filters.add(PreConfiguredTokenFilter.singleton("dutch_stem", false, input -> new SnowballFilter(input, new DutchStemmer()))); filters.add(PreConfiguredTokenFilter.singleton("edge_ngram", false, input -> - new EdgeNGramTokenFilter(input, EdgeNGramTokenFilter.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenFilter.DEFAULT_MAX_GRAM_SIZE))); + new EdgeNGramTokenFilter(input, 1))); filters.add(PreConfiguredTokenFilter.singletonWithVersion("edgeNGram", false, (reader, version) -> { if (version.onOrAfter(org.elasticsearch.Version.V_6_4_0)) { DEPRECATION_LOGGER.deprecatedAndMaybeLog("edgeNGram_deprecation", "The [edgeNGram] token filter name is deprecated and will be removed in a future version. " + "Please change the filter name to [edge_ngram] instead."); } - return new EdgeNGramTokenFilter(reader, EdgeNGramTokenFilter.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenFilter.DEFAULT_MAX_GRAM_SIZE); + return new EdgeNGramTokenFilter(reader, 1); })); filters.add(PreConfiguredTokenFilter.singleton("elision", true, input -> new ElisionFilter(input, FrenchAnalyzer.DEFAULT_ARTICLES))); @@ -432,14 +432,14 @@ public List getPreConfiguredTokenFilters() { new LimitTokenCountFilter(input, LimitTokenCountFilterFactory.DEFAULT_MAX_TOKEN_COUNT, LimitTokenCountFilterFactory.DEFAULT_CONSUME_ALL_TOKENS))); - filters.add(PreConfiguredTokenFilter.singleton("ngram", false, NGramTokenFilter::new)); + filters.add(PreConfiguredTokenFilter.singleton("ngram", false, reader -> new NGramTokenFilter(reader, 1, 2, false))); filters.add(PreConfiguredTokenFilter.singletonWithVersion("nGram", false, (reader, version) -> { if (version.onOrAfter(org.elasticsearch.Version.V_6_4_0)) { DEPRECATION_LOGGER.deprecatedAndMaybeLog("nGram_deprecation", "The [nGram] token filter name is deprecated and will be removed in a future version. " + "Please change the filter name to [ngram] instead."); } - return new NGramTokenFilter(reader); + return new NGramTokenFilter(reader, 1, 2, false); })); filters.add(PreConfiguredTokenFilter.singleton("persian_normalization", true, PersianNormalizationFilter::new)); filters.add(PreConfiguredTokenFilter.singleton("porter_stem", false, PorterStemFilter::new)); @@ -462,7 +462,8 @@ public List getPreConfiguredTokenFilters() { filters.add(PreConfiguredTokenFilter.singleton("sorani_normalization", true, SoraniNormalizationFilter::new)); filters.add(PreConfiguredTokenFilter.singleton("stemmer", false, PorterStemFilter::new)); // The stop filter is in lucene-core but the English stop words set is in lucene-analyzers-common - filters.add(PreConfiguredTokenFilter.singleton("stop", false, input -> new StopFilter(input, StopAnalyzer.ENGLISH_STOP_WORDS_SET))); + filters.add(PreConfiguredTokenFilter.singleton("stop", false, + input -> new StopFilter(input, EnglishAnalyzer.ENGLISH_STOP_WORDS_SET))); filters.add(PreConfiguredTokenFilter.singleton("trim", true, TrimFilter::new)); filters.add(PreConfiguredTokenFilter.singleton("truncate", false, input -> new TruncateTokenFilter(input, 10))); filters.add(PreConfiguredTokenFilter.singleton("type_as_payload", false, TypeAsPayloadTokenFilter::new)); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EdgeNGramTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EdgeNGramTokenFilterFactory.java index af6d30a035476..6bcd2b737feeb 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EdgeNGramTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EdgeNGramTokenFilterFactory.java @@ -21,7 +21,6 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.ngram.EdgeNGramTokenFilter; -import org.apache.lucene.analysis.ngram.NGramTokenFilter; import org.apache.lucene.analysis.reverse.ReverseStringFilter; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -41,8 +40,8 @@ public class EdgeNGramTokenFilterFactory extends AbstractTokenFilterFactory { EdgeNGramTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { super(indexSettings, name, settings); - this.minGram = settings.getAsInt("min_gram", NGramTokenFilter.DEFAULT_MIN_NGRAM_SIZE); - this.maxGram = settings.getAsInt("max_gram", NGramTokenFilter.DEFAULT_MAX_NGRAM_SIZE); + this.minGram = settings.getAsInt("min_gram", 1); + this.maxGram = settings.getAsInt("max_gram", 2); this.side = parseSide(settings.get("side", "front")); } @@ -63,7 +62,8 @@ public TokenStream create(TokenStream tokenStream) { result = new ReverseStringFilter(result); } - result = new EdgeNGramTokenFilter(result, minGram, maxGram); + // TODO: Expose preserveOriginal + result = new EdgeNGramTokenFilter(result, minGram, maxGram, false); // side=BACK is not supported anymore but applying ReverseStringFilter up-front and after the token filter has the same effect if (side == SIDE_BACK) { diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenFilterFactory.java index 22b060613163c..8d99ec1d1a15d 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenFilterFactory.java @@ -39,8 +39,8 @@ public class NGramTokenFilterFactory extends AbstractTokenFilterFactory { NGramTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { super(indexSettings, name, settings); int maxAllowedNgramDiff = indexSettings.getMaxNgramDiff(); - this.minGram = settings.getAsInt("min_gram", NGramTokenFilter.DEFAULT_MIN_NGRAM_SIZE); - this.maxGram = settings.getAsInt("max_gram", NGramTokenFilter.DEFAULT_MAX_NGRAM_SIZE); + this.minGram = settings.getAsInt("min_gram", 1); + this.maxGram = settings.getAsInt("max_gram", 2); int ngramDiff = maxGram - minGram; if (ngramDiff > maxAllowedNgramDiff) { if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0_alpha1)) { @@ -57,6 +57,7 @@ public class NGramTokenFilterFactory extends AbstractTokenFilterFactory { @Override public TokenStream create(TokenStream tokenStream) { - return new NGramTokenFilter(tokenStream, minGram, maxGram); + // TODO: Expose preserveOriginal + return new NGramTokenFilter(tokenStream, minGram, maxGram, false); } } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SnowballAnalyzer.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SnowballAnalyzer.java index bc4b9a763fd68..74e6bbcc65c2a 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SnowballAnalyzer.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SnowballAnalyzer.java @@ -27,11 +27,10 @@ import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.en.EnglishPossessiveFilter; import org.apache.lucene.analysis.snowball.SnowballFilter; -import org.apache.lucene.analysis.standard.StandardFilter; import org.apache.lucene.analysis.standard.StandardTokenizer; import org.apache.lucene.analysis.tr.TurkishLowerCaseFilter; -/** Filters {@link StandardTokenizer} with {@link StandardFilter}, {@link +/** Filters {@link StandardTokenizer} with {@link * LowerCaseFilter}, {@link StopFilter} and {@link SnowballFilter}. * * Available stemmers are listed in org.tartarus.snowball.ext. The name of a @@ -57,8 +56,7 @@ public final class SnowballAnalyzer extends Analyzer { stopSet = CharArraySet.unmodifiableSet(CharArraySet.copy(stopWords)); } - /** Constructs a {@link StandardTokenizer} filtered by a {@link - StandardFilter}, a {@link LowerCaseFilter}, a {@link StopFilter}, + /** Constructs a {@link StandardTokenizer} filtered by a {@link LowerCaseFilter}, a {@link StopFilter}, and a {@link SnowballFilter} */ @Override public TokenStreamComponents createComponents(String fieldName) { diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SnowballAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SnowballAnalyzerProvider.java index 0f213df9ad722..6eec01570a881 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SnowballAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SnowballAnalyzerProvider.java @@ -19,8 +19,8 @@ package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; -import org.apache.lucene.analysis.core.StopAnalyzer; import org.apache.lucene.analysis.de.GermanAnalyzer; +import org.apache.lucene.analysis.en.EnglishAnalyzer; import org.apache.lucene.analysis.fr.FrenchAnalyzer; import org.apache.lucene.analysis.nl.DutchAnalyzer; import org.elasticsearch.common.settings.Settings; @@ -42,7 +42,7 @@ * Configuration of language is done with the "language" attribute or the analyzer. * Also supports additional stopwords via "stopwords" attribute *

- * The SnowballAnalyzer comes with a StandardFilter, LowerCaseFilter, StopFilter + * The SnowballAnalyzer comes with a LowerCaseFilter, StopFilter * and the SnowballFilter. * * @@ -52,7 +52,7 @@ public class SnowballAnalyzerProvider extends AbstractIndexAnalyzerProvider defaultLanguageStopwords = new HashMap<>(); - defaultLanguageStopwords.put("English", StopAnalyzer.ENGLISH_STOP_WORDS_SET); + defaultLanguageStopwords.put("English", EnglishAnalyzer.ENGLISH_STOP_WORDS_SET); defaultLanguageStopwords.put("Dutch", DutchAnalyzer.getDefaultStopSet()); defaultLanguageStopwords.put("German", GermanAnalyzer.getDefaultStopSet()); defaultLanguageStopwords.put("German2", GermanAnalyzer.getDefaultStopSet()); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StandardHtmlStripAnalyzer.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StandardHtmlStripAnalyzer.java index f0b2b7188e5ba..e2ee540fe3e70 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StandardHtmlStripAnalyzer.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StandardHtmlStripAnalyzer.java @@ -25,8 +25,7 @@ import org.apache.lucene.analysis.StopwordAnalyzerBase; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; -import org.apache.lucene.analysis.core.StopAnalyzer; -import org.apache.lucene.analysis.standard.StandardFilter; +import org.apache.lucene.analysis.en.EnglishAnalyzer; import org.apache.lucene.analysis.standard.StandardTokenizer; public class StandardHtmlStripAnalyzer extends StopwordAnalyzerBase { @@ -36,7 +35,7 @@ public class StandardHtmlStripAnalyzer extends StopwordAnalyzerBase { */ @Deprecated public StandardHtmlStripAnalyzer() { - super(StopAnalyzer.ENGLISH_STOP_WORDS_SET); + super(EnglishAnalyzer.ENGLISH_STOP_WORDS_SET); } StandardHtmlStripAnalyzer(CharArraySet stopwords) { @@ -46,8 +45,7 @@ public StandardHtmlStripAnalyzer() { @Override protected TokenStreamComponents createComponents(final String fieldName) { final Tokenizer src = new StandardTokenizer(); - TokenStream tok = new StandardFilter(src); - tok = new LowerCaseFilter(tok); + TokenStream tok = new LowerCaseFilter(src); if (!stopwords.isEmpty()) { tok = new StopFilter(tok, stopwords); } diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PatternAnalyzerTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PatternAnalyzerTests.java index d2d226d6250e8..29122d7292168 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PatternAnalyzerTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PatternAnalyzerTests.java @@ -20,7 +20,7 @@ */ import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.core.StopAnalyzer; +import org.apache.lucene.analysis.en.EnglishAnalyzer; import org.apache.lucene.util.BytesRef; import org.elasticsearch.test.ESTokenStreamTestCase; @@ -44,7 +44,7 @@ public void testNonWordPattern() throws IOException { // split on non-letter pattern, lowercase, english stopwords PatternAnalyzer b = new PatternAnalyzer(Pattern.compile("\\W+"), true, - StopAnalyzer.ENGLISH_STOP_WORDS_SET); + EnglishAnalyzer.ENGLISH_STOP_WORDS_SET); assertAnalyzesTo(b, "The quick brown Fox,the abcd1234 (56.78) dc.", new String[] { "quick", "brown", "fox", "abcd1234", "56", "78", "dc" }); } @@ -61,7 +61,7 @@ public void testWhitespacePattern() throws IOException { // Split on whitespace patterns, lowercase, english stopwords PatternAnalyzer b = new PatternAnalyzer(Pattern.compile("\\s+"), true, - StopAnalyzer.ENGLISH_STOP_WORDS_SET); + EnglishAnalyzer.ENGLISH_STOP_WORDS_SET); assertAnalyzesTo(b, "The quick brown Fox,the abcd1234 (56.78) dc.", new String[] { "quick", "brown", "fox,the", "abcd1234", "(56.78)", "dc." }); } @@ -78,7 +78,7 @@ public void testCustomPattern() throws IOException { // split on comma, lowercase, english stopwords PatternAnalyzer b = new PatternAnalyzer(Pattern.compile(","), true, - StopAnalyzer.ENGLISH_STOP_WORDS_SET); + EnglishAnalyzer.ENGLISH_STOP_WORDS_SET); assertAnalyzesTo(b, "Here,Are,some,Comma,separated,words,", new String[] { "here", "some", "comma", "separated", "words" }); } @@ -109,7 +109,7 @@ public void testHugeDocument() throws IOException { /** blast some random strings through the analyzer */ public void testRandomStrings() throws Exception { - Analyzer a = new PatternAnalyzer(Pattern.compile(","), true, StopAnalyzer.ENGLISH_STOP_WORDS_SET); + Analyzer a = new PatternAnalyzer(Pattern.compile(","), true, EnglishAnalyzer.ENGLISH_STOP_WORDS_SET); checkRandomData(random(), a, 10000*RANDOM_MULTIPLIER); } diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SnowballAnalyzerTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SnowballAnalyzerTests.java index 0b9998eda31c5..360d17ef0f4f3 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SnowballAnalyzerTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/SnowballAnalyzerTests.java @@ -20,7 +20,7 @@ */ import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.standard.StandardAnalyzer; +import org.apache.lucene.analysis.en.EnglishAnalyzer; import org.elasticsearch.test.ESTokenStreamTestCase; public class SnowballAnalyzerTests extends ESTokenStreamTestCase { @@ -33,7 +33,7 @@ public void testEnglish() throws Exception { public void testStopwords() throws Exception { Analyzer a = new SnowballAnalyzer("English", - StandardAnalyzer.STOP_WORDS_SET); + EnglishAnalyzer.ENGLISH_STOP_WORDS_SET); assertAnalyzesTo(a, "the quick brown fox jumped", new String[]{"quick", "brown", "fox", "jump"}); } diff --git a/modules/lang-expression/licenses/lucene-expressions-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-7.5.0-snapshot-13b9e28f9d.jar.sha1 deleted file mode 100644 index 0ebdddcc5f1b5..0000000000000 --- a/modules/lang-expression/licenses/lucene-expressions-7.5.0-snapshot-13b9e28f9d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fded6bb485b8b01bb2a9280162fd14d4d3ce4510 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-4d78db26be.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-4d78db26be.jar.sha1 new file mode 100644 index 0000000000000..bec50d36793d8 --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-4d78db26be.jar.sha1 @@ -0,0 +1 @@ +5f469e925dde5dff81b9d56f465a8babb56cd26b \ No newline at end of file diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java index 094a62d188baf..2c60136209ca7 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java @@ -26,6 +26,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.store.RAMDirectory; @@ -550,7 +551,7 @@ static Response innerShardOperation(Request request, ScriptService scriptService Query luceneQuery = request.contextSetup.query.rewrite(context).toQuery(context); IndexSearcher indexSearcher = new IndexSearcher(leafReaderContext.reader()); luceneQuery = indexSearcher.rewrite(luceneQuery); - Weight weight = indexSearcher.createWeight(luceneQuery, true, 1f); + Weight weight = indexSearcher.createWeight(luceneQuery, ScoreMode.COMPLETE, 1f); Scorer scorer = weight.scorer(indexSearcher.getIndexReader().leaves().get(0)); // Consume the first (and only) match. int docID = scorer.iterator().nextDoc(); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScoreTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScoreTests.java index 567f462046146..76bb6d14dcf61 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScoreTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScoreTests.java @@ -49,6 +49,11 @@ public void testScoreWorks() { public float score() throws IOException { return 2.5f; } + + @Override + public float getMaxScore(int upTo) throws IOException { + return 2.5f; + } }, true)); } @@ -60,6 +65,11 @@ public void testScoreNotUsed() { public float score() throws IOException { throw new AssertionError("score() should not be called"); } + + @Override + public float getMaxScore(int upTo) throws IOException { + return Float.MAX_VALUE; + } }, true)); } @@ -75,6 +85,11 @@ public float score() throws IOException { } throw new AssertionError("score() should not be called twice"); } + + @Override + public float getMaxScore(int upTo) throws IOException { + return 4.5f; + } }, true)); } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java index 6ee021c695f99..4820bc10cf24f 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptedMetricAggContexts; +import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -74,6 +75,11 @@ public void testMapBasic() { @Override public DocIdSetIterator iterator() { return null; } + + @Override + public float getMaxScore(int upTo) throws IOException { + return 0.5f; + } }; ScriptedMetricAggContexts.MapScript.LeafFactory leafFactory = factory.newFactory(params, state, null); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java index 0795ab7777526..1b4c4eb0ff636 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java @@ -89,7 +89,7 @@ public void testBasics() throws IOException { .add(new TermQuery(new Term("match", "yes")), Occur.FILTER) .build(), 3.2f); TopDocs topDocs = searcher.search(query, 1); - assertEquals(1, topDocs.totalHits); + assertEquals(1, topDocs.totalHits.value); assertEquals((float) (3.2 * 2 / 3), topDocs.scoreDocs[0].score, 0); w.close(); dir.close(); @@ -128,7 +128,7 @@ public void testWeightScript() throws IOException { .add(new TermQuery(new Term("match", "yes")), Occur.FILTER) .build(), 3.2f); TopDocs topDocs = searcher.search(query, 1); - assertEquals(1, topDocs.totalHits); + assertEquals(1, topDocs.totalHits.value); assertEquals((float) (3.2 * 2 / 3), topDocs.scoreDocs[0].score, 0); w.close(); dir.close(); diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/30_search.yml b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/30_search.yml index a9aa00aa5e036..9a43e1f9aa445 100644 --- a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/30_search.yml +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/30_search.yml @@ -161,7 +161,7 @@ "script_score": { "script": { "lang": "painless", - "source": "-doc['num1'].value" + "source": "3 - doc['num1'].value" } } }] diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregator.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregator.java index b555afce67ae7..4469c9633dd87 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregator.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregator.java @@ -24,6 +24,7 @@ import org.apache.lucene.search.ConstantScoreScorer; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; @@ -78,8 +79,8 @@ public ParentToChildrenAggregator(String name, AggregatorFactories factories, throws IOException { super(name, factories, context, parent, pipelineAggregators, metaData); // these two filters are cached in the parser - this.childFilter = context.searcher().createNormalizedWeight(childFilter, false); - this.parentFilter = context.searcher().createNormalizedWeight(parentFilter, false); + this.childFilter = context.searcher().createWeight(context.searcher().rewrite(childFilter), ScoreMode.COMPLETE_NO_SCORES, 1f); + this.parentFilter = context.searcher().createWeight(context.searcher().rewrite(parentFilter), ScoreMode.COMPLETE_NO_SCORES, 1f); this.parentOrdToBuckets = context.bigArrays().newLongArray(maxOrd, false); this.parentOrdToBuckets.fill(0, maxOrd, -1); this.parentOrdToOtherBuckets = new LongObjectPagedHashMap<>(context.bigArrays()); diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java b/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java index 5e57a2774055d..9e9b55872cfca 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java @@ -23,16 +23,21 @@ import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.MultiCollector; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocsCollector; import org.apache.lucene.search.TopFieldCollector; import org.apache.lucene.search.TopScoreDocCollector; import org.apache.lucene.search.TotalHitCountCollector; +import org.apache.lucene.search.TotalHits; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.search.MaxScoreCollector; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.query.InnerHitBuilder; import org.elasticsearch.index.query.InnerHitContextBuilder; @@ -92,14 +97,14 @@ static final class JoinFieldInnerHitSubContext extends InnerHitsContext.InnerHit } @Override - public TopDocs[] topDocs(SearchHit[] hits) throws IOException { + public TopDocsAndMaxScore[] topDocs(SearchHit[] hits) throws IOException { Weight innerHitQueryWeight = createInnerHitQueryWeight(); - TopDocs[] result = new TopDocs[hits.length]; + TopDocsAndMaxScore[] result = new TopDocsAndMaxScore[hits.length]; for (int i = 0; i < hits.length; i++) { SearchHit hit = hits[i]; String joinName = getSortedDocValue(joinFieldMapper.name(), context, hit.docId()); if (joinName == null) { - result[i] = Lucene.EMPTY_TOP_DOCS; + result[i] = new TopDocsAndMaxScore(Lucene.EMPTY_TOP_DOCS, Float.NaN); continue; } @@ -107,7 +112,7 @@ public TopDocs[] topDocs(SearchHit[] hits) throws IOException { ParentIdFieldMapper parentIdFieldMapper = joinFieldMapper.getParentIdFieldMapper(typeName, fetchChildInnerHits == false); if (parentIdFieldMapper == null) { - result[i] = Lucene.EMPTY_TOP_DOCS; + result[i] = new TopDocsAndMaxScore(Lucene.EMPTY_TOP_DOCS, Float.NaN); continue; } @@ -125,29 +130,41 @@ public TopDocs[] topDocs(SearchHit[] hits) throws IOException { q = context.mapperService().fullName(IdFieldMapper.NAME).termQuery(parentId, qsc); } - Weight weight = context.searcher().createNormalizedWeight(q, false); + Weight weight = context.searcher().createWeight(context.searcher().rewrite(q), ScoreMode.COMPLETE_NO_SCORES, 1f); if (size() == 0) { TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector(); for (LeafReaderContext ctx : context.searcher().getIndexReader().leaves()) { intersect(weight, innerHitQueryWeight, totalHitCountCollector, ctx); } - result[i] = new TopDocs(totalHitCountCollector.getTotalHits(), Lucene.EMPTY_SCORE_DOCS, Float.NaN); + result[i] = new TopDocsAndMaxScore( + new TopDocs(new TotalHits(totalHitCountCollector.getTotalHits(), TotalHits.Relation.EQUAL_TO), + Lucene.EMPTY_SCORE_DOCS), Float.NaN); } else { int topN = Math.min(from() + size(), context.searcher().getIndexReader().maxDoc()); TopDocsCollector topDocsCollector; + MaxScoreCollector maxScoreCollector = null; if (sort() != null) { - topDocsCollector = TopFieldCollector.create(sort().sort, topN, true, trackScores(), trackScores(), true); + topDocsCollector = TopFieldCollector.create(sort().sort, topN, Integer.MAX_VALUE); + if (trackScores()) { + maxScoreCollector = new MaxScoreCollector(); + } } else { - topDocsCollector = TopScoreDocCollector.create(topN); + topDocsCollector = TopScoreDocCollector.create(topN, Integer.MAX_VALUE); + maxScoreCollector = new MaxScoreCollector(); } try { for (LeafReaderContext ctx : context.searcher().getIndexReader().leaves()) { - intersect(weight, innerHitQueryWeight, topDocsCollector, ctx); + intersect(weight, innerHitQueryWeight, MultiCollector.wrap(topDocsCollector, maxScoreCollector), ctx); } } finally { clearReleasables(Lifetime.COLLECTION); } - result[i] = topDocsCollector.topDocs(from(), size()); + TopDocs topDocs = topDocsCollector.topDocs(from(), size()); + float maxScore = Float.NaN; + if (maxScoreCollector != null) { + maxScore = maxScoreCollector.getMaxScore(); + } + result[i] = new TopDocsAndMaxScore(topDocs, maxScore); } } return result; diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java index 5bbf998883eee..bf491727ff576 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java @@ -26,11 +26,14 @@ import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; +import org.apache.lucene.search.BooleanClause.Occur; +import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Bits; import org.elasticsearch.common.CheckedFunction; @@ -53,14 +56,17 @@ final class PercolateQuery extends Query implements Accountable { private final Query candidateMatchesQuery; private final Query verifiedMatchesQuery; private final IndexSearcher percolatorIndexSearcher; + private final Query nonNestedDocsFilter; PercolateQuery(String name, QueryStore queryStore, List documents, - Query candidateMatchesQuery, IndexSearcher percolatorIndexSearcher, Query verifiedMatchesQuery) { + Query candidateMatchesQuery, IndexSearcher percolatorIndexSearcher, + Query nonNestedDocsFilter, Query verifiedMatchesQuery) { this.name = name; this.documents = Objects.requireNonNull(documents); this.candidateMatchesQuery = Objects.requireNonNull(candidateMatchesQuery); this.queryStore = Objects.requireNonNull(queryStore); this.percolatorIndexSearcher = Objects.requireNonNull(percolatorIndexSearcher); + this.nonNestedDocsFilter = nonNestedDocsFilter; this.verifiedMatchesQuery = Objects.requireNonNull(verifiedMatchesQuery); } @@ -68,16 +74,17 @@ final class PercolateQuery extends Query implements Accountable { public Query rewrite(IndexReader reader) throws IOException { Query rewritten = candidateMatchesQuery.rewrite(reader); if (rewritten != candidateMatchesQuery) { - return new PercolateQuery(name, queryStore, documents, rewritten, percolatorIndexSearcher, verifiedMatchesQuery); + return new PercolateQuery(name, queryStore, documents, rewritten, percolatorIndexSearcher, + nonNestedDocsFilter, verifiedMatchesQuery); } else { return this; } } @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException { - final Weight verifiedMatchesWeight = verifiedMatchesQuery.createWeight(searcher, false, boost); - final Weight candidateMatchesWeight = candidateMatchesQuery.createWeight(searcher, false, boost); + public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { + final Weight verifiedMatchesWeight = verifiedMatchesQuery.createWeight(searcher, ScoreMode.COMPLETE_NO_SCORES, boost); + final Weight candidateMatchesWeight = candidateMatchesQuery.createWeight(searcher, ScoreMode.COMPLETE_NO_SCORES, boost); return new Weight(this) { @Override public void extractTerms(Set set) { @@ -91,7 +98,7 @@ public Explanation explain(LeafReaderContext leafReaderContext, int docId) throw int result = twoPhaseIterator.approximation().advance(docId); if (result == docId) { if (twoPhaseIterator.matches()) { - if (needsScores) { + if (scoreMode.needsScores()) { CheckedFunction percolatorQueries = queryStore.getQueries(leafReaderContext); Query query = percolatorQueries.apply(docId); Explanation detail = percolatorIndexSearcher.explain(query, 0); @@ -112,9 +119,9 @@ public Scorer scorer(LeafReaderContext leafReaderContext) throws IOException { return null; } - final CheckedFunction queries = queryStore.getQueries(leafReaderContext); - if (needsScores) { - return new BaseScorer(this, approximation, queries, percolatorIndexSearcher) { + final CheckedFunction percolatorQueries = queryStore.getQueries(leafReaderContext); + if (scoreMode.needsScores()) { + return new BaseScorer(this, approximation) { float score; @@ -122,8 +129,14 @@ public Scorer scorer(LeafReaderContext leafReaderContext) throws IOException { boolean matchDocId(int docId) throws IOException { Query query = percolatorQueries.apply(docId); if (query != null) { + if (nonNestedDocsFilter != null) { + query = new BooleanQuery.Builder() + .add(query, Occur.MUST) + .add(nonNestedDocsFilter, Occur.FILTER) + .build(); + } TopDocs topDocs = percolatorIndexSearcher.search(query, 1); - if (topDocs.totalHits > 0) { + if (topDocs.scoreDocs.length > 0) { score = topDocs.scoreDocs[0].score; return true; } else { @@ -142,7 +155,7 @@ public float score() throws IOException { } else { ScorerSupplier verifiedDocsScorer = verifiedMatchesWeight.scorerSupplier(leafReaderContext); Bits verifiedDocsBits = Lucene.asSequentialAccessBits(leafReaderContext.reader().maxDoc(), verifiedDocsScorer); - return new BaseScorer(this, approximation, queries, percolatorIndexSearcher) { + return new BaseScorer(this, approximation) { @Override public float score() throws IOException { @@ -159,7 +172,16 @@ boolean matchDocId(int docId) throws IOException { return true; } Query query = percolatorQueries.apply(docId); - return query != null && Lucene.exists(percolatorIndexSearcher, query); + if (query == null) { + return false; + } + if (nonNestedDocsFilter != null) { + query = new BooleanQuery.Builder() + .add(query, Occur.MUST) + .add(nonNestedDocsFilter, Occur.FILTER) + .build(); + } + return Lucene.exists(percolatorIndexSearcher, query); } }; } @@ -182,6 +204,10 @@ IndexSearcher getPercolatorIndexSearcher() { return percolatorIndexSearcher; } + boolean excludesNestedDocs() { + return nonNestedDocsFilter != null; + } + List getDocuments() { return documents; } @@ -241,15 +267,10 @@ interface QueryStore { abstract static class BaseScorer extends Scorer { final Scorer approximation; - final CheckedFunction percolatorQueries; - final IndexSearcher percolatorIndexSearcher; - BaseScorer(Weight weight, Scorer approximation, CheckedFunction percolatorQueries, - IndexSearcher percolatorIndexSearcher) { + BaseScorer(Weight weight, Scorer approximation) { super(weight); this.approximation = approximation; - this.percolatorQueries = percolatorQueries; - this.percolatorIndexSearcher = percolatorIndexSearcher; } @Override @@ -279,6 +300,10 @@ public final int docID() { abstract boolean matchDocId(int docId) throws IOException; + @Override + public float getMaxScore(int upTo) throws IOException { + return Float.MAX_VALUE; + } } } diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java index 445076b8eba07..09cc04458ec70 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java @@ -29,10 +29,9 @@ import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.index.memory.MemoryIndex; -import org.apache.lucene.search.BooleanClause; -import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.search.join.BitSetProducer; @@ -56,7 +55,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContent; @@ -605,13 +603,19 @@ protected Analyzer getWrappedAnalyzer(String fieldName) { } }; final IndexSearcher docSearcher; + final boolean excludeNestedDocuments; if (docs.size() > 1 || docs.get(0).docs().size() > 1) { assert docs.size() != 1 || docMapper.hasNestedObjects(); docSearcher = createMultiDocumentSearcher(analyzer, docs); + excludeNestedDocuments = docMapper.hasNestedObjects() && docs.stream() + .map(ParsedDocument::docs) + .mapToInt(List::size) + .anyMatch(size -> size > 1); } else { MemoryIndex memoryIndex = MemoryIndex.fromDocument(docs.get(0).rootDoc(), analyzer, true, false); docSearcher = memoryIndex.createSearcher(); docSearcher.setQueryCache(null); + excludeNestedDocuments = false; } PercolatorFieldMapper.FieldType pft = (PercolatorFieldMapper.FieldType) fieldType; @@ -621,7 +625,7 @@ protected Analyzer getWrappedAnalyzer(String fieldName) { percolateShardContext, pft.mapUnmappedFieldsAsText); - return pft.percolateQuery(name, queryStore, documents, docSearcher, context.indexVersionCreated()); + return pft.percolateQuery(name, queryStore, documents, docSearcher, excludeNestedDocuments, context.indexVersionCreated()); } public String getField() { @@ -653,17 +657,7 @@ static IndexSearcher createMultiDocumentSearcher(Analyzer analyzer, Collection

documents, - IndexSearcher searcher, Version indexVersion) throws IOException { + IndexSearcher searcher, boolean excludeNestedDocuments, Version indexVersion) throws IOException { IndexReader indexReader = searcher.getIndexReader(); Tuple t = createCandidateQuery(indexReader, indexVersion); Query candidateQuery = t.v1(); @@ -261,7 +262,11 @@ Query percolateQuery(String name, PercolateQuery.QueryStore queryStore, List createCandidateQuery(IndexReader indexReader, Version indexVersion) throws IOException { diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java index 4d5e3d2a988f9..fdcc9156b415e 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java @@ -22,6 +22,7 @@ import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; @@ -74,7 +75,8 @@ static void innerHitsExecute(Query mainQuery, IndexSearcher indexSearcher, Searc // See https://issues.apache.org/jira/browse/LUCENE-8055 // for now we just use version 6.0 version to find nested parent final Version version = Version.V_6_0_0; //context.mapperService().getIndexSettings().getIndexVersionCreated(); - Weight weight = percolatorIndexSearcher.createNormalizedWeight(Queries.newNonNestedFilter(version), false); + Weight weight = percolatorIndexSearcher.createWeight(percolatorIndexSearcher.rewrite(Queries.newNonNestedFilter(version)), + ScoreMode.COMPLETE_NO_SCORES, 1f); Scorer s = weight.scorer(percolatorIndexSearcher.getIndexReader().leaves().get(0)); int memoryIndexMaxDoc = percolatorIndexSearcher.getIndexReader().maxDoc(); BitSet rootDocs = BitSet.of(s.iterator(), memoryIndexMaxDoc); @@ -96,7 +98,7 @@ static void innerHitsExecute(Query mainQuery, IndexSearcher indexSearcher, Searc } TopDocs topDocs = percolatorIndexSearcher.search(query, memoryIndexMaxDoc, new Sort(SortField.FIELD_DOC)); - if (topDocs.totalHits == 0) { + if (topDocs.totalHits.value == 0) { // This hit didn't match with a percolate query, // likely to happen when percolating multiple documents continue; diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java index 9c8979601e8dc..3d9a8fb8ebb08 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java @@ -61,6 +61,7 @@ import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; @@ -595,51 +596,52 @@ public void testRangeQueries() throws Exception { Version v = Version.V_6_1_0; MemoryIndex memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new IntPoint("int_field", 3)), new WhitespaceAnalyzer()); IndexSearcher percolateSearcher = memoryIndex.createSearcher(); - Query query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v); + Query query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), + percolateSearcher, false, v); TopDocs topDocs = shardSearcher.search(query, 1); - assertEquals(1L, topDocs.totalHits); + assertEquals(1L, topDocs.totalHits.value); assertEquals(1, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new LongPoint("long_field", 7L)), new WhitespaceAnalyzer()); percolateSearcher = memoryIndex.createSearcher(); - query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v); + query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); - assertEquals(1L, topDocs.totalHits); + assertEquals(1L, topDocs.totalHits.value); assertEquals(1, topDocs.scoreDocs.length); assertEquals(1, topDocs.scoreDocs[0].doc); memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new HalfFloatPoint("half_float_field", 12)), new WhitespaceAnalyzer()); percolateSearcher = memoryIndex.createSearcher(); - query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v); + query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); - assertEquals(1L, topDocs.totalHits); + assertEquals(1L, topDocs.totalHits.value); assertEquals(1, topDocs.scoreDocs.length); assertEquals(2, topDocs.scoreDocs[0].doc); memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new FloatPoint("float_field", 17)), new WhitespaceAnalyzer()); percolateSearcher = memoryIndex.createSearcher(); - query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v); + query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); - assertEquals(1, topDocs.totalHits); + assertEquals(1, topDocs.totalHits.value); assertEquals(1, topDocs.scoreDocs.length); assertEquals(3, topDocs.scoreDocs[0].doc); memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new DoublePoint("double_field", 21)), new WhitespaceAnalyzer()); percolateSearcher = memoryIndex.createSearcher(); - query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v); + query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); - assertEquals(1, topDocs.totalHits); + assertEquals(1, topDocs.totalHits.value); assertEquals(1, topDocs.scoreDocs.length); assertEquals(4, topDocs.scoreDocs[0].doc); memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new InetAddressPoint("ip_field", forString("192.168.0.4"))), new WhitespaceAnalyzer()); percolateSearcher = memoryIndex.createSearcher(); - query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v); + query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); - assertEquals(1, topDocs.totalHits); + assertEquals(1, topDocs.totalHits.value); assertEquals(1, topDocs.scoreDocs.length); assertEquals(5, topDocs.scoreDocs[0].doc); } @@ -777,16 +779,16 @@ public void testPercolateMatchAll() throws Exception { memoryIndex.addField("field", "value1", new WhitespaceAnalyzer()); IndexSearcher percolateSearcher = memoryIndex.createSearcher(); PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, - Collections.singletonList(new BytesArray("{}")), percolateSearcher, Version.CURRENT); - TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC), true, true); - assertEquals(3L, topDocs.totalHits); + Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, Version.CURRENT); + TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); + assertEquals(3L, topDocs.totalHits.value); assertEquals(3, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(1, topDocs.scoreDocs[1].doc); assertEquals(4, topDocs.scoreDocs[2].doc); topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10); - assertEquals(3L, topDocs.totalHits); + assertEquals(3L, topDocs.totalHits.value); assertEquals(3, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(1, topDocs.scoreDocs[1].doc); @@ -810,9 +812,9 @@ public void testFunctionScoreQuery() throws Exception { memoryIndex.addField("field", "value", new WhitespaceAnalyzer()); IndexSearcher percolateSearcher = memoryIndex.createSearcher(); PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, - Collections.singletonList(new BytesArray("{}")), percolateSearcher, Version.CURRENT); - TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC), true, true); - assertEquals(2L, topDocs.totalHits); + Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, Version.CURRENT); + TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); + assertEquals(2L, topDocs.totalHits.value); assertEquals(2, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(2, topDocs.scoreDocs[1].doc); @@ -860,17 +862,18 @@ public void testPercolateSmallAndLargeDocument() throws Exception { try (IndexReader ir = DirectoryReader.open(directory)){ IndexSearcher percolateSearcher = new IndexSearcher(ir); PercolateQuery query = (PercolateQuery) - fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v); + fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), + percolateSearcher, false, v); BooleanQuery candidateQuery = (BooleanQuery) query.getCandidateMatchesQuery(); assertThat(candidateQuery.clauses().get(0).getQuery(), instanceOf(CoveringQuery.class)); TopDocs topDocs = shardSearcher.search(query, 10); - assertEquals(2L, topDocs.totalHits); + assertEquals(2L, topDocs.totalHits.value); assertEquals(2, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(2, topDocs.scoreDocs[1].doc); topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10); - assertEquals(2L, topDocs.totalHits); + assertEquals(2L, topDocs.totalHits.value); assertEquals(2, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(2, topDocs.scoreDocs[1].doc); @@ -890,18 +893,19 @@ public void testPercolateSmallAndLargeDocument() throws Exception { try (IndexReader ir = DirectoryReader.open(directory)){ IndexSearcher percolateSearcher = new IndexSearcher(ir); PercolateQuery query = (PercolateQuery) - fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v); + fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), + percolateSearcher, false, v); BooleanQuery candidateQuery = (BooleanQuery) query.getCandidateMatchesQuery(); assertThat(candidateQuery.clauses().get(0).getQuery(), instanceOf(TermInSetQuery.class)); TopDocs topDocs = shardSearcher.search(query, 10); - assertEquals(2L, topDocs.totalHits); + assertEquals(2L, topDocs.totalHits.value); assertEquals(2, topDocs.scoreDocs.length); assertEquals(1, topDocs.scoreDocs[0].doc); assertEquals(2, topDocs.scoreDocs[1].doc); topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10); - assertEquals(2L, topDocs.totalHits); + assertEquals(2L, topDocs.totalHits.value); assertEquals(2, topDocs.scoreDocs.length); assertEquals(1, topDocs.scoreDocs[0].doc); assertEquals(2, topDocs.scoreDocs[1].doc); @@ -951,9 +955,9 @@ public void testDuplicatedClauses() throws Exception { MemoryIndex memoryIndex = new MemoryIndex(); memoryIndex.addField("field", "value1 value2 value3", new WhitespaceAnalyzer()); IndexSearcher percolateSearcher = memoryIndex.createSearcher(); - PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, v); - TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC), true, true); - assertEquals(2L, topDocs.totalHits); + PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v); + TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); + assertEquals(2L, topDocs.totalHits.value); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(1, topDocs.scoreDocs[1].doc); } @@ -985,25 +989,25 @@ public void testDuplicatedClauses2() throws Exception { MemoryIndex memoryIndex = new MemoryIndex(); memoryIndex.addField("field", "value1 value4 value5", new WhitespaceAnalyzer()); IndexSearcher percolateSearcher = memoryIndex.createSearcher(); - PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, v); - TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC), true, true); - assertEquals(1L, topDocs.totalHits); + PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v); + TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); + assertEquals(1L, topDocs.totalHits.value); assertEquals(0, topDocs.scoreDocs[0].doc); memoryIndex = new MemoryIndex(); memoryIndex.addField("field", "value1 value2", new WhitespaceAnalyzer()); percolateSearcher = memoryIndex.createSearcher(); - query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, v); - topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC), true, true); - assertEquals(1L, topDocs.totalHits); + query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v); + topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); + assertEquals(1L, topDocs.totalHits.value); assertEquals(0, topDocs.scoreDocs[0].doc); memoryIndex = new MemoryIndex(); memoryIndex.addField("field", "value3", new WhitespaceAnalyzer()); percolateSearcher = memoryIndex.createSearcher(); - query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, v); - topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC), true, true); - assertEquals(1L, topDocs.totalHits); + query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v); + topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); + assertEquals(1L, topDocs.totalHits.value); assertEquals(0, topDocs.scoreDocs[0].doc); } @@ -1036,9 +1040,9 @@ public void testMsmAndRanges_disjunction() throws Exception { document.add(new IntPoint("int_field", 7)); MemoryIndex memoryIndex = MemoryIndex.fromDocument(document, new WhitespaceAnalyzer()); IndexSearcher percolateSearcher = memoryIndex.createSearcher(); - PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, v); - TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC), true, true); - assertEquals(1L, topDocs.totalHits); + PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v); + TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); + assertEquals(1L, topDocs.totalHits.value); assertEquals(0, topDocs.scoreDocs[0].doc); } @@ -1046,7 +1050,7 @@ private void duelRun(PercolateQuery.QueryStore queryStore, MemoryIndex memoryInd boolean requireScore = randomBoolean(); IndexSearcher percolateSearcher = memoryIndex.createSearcher(); Query percolateQuery = fieldType.percolateQuery("_name", queryStore, - Collections.singletonList(new BytesArray("{}")), percolateSearcher, Version.CURRENT); + Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, Version.CURRENT); Query query = requireScore ? percolateQuery : new ConstantScoreQuery(percolateQuery); TopDocs topDocs = shardSearcher.search(query, 100); @@ -1055,7 +1059,7 @@ private void duelRun(PercolateQuery.QueryStore queryStore, MemoryIndex memoryInd TopDocs controlTopDocs = shardSearcher.search(controlQuery, 100); try { - assertThat(topDocs.totalHits, equalTo(controlTopDocs.totalHits)); + assertThat(topDocs.totalHits.value, equalTo(controlTopDocs.totalHits.value)); assertThat(topDocs.scoreDocs.length, equalTo(controlTopDocs.scoreDocs.length)); for (int j = 0; j < topDocs.scoreDocs.length; j++) { assertThat(topDocs.scoreDocs[j].doc, equalTo(controlTopDocs.scoreDocs[j].doc)); @@ -1130,7 +1134,7 @@ private TopDocs executeQuery(PercolateQuery.QueryStore queryStore, IndexSearcher shardSearcher) throws IOException { IndexSearcher percolateSearcher = memoryIndex.createSearcher(); Query percolateQuery = fieldType.percolateQuery("_name", queryStore, - Collections.singletonList(new BytesArray("{}")), percolateSearcher, Version.CURRENT); + Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, Version.CURRENT); return shardSearcher.search(percolateQuery, 10); } @@ -1174,7 +1178,7 @@ private ControlQuery(MemoryIndex memoryIndex, PercolateQuery.QueryStore querySto } @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) { + public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) { final IndexSearcher percolatorIndexSearcher = memoryIndex.createSearcher(); return new Weight(this) { @@ -1210,8 +1214,8 @@ protected boolean match(int doc) { try { Query query = leaf.apply(doc); TopDocs topDocs = percolatorIndexSearcher.search(query, 1); - if (topDocs.totalHits > 0) { - if (needsScores) { + if (topDocs.scoreDocs.length > 0) { + if (scoreMode.needsScores()) { _score[0] = topDocs.scoreDocs[0].score; } return true; @@ -1239,6 +1243,11 @@ public DocIdSetIterator iterator() { public float score() throws IOException { return _score[0]; } + + @Override + public float getMaxScore(int upTo) throws IOException { + return _score[0]; + } }; } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java index eb7af5f30d061..be9c3f83f3f4b 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java @@ -19,12 +19,6 @@ package org.elasticsearch.percolator; -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.core.WhitespaceAnalyzer; -import org.apache.lucene.search.BooleanClause; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; @@ -40,8 +34,6 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.ParseContext; -import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.Rewriteable; @@ -63,7 +55,6 @@ import java.util.Set; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.sameInstance; public class PercolateQueryBuilderTests extends AbstractQueryTestCase { @@ -72,8 +63,8 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase parseQuery("{\"percolate\" : { \"document\": {}, \"documents\": [{}, {}], \"field\":\"" + queryField + "\"}}")); } - public void testCreateNestedDocumentSearcher() throws Exception { - int numNestedDocs = randomIntBetween(2, 8); - List docs = new ArrayList<>(numNestedDocs); - for (int i = 0; i < numNestedDocs; i++) { - docs.add(new ParseContext.Document()); - } - - Collection parsedDocument = Collections.singleton( - new ParsedDocument(null, null, "_id", "_type", null, docs, null, null, null)); - Analyzer analyzer = new WhitespaceAnalyzer(); - IndexSearcher indexSearcher = PercolateQueryBuilder.createMultiDocumentSearcher(analyzer, parsedDocument); - assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(numNestedDocs)); - - // ensure that any query get modified so that the nested docs are never included as hits: - Query query = new MatchAllDocsQuery(); - BooleanQuery result = (BooleanQuery) indexSearcher.createNormalizedWeight(query, true).getQuery(); - assertThat(result.clauses().size(), equalTo(2)); - assertThat(result.clauses().get(0).getQuery(), sameInstance(query)); - assertThat(result.clauses().get(0).getOccur(), equalTo(BooleanClause.Occur.MUST)); - assertThat(result.clauses().get(1).getOccur(), equalTo(BooleanClause.Occur.MUST_NOT)); - } - - public void testCreateMultiDocumentSearcher() throws Exception { - int numDocs = randomIntBetween(2, 8); - List docs = new ArrayList<>(); - for (int i = 0; i < numDocs; i++) { - docs.add(new ParsedDocument(null, null, "_id", "_type", null, - Collections.singletonList(new ParseContext.Document()), null, null, null)); - } - Analyzer analyzer = new WhitespaceAnalyzer(); - IndexSearcher indexSearcher = PercolateQueryBuilder.createMultiDocumentSearcher(analyzer, docs); - assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(numDocs)); - - // ensure that any query get modified so that the nested docs are never included as hits: - Query query = new MatchAllDocsQuery(); - BooleanQuery result = (BooleanQuery) indexSearcher.createNormalizedWeight(query, true).getQuery(); - assertThat(result.clauses().size(), equalTo(2)); - assertThat(result.clauses().get(0).getQuery(), sameInstance(query)); - assertThat(result.clauses().get(0).getOccur(), equalTo(BooleanClause.Occur.MUST)); - assertThat(result.clauses().get(1).getOccur(), equalTo(BooleanClause.Occur.MUST_NOT)); - } - private static BytesReference randomSource(Set usedFields) { try { // If we create two source that have the same field, but these fields have different kind of values (str vs. lng) then @@ -352,4 +301,5 @@ public void testFieldAlias() throws IOException { assertEquals(query.getCandidateMatchesQuery(), aliasQuery.getCandidateMatchesQuery()); assertEquals(query.getVerifiedMatchesQuery(), aliasQuery.getVerifiedMatchesQuery()); } + } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java index ac9cc97499ce6..4c2c135554587 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java @@ -117,9 +117,9 @@ public void testPercolateQuery() throws Exception { IndexSearcher percolateSearcher = memoryIndex.createSearcher(); // no scoring, wrapping it in a constant score query: Query query = new ConstantScoreQuery(new PercolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("a")), - new TermQuery(new Term("select", "a")), percolateSearcher, new MatchNoDocsQuery(""))); + new TermQuery(new Term("select", "a")), percolateSearcher, null, new MatchNoDocsQuery(""))); TopDocs topDocs = shardSearcher.search(query, 10); - assertThat(topDocs.totalHits, equalTo(1L)); + assertThat(topDocs.totalHits.value, equalTo(1L)); assertThat(topDocs.scoreDocs.length, equalTo(1)); assertThat(topDocs.scoreDocs[0].doc, equalTo(0)); Explanation explanation = shardSearcher.explain(query, 0); @@ -127,9 +127,9 @@ public void testPercolateQuery() throws Exception { assertThat(explanation.getValue(), equalTo(topDocs.scoreDocs[0].score)); query = new ConstantScoreQuery(new PercolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("b")), - new TermQuery(new Term("select", "b")), percolateSearcher, new MatchNoDocsQuery(""))); + new TermQuery(new Term("select", "b")), percolateSearcher, null, new MatchNoDocsQuery(""))); topDocs = shardSearcher.search(query, 10); - assertThat(topDocs.totalHits, equalTo(3L)); + assertThat(topDocs.totalHits.value, equalTo(3L)); assertThat(topDocs.scoreDocs.length, equalTo(3)); assertThat(topDocs.scoreDocs[0].doc, equalTo(1)); explanation = shardSearcher.explain(query, 1); @@ -147,14 +147,14 @@ public void testPercolateQuery() throws Exception { assertThat(explanation.getValue(), equalTo(topDocs.scoreDocs[2].score)); query = new ConstantScoreQuery(new PercolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("c")), - new MatchAllDocsQuery(), percolateSearcher, new MatchAllDocsQuery())); + new MatchAllDocsQuery(), percolateSearcher, null, new MatchAllDocsQuery())); topDocs = shardSearcher.search(query, 10); - assertThat(topDocs.totalHits, equalTo(4L)); + assertThat(topDocs.totalHits.value, equalTo(4L)); query = new PercolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), - new TermQuery(new Term("select", "b")), percolateSearcher, new MatchNoDocsQuery("")); + new TermQuery(new Term("select", "b")), percolateSearcher, null, new MatchNoDocsQuery("")); topDocs = shardSearcher.search(query, 10); - assertThat(topDocs.totalHits, equalTo(3L)); + assertThat(topDocs.totalHits.value, equalTo(3L)); assertThat(topDocs.scoreDocs.length, equalTo(3)); assertThat(topDocs.scoreDocs[0].doc, equalTo(3)); explanation = shardSearcher.explain(query, 3); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateWithNestedQueryBuilderTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateWithNestedQueryBuilderTests.java new file mode 100644 index 0000000000000..e58b6c6ad6a70 --- /dev/null +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateWithNestedQueryBuilderTests.java @@ -0,0 +1,57 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.percolator; + +import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryShardContext; + +import java.io.IOException; + +public class PercolateWithNestedQueryBuilderTests extends PercolateQueryBuilderTests { + + @Override + protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { + super.initializeAdditionalMappings(mapperService); + mapperService.merge("_doc", new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef( + "_doc", "some_nested_object", "type=nested"))), MapperService.MergeReason.MAPPING_UPDATE); + } + + public void testDetectsNestedDocuments() throws IOException { + QueryShardContext shardContext = createShardContext(); + + PercolateQueryBuilder builder = new PercolateQueryBuilder(queryField, + new BytesArray("{ \"foo\": \"bar\" }"), XContentType.JSON); + QueryBuilder rewrittenBuilder = rewriteAndFetch(builder, shardContext); + PercolateQuery query = (PercolateQuery) rewrittenBuilder.toQuery(shardContext); + assertFalse(query.excludesNestedDocs()); + + builder = new PercolateQueryBuilder(queryField, + new BytesArray("{ \"foo\": \"bar\", \"some_nested_object\": [ { \"baz\": 42 } ] }"), XContentType.JSON); + rewrittenBuilder = rewriteAndFetch(builder, shardContext); + query = (PercolateQuery) rewrittenBuilder.toQuery(shardContext); + assertTrue(query.excludesNestedDocs()); + } +} diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhaseTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhaseTests.java index f1b89d92ab11e..e5f2160cfcaab 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhaseTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhaseTests.java @@ -46,7 +46,7 @@ public class PercolatorHighlightSubFetchPhaseTests extends ESTestCase { public void testHitsExecutionNeeded() { PercolateQuery percolateQuery = new PercolateQuery("_name", ctx -> null, Collections.singletonList(new BytesArray("{}")), - new MatchAllDocsQuery(), Mockito.mock(IndexSearcher.class), new MatchAllDocsQuery()); + new MatchAllDocsQuery(), Mockito.mock(IndexSearcher.class), null, new MatchAllDocsQuery()); PercolatorHighlightSubFetchPhase subFetchPhase = new PercolatorHighlightSubFetchPhase(Settings.EMPTY, emptyMap()); SearchContext searchContext = Mockito.mock(SearchContext.class); @@ -60,7 +60,7 @@ public void testHitsExecutionNeeded() { public void testLocatePercolatorQuery() { PercolateQuery percolateQuery = new PercolateQuery("_name", ctx -> null, Collections.singletonList(new BytesArray("{}")), - new MatchAllDocsQuery(), Mockito.mock(IndexSearcher.class), new MatchAllDocsQuery()); + new MatchAllDocsQuery(), Mockito.mock(IndexSearcher.class), null, new MatchAllDocsQuery()); assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(new MatchAllDocsQuery()).size(), equalTo(0)); BooleanQuery.Builder bq = new BooleanQuery.Builder(); bq.add(new MatchAllDocsQuery(), BooleanClause.Occur.FILTER); @@ -94,7 +94,7 @@ public void testLocatePercolatorQuery() { assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(disjunctionMaxQuery).get(0), sameInstance(percolateQuery)); PercolateQuery percolateQuery2 = new PercolateQuery("_name", ctx -> null, Collections.singletonList(new BytesArray("{}")), - new MatchAllDocsQuery(), Mockito.mock(IndexSearcher.class), new MatchAllDocsQuery()); + new MatchAllDocsQuery(), Mockito.mock(IndexSearcher.class), null, new MatchAllDocsQuery()); bq = new BooleanQuery.Builder(); bq.add(new MatchAllDocsQuery(), BooleanClause.Occur.FILTER); assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(bq.build()).size(), equalTo(0)); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java index a428726225b95..89356bf274d8d 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhaseTests.java @@ -30,6 +30,7 @@ import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.TotalHits; import org.apache.lucene.store.Directory; import org.apache.lucene.util.FixedBitSet; import org.elasticsearch.search.SearchHit; @@ -58,7 +59,7 @@ public void testHitsExecute() throws Exception { MemoryIndex memoryIndex = new MemoryIndex(); memoryIndex.addField("field", "value", new WhitespaceAnalyzer()); PercolateQuery percolateQuery = new PercolateQuery("_name", queryStore, Collections.emptyList(), - new MatchAllDocsQuery(), memoryIndex.createSearcher(), new MatchNoDocsQuery()); + new MatchAllDocsQuery(), memoryIndex.createSearcher(), null, new MatchNoDocsQuery()); PercolatorMatchedSlotSubFetchPhase.innerHitsExecute(percolateQuery, indexSearcher, hits); assertNotNull(hits[0].field(PercolatorMatchedSlotSubFetchPhase.FIELD_NAME_PREFIX)); @@ -72,7 +73,7 @@ public void testHitsExecute() throws Exception { MemoryIndex memoryIndex = new MemoryIndex(); memoryIndex.addField("field", "value1", new WhitespaceAnalyzer()); PercolateQuery percolateQuery = new PercolateQuery("_name", queryStore, Collections.emptyList(), - new MatchAllDocsQuery(), memoryIndex.createSearcher(), new MatchNoDocsQuery()); + new MatchAllDocsQuery(), memoryIndex.createSearcher(), null, new MatchNoDocsQuery()); PercolatorMatchedSlotSubFetchPhase.innerHitsExecute(percolateQuery, indexSearcher, hits); assertNull(hits[0].field(PercolatorMatchedSlotSubFetchPhase.FIELD_NAME_PREFIX)); @@ -85,7 +86,7 @@ public void testHitsExecute() throws Exception { MemoryIndex memoryIndex = new MemoryIndex(); memoryIndex.addField("field", "value", new WhitespaceAnalyzer()); PercolateQuery percolateQuery = new PercolateQuery("_name", queryStore, Collections.emptyList(), - new MatchAllDocsQuery(), memoryIndex.createSearcher(), new MatchNoDocsQuery()); + new MatchAllDocsQuery(), memoryIndex.createSearcher(), null, new MatchNoDocsQuery()); PercolatorMatchedSlotSubFetchPhase.innerHitsExecute(percolateQuery, indexSearcher, hits); assertNull(hits[0].field(PercolatorMatchedSlotSubFetchPhase.FIELD_NAME_PREFIX)); @@ -100,7 +101,7 @@ public void testConvertTopDocsToSlots() { scoreDocs[i] = new ScoreDoc(i, 1f); } - TopDocs topDocs = new TopDocs(scoreDocs.length, scoreDocs, 1f); + TopDocs topDocs = new TopDocs(new TotalHits(scoreDocs.length, TotalHits.Relation.EQUAL_TO), scoreDocs); IntStream stream = PercolatorMatchedSlotSubFetchPhase.convertTopDocsToSlots(topDocs, null); int[] result = stream.toArray(); @@ -117,7 +118,7 @@ public void testConvertTopDocsToSlots_nestedDocs() { scoreDocs[2] = new ScoreDoc(8, 1f); scoreDocs[3] = new ScoreDoc(11, 1f); scoreDocs[4] = new ScoreDoc(14, 1f); - TopDocs topDocs = new TopDocs(scoreDocs.length, scoreDocs, 1f); + TopDocs topDocs = new TopDocs(new TotalHits(scoreDocs.length, TotalHits.Relation.EQUAL_TO), scoreDocs); FixedBitSet bitSet = new FixedBitSet(15); bitSet.set(2); diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.5.0-snapshot-13b9e28f9d.jar.sha1 deleted file mode 100644 index 1e79e1e70ef8f..0000000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.5.0-snapshot-13b9e28f9d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a010e852be8d56efe1906e6da5292e4541239724 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-4d78db26be.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-4d78db26be.jar.sha1 new file mode 100644 index 0000000000000..be2e7ec355ac5 --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-4d78db26be.jar.sha1 @@ -0,0 +1 @@ +97a3758487272ba4d15720b0ca15b0f980310c89 \ No newline at end of file diff --git a/plugins/analysis-icu/src/test/resources/rest-api-spec/test/analysis_icu/20_search.yml b/plugins/analysis-icu/src/test/resources/rest-api-spec/test/analysis_icu/20_search.yml index 67ff1dab98483..89ef510c72b02 100644 --- a/plugins/analysis-icu/src/test/resources/rest-api-spec/test/analysis_icu/20_search.yml +++ b/plugins/analysis-icu/src/test/resources/rest-api-spec/test/analysis_icu/20_search.yml @@ -12,7 +12,7 @@ analyzer: my_analyzer: tokenizer: standard - filter: ["standard", "lowercase", "my_collator"] + filter: ["lowercase", "my_collator"] filter: my_collator: type: icu_collation diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.5.0-snapshot-13b9e28f9d.jar.sha1 deleted file mode 100644 index 2d9669e436229..0000000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.5.0-snapshot-13b9e28f9d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -88e0ed90d433a9088528485cd4f59311735d92a4 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-4d78db26be.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-4d78db26be.jar.sha1 new file mode 100644 index 0000000000000..a7f63df28d7e5 --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-4d78db26be.jar.sha1 @@ -0,0 +1 @@ +12ed739794cd317754684308ddc5bdbdcc46cdde \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.5.0-snapshot-13b9e28f9d.jar.sha1 deleted file mode 100644 index f7b8fdd4bc187..0000000000000 --- a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.5.0-snapshot-13b9e28f9d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0daec9ac3c4bba5f91b1bc413c651b7a98313982 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-4d78db26be.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-4d78db26be.jar.sha1 new file mode 100644 index 0000000000000..8fc57bbf7e46d --- /dev/null +++ b/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-4d78db26be.jar.sha1 @@ -0,0 +1 @@ +4da6e5c17a17f0a9a99b518ea9985ea06996b63b \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.5.0-snapshot-13b9e28f9d.jar.sha1 deleted file mode 100644 index 80cf627011b4e..0000000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.5.0-snapshot-13b9e28f9d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f5af81eec04c1da0d6969cff18f360ff379b1bf7 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-4d78db26be.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-4d78db26be.jar.sha1 new file mode 100644 index 0000000000000..d94b274bf13ff --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-4d78db26be.jar.sha1 @@ -0,0 +1 @@ +a36b2db18a2a22966ab0bf9fced775f22dd7029d \ No newline at end of file diff --git a/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/10_metaphone.yml b/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/10_metaphone.yml index 1f326fe3776d1..1be0d8525a1c6 100644 --- a/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/10_metaphone.yml +++ b/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/10_metaphone.yml @@ -13,7 +13,7 @@ analyzer: my_analyzer: tokenizer: standard - filter: ["standard", "lowercase", "my_metaphone"] + filter: ["lowercase", "my_metaphone"] filter: my_metaphone: type: phonetic diff --git a/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/20_double_metaphone.yml b/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/20_double_metaphone.yml index 5af9f48aa808e..84b0129414c8e 100644 --- a/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/20_double_metaphone.yml +++ b/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/20_double_metaphone.yml @@ -13,7 +13,7 @@ analyzer: my_analyzer: tokenizer: standard - filter: ["standard", "lowercase", "my_metaphone"] + filter: ["lowercase", "my_metaphone"] filter: my_metaphone: type: phonetic diff --git a/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/30_beider_morse.yml b/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/30_beider_morse.yml index 259b0adea745d..bdd1ddef388df 100644 --- a/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/30_beider_morse.yml +++ b/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/30_beider_morse.yml @@ -13,7 +13,7 @@ analyzer: my_analyzer: tokenizer: standard - filter: ["standard", "lowercase", "beider_morse"] + filter: ["lowercase", "beider_morse"] filter: beider_morse: type: phonetic diff --git a/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/40_search.yml b/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/40_search.yml index 75c672172391c..34a5bfa1da14c 100644 --- a/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/40_search.yml +++ b/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/40_search.yml @@ -12,7 +12,7 @@ analyzer: my_analyzer: tokenizer: standard - filter: ["standard", "lowercase", "my_metaphone"] + filter: ["lowercase", "my_metaphone"] filter: my_metaphone: type: phonetic diff --git a/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/50_daitch_mokotoff.yml b/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/50_daitch_mokotoff.yml index c67b6892bc993..bee4c8bf5f432 100644 --- a/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/50_daitch_mokotoff.yml +++ b/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/50_daitch_mokotoff.yml @@ -13,7 +13,7 @@ analyzer: my_analyzer: tokenizer: standard - filter: ["standard", "lowercase", "daitch_mokotoff"] + filter: ["lowercase", "daitch_mokotoff"] filter: daitch_mokotoff: type: phonetic diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.5.0-snapshot-13b9e28f9d.jar.sha1 deleted file mode 100644 index 14be684b96f3d..0000000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.5.0-snapshot-13b9e28f9d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9e649088ee298293aa95a05391dff9cb0582648e \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-4d78db26be.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-4d78db26be.jar.sha1 new file mode 100644 index 0000000000000..f75d7abd6a36b --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-4d78db26be.jar.sha1 @@ -0,0 +1 @@ +5f1d360a47d2fd166e970d17c46b284830e64258 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.5.0-snapshot-13b9e28f9d.jar.sha1 deleted file mode 100644 index ea55c790537f4..0000000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.5.0-snapshot-13b9e28f9d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -47fb370054ba7413d050f13c177edf01180c31ca \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-4d78db26be.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-4d78db26be.jar.sha1 new file mode 100644 index 0000000000000..2e3943cf79345 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-4d78db26be.jar.sha1 @@ -0,0 +1 @@ +b07883b5e988d1d991503aa49d9b59059518825d \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.5.0-snapshot-13b9e28f9d.jar.sha1 deleted file mode 100644 index 2d6f580c35a23..0000000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.5.0-snapshot-13b9e28f9d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bc0708acbac195772b67b5ad2e9c4683d27ff450 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-4d78db26be.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-4d78db26be.jar.sha1 new file mode 100644 index 0000000000000..1d21c6e5b613c --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-4d78db26be.jar.sha1 @@ -0,0 +1 @@ +1b46b3ee62932de7ba7b670820a13eb973ec5777 \ No newline at end of file diff --git a/plugins/examples/rescore/src/test/java/org/elasticsearch/example/rescore/ExampleRescoreBuilderTests.java b/plugins/examples/rescore/src/test/java/org/elasticsearch/example/rescore/ExampleRescoreBuilderTests.java index d9fc4521a3593..36b5bea411a93 100644 --- a/plugins/examples/rescore/src/test/java/org/elasticsearch/example/rescore/ExampleRescoreBuilderTests.java +++ b/plugins/examples/rescore/src/test/java/org/elasticsearch/example/rescore/ExampleRescoreBuilderTests.java @@ -21,6 +21,7 @@ import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.TotalHits; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.search.rescore.RescoreContext; import org.elasticsearch.test.AbstractWireSerializingTestCase; @@ -68,7 +69,7 @@ public void testRescore() throws IOException { String fieldFactor = null; ExampleRescoreBuilder builder = new ExampleRescoreBuilder(factor, fieldFactor).windowSize(2); RescoreContext context = builder.buildContext(null); - TopDocs docs = new TopDocs(10, new ScoreDoc[3], 0); + TopDocs docs = new TopDocs(new TotalHits(10, TotalHits.Relation.EQUAL_TO), new ScoreDoc[3]); docs.scoreDocs[0] = new ScoreDoc(0, 1.0f); docs.scoreDocs[1] = new ScoreDoc(1, 1.0f); docs.scoreDocs[2] = new ScoreDoc(2, 1.0f); diff --git a/server/licenses/lucene-analyzers-common-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-analyzers-common-7.5.0-snapshot-13b9e28f9d.jar.sha1 deleted file mode 100644 index 2cbf39687624c..0000000000000 --- a/server/licenses/lucene-analyzers-common-7.5.0-snapshot-13b9e28f9d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c547b30525ad80d0ceeaa40c2d3a901c7e76fd46 \ No newline at end of file diff --git a/server/licenses/lucene-analyzers-common-8.0.0-snapshot-4d78db26be.jar.sha1 b/server/licenses/lucene-analyzers-common-8.0.0-snapshot-4d78db26be.jar.sha1 new file mode 100644 index 0000000000000..3a02e483d6808 --- /dev/null +++ b/server/licenses/lucene-analyzers-common-8.0.0-snapshot-4d78db26be.jar.sha1 @@ -0,0 +1 @@ +fa8e0fbef3e3fcf49ace4a4153580070def770eb \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-backward-codecs-7.5.0-snapshot-13b9e28f9d.jar.sha1 deleted file mode 100644 index 9e2473361f033..0000000000000 --- a/server/licenses/lucene-backward-codecs-7.5.0-snapshot-13b9e28f9d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9c327295d54d5abd2684e00c3aefe58aa1caace7 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-8.0.0-snapshot-4d78db26be.jar.sha1 b/server/licenses/lucene-backward-codecs-8.0.0-snapshot-4d78db26be.jar.sha1 new file mode 100644 index 0000000000000..8279b81d6cfc0 --- /dev/null +++ b/server/licenses/lucene-backward-codecs-8.0.0-snapshot-4d78db26be.jar.sha1 @@ -0,0 +1 @@ +3d636541581e338a1be7e3e176aac73d7ae0b323 \ No newline at end of file diff --git a/server/licenses/lucene-core-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-core-7.5.0-snapshot-13b9e28f9d.jar.sha1 deleted file mode 100644 index fdedaf3fc5756..0000000000000 --- a/server/licenses/lucene-core-7.5.0-snapshot-13b9e28f9d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -73dd7703a94ec2357581f65ee7c1c4d618ff310f \ No newline at end of file diff --git a/server/licenses/lucene-core-8.0.0-snapshot-4d78db26be.jar.sha1 b/server/licenses/lucene-core-8.0.0-snapshot-4d78db26be.jar.sha1 new file mode 100644 index 0000000000000..683b585bb2f61 --- /dev/null +++ b/server/licenses/lucene-core-8.0.0-snapshot-4d78db26be.jar.sha1 @@ -0,0 +1 @@ +126faacb28d1b8cc1ab81d702973d057892120d1 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-grouping-7.5.0-snapshot-13b9e28f9d.jar.sha1 deleted file mode 100644 index 4e555692b0f9a..0000000000000 --- a/server/licenses/lucene-grouping-7.5.0-snapshot-13b9e28f9d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1c3802fa30990a1758f2df19d17fe2c95fc45870 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-8.0.0-snapshot-4d78db26be.jar.sha1 b/server/licenses/lucene-grouping-8.0.0-snapshot-4d78db26be.jar.sha1 new file mode 100644 index 0000000000000..483f470b5e015 --- /dev/null +++ b/server/licenses/lucene-grouping-8.0.0-snapshot-4d78db26be.jar.sha1 @@ -0,0 +1 @@ +abd514ec02837f48b8c478287fde7cc5d6439ada \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-highlighter-7.5.0-snapshot-13b9e28f9d.jar.sha1 deleted file mode 100644 index 73b6c15f332f9..0000000000000 --- a/server/licenses/lucene-highlighter-7.5.0-snapshot-13b9e28f9d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8d7abdbb7900d7e6a76c391d8be07217c0d882ca \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-8.0.0-snapshot-4d78db26be.jar.sha1 b/server/licenses/lucene-highlighter-8.0.0-snapshot-4d78db26be.jar.sha1 new file mode 100644 index 0000000000000..27dd042c06bf3 --- /dev/null +++ b/server/licenses/lucene-highlighter-8.0.0-snapshot-4d78db26be.jar.sha1 @@ -0,0 +1 @@ +778e87a263184b8ddcbb4ef9d244467933f32993 \ No newline at end of file diff --git a/server/licenses/lucene-join-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-join-7.5.0-snapshot-13b9e28f9d.jar.sha1 deleted file mode 100644 index 23414b8e8e134..0000000000000 --- a/server/licenses/lucene-join-7.5.0-snapshot-13b9e28f9d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -011f78ae9d9a386fcf20ceea29ba30e75fb512e8 \ No newline at end of file diff --git a/server/licenses/lucene-join-8.0.0-snapshot-4d78db26be.jar.sha1 b/server/licenses/lucene-join-8.0.0-snapshot-4d78db26be.jar.sha1 new file mode 100644 index 0000000000000..13d2db8d210dc --- /dev/null +++ b/server/licenses/lucene-join-8.0.0-snapshot-4d78db26be.jar.sha1 @@ -0,0 +1 @@ +96aff29ad966204c73f8dd98d8116f09e34b6ebd \ No newline at end of file diff --git a/server/licenses/lucene-memory-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-memory-7.5.0-snapshot-13b9e28f9d.jar.sha1 deleted file mode 100644 index d227ebaf46368..0000000000000 --- a/server/licenses/lucene-memory-7.5.0-snapshot-13b9e28f9d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c3dd461a7cebdcacc77304660218513e10f89adb \ No newline at end of file diff --git a/server/licenses/lucene-memory-8.0.0-snapshot-4d78db26be.jar.sha1 b/server/licenses/lucene-memory-8.0.0-snapshot-4d78db26be.jar.sha1 new file mode 100644 index 0000000000000..6e014f20c97fd --- /dev/null +++ b/server/licenses/lucene-memory-8.0.0-snapshot-4d78db26be.jar.sha1 @@ -0,0 +1 @@ +e72e2accebb1277c57dfe21bc011195eed91dbfd \ No newline at end of file diff --git a/server/licenses/lucene-misc-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-misc-7.5.0-snapshot-13b9e28f9d.jar.sha1 deleted file mode 100644 index a892f3a2272ba..0000000000000 --- a/server/licenses/lucene-misc-7.5.0-snapshot-13b9e28f9d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d63101181708d78eccc441b0d1193dd91d1a0bf1 \ No newline at end of file diff --git a/server/licenses/lucene-misc-8.0.0-snapshot-4d78db26be.jar.sha1 b/server/licenses/lucene-misc-8.0.0-snapshot-4d78db26be.jar.sha1 new file mode 100644 index 0000000000000..57081e7aa10ba --- /dev/null +++ b/server/licenses/lucene-misc-8.0.0-snapshot-4d78db26be.jar.sha1 @@ -0,0 +1 @@ +bf25587ebf6823781f5d7acffd7d65c46c21cb27 \ No newline at end of file diff --git a/server/licenses/lucene-queries-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-queries-7.5.0-snapshot-13b9e28f9d.jar.sha1 deleted file mode 100644 index 5d0fead48cbc9..0000000000000 --- a/server/licenses/lucene-queries-7.5.0-snapshot-13b9e28f9d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -22e56fbd44d6a47d7dddbdda3c17ce22ad0a6680 \ No newline at end of file diff --git a/server/licenses/lucene-queries-8.0.0-snapshot-4d78db26be.jar.sha1 b/server/licenses/lucene-queries-8.0.0-snapshot-4d78db26be.jar.sha1 new file mode 100644 index 0000000000000..6855364592ea5 --- /dev/null +++ b/server/licenses/lucene-queries-8.0.0-snapshot-4d78db26be.jar.sha1 @@ -0,0 +1 @@ +6cad42923bcb6e1c6060ae1cbab574646e8c808e \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-queryparser-7.5.0-snapshot-13b9e28f9d.jar.sha1 deleted file mode 100644 index 8be3d6447b0bb..0000000000000 --- a/server/licenses/lucene-queryparser-7.5.0-snapshot-13b9e28f9d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -36b38a1d71045f5bee5dc40526f8d57084dbdc00 \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-8.0.0-snapshot-4d78db26be.jar.sha1 b/server/licenses/lucene-queryparser-8.0.0-snapshot-4d78db26be.jar.sha1 new file mode 100644 index 0000000000000..f9d037120a342 --- /dev/null +++ b/server/licenses/lucene-queryparser-8.0.0-snapshot-4d78db26be.jar.sha1 @@ -0,0 +1 @@ +e5841d7e877e51bbd2d325709353f5ab7e94b49a \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-sandbox-7.5.0-snapshot-13b9e28f9d.jar.sha1 deleted file mode 100644 index 6d968f5400c52..0000000000000 --- a/server/licenses/lucene-sandbox-7.5.0-snapshot-13b9e28f9d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -21eb8b111bcb94f4abb8c6402dfd10f51ecc0b38 \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-8.0.0-snapshot-4d78db26be.jar.sha1 b/server/licenses/lucene-sandbox-8.0.0-snapshot-4d78db26be.jar.sha1 new file mode 100644 index 0000000000000..45c8934a8d41b --- /dev/null +++ b/server/licenses/lucene-sandbox-8.0.0-snapshot-4d78db26be.jar.sha1 @@ -0,0 +1 @@ +fefe17f6ac0c7d505c5051e96d0f4916fec2bf9e \ No newline at end of file diff --git a/server/licenses/lucene-spatial-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-spatial-7.5.0-snapshot-13b9e28f9d.jar.sha1 deleted file mode 100644 index b6aec2eae1dda..0000000000000 --- a/server/licenses/lucene-spatial-7.5.0-snapshot-13b9e28f9d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d60081c5641ed21aea82d5d0976b40e1f184c8e5 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-8.0.0-snapshot-4d78db26be.jar.sha1 b/server/licenses/lucene-spatial-8.0.0-snapshot-4d78db26be.jar.sha1 new file mode 100644 index 0000000000000..b02408a7683b3 --- /dev/null +++ b/server/licenses/lucene-spatial-8.0.0-snapshot-4d78db26be.jar.sha1 @@ -0,0 +1 @@ +22b0a9d9fb675f7c82a7a2b18f593f3278b40f11 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-spatial-extras-7.5.0-snapshot-13b9e28f9d.jar.sha1 deleted file mode 100644 index 6999baccc89e9..0000000000000 --- a/server/licenses/lucene-spatial-extras-7.5.0-snapshot-13b9e28f9d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2d42b373546aa8923d25e4e9a673dd186064f9bd \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-8.0.0-snapshot-4d78db26be.jar.sha1 b/server/licenses/lucene-spatial-extras-8.0.0-snapshot-4d78db26be.jar.sha1 new file mode 100644 index 0000000000000..d4e8b662ce465 --- /dev/null +++ b/server/licenses/lucene-spatial-extras-8.0.0-snapshot-4d78db26be.jar.sha1 @@ -0,0 +1 @@ +bd6449cc67a36891f6b3201489c5ed44d795fab0 \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-spatial3d-7.5.0-snapshot-13b9e28f9d.jar.sha1 deleted file mode 100644 index b866b1985568b..0000000000000 --- a/server/licenses/lucene-spatial3d-7.5.0-snapshot-13b9e28f9d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7f31607959e5a2ed84ab2d9a007a3f76e9a2d38c \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-8.0.0-snapshot-4d78db26be.jar.sha1 b/server/licenses/lucene-spatial3d-8.0.0-snapshot-4d78db26be.jar.sha1 new file mode 100644 index 0000000000000..9743868e5c748 --- /dev/null +++ b/server/licenses/lucene-spatial3d-8.0.0-snapshot-4d78db26be.jar.sha1 @@ -0,0 +1 @@ +5e2a8b3e9e19ad61fcbd27a138cf55f2d6cbfb2d \ No newline at end of file diff --git a/server/licenses/lucene-suggest-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/server/licenses/lucene-suggest-7.5.0-snapshot-13b9e28f9d.jar.sha1 deleted file mode 100644 index 55e1c5990de63..0000000000000 --- a/server/licenses/lucene-suggest-7.5.0-snapshot-13b9e28f9d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f7619348f0619867c52f4801531c70358f49873a \ No newline at end of file diff --git a/server/licenses/lucene-suggest-8.0.0-snapshot-4d78db26be.jar.sha1 b/server/licenses/lucene-suggest-8.0.0-snapshot-4d78db26be.jar.sha1 new file mode 100644 index 0000000000000..8b722955278cf --- /dev/null +++ b/server/licenses/lucene-suggest-8.0.0-snapshot-4d78db26be.jar.sha1 @@ -0,0 +1 @@ +bd5931d1d5ca3f84565534182881565a44aeb72a \ No newline at end of file diff --git a/server/src/main/java/org/apache/lucene/queries/BinaryDocValuesRangeQuery.java b/server/src/main/java/org/apache/lucene/queries/BinaryDocValuesRangeQuery.java index 3cc16ce9320d5..63db15b2ee168 100644 --- a/server/src/main/java/org/apache/lucene/queries/BinaryDocValuesRangeQuery.java +++ b/server/src/main/java/org/apache/lucene/queries/BinaryDocValuesRangeQuery.java @@ -26,6 +26,7 @@ import org.apache.lucene.search.ConstantScoreWeight; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; @@ -58,7 +59,7 @@ public BinaryDocValuesRangeQuery(String fieldName, QueryType queryType, LengthTy } @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException { + public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new ConstantScoreWeight(this, boost) { @Override diff --git a/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java b/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java index cd5da674b8e71..dd3ac992475b9 100644 --- a/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java +++ b/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java @@ -22,7 +22,7 @@ import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.Term; -import org.apache.lucene.index.TermContext; +import org.apache.lucene.index.TermStates; import org.apache.lucene.index.TermState; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; @@ -84,10 +84,10 @@ public Query rewrite(IndexReader reader) throws IOException { return rewritten; } IndexReaderContext context = reader.getContext(); - TermContext[] ctx = new TermContext[terms.length]; + TermStates[] ctx = new TermStates[terms.length]; int[] docFreqs = new int[ctx.length]; for (int i = 0; i < terms.length; i++) { - ctx[i] = TermContext.build(context, terms[i]); + ctx[i] = TermStates.build(context, terms[i], true); docFreqs[i] = ctx[i].docFreq(); } @@ -96,16 +96,16 @@ public Query rewrite(IndexReader reader) throws IOException { return topLevelQuery(terms, ctx, docFreqs, maxDoc); } - protected abstract Query topLevelQuery(Term[] terms, TermContext[] ctx, int[] docFreqs, int maxDoc); + protected abstract Query topLevelQuery(Term[] terms, TermStates[] ctx, int[] docFreqs, int maxDoc); - protected void blend(final TermContext[] contexts, int maxDoc, IndexReader reader) throws IOException { + protected void blend(final TermStates[] contexts, int maxDoc, IndexReader reader) throws IOException { if (contexts.length <= 1) { return; } int max = 0; long minSumTTF = Long.MAX_VALUE; for (int i = 0; i < contexts.length; i++) { - TermContext ctx = contexts[i]; + TermStates ctx = contexts[i]; int df = ctx.docFreq(); // we use the max here since it's the only "true" estimation we can make here // at least max(df) documents have that term. Sum or Averages don't seem @@ -155,7 +155,7 @@ protected int compare(int i, int j) { // the more popular (more frequent) fields // that acts as a tie breaker for (int i : tieBreak) { - TermContext ctx = contexts[i]; + TermStates ctx = contexts[i]; if (ctx.docFreq() == 0) { break; } @@ -183,12 +183,12 @@ protected int compare(int i, int j) { } } - private TermContext adjustTTF(IndexReaderContext readerContext, TermContext termContext, long sumTTF) { + private TermStates adjustTTF(IndexReaderContext readerContext, TermStates termContext, long sumTTF) throws IOException { assert termContext.wasBuiltFor(readerContext); if (sumTTF == -1 && termContext.totalTermFreq() == -1) { return termContext; } - TermContext newTermContext = new TermContext(readerContext); + TermStates newTermContext = new TermStates(readerContext); List leaves = readerContext.leaves(); final int len; if (leaves == null) { @@ -199,7 +199,7 @@ private TermContext adjustTTF(IndexReaderContext readerContext, TermContext term int df = termContext.docFreq(); long ttf = sumTTF; for (int i = 0; i < len; i++) { - TermState termState = termContext.get(i); + TermState termState = termContext.get(leaves.get(i)); if (termState == null) { continue; } @@ -210,7 +210,7 @@ private TermContext adjustTTF(IndexReaderContext readerContext, TermContext term return newTermContext; } - private static TermContext adjustDF(IndexReaderContext readerContext, TermContext ctx, int newDocFreq) { + private static TermStates adjustDF(IndexReaderContext readerContext, TermStates ctx, int newDocFreq) throws IOException { assert ctx.wasBuiltFor(readerContext); // Use a value of ttf that is consistent with the doc freq (ie. gte) long newTTF; @@ -226,9 +226,9 @@ private static TermContext adjustDF(IndexReaderContext readerContext, TermContex } else { len = leaves.size(); } - TermContext newCtx = new TermContext(readerContext); + TermStates newCtx = new TermStates(readerContext); for (int i = 0; i < len; ++i) { - TermState termState = ctx.get(i); + TermState termState = ctx.get(leaves.get(i)); if (termState == null) { continue; } @@ -299,7 +299,7 @@ public int hashCode() { public static BlendedTermQuery commonTermsBlendedQuery(Term[] terms, final float[] boosts, final float maxTermFrequency) { return new BlendedTermQuery(terms, boosts) { @Override - protected Query topLevelQuery(Term[] terms, TermContext[] ctx, int[] docFreqs, int maxDoc) { + protected Query topLevelQuery(Term[] terms, TermStates[] ctx, int[] docFreqs, int maxDoc) { BooleanQuery.Builder highBuilder = new BooleanQuery.Builder(); BooleanQuery.Builder lowBuilder = new BooleanQuery.Builder(); for (int i = 0; i < terms.length; i++) { @@ -342,7 +342,7 @@ public static BlendedTermQuery dismaxBlendedQuery(Term[] terms, final float tieB public static BlendedTermQuery dismaxBlendedQuery(Term[] terms, final float[] boosts, final float tieBreakerMultiplier) { return new BlendedTermQuery(terms, boosts) { @Override - protected Query topLevelQuery(Term[] terms, TermContext[] ctx, int[] docFreqs, int maxDoc) { + protected Query topLevelQuery(Term[] terms, TermStates[] ctx, int[] docFreqs, int maxDoc) { List queries = new ArrayList<>(ctx.length); for (int i = 0; i < terms.length; i++) { Query query = new TermQuery(terms[i], ctx[i]); diff --git a/server/src/main/java/org/apache/lucene/queries/MinDocQuery.java b/server/src/main/java/org/apache/lucene/queries/MinDocQuery.java index 0fed8316a0564..b9a001b6e7370 100644 --- a/server/src/main/java/org/apache/lucene/queries/MinDocQuery.java +++ b/server/src/main/java/org/apache/lucene/queries/MinDocQuery.java @@ -26,6 +26,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; @@ -76,7 +77,7 @@ public Query rewrite(IndexReader reader) throws IOException { } @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException { + public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { if (readerId == null) { throw new IllegalStateException("Rewrite first"); } else if (Objects.equals(searcher.getIndexReader().getContext().id(), readerId) == false) { diff --git a/server/src/main/java/org/apache/lucene/queries/SearchAfterSortedDocQuery.java b/server/src/main/java/org/apache/lucene/queries/SearchAfterSortedDocQuery.java index 5da0e618752e2..2c436f0227222 100644 --- a/server/src/main/java/org/apache/lucene/queries/SearchAfterSortedDocQuery.java +++ b/server/src/main/java/org/apache/lucene/queries/SearchAfterSortedDocQuery.java @@ -23,16 +23,17 @@ import org.apache.lucene.search.ConstantScoreScorer; import org.apache.lucene.search.ConstantScoreWeight; import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.EarlyTerminatingSortingCollector; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafFieldComparator; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.Weight; +import org.elasticsearch.common.lucene.Lucene; import java.io.IOException; import java.util.Arrays; @@ -53,7 +54,7 @@ public SearchAfterSortedDocQuery(Sort sort, FieldDoc after) { throw new IllegalArgumentException("after doc has " + after.fields.length + " value(s) but sort has " + sort.getSort().length + "."); } - this.sort = sort; + this.sort = Objects.requireNonNull(sort); this.after = after; int numFields = sort.getSort().length; this.fieldComparators = new FieldComparator[numFields]; @@ -70,12 +71,12 @@ public SearchAfterSortedDocQuery(Sort sort, FieldDoc after) { } @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException { + public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new ConstantScoreWeight(this, 1.0f) { @Override public Scorer scorer(LeafReaderContext context) throws IOException { Sort segmentSort = context.reader().getMetaData().getSort(); - if (EarlyTerminatingSortingCollector.canEarlyTerminate(sort, segmentSort) == false) { + if (segmentSort == null || Lucene.canEarlyTerminate(sort, segmentSort) == false) { throw new IOException("search sort :[" + sort.getSort() + "] does not match the index sort:[" + segmentSort + "]"); } final int afterDoc = after.doc - context.docBase; diff --git a/server/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java b/server/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java index c5362cbf85812..4dba67abdeb9a 100644 --- a/server/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java +++ b/server/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java @@ -24,6 +24,7 @@ import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopFieldDocs; +import org.apache.lucene.search.TotalHits; import org.apache.lucene.util.PriorityQueue; import java.util.ArrayList; @@ -40,9 +41,9 @@ public final class CollapseTopFieldDocs extends TopFieldDocs { /** The collapse value for each top doc */ public final Object[] collapseValues; - public CollapseTopFieldDocs(String field, long totalHits, ScoreDoc[] scoreDocs, - SortField[] sortFields, Object[] values, float maxScore) { - super(totalHits, scoreDocs, sortFields, maxScore); + public CollapseTopFieldDocs(String field, TotalHits totalHits, ScoreDoc[] scoreDocs, + SortField[] sortFields, Object[] values) { + super(totalHits, scoreDocs, sortFields); this.field = field; this.collapseValues = values; } @@ -172,23 +173,23 @@ public static CollapseTopFieldDocs merge(Sort sort, int start, int size, long totalHitCount = 0; int availHitCount = 0; - float maxScore = Float.MIN_VALUE; + TotalHits.Relation totalHitsRelation = TotalHits.Relation.EQUAL_TO; for(int shardIDX=0;shardIDX 0) { availHitCount += shard.scoreDocs.length; queue.add(new ShardRef(shardIDX, setShardIndex == false)); - maxScore = Math.max(maxScore, shard.getMaxScore()); } } - if (availHitCount == 0) { - maxScore = Float.NaN; - } - final ScoreDoc[] hits; final Object[] values; if (availHitCount <= start) { @@ -237,6 +238,7 @@ public static CollapseTopFieldDocs merge(Sort sort, int start, int size, hits = hitList.toArray(new ScoreDoc[0]); values = collapseList.toArray(new Object[0]); } - return new CollapseTopFieldDocs(collapseField, totalHitCount, hits, sort.getSort(), values, maxScore); + TotalHits totalHits = new TotalHits(totalHitCount, totalHitsRelation); + return new CollapseTopFieldDocs(collapseField, totalHits, hits, sort.getSort(), values); } } diff --git a/server/src/main/java/org/apache/lucene/search/grouping/CollapsingTopDocsCollector.java b/server/src/main/java/org/apache/lucene/search/grouping/CollapsingTopDocsCollector.java index fedda3ead596b..7f36074d1459b 100644 --- a/server/src/main/java/org/apache/lucene/search/grouping/CollapsingTopDocsCollector.java +++ b/server/src/main/java/org/apache/lucene/search/grouping/CollapsingTopDocsCollector.java @@ -20,9 +20,11 @@ import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; +import org.apache.lucene.search.TotalHits; import java.io.IOException; import java.util.Collection; @@ -34,6 +36,9 @@ * A collector that groups documents based on field values and returns {@link CollapseTopFieldDocs} * output. The collapsing is done in a single pass by selecting only the top sorted document per collapse key. * The value used for the collapse key of each group can be found in {@link CollapseTopFieldDocs#collapseValues}. + * + * TODO: If the sort is based on score we should propagate the mininum competitive score when orderedGroups is full. + * This is safe for collapsing since the group sort is the same as the query sort. */ public final class CollapsingTopDocsCollector extends FirstPassGroupingCollector { protected final String collapseField; @@ -42,32 +47,23 @@ public final class CollapsingTopDocsCollector extends FirstPassGroupingCollec protected Scorer scorer; private int totalHitCount; - private float maxScore; - private final boolean trackMaxScore; - CollapsingTopDocsCollector(GroupSelector groupSelector, String collapseField, Sort sort, - int topN, boolean trackMaxScore) { + CollapsingTopDocsCollector(GroupSelector groupSelector, String collapseField, Sort sort, int topN) { super(groupSelector, sort, topN); this.collapseField = collapseField; - this.trackMaxScore = trackMaxScore; - if (trackMaxScore) { - maxScore = Float.NEGATIVE_INFINITY; - } else { - maxScore = Float.NaN; - } this.sort = sort; } /** - * Transform {@link FirstPassGroupingCollector#getTopGroups(int, boolean)} output in + * Transform {@link FirstPassGroupingCollector#getTopGroups(int)} output in * {@link CollapseTopFieldDocs}. The collapsing needs only one pass so we can get the final top docs at the end * of the first pass. */ public CollapseTopFieldDocs getTopDocs() throws IOException { - Collection> groups = super.getTopGroups(0, true); + Collection> groups = super.getTopGroups(0); if (groups == null) { - return new CollapseTopFieldDocs(collapseField, totalHitCount, new ScoreDoc[0], - sort.getSort(), new Object[0], Float.NaN); + TotalHits totalHits = new TotalHits(0, TotalHits.Relation.EQUAL_TO); + return new CollapseTopFieldDocs(collapseField, totalHits, new ScoreDoc[0], sort.getSort(), new Object[0]); } FieldDoc[] docs = new FieldDoc[groups.size()]; Object[] collapseValues = new Object[groups.size()]; @@ -92,16 +88,17 @@ public CollapseTopFieldDocs getTopDocs() throws IOException { collapseValues[pos] = group.groupValue; pos++; } - return new CollapseTopFieldDocs(collapseField, totalHitCount, docs, sort.getSort(), - collapseValues, maxScore); + TotalHits totalHits = new TotalHits(totalHitCount, TotalHits.Relation.EQUAL_TO); + return new CollapseTopFieldDocs(collapseField, totalHits, docs, sort.getSort(), collapseValues); } @Override - public boolean needsScores() { - if (super.needsScores() == false) { - return trackMaxScore; + public ScoreMode scoreMode() { + if (super.scoreMode().needsScores()) { + return ScoreMode.COMPLETE; + } else { + return ScoreMode.COMPLETE_NO_SCORES; } - return true; } @Override @@ -113,9 +110,6 @@ public void setScorer(Scorer scorer) throws IOException { @Override public void collect(int doc) throws IOException { super.collect(doc); - if (trackMaxScore) { - maxScore = Math.max(maxScore, scorer.score()); - } totalHitCount++; } @@ -134,9 +128,9 @@ public void collect(int doc) throws IOException { * @param topN How many top groups to keep. */ public static CollapsingTopDocsCollector createNumeric(String collapseField, Sort sort, - int topN, boolean trackMaxScore) { + int topN) { return new CollapsingTopDocsCollector<>(new CollapsingDocValuesSource.Numeric(collapseField), - collapseField, sort, topN, trackMaxScore); + collapseField, sort, topN); } /** @@ -153,8 +147,8 @@ public static CollapsingTopDocsCollector createNumeric(String collapseField, * @param topN How many top groups to keep. */ public static CollapsingTopDocsCollector createKeyword(String collapseField, Sort sort, - int topN, boolean trackMaxScore) { + int topN) { return new CollapsingTopDocsCollector<>(new CollapsingDocValuesSource.Keyword(collapseField), - collapseField, sort, topN, trackMaxScore); + collapseField, sort, topN); } } diff --git a/server/src/main/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighter.java b/server/src/main/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighter.java index 45ee7becc983e..d9bf9613cba07 100644 --- a/server/src/main/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighter.java +++ b/server/src/main/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighter.java @@ -48,6 +48,7 @@ import java.util.Locale; import java.util.Map; import java.util.Set; +import java.util.function.Predicate; /** * Subclass of the {@link UnifiedHighlighter} that works for a single field in a single document. @@ -136,15 +137,16 @@ protected PassageFormatter getFormatter(String field) { @Override protected FieldHighlighter getFieldHighlighter(String field, Query query, Set allTerms, int maxPassages) { - BytesRef[] terms = filterExtractedTerms(getFieldMatcher(field), allTerms); + Predicate fieldMatcher = getFieldMatcher(field); + BytesRef[] terms = filterExtractedTerms(fieldMatcher, allTerms); Set highlightFlags = getFlags(field); PhraseHelper phraseHelper = getPhraseHelper(field, query, highlightFlags); CharacterRunAutomaton[] automata = getAutomata(field, query, highlightFlags); OffsetSource offsetSource = getOptimizedOffsetSource(field, terms, phraseHelper, automata); BreakIterator breakIterator = new SplittingBreakIterator(getBreakIterator(field), UnifiedHighlighter.MULTIVAL_SEP_CHAR); - FieldOffsetStrategy strategy = - getOffsetStrategy(offsetSource, field, terms, phraseHelper, automata, highlightFlags); + UHComponents components = new UHComponents(field, fieldMatcher, query, terms, phraseHelper, automata, highlightFlags); + FieldOffsetStrategy strategy = getOffsetStrategy(offsetSource, components); return new CustomFieldHighlighter(field, strategy, breakIteratorLocale, breakIterator, getScorer(field), maxPassages, (noMatchSize > 0 ? 1 : 0), getFormatter(field), noMatchSize, fieldValue); } diff --git a/server/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java b/server/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java index 6b670953ecbf0..16073abfc0087 100644 --- a/server/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java +++ b/server/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java @@ -22,7 +22,6 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Term; import org.apache.lucene.queries.BlendedTermQuery; -import org.apache.lucene.queries.BoostingQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.MultiPhraseQuery; @@ -74,12 +73,11 @@ void flatten(Query sourceQuery, IndexReader reader, Collection flatQuerie } else if (sourceQuery instanceof BlendedTermQuery) { final BlendedTermQuery blendedTermQuery = (BlendedTermQuery) sourceQuery; flatten(blendedTermQuery.rewrite(reader), reader, flatQueries, boost); - } else if (sourceQuery instanceof BoostingQuery) { - BoostingQuery boostingQuery = (BoostingQuery) sourceQuery; - //flatten positive query with query boost - flatten(boostingQuery.getMatch(), reader, flatQueries, boost); - //flatten negative query with negative boost - flatten(boostingQuery.getContext(), reader, flatQueries, boostingQuery.getBoost()); + } else if (sourceQuery instanceof org.apache.lucene.queries.function.FunctionScoreQuery) { + org.apache.lucene.queries.function.FunctionScoreQuery funcScoreQuery = + (org.apache.lucene.queries.function.FunctionScoreQuery) sourceQuery; + //flatten query with query boost + flatten(funcScoreQuery.getWrappedQuery(), reader, flatQueries, boost); } else if (sourceQuery instanceof SynonymQuery) { // SynonymQuery should be handled by the parent class directly. // This statement should be removed when https://issues.apache.org/jira/browse/LUCENE-7484 is merged. diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 65dc1e2d23131..01738930b4bcf 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -105,7 +105,7 @@ public class Version implements Comparable, ToXContentFragment { public static final Version V_6_5_0 = new Version(V_6_5_0_ID, org.apache.lucene.util.Version.LUCENE_7_5_0); public static final int V_7_0_0_alpha1_ID = 7000001; public static final Version V_7_0_0_alpha1 = - new Version(V_7_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_7_5_0); + new Version(V_7_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_8_0_0); public static final Version CURRENT = V_7_0_0_alpha1; static { diff --git a/server/src/main/java/org/elasticsearch/action/search/MaxScoreCollector.java b/server/src/main/java/org/elasticsearch/action/search/MaxScoreCollector.java new file mode 100644 index 0000000000000..071cd92330496 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/search/MaxScoreCollector.java @@ -0,0 +1,62 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.search; + +import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.SimpleCollector; + +import java.io.IOException; + +/** + * A collector that computes the maximum score. + */ +public class MaxScoreCollector extends SimpleCollector { + + private Scorer scorer; + private float maxScore = Float.NEGATIVE_INFINITY; + private boolean hasHits = false; + + @Override + public void setScorer(Scorer scorer) { + this.scorer = scorer; + } + + @Override + public ScoreMode scoreMode() { + // Could be TOP_SCORES but it is always used in a MultiCollector anyway, so this saves some wrapping. + return ScoreMode.COMPLETE; + } + + @Override + public void collect(int doc) throws IOException { + hasHits = true; + maxScore = Math.max(maxScore, scorer.score()); + } + + /** + * Get the maximum score. This returns {@link Float#NaN} if no hits were + * collected. + */ + public float getMaxScore() { + return hasHits ? maxScore : Float.NaN; + } + +} diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java index fb450b2ce8359..9b4d232f23ca6 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java @@ -31,9 +31,12 @@ import org.apache.lucene.search.TermStatistics; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopFieldDocs; +import org.apache.lucene.search.TotalHits; +import org.apache.lucene.search.TotalHits.Relation; import org.apache.lucene.search.grouping.CollapseTopFieldDocs; import org.elasticsearch.common.collect.HppcMaps; import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.SearchHit; @@ -94,14 +97,15 @@ public AggregatedDfs aggregateDfs(Collection results) { assert terms.length == stats.length; for (int i = 0; i < terms.length; i++) { assert terms[i] != null; + if (stats[i] == null) { + continue; + } TermStatistics existing = termStatistics.get(terms[i]); if (existing != null) { assert terms[i].bytes().equals(existing.term()); - // totalTermFrequency is an optional statistic we need to check if either one or both - // are set to -1 which means not present and then set it globally to -1 termStatistics.put(terms[i], new TermStatistics(existing.term(), - existing.docFreq() + stats[i].docFreq(), - optionalSum(existing.totalTermFreq(), stats[i].totalTermFreq()))); + existing.docFreq() + stats[i].docFreq(), + existing.totalTermFreq() + stats[i].totalTermFreq())); } else { termStatistics.put(terms[i], stats[i]); } @@ -115,14 +119,17 @@ public AggregatedDfs aggregateDfs(Collection results) { if (keys[i] != null) { String key = (String) keys[i]; CollectionStatistics value = (CollectionStatistics) values[i]; + if (value == null) { + continue; + } assert key != null; CollectionStatistics existing = fieldStatistics.get(key); if (existing != null) { - CollectionStatistics merged = new CollectionStatistics( - key, existing.maxDoc() + value.maxDoc(), - optionalSum(existing.docCount(), value.docCount()), - optionalSum(existing.sumTotalTermFreq(), value.sumTotalTermFreq()), - optionalSum(existing.sumDocFreq(), value.sumDocFreq()) + CollectionStatistics merged = new CollectionStatistics(key, + existing.maxDoc() + value.maxDoc(), + existing.docCount() + value.docCount(), + existing.sumTotalTermFreq() + value.sumTotalTermFreq(), + existing.sumDocFreq() + value.sumDocFreq() ); fieldStatistics.put(key, merged); } else { @@ -135,10 +142,6 @@ public AggregatedDfs aggregateDfs(Collection results) { return new AggregatedDfs(termStatistics, fieldStatistics, aggMaxDoc); } - private static long optionalSum(long left, long right) { - return Math.min(left, right) == -1 ? -1 : left + right; - } - /** * Returns a score doc array of top N search docs across all shards, followed by top suggest docs for each * named completion suggestion across all shards. If more than one named completion suggestion is specified in the @@ -156,7 +159,7 @@ private static long optionalSum(long left, long right) { * @param size the number of hits to return from the merged top docs */ public SortedTopDocs sortDocs(boolean ignoreFrom, Collection results, - final Collection bufferedTopDocs, final TopDocsStats topDocsStats, int from, int size) { + final Collection bufferedTopDocs, final TopDocsStats topDocsStats, int from, int size) { if (results.isEmpty()) { return SortedTopDocs.EMPTY; } @@ -169,12 +172,12 @@ public SortedTopDocs sortDocs(boolean ignoreFrom, Collection 0) { // make sure we set the shard index before we add it - the consumer didn't do that yet - setShardIndex(td, queryResult.getShardIndex()); - topDocs.add(td); + if (td.topDocs.scoreDocs.length > 0) { // make sure we set the shard index before we add it - the consumer didn't do that yet + setShardIndex(td.topDocs, queryResult.getShardIndex()); + topDocs.add(td.topDocs); } } if (queryResult.hasSuggestHits()) { @@ -387,7 +390,9 @@ private SearchHits getHits(ReducedQueryPhase reducedQueryPhase, boolean ignoreFr assert index < fetchResult.hits().getHits().length : "not enough hits fetched. index [" + index + "] length: " + fetchResult.hits().getHits().length; SearchHit searchHit = fetchResult.hits().getHits()[index]; - searchHit.score(shardDoc.score); + if (sorted == false) { + searchHit.score(shardDoc.score); + } searchHit.shard(fetchResult.getSearchShardTarget()); if (sorted) { FieldDoc fieldDoc = (FieldDoc) shardDoc; @@ -683,10 +688,10 @@ private synchronized void consumeInternal(QuerySearchResult querySearchResult) { aggsBuffer[i] = (InternalAggregations) querySearchResult.consumeAggs(); } if (hasTopDocs) { - final TopDocs topDocs = querySearchResult.consumeTopDocs(); // can't be null + final TopDocsAndMaxScore topDocs = querySearchResult.consumeTopDocs(); // can't be null topDocsStats.add(topDocs); - SearchPhaseController.setShardIndex(topDocs, querySearchResult.getShardIndex()); - topDocsBuffer[i] = topDocs; + SearchPhaseController.setShardIndex(topDocs.topDocs, querySearchResult.getShardIndex()); + topDocsBuffer[i] = topDocs.topDocs; } } @@ -743,6 +748,7 @@ public ReducedQueryPhase reduce() { static final class TopDocsStats { final boolean trackTotalHits; long totalHits; + TotalHits.Relation totalHitsRelation = TotalHits.Relation.EQUAL_TO; long fetchHits; float maxScore = Float.NEGATIVE_INFINITY; @@ -755,13 +761,16 @@ static final class TopDocsStats { this.totalHits = trackTotalHits ? 0 : -1; } - void add(TopDocs topDocs) { + void add(TopDocsAndMaxScore topDocs) { if (trackTotalHits) { - totalHits += topDocs.totalHits; + totalHits += topDocs.topDocs.totalHits.value; + if (topDocs.topDocs.totalHits.relation == Relation.GREATER_THAN_OR_EQUAL_TO) { + totalHitsRelation = TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO; + } } - fetchHits += topDocs.scoreDocs.length; - if (!Float.isNaN(topDocs.getMaxScore())) { - maxScore = Math.max(maxScore, topDocs.getMaxScore()); + fetchHits += topDocs.topDocs.scoreDocs.length; + if (!Float.isNaN(topDocs.maxScore)) { + maxScore = Math.max(maxScore, topDocs.maxScore); } } } diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsFields.java b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsFields.java index 031a537c37b34..7d13cff2ebd09 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsFields.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsFields.java @@ -22,7 +22,9 @@ import com.carrotsearch.hppc.ObjectLongHashMap; import com.carrotsearch.hppc.cursors.ObjectLongCursor; import org.apache.lucene.index.Fields; +import org.apache.lucene.index.ImpactsEnum; import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.SlowImpactsEnum; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.BoostAttribute; @@ -348,6 +350,11 @@ public PostingsEnum postings(PostingsEnum reuse, int flags) throws IOException { : null, hasPayloads ? payloads : null, freq); } + @Override + public ImpactsEnum impacts(int flags) throws IOException { + return new SlowImpactsEnum(postings(null, flags)); + } + }; } diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsWriter.java b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsWriter.java index 8a54406c1f9cb..9aca80b533f66 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsWriter.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsWriter.java @@ -112,13 +112,17 @@ void setFields(Fields termVectorsByField, Set selectedFields, EnumSet= -1); diff --git a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java index 1c1e56878932d..6016c7cb4c45f 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -50,13 +50,16 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; +import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedNumericSortField; import org.apache.lucene.search.SortedSetSortField; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopFieldDocs; +import org.apache.lucene.search.TotalHits; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.search.grouping.CollapseTopFieldDocs; @@ -73,6 +76,7 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -81,6 +85,7 @@ import java.io.IOException; import java.text.ParseException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; @@ -89,7 +94,7 @@ public class Lucene { public static final String LATEST_DOC_VALUES_FORMAT = "Lucene70"; public static final String LATEST_POSTINGS_FORMAT = "Lucene50"; - public static final String LATEST_CODEC = "Lucene70"; + public static final String LATEST_CODEC = "Lucene80"; static { Deprecated annotation = PostingsFormat.forName(LATEST_POSTINGS_FORMAT).getClass().getAnnotation(Deprecated.class); @@ -105,7 +110,7 @@ public class Lucene { public static final ScoreDoc[] EMPTY_SCORE_DOCS = new ScoreDoc[0]; - public static final TopDocs EMPTY_TOP_DOCS = new TopDocs(0, EMPTY_SCORE_DOCS, Float.NaN); + public static final TopDocs EMPTY_TOP_DOCS = new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), EMPTY_SCORE_DOCS); public static Version parseVersion(@Nullable String version, Version defaultVersion, Logger logger) { if (version == null) { @@ -251,7 +256,7 @@ protected Object doBody(String segmentFileName) throws IOException { * Check whether there is one or more documents matching the provided query. */ public static boolean exists(IndexSearcher searcher, Query query) throws IOException { - final Weight weight = searcher.createNormalizedWeight(query, false); + final Weight weight = searcher.createWeight(searcher.rewrite(query), ScoreMode.COMPLETE_NO_SCORES, 1f); // the scorer API should be more efficient at stopping after the first // match than the bulk scorer API for (LeafReaderContext context : searcher.getIndexReader().leaves()) { @@ -270,19 +275,28 @@ public static boolean exists(IndexSearcher searcher, Query query) throws IOExcep return false; } - public static TopDocs readTopDocs(StreamInput in) throws IOException { + private static TotalHits readTotalHits(StreamInput in) throws IOException { + long totalHits = in.readVLong(); + TotalHits.Relation totalHitsRelation = TotalHits.Relation.EQUAL_TO; + if (in.getVersion().onOrAfter(org.elasticsearch.Version.V_7_0_0_alpha1)) { + totalHitsRelation = in.readEnum(TotalHits.Relation.class); + } + return new TotalHits(totalHits, totalHitsRelation); + } + + public static TopDocsAndMaxScore readTopDocs(StreamInput in) throws IOException { byte type = in.readByte(); if (type == 0) { - long totalHits = in.readVLong(); + TotalHits totalHits = readTotalHits(in); float maxScore = in.readFloat(); ScoreDoc[] scoreDocs = new ScoreDoc[in.readVInt()]; for (int i = 0; i < scoreDocs.length; i++) { scoreDocs[i] = new ScoreDoc(in.readVInt(), in.readFloat()); } - return new TopDocs(totalHits, scoreDocs, maxScore); + return new TopDocsAndMaxScore(new TopDocs(totalHits, scoreDocs), maxScore); } else if (type == 1) { - long totalHits = in.readVLong(); + TotalHits totalHits = readTotalHits(in); float maxScore = in.readFloat(); SortField[] fields = new SortField[in.readVInt()]; @@ -294,9 +308,9 @@ public static TopDocs readTopDocs(StreamInput in) throws IOException { for (int i = 0; i < fieldDocs.length; i++) { fieldDocs[i] = readFieldDoc(in); } - return new TopFieldDocs(totalHits, fieldDocs, fields, maxScore); + return new TopDocsAndMaxScore(new TopFieldDocs(totalHits, fieldDocs, fields), maxScore); } else if (type == 2) { - long totalHits = in.readVLong(); + TotalHits totalHits = readTotalHits(in); float maxScore = in.readFloat(); String field = in.readString(); @@ -311,7 +325,7 @@ public static TopDocs readTopDocs(StreamInput in) throws IOException { fieldDocs[i] = readFieldDoc(in); collapseValues[i] = readSortValue(in); } - return new CollapseTopFieldDocs(field, totalHits, fieldDocs, fields, collapseValues, maxScore); + return new TopDocsAndMaxScore(new CollapseTopFieldDocs(field, totalHits, fieldDocs, fields, collapseValues), maxScore); } else { throw new IllegalStateException("Unknown type " + type); } @@ -381,13 +395,22 @@ public static ScoreDoc readScoreDoc(StreamInput in) throws IOException { private static final Class GEO_DISTANCE_SORT_TYPE_CLASS = LatLonDocValuesField.newDistanceSort("some_geo_field", 0, 0).getClass(); - public static void writeTopDocs(StreamOutput out, TopDocs topDocs) throws IOException { - if (topDocs instanceof CollapseTopFieldDocs) { + private static void writeTotalHits(StreamOutput out, TotalHits totalHits) throws IOException { + out.writeVLong(totalHits.value); + if (out.getVersion().onOrAfter(org.elasticsearch.Version.V_7_0_0_alpha1)) { + out.writeEnum(totalHits.relation); + } else if (totalHits.relation != TotalHits.Relation.EQUAL_TO) { + throw new IllegalArgumentException("Cannot serialize approximate total hit counts to nodes that are on a version < 7.0.0"); + } + } + + public static void writeTopDocs(StreamOutput out, TopDocsAndMaxScore topDocs) throws IOException { + if (topDocs.topDocs instanceof CollapseTopFieldDocs) { out.writeByte((byte) 2); - CollapseTopFieldDocs collapseDocs = (CollapseTopFieldDocs) topDocs; + CollapseTopFieldDocs collapseDocs = (CollapseTopFieldDocs) topDocs.topDocs; - out.writeVLong(topDocs.totalHits); - out.writeFloat(topDocs.getMaxScore()); + writeTotalHits(out, topDocs.topDocs.totalHits); + out.writeFloat(topDocs.maxScore); out.writeString(collapseDocs.field); @@ -396,35 +419,35 @@ public static void writeTopDocs(StreamOutput out, TopDocs topDocs) throws IOExce writeSortField(out, sortField); } - out.writeVInt(topDocs.scoreDocs.length); - for (int i = 0; i < topDocs.scoreDocs.length; i++) { + out.writeVInt(topDocs.topDocs.scoreDocs.length); + for (int i = 0; i < topDocs.topDocs.scoreDocs.length; i++) { ScoreDoc doc = collapseDocs.scoreDocs[i]; writeFieldDoc(out, (FieldDoc) doc); writeSortValue(out, collapseDocs.collapseValues[i]); } - } else if (topDocs instanceof TopFieldDocs) { + } else if (topDocs.topDocs instanceof TopFieldDocs) { out.writeByte((byte) 1); - TopFieldDocs topFieldDocs = (TopFieldDocs) topDocs; + TopFieldDocs topFieldDocs = (TopFieldDocs) topDocs.topDocs; - out.writeVLong(topDocs.totalHits); - out.writeFloat(topDocs.getMaxScore()); + writeTotalHits(out, topDocs.topDocs.totalHits); + out.writeFloat(topDocs.maxScore); out.writeVInt(topFieldDocs.fields.length); for (SortField sortField : topFieldDocs.fields) { writeSortField(out, sortField); } - out.writeVInt(topDocs.scoreDocs.length); + out.writeVInt(topDocs.topDocs.scoreDocs.length); for (ScoreDoc doc : topFieldDocs.scoreDocs) { writeFieldDoc(out, (FieldDoc) doc); } } else { out.writeByte((byte) 0); - out.writeVLong(topDocs.totalHits); - out.writeFloat(topDocs.getMaxScore()); + writeTotalHits(out, topDocs.topDocs.totalHits); + out.writeFloat(topDocs.maxScore); - out.writeVInt(topDocs.scoreDocs.length); - for (ScoreDoc doc : topDocs.scoreDocs) { + out.writeVInt(topDocs.topDocs.scoreDocs.length); + for (ScoreDoc doc : topDocs.topDocs.scoreDocs) { writeScoreDoc(out, doc); } } @@ -578,6 +601,24 @@ public static void writeSortField(StreamOutput out, SortField sortField) throws out.writeBoolean(sortField.getReverse()); } + private static Number readExplanationValue(StreamInput in) throws IOException { + if (in.getVersion().onOrAfter(org.elasticsearch.Version.V_7_0_0_alpha1)) { + final int numberType = in.readByte(); + switch (numberType) { + case 0: + return in.readFloat(); + case 1: + return in.readDouble(); + case 2: + return in.readZLong(); + default: + throw new IOException("Unexpected number type: " + numberType); + } + } else { + return in.readFloat(); + } + } + public static Explanation readExplanation(StreamInput in) throws IOException { boolean match = in.readBoolean(); String description = in.readString(); @@ -586,12 +627,29 @@ public static Explanation readExplanation(StreamInput in) throws IOException { subExplanations[i] = readExplanation(in); } if (match) { - return Explanation.match(in.readFloat(), description, subExplanations); + return Explanation.match(readExplanationValue(in), description, subExplanations); } else { return Explanation.noMatch(description, subExplanations); } } + private static void writeExplanationValue(StreamOutput out, Number value) throws IOException { + if (out.getVersion().onOrAfter(org.elasticsearch.Version.V_7_0_0_alpha1)) { + if (value instanceof Float) { + out.writeByte((byte) 0); + out.writeFloat(value.floatValue()); + } else if (value instanceof Double) { + out.writeByte((byte) 1); + out.writeDouble(value.doubleValue()); + } else { + out.writeByte((byte) 2); + out.writeZLong(value.longValue()); + } + } else { + out.writeFloat(value.floatValue()); + } + } + public static void writeExplanation(StreamOutput out, Explanation explanation) throws IOException { out.writeBoolean(explanation.isMatch()); out.writeString(explanation.getDescription()); @@ -601,7 +659,7 @@ public static void writeExplanation(StreamOutput out, Explanation explanation) t writeExplanation(out, subExp); } if (explanation.isMatch()) { - out.writeFloat(explanation.getValue()); + writeExplanationValue(out, explanation.getValue()); } } @@ -705,6 +763,10 @@ public int docID() { public DocIdSetIterator iterator() { throw new IllegalStateException(message); } + @Override + public float getMaxScore(int upTo) throws IOException { + throw new IllegalStateException(message); + } }; } @@ -836,6 +898,19 @@ public int length() { }; } + /** + * Whether a query sorted by {@code searchSort} can be early-terminated if the index is sorted by {@code indexSort}. + */ + public static boolean canEarlyTerminate(Sort searchSort, Sort indexSort) { + final SortField[] fields1 = searchSort.getSort(); + final SortField[] fields2 = indexSort.getSort(); + // early termination is possible if fields1 is a prefix of fields2 + if (fields1.length > fields2.length) { + return false; + } + return Arrays.asList(fields1).equals(Arrays.asList(fields2).subList(0, fields1.length)); + } + /** * Wraps a directory reader to make all documents live except those were rolled back * or hard-deleted due to non-aborting exceptions during indexing. diff --git a/server/src/main/java/org/elasticsearch/common/lucene/MinimumScoreCollector.java b/server/src/main/java/org/elasticsearch/common/lucene/MinimumScoreCollector.java index 2552309450ba4..76b59887fb946 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/MinimumScoreCollector.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/MinimumScoreCollector.java @@ -23,6 +23,7 @@ import org.apache.lucene.search.Collector; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.ScoreCachingWrappingScorer; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.SimpleCollector; @@ -63,7 +64,7 @@ public void doSetNextReader(LeafReaderContext context) throws IOException { } @Override - public boolean needsScores() { - return true; + public ScoreMode scoreMode() { + return ScoreMode.COMPLETE; } } diff --git a/server/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java b/server/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java index 0ff0008a43032..67f1495c79c2a 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java @@ -19,6 +19,7 @@ package org.elasticsearch.common.lucene.index; +import org.apache.lucene.index.ImpactsEnum; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.PostingsEnum; @@ -28,6 +29,7 @@ import org.apache.lucene.search.FilteredDocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BitSet; @@ -80,7 +82,7 @@ public FilterableTermsEnum(IndexReader reader, String field, int docsEnumFlag, @ } else { final IndexSearcher searcher = new IndexSearcher(reader); searcher.setQueryCache(null); - weight = searcher.createNormalizedWeight(filter, false); + weight = searcher.createWeight(searcher.rewrite(filter), ScoreMode.COMPLETE_NO_SCORES, 1f); } for (LeafReaderContext context : leaves) { Terms terms = context.reader().terms(field); @@ -207,6 +209,11 @@ public PostingsEnum postings(PostingsEnum reuse, int flags) throws IOException { throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE); } + @Override + public ImpactsEnum impacts(int flags) throws IOException { + throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE); + } + @Override public BytesRef next() throws IOException { throw new UnsupportedOperationException(UNSUPPORTED_MESSAGE); diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/FilteredCollector.java b/server/src/main/java/org/elasticsearch/common/lucene/search/FilteredCollector.java index e9db2928ca724..f1e55d76296ca 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/FilteredCollector.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/FilteredCollector.java @@ -22,6 +22,7 @@ import org.apache.lucene.search.Collector; import org.apache.lucene.search.FilterLeafCollector; import org.apache.lucene.search.LeafCollector; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; @@ -56,7 +57,7 @@ public void collect(int doc) throws IOException { } @Override - public boolean needsScores() { - return collector.needsScores(); + public ScoreMode scoreMode() { + return collector.scoreMode(); } } \ No newline at end of file diff --git a/server/src/main/java/org/elasticsearch/index/analysis/StandardTokenFilterFactory.java b/server/src/main/java/org/elasticsearch/common/lucene/search/TopDocsAndMaxScore.java similarity index 54% rename from server/src/main/java/org/elasticsearch/index/analysis/StandardTokenFilterFactory.java rename to server/src/main/java/org/elasticsearch/common/lucene/search/TopDocsAndMaxScore.java index 2339815b5582e..7cc1f9142de4f 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/StandardTokenFilterFactory.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/TopDocsAndMaxScore.java @@ -17,23 +17,22 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.common.lucene.search; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.standard.StandardFilter; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.index.IndexSettings; +import org.apache.lucene.search.TopDocs; +/** + * Wrapper around a {@link TopDocs} instance and the maximum score. + */ +// TODO: Remove this class when https://github.com/elastic/elasticsearch/issues/32981 is addressed. +public final class TopDocsAndMaxScore { -public class StandardTokenFilterFactory extends AbstractTokenFilterFactory { + public final TopDocs topDocs; + public float maxScore; - public StandardTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + public TopDocsAndMaxScore(TopDocs topDocs, float maxScore) { + this.topDocs = topDocs; + this.maxScore = maxScore; } - @Override - public TokenStream create(TokenStream tokenStream) { - return new StandardFilter(tokenStream); - } -} \ No newline at end of file +} diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/CombineFunction.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/CombineFunction.java index 399f3d7a2e613..6d8a436c0b202 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/CombineFunction.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/CombineFunction.java @@ -38,10 +38,10 @@ public float combine(double queryScore, double funcScore, double maxBoost) { public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) { Explanation boostExpl = Explanation.match(maxBoost, "maxBoost"); Explanation minExpl = Explanation.match( - Math.min(funcExpl.getValue(), maxBoost), + Math.min(funcExpl.getValue().floatValue(), maxBoost), "min of:", funcExpl, boostExpl); - return Explanation.match(queryExpl.getValue() * minExpl.getValue(), + return Explanation.match(queryExpl.getValue().floatValue() * minExpl.getValue().floatValue(), "function score, product of:", queryExpl, minExpl); } }, @@ -55,7 +55,7 @@ public float combine(double queryScore, double funcScore, double maxBoost) { public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) { Explanation boostExpl = Explanation.match(maxBoost, "maxBoost"); return Explanation.match( - Math.min(funcExpl.getValue(), maxBoost), + Math.min(funcExpl.getValue().floatValue(), maxBoost), "min of:", funcExpl, boostExpl); } @@ -69,9 +69,9 @@ public float combine(double queryScore, double funcScore, double maxBoost) { @Override public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) { - Explanation minExpl = Explanation.match(Math.min(funcExpl.getValue(), maxBoost), "min of:", + Explanation minExpl = Explanation.match(Math.min(funcExpl.getValue().floatValue(), maxBoost), "min of:", funcExpl, Explanation.match(maxBoost, "maxBoost")); - return Explanation.match(Math.min(funcExpl.getValue(), maxBoost) + queryExpl.getValue(), "sum of", + return Explanation.match(Math.min(funcExpl.getValue().floatValue(), maxBoost) + queryExpl.getValue().floatValue(), "sum of", queryExpl, minExpl); } @@ -84,10 +84,10 @@ public float combine(double queryScore, double funcScore, double maxBoost) { @Override public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) { - Explanation minExpl = Explanation.match(Math.min(funcExpl.getValue(), maxBoost), "min of:", + Explanation minExpl = Explanation.match(Math.min(funcExpl.getValue().floatValue(), maxBoost), "min of:", funcExpl, Explanation.match(maxBoost, "maxBoost")); return Explanation.match( - (float) ((Math.min(funcExpl.getValue(), maxBoost) + queryExpl.getValue()) / 2.0), "avg of", + (float) ((Math.min(funcExpl.getValue().floatValue(), maxBoost) + queryExpl.getValue().floatValue()) / 2.0), "avg of", queryExpl, minExpl); } @@ -101,10 +101,10 @@ public float combine(double queryScore, double funcScore, double maxBoost) { @Override public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) { Explanation innerMinExpl = Explanation.match( - Math.min(funcExpl.getValue(), maxBoost), "min of:", + Math.min(funcExpl.getValue().floatValue(), maxBoost), "min of:", funcExpl, Explanation.match(maxBoost, "maxBoost")); return Explanation.match( - Math.min(Math.min(funcExpl.getValue(), maxBoost), queryExpl.getValue()), "min of", + Math.min(Math.min(funcExpl.getValue().floatValue(), maxBoost), queryExpl.getValue().floatValue()), "min of", queryExpl, innerMinExpl); } @@ -118,10 +118,10 @@ public float combine(double queryScore, double funcScore, double maxBoost) { @Override public Explanation explain(Explanation queryExpl, Explanation funcExpl, float maxBoost) { Explanation innerMinExpl = Explanation.match( - Math.min(funcExpl.getValue(), maxBoost), "min of:", + Math.min(funcExpl.getValue().floatValue(), maxBoost), "min of:", funcExpl, Explanation.match(maxBoost, "maxBoost")); return Explanation.match( - Math.max(Math.min(funcExpl.getValue(), maxBoost), queryExpl.getValue()), "max of:", + Math.max(Math.min(funcExpl.getValue().floatValue(), maxBoost), queryExpl.getValue().floatValue()), "max of:", queryExpl, innerMinExpl); } diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/FieldValueFactorFunction.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/FieldValueFactorFunction.java index c49487cfb7eb4..fb5a82bc098e2 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/FieldValueFactorFunction.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/FieldValueFactorFunction.java @@ -90,7 +90,7 @@ public double score(int docId, float subQueryScore) throws IOException { public Explanation explainScore(int docId, Explanation subQueryScore) throws IOException { String modifierStr = modifier != null ? modifier.toString() : ""; String defaultStr = missing != null ? "?:" + missing : ""; - double score = score(docId, subQueryScore.getValue()); + double score = score(docId, subQueryScore.getValue().floatValue()); return Explanation.match( (float) score, String.format(Locale.ROOT, diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java index c2263fc201e18..05b74a8b7fe3c 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java @@ -212,22 +212,27 @@ public Query rewrite(IndexReader reader) throws IOException { } @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException { - if (needsScores == false && minScore == null) { - return subQuery.createWeight(searcher, needsScores, boost); + public Weight createWeight(IndexSearcher searcher, org.apache.lucene.search.ScoreMode scoreMode, float boost) throws IOException { + if (scoreMode == org.apache.lucene.search.ScoreMode.COMPLETE_NO_SCORES && minScore == null) { + return subQuery.createWeight(searcher, scoreMode, boost); } - boolean subQueryNeedsScores = combineFunction != CombineFunction.REPLACE; + org.apache.lucene.search.ScoreMode subQueryScoreMode = combineFunction != CombineFunction.REPLACE + ? org.apache.lucene.search.ScoreMode.COMPLETE + : org.apache.lucene.search.ScoreMode.COMPLETE_NO_SCORES; Weight[] filterWeights = new Weight[functions.length]; for (int i = 0; i < functions.length; ++i) { - subQueryNeedsScores |= functions[i].needsScores(); + if (functions[i].needsScores()) { + subQueryScoreMode = org.apache.lucene.search.ScoreMode.COMPLETE; + } if (functions[i] instanceof FilterScoreFunction) { Query filter = ((FilterScoreFunction) functions[i]).filter; - filterWeights[i] = searcher.createNormalizedWeight(filter, false); + filterWeights[i] = searcher.createWeight(searcher.rewrite(filter), + org.apache.lucene.search.ScoreMode.COMPLETE_NO_SCORES, 1f); } } - Weight subQueryWeight = subQuery.createWeight(searcher, subQueryNeedsScores, boost); - return new CustomBoostFactorWeight(this, subQueryWeight, filterWeights, subQueryNeedsScores); + Weight subQueryWeight = subQuery.createWeight(searcher, subQueryScoreMode, boost); + return new CustomBoostFactorWeight(this, subQueryWeight, filterWeights, subQueryScoreMode.needsScores()); } class CustomBoostFactorWeight extends Weight { @@ -299,10 +304,9 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio ScoreFunction function = functions[i]; Explanation functionExplanation = function.getLeafScoreFunction(context).explainScore(doc, expl); if (function instanceof FilterScoreFunction) { - double factor = functionExplanation.getValue(); - float sc = (float) factor; + float factor = functionExplanation.getValue().floatValue(); Query filterQuery = ((FilterScoreFunction) function).filter; - Explanation filterExplanation = Explanation.match(sc, "function score, product of:", + Explanation filterExplanation = Explanation.match(factor, "function score, product of:", Explanation.match(1.0f, "match filter: " + filterQuery.toString()), functionExplanation); functionsExplanations.add(filterExplanation); } else { @@ -319,14 +323,14 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio FunctionFactorScorer scorer = functionScorer(context); int actualDoc = scorer.iterator().advance(doc); assert (actualDoc == doc); - double score = scorer.computeScore(doc, expl.getValue()); + double score = scorer.computeScore(doc, expl.getValue().floatValue()); factorExplanation = Explanation.match( (float) score, "function score, score mode [" + scoreMode.toString().toLowerCase(Locale.ROOT) + "]", functionsExplanations); } expl = combineFunction.explain(expl, factorExplanation, maxBoost); } - if (minScore != null && minScore > expl.getValue()) { + if (minScore != null && minScore > expl.getValue().floatValue()) { expl = Explanation.noMatch("Score value is too low, expected at least " + minScore + " but got " + expl.getValue(), expl); } return expl; @@ -442,6 +446,11 @@ protected double computeScore(int docId, float subQueryScore) throws IOException } return factor; } + + @Override + public float getMaxScore(int upTo) throws IOException { + return Float.MAX_VALUE; // TODO: what would be a good upper bound? + } } @Override diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java index 8e21c1af41aef..5296926e9869d 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java @@ -59,6 +59,16 @@ public float score() throws IOException { return in.score(); } + @Override + public int advanceShallow(int target) throws IOException { + return in.advanceShallow(target); + } + + @Override + public float getMaxScore(int upTo) throws IOException { + return in.getMaxScore(upTo); + } + @Override public DocIdSetIterator iterator() { return TwoPhaseIterator.asDocIdSetIterator(twoPhaseIterator()); diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/RandomScoreFunction.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/RandomScoreFunction.java index a104a416cc6bf..8694b6fa019f1 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/RandomScoreFunction.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/RandomScoreFunction.java @@ -84,7 +84,7 @@ public double score(int docId, float subQueryScore) throws IOException { public Explanation explainScore(int docId, Explanation subQueryScore) throws IOException { String field = fieldData == null ? null : fieldData.getFieldName(); return Explanation.match( - (float) score(docId, subQueryScore.getValue()), + (float) score(docId, subQueryScore.getValue().floatValue()), "random score function (seed: " + originalSeed + ", field: " + field + ")"); } }; diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java index 7f8b10349bc7d..bf1ea637a9671 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java @@ -54,6 +54,11 @@ public float score() throws IOException { public DocIdSetIterator iterator() { throw new UnsupportedOperationException(); } + + @Override + public float getMaxScore(int upTo) throws IOException { + throw new UnsupportedOperationException(); + } } private final Script sScript; @@ -88,10 +93,10 @@ public Explanation explainScore(int docId, Explanation subQueryScore) throws IOE if (leafScript instanceof ExplainableSearchScript) { leafScript.setDocument(docId); scorer.docid = docId; - scorer.score = subQueryScore.getValue(); + scorer.score = subQueryScore.getValue().floatValue(); exp = ((ExplainableSearchScript) leafScript).explain(subQueryScore); } else { - double score = score(docId, subQueryScore.getValue()); + double score = score(docId, subQueryScore.getValue().floatValue()); String explanation = "script score function, computed with script:\"" + sScript + "\""; if (sScript.getParams() != null) { explanation += " and parameters: \n" + sScript.getParams().toString(); diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/WeightFactorFunction.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/WeightFactorFunction.java index 7d96426e8695e..87f6b21e9da2b 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/WeightFactorFunction.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/WeightFactorFunction.java @@ -60,7 +60,7 @@ public double score(int docId, float subQueryScore) throws IOException { public Explanation explainScore(int docId, Explanation subQueryScore) throws IOException { Explanation functionExplanation = leafFunction.explainScore(docId, subQueryScore); return Explanation.match( - functionExplanation.getValue() * (float) getWeight(), "product of:", + functionExplanation.getValue().floatValue() * (float) getWeight(), "product of:", functionExplanation, explainWeight()); } }; diff --git a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index d8af8d3ead1f4..52439f7c89d14 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -20,7 +20,7 @@ package org.elasticsearch.common.settings; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.apache.lucene.search.spell.LevensteinDistance; +import org.apache.lucene.search.spell.LevenshteinDistance; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.collect.Tuple; @@ -437,7 +437,7 @@ void validate( final String key, final Settings settings, final boolean validateDependencies, final boolean validateInternalOrPrivateIndex) { Setting setting = getRaw(key); if (setting == null) { - LevensteinDistance ld = new LevensteinDistance(); + LevenshteinDistance ld = new LevenshteinDistance(); List> scoredKeys = new ArrayList<>(); for (String k : this.keySettings.keySet()) { float distance = ld.getDistance(key, k); diff --git a/server/src/main/java/org/elasticsearch/common/util/CachedSupplier.java b/server/src/main/java/org/elasticsearch/common/util/CachedSupplier.java new file mode 100644 index 0000000000000..eb15ee130521e --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/util/CachedSupplier.java @@ -0,0 +1,48 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.util; + +import java.util.function.Supplier; + +/** + * A {@link Supplier} that caches its return value. This may be useful to make + * a {@link Supplier} idempotent or for performance reasons if always returning + * the same instance is acceptable. + */ +public final class CachedSupplier implements Supplier { + + private Supplier supplier; + private T result; + private boolean resultSet; + + public CachedSupplier(Supplier supplier) { + this.supplier = supplier; + } + + @Override + public synchronized T get() { + if (resultSet == false) { + result = supplier.get(); + resultSet = true; + } + return result; + } + +} diff --git a/server/src/main/java/org/elasticsearch/index/analysis/StopAnalyzerProvider.java b/server/src/main/java/org/elasticsearch/index/analysis/StopAnalyzerProvider.java index f3559a650704f..d78d914a5eca6 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/StopAnalyzerProvider.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/StopAnalyzerProvider.java @@ -21,6 +21,7 @@ import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.core.StopAnalyzer; +import org.apache.lucene.analysis.en.EnglishAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -32,7 +33,7 @@ public class StopAnalyzerProvider extends AbstractIndexAnalyzerProvider codecs = MapBuilder.newMapBuilder(); if (mapperService == null) { - codecs.put(DEFAULT_CODEC, new Lucene70Codec()); - codecs.put(BEST_COMPRESSION_CODEC, new Lucene70Codec(Mode.BEST_COMPRESSION)); + codecs.put(DEFAULT_CODEC, new Lucene80Codec()); + codecs.put(BEST_COMPRESSION_CODEC, new Lucene80Codec(Mode.BEST_COMPRESSION)); } else { codecs.put(DEFAULT_CODEC, new PerFieldMappingPostingFormatCodec(Mode.BEST_SPEED, mapperService, logger)); diff --git a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java index bf1e48e7a6b27..dfbbf350dcb47 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java @@ -23,7 +23,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat; -import org.apache.lucene.codecs.lucene70.Lucene70Codec; +import org.apache.lucene.codecs.lucene80.Lucene80Codec; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.index.mapper.CompletionFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; @@ -37,8 +37,7 @@ * per index in real time via the mapping API. If no specific postings format is * configured for a specific field the default postings format is used. */ -// LUCENE UPGRADE: make sure to move to a new codec depending on the lucene version -public class PerFieldMappingPostingFormatCodec extends Lucene70Codec { +public class PerFieldMappingPostingFormatCodec extends Lucene80Codec { private final Logger logger; private final MapperService mapperService; diff --git a/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java b/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java index 2bca31f3bc88f..a44f8a0f8357b 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java +++ b/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java @@ -104,7 +104,7 @@ final class LuceneChangesSnapshot implements Translog.Snapshot { this.indexSearcher.setQueryCache(null); this.parallelArray = new ParallelArray(searchBatchSize); final TopDocs topDocs = searchOperations(null); - this.totalHits = Math.toIntExact(topDocs.totalHits); + this.totalHits = Math.toIntExact(topDocs.totalHits.value); this.scoreDocs = topDocs.scoreDocs; fillParallelArray(scoreDocs, parallelArray); } diff --git a/server/src/main/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicy.java b/server/src/main/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicy.java index fde97562de8f8..7faed37b2fd36 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicy.java +++ b/server/src/main/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicy.java @@ -40,6 +40,7 @@ import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BitSet; @@ -72,7 +73,7 @@ static CodecReader wrapReader(String recoverySourceField, CodecReader reader, Su builder.add(retainSourceQuerySupplier.get(), BooleanClause.Occur.FILTER); IndexSearcher s = new IndexSearcher(reader); s.setQueryCache(null); - Weight weight = s.createWeight(s.rewrite(builder.build()), false, 1.0f); + Weight weight = s.createWeight(s.rewrite(builder.build()), ScoreMode.COMPLETE_NO_SCORES, 1.0f); Scorer scorer = weight.scorer(reader.getContext()); if (scorer != null) { return new SourcePruningFilterCodecReader(recoverySourceField, reader, BitSet.of(scorer.iterator(), reader.maxDoc())); diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java index da3dc75f4ef52..6896432bcdd55 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java @@ -28,6 +28,7 @@ import org.apache.lucene.search.FieldComparatorSource; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.SortField; import org.apache.lucene.search.Weight; @@ -155,7 +156,7 @@ public BitSet rootDocs(LeafReaderContext ctx) throws IOException { public DocIdSetIterator innerDocs(LeafReaderContext ctx) throws IOException { final IndexReaderContext topLevelCtx = ReaderUtil.getTopLevelContext(ctx); IndexSearcher indexSearcher = new IndexSearcher(topLevelCtx); - Weight weight = indexSearcher.createNormalizedWeight(innerQuery, false); + Weight weight = indexSearcher.createWeight(indexSearcher.rewrite(innerQuery), ScoreMode.COMPLETE_NO_SCORES, 1f); Scorer s = weight.scorer(ctx); return s == null ? null : s.iterator(); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index 663aa7e6f9e10..fa1abe4293948 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -22,6 +22,7 @@ import org.apache.lucene.document.StoredField; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BytesRef; @@ -293,7 +294,7 @@ public ObjectMapper findNestedObjectMapper(int nestedDocId, SearchContext sc, Le } // We can pass down 'null' as acceptedDocs, because nestedDocId is a doc to be fetched and // therefor is guaranteed to be a live doc. - final Weight nestedWeight = filter.createWeight(sc.searcher(), false, 1f); + final Weight nestedWeight = filter.createWeight(sc.searcher(), ScoreMode.COMPLETE_NO_SCORES, 1f); Scorer scorer = nestedWeight.scorer(context); if (scorer == null) { continue; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 29f1cbb721feb..f7bcab21d723d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -296,7 +296,7 @@ protected Analyzer getWrappedAnalyzer(String fieldName) { @Override protected TokenStreamComponents wrapComponents(String fieldName, TokenStreamComponents components) { - TokenFilter filter = new EdgeNGramTokenFilter(components.getTokenStream(), minChars, maxChars); + TokenFilter filter = new EdgeNGramTokenFilter(components.getTokenStream(), minChars, maxChars, false); return new TokenStreamComponents(components.getTokenizer(), filter); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java index a5a675e96dc93..162ce2a3fde61 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java @@ -25,7 +25,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.Term; -import org.apache.lucene.index.TermContext; +import org.apache.lucene.index.TermStates; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; @@ -216,7 +216,7 @@ public Query rewrite(IndexReader reader) throws IOException { for (BytesRef type : types) { if (uniqueTypes.add(type)) { Term term = new Term(CONTENT_TYPE, type); - TermContext context = TermContext.build(reader.getContext(), term); + TermStates context = TermStates.build(reader.getContext(), term, true); if (context.docFreq() == 0) { // this _type is not present in the reader continue; diff --git a/server/src/main/java/org/elasticsearch/index/query/BoostingQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/BoostingQueryBuilder.java index 35b0d18b1e88c..f3e6f6c8061e6 100644 --- a/server/src/main/java/org/elasticsearch/index/query/BoostingQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/BoostingQueryBuilder.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.query; -import org.apache.lucene.queries.BoostingQuery; +import org.apache.lucene.queries.function.FunctionScoreQuery; import org.apache.lucene.search.Query; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; @@ -201,7 +201,7 @@ public String getWriteableName() { protected Query doToQuery(QueryShardContext context) throws IOException { Query positive = positiveQuery.toQuery(context); Query negative = negativeQuery.toQuery(context); - return new BoostingQuery(positive, negative, negativeBoost); + return FunctionScoreQuery.boostByQuery(positive, negative, negativeBoost); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java index 8d7c0190eb210..d2b432e7c7ca1 100644 --- a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java @@ -22,22 +22,26 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.MultiCollector; import org.apache.lucene.search.Query; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocsCollector; import org.apache.lucene.search.TopFieldCollector; import org.apache.lucene.search.TopScoreDocCollector; import org.apache.lucene.search.TotalHitCountCollector; +import org.apache.lucene.search.TotalHits; import org.apache.lucene.search.Weight; import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.search.join.ParentChildrenBlockJoinQuery; import org.apache.lucene.search.join.ScoreMode; +import org.elasticsearch.action.search.MaxScoreCollector; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.ObjectMapper; @@ -365,9 +369,9 @@ static final class NestedInnerHitSubContext extends InnerHitsContext.InnerHitSub } @Override - public TopDocs[] topDocs(SearchHit[] hits) throws IOException { + public TopDocsAndMaxScore[] topDocs(SearchHit[] hits) throws IOException { Weight innerHitQueryWeight = createInnerHitQueryWeight(); - TopDocs[] result = new TopDocs[hits.length]; + TopDocsAndMaxScore[] result = new TopDocsAndMaxScore[hits.length]; for (int i = 0; i < hits.length; i++) { SearchHit hit = hits[i]; Query rawParentFilter; @@ -385,25 +389,38 @@ public TopDocs[] topDocs(SearchHit[] hits) throws IOException { Query childFilter = childObjectMapper.nestedTypeFilter(); BitSetProducer parentFilter = context.bitsetFilterCache().getBitSetProducer(rawParentFilter); Query q = new ParentChildrenBlockJoinQuery(parentFilter, childFilter, parentDocId); - Weight weight = context.searcher().createNormalizedWeight(q, false); + Weight weight = context.searcher().createWeight(context.searcher().rewrite(q), + org.apache.lucene.search.ScoreMode.COMPLETE_NO_SCORES, 1f); if (size() == 0) { TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector(); intersect(weight, innerHitQueryWeight, totalHitCountCollector, ctx); - result[i] = new TopDocs(totalHitCountCollector.getTotalHits(), Lucene.EMPTY_SCORE_DOCS, Float.NaN); + result[i] = new TopDocsAndMaxScore(new TopDocs(new TotalHits(totalHitCountCollector.getTotalHits(), + TotalHits.Relation.EQUAL_TO), Lucene.EMPTY_SCORE_DOCS), Float.NaN); } else { int topN = Math.min(from() + size(), context.searcher().getIndexReader().maxDoc()); TopDocsCollector topDocsCollector; + MaxScoreCollector maxScoreCollector = null; if (sort() != null) { - topDocsCollector = TopFieldCollector.create(sort().sort, topN, true, trackScores(), trackScores(), true); + topDocsCollector = TopFieldCollector.create(sort().sort, topN, Integer.MAX_VALUE); + if (trackScores()) { + maxScoreCollector = new MaxScoreCollector(); + } } else { - topDocsCollector = TopScoreDocCollector.create(topN); + topDocsCollector = TopScoreDocCollector.create(topN, Integer.MAX_VALUE); + maxScoreCollector = new MaxScoreCollector(); } try { - intersect(weight, innerHitQueryWeight, topDocsCollector, ctx); + intersect(weight, innerHitQueryWeight, MultiCollector.wrap(topDocsCollector, maxScoreCollector), ctx); } finally { clearReleasables(Lifetime.COLLECTION); } - result[i] = topDocsCollector.topDocs(from(), size()); + + TopDocs td = topDocsCollector.topDocs(from(), size()); + float maxScore = Float.NaN; + if (maxScoreCollector != null) { + maxScore = maxScoreCollector.getMaxScore(); + } + result[i] = new TopDocsAndMaxScore(td, maxScore); } } return result; diff --git a/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java index c08f342d50846..50586aa2522ad 100644 --- a/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java @@ -25,6 +25,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; @@ -169,7 +170,7 @@ public int hashCode() { } @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException { + public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new ConstantScoreWeight(this, boost) { @Override diff --git a/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java index 637d93212912f..6ea068176b41e 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SpanMultiTermQueryBuilder.java @@ -20,7 +20,7 @@ import org.apache.lucene.index.Term; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.TermContext; +import org.apache.lucene.index.TermStates; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.ConstantScoreQuery; @@ -165,7 +165,7 @@ protected void checkMaxClauseCount(int count) { } @Override - protected void addClause(List topLevel, Term term, int docCount, float boost, TermContext states) { + protected void addClause(List topLevel, Term term, int docCount, float boost, TermStates states) { SpanTermQuery q = new SpanTermQuery(term, states); topLevel.add(q); } diff --git a/server/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java b/server/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java index 54c25b40501d2..7d6dd4a59cb19 100644 --- a/server/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java @@ -554,7 +554,7 @@ public Explanation explainScore(int docId, Explanation subQueryScore) throws IOE } double value = distance.doubleValue(); return Explanation.match( - (float) score(docId, subQueryScore.getValue()), + (float) score(docId, subQueryScore.getValue().floatValue()), "Function for field " + getFieldName() + ":", func.explainFunction(getDistanceString(ctx, docId), value, scale)); } diff --git a/server/src/main/java/org/elasticsearch/index/search/ESToParentBlockJoinQuery.java b/server/src/main/java/org/elasticsearch/index/search/ESToParentBlockJoinQuery.java index 3762b1fffc067..5e6aa3bb7c456 100644 --- a/server/src/main/java/org/elasticsearch/index/search/ESToParentBlockJoinQuery.java +++ b/server/src/main/java/org/elasticsearch/index/search/ESToParentBlockJoinQuery.java @@ -75,8 +75,8 @@ public Query rewrite(IndexReader reader) throws IOException { } @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException { - return query.createWeight(searcher, needsScores, boost); + public Weight createWeight(IndexSearcher searcher, org.apache.lucene.search.ScoreMode scoreMode, float boost) throws IOException { + return query.createWeight(searcher, scoreMode, boost); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java b/server/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java index a2e738128e3eb..a6949c0559722 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java @@ -89,7 +89,7 @@ public final Engine.Searcher wrap(Engine.Searcher engineSearcher) throws IOExcep final IndexSearcher innerIndexSearcher = new IndexSearcher(reader); innerIndexSearcher.setQueryCache(origIndexSearcher.getQueryCache()); innerIndexSearcher.setQueryCachingPolicy(origIndexSearcher.getQueryCachingPolicy()); - innerIndexSearcher.setSimilarity(origIndexSearcher.getSimilarity(true)); + innerIndexSearcher.setSimilarity(origIndexSearcher.getSimilarity()); // TODO: Right now IndexSearcher isn't wrapper friendly, when it becomes wrapper friendly we should revise this extension point // For example if IndexSearcher#rewrite() is overwritten than also IndexSearcher#createNormalizedWeight needs to be overwritten // This needs to be fixed before we can allow the IndexSearcher from Engine to be wrapped multiple times diff --git a/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java b/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java index e27c68c7570a7..a22193974272c 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java +++ b/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java @@ -32,6 +32,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; @@ -73,7 +74,7 @@ final class ShardSplittingQuery extends Query { this.nestedParentBitSetProducer = hasNested ? newParentDocBitSetProducer(indexMetaData.getCreationVersion()) : null; } @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) { + public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) { return new ConstantScoreWeight(this, boost) { @Override public String toString() { @@ -348,7 +349,7 @@ private static BitSetProducer newParentDocBitSetProducer(Version indexVersionCre final IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(context); final IndexSearcher searcher = new IndexSearcher(topLevelContext); searcher.setQueryCache(null); - final Weight weight = searcher.createNormalizedWeight(query, false); + final Weight weight = searcher.createWeight(searcher.rewrite(query), ScoreMode.COMPLETE_NO_SCORES, 1f); Scorer s = weight.scorer(context); return s == null ? null : BitSet.of(s.iterator(), context.reader().maxDoc()); }; diff --git a/server/src/main/java/org/elasticsearch/index/similarity/ScriptedSimilarity.java b/server/src/main/java/org/elasticsearch/index/similarity/ScriptedSimilarity.java index aea18c30a6907..7e3efacfa20be 100644 --- a/server/src/main/java/org/elasticsearch/index/similarity/ScriptedSimilarity.java +++ b/server/src/main/java/org/elasticsearch/index/similarity/ScriptedSimilarity.java @@ -20,19 +20,14 @@ package org.elasticsearch.index.similarity; import org.apache.lucene.index.FieldInvertState; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.search.CollectionStatistics; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.TermStatistics; import org.apache.lucene.search.similarities.Similarity; -import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.SmallFloat; import org.elasticsearch.script.SimilarityScript; import org.elasticsearch.script.SimilarityWeightScript; -import java.io.IOException; - /** * A {@link Similarity} implementation that allows scores to be scripted. */ @@ -65,8 +60,18 @@ public long computeNorm(FieldInvertState state) { return SmallFloat.intToByte4(numTerms); } + /** Compute the part of the score that does not depend on the current document using the init_script. */ + private double computeWeight(Query query, Field field, Term term) { + if (weightScriptFactory == null) { + return 1d; + } + SimilarityWeightScript weightScript = weightScriptFactory.newInstance(); + return weightScript.execute(query, field, term); + } + @Override - public SimWeight computeWeight(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) { + public SimScorer scorer(float boost, + CollectionStatistics collectionStats, TermStatistics... termStats) { Query query = new Query(boost); long docCount = collectionStats.docCount(); if (docCount == -1) { @@ -77,58 +82,32 @@ public SimWeight computeWeight(float boost, CollectionStatistics collectionStats for (int i = 0; i < termStats.length; ++i) { terms[i] = new Term(termStats[i].docFreq(), termStats[i].totalTermFreq()); } - return new Weight(collectionStats.field(), query, field, terms); - } - - /** Compute the part of the score that does not depend on the current document using the init_script. */ - private double computeWeight(Query query, Field field, Term term) throws IOException { - if (weightScriptFactory == null) { - return 1d; - } - SimilarityWeightScript weightScript = weightScriptFactory.newInstance(); - return weightScript.execute(query, field, term); - } - @Override - public SimScorer simScorer(SimWeight w, LeafReaderContext context) throws IOException { - Weight weight = (Weight) w; - SimScorer[] scorers = new SimScorer[weight.terms.length]; - for (int i = 0; i < weight.terms.length; ++i) { - final Term term = weight.terms[i]; + SimScorer[] scorers = new SimScorer[terms.length]; + for (int i = 0; i < terms.length; ++i) { + final Term term = terms[i]; final SimilarityScript script = scriptFactory.newInstance(); - final NumericDocValues norms = context.reader().getNormValues(weight.fieldName); - final Doc doc = new Doc(norms); - final double scoreWeight = computeWeight(weight.query, weight.field, term); + final Doc doc = new Doc(); + final double scoreWeight = computeWeight(query, field, term); scorers[i] = new SimScorer() { @Override - public float score(int docID, float freq) throws IOException { - doc.docID = docID; + public float score(float freq, long norm) { doc.freq = freq; - return (float) script.execute(scoreWeight, weight.query, weight.field, term, doc); + doc.norm = norm; + return (float) script.execute(scoreWeight, query, field, term, doc); } @Override - public float computeSlopFactor(int distance) { - return 1.0f / (distance + 1); - } - - @Override - public float computePayloadFactor(int doc, int start, int end, BytesRef payload) { - return 1f; - } - - @Override - public Explanation explain(int docID, Explanation freq) throws IOException { - doc.docID = docID; - float score = score(docID, freq.getValue()); + public Explanation explain(Explanation freq, long norm) { + float score = score(freq.getValue().floatValue(), norm); return Explanation.match(score, "score from " + ScriptedSimilarity.this.toString() + " computed from:", Explanation.match((float) scoreWeight, "weight"), - Explanation.match(weight.query.boost, "query.boost"), - Explanation.match(weight.field.docCount, "field.docCount"), - Explanation.match(weight.field.sumDocFreq, "field.sumDocFreq"), - Explanation.match(weight.field.sumTotalTermFreq, "field.sumTotalTermFreq"), + Explanation.match(query.boost, "query.boost"), + Explanation.match(field.docCount, "field.docCount"), + Explanation.match(field.sumDocFreq, "field.sumDocFreq"), + Explanation.match(field.sumTotalTermFreq, "field.sumTotalTermFreq"), Explanation.match(term.docFreq, "term.docFreq"), Explanation.match(term.totalTermFreq, "term.totalTermFreq"), Explanation.match(freq.getValue(), "doc.freq", freq.getDetails()), @@ -143,50 +122,26 @@ public Explanation explain(int docID, Explanation freq) throws IOException { return new SimScorer() { @Override - public float score(int doc, float freq) throws IOException { + public float score(float freq, long norm) { double sum = 0; for (SimScorer scorer : scorers) { - sum += scorer.score(doc, freq); + sum += scorer.score(freq, norm); } return (float) sum; } @Override - public float computeSlopFactor(int distance) { - return 1.0f / (distance + 1); - } - - @Override - public float computePayloadFactor(int doc, int start, int end, BytesRef payload) { - return 1f; - } - - @Override - public Explanation explain(int doc, Explanation freq) throws IOException { + public Explanation explain(Explanation freq, long norm) { Explanation[] subs = new Explanation[scorers.length]; for (int i = 0; i < subs.length; ++i) { - subs[i] = scorers[i].explain(doc, freq); + subs[i] = scorers[i].explain(freq, norm); } - return Explanation.match(score(doc, freq.getValue()), "Sum of:", subs); + return Explanation.match(score(freq.getValue().floatValue(), norm), "Sum of:", subs); } }; } } - private static class Weight extends SimWeight { - private final String fieldName; - private final Query query; - private final Field field; - private final Term[] terms; - - Weight(String fieldName, Query query, Field field, Term[] terms) { - this.fieldName = fieldName; - this.query = query; - this.field = field; - this.terms = terms; - } - } - /** Scoring factors that come from the query. */ public static class Query { private final float boost; @@ -254,25 +209,16 @@ public long getTotalTermFreq() { /** Statistics that are specific to a document. */ public static class Doc { - private final NumericDocValues norms; - private int docID; private float freq; + private long norm; - private Doc(NumericDocValues norms) { - this.norms = norms; - } + private Doc() {} /** Return the number of tokens that the current document has in the considered field. */ - public int getLength() throws IOException { + public int getLength() { // the length is computed lazily so that similarities that do not use the length are // not penalized - if (norms == null) { - return 1; - } else if (norms.advanceExact(docID)) { - return SmallFloat.byte4ToInt((byte) norms.longValue()); - } else { - return 0; - } + return SmallFloat.byte4ToInt((byte) norm); } /** Return the number of occurrences of the term in the current document for the considered field. */ diff --git a/server/src/main/java/org/elasticsearch/index/similarity/SimilarityProviders.java b/server/src/main/java/org/elasticsearch/index/similarity/SimilarityProviders.java index 18c6d6a3fc063..9aab1260b6b48 100644 --- a/server/src/main/java/org/elasticsearch/index/similarity/SimilarityProviders.java +++ b/server/src/main/java/org/elasticsearch/index/similarity/SimilarityProviders.java @@ -24,13 +24,10 @@ import org.apache.lucene.search.similarities.AfterEffectL; import org.apache.lucene.search.similarities.BM25Similarity; import org.apache.lucene.search.similarities.BasicModel; -import org.apache.lucene.search.similarities.BasicModelBE; -import org.apache.lucene.search.similarities.BasicModelD; import org.apache.lucene.search.similarities.BasicModelG; import org.apache.lucene.search.similarities.BasicModelIF; import org.apache.lucene.search.similarities.BasicModelIn; import org.apache.lucene.search.similarities.BasicModelIne; -import org.apache.lucene.search.similarities.BasicModelP; import org.apache.lucene.search.similarities.BooleanSimilarity; import org.apache.lucene.search.similarities.ClassicSimilarity; import org.apache.lucene.search.similarities.DFISimilarity; @@ -74,24 +71,35 @@ private SimilarityProviders() {} // no instantiation static final String DISCOUNT_OVERLAPS = "discount_overlaps"; private static final Map BASIC_MODELS; + private static final Map LEGACY_BASIC_MODELS; private static final Map AFTER_EFFECTS; + private static final Map LEGACY_AFTER_EFFECTS; static { Map models = new HashMap<>(); - models.put("be", new BasicModelBE()); - models.put("d", new BasicModelD()); models.put("g", new BasicModelG()); models.put("if", new BasicModelIF()); models.put("in", new BasicModelIn()); models.put("ine", new BasicModelIne()); - models.put("p", new BasicModelP()); BASIC_MODELS = unmodifiableMap(models); + Map legacyModels = new HashMap<>(); + // TODO: be and g and both based on the bose-einstein model. + // Is there a better replacement for d and p which use the binomial model? + legacyModels.put("be", "g"); + legacyModels.put("d", "ine"); + legacyModels.put("p", "ine"); + LEGACY_BASIC_MODELS = unmodifiableMap(legacyModels); + Map effects = new HashMap<>(); - effects.put("no", new AfterEffect.NoAfterEffect()); effects.put("b", new AfterEffectB()); effects.put("l", new AfterEffectL()); AFTER_EFFECTS = unmodifiableMap(effects); + + Map legacyEffects = new HashMap<>(); + // l is simpler than b, so this should be a better replacement for "no" + legacyEffects.put("no", "l"); + LEGACY_AFTER_EFFECTS = unmodifiableMap(legacyEffects); } private static final Map INDEPENDENCE_MEASURES; @@ -124,9 +132,25 @@ private SimilarityProviders() {} // no instantiation * @param settings Settings to parse * @return {@link BasicModel} referred to in the Settings */ - private static BasicModel parseBasicModel(Settings settings) { + private static BasicModel parseBasicModel(Version indexCreatedVersion, Settings settings) { String basicModel = settings.get("basic_model"); BasicModel model = BASIC_MODELS.get(basicModel); + + if (model == null) { + String replacement = LEGACY_BASIC_MODELS.get(basicModel); + if (replacement != null) { + if (indexCreatedVersion.onOrAfter(Version.V_7_0_0_alpha1)) { + throw new IllegalArgumentException("Basic model [" + basicModel + "] isn't supported anymore, " + + "please use another model."); + } else { + DEPRECATION_LOGGER.deprecated("Basic model [" + basicModel + + "] isn't supported anymore and has arbitrarily been replaced with [" + replacement + "]."); + model = BASIC_MODELS.get(replacement); + assert model != null; + } + } + } + if (model == null) { throw new IllegalArgumentException("Unsupported BasicModel [" + basicModel + "], expected one of " + BASIC_MODELS.keySet()); } @@ -139,9 +163,25 @@ private static BasicModel parseBasicModel(Settings settings) { * @param settings Settings to parse * @return {@link AfterEffect} referred to in the Settings */ - private static AfterEffect parseAfterEffect(Settings settings) { + private static AfterEffect parseAfterEffect(Version indexCreatedVersion, Settings settings) { String afterEffect = settings.get("after_effect"); AfterEffect effect = AFTER_EFFECTS.get(afterEffect); + + if (effect == null) { + String replacement = LEGACY_AFTER_EFFECTS.get(afterEffect); + if (replacement != null) { + if (indexCreatedVersion.onOrAfter(Version.V_7_0_0_alpha1)) { + throw new IllegalArgumentException("After effect [" + afterEffect + + "] isn't supported anymore, please use another effect."); + } else { + DEPRECATION_LOGGER.deprecated("After effect [" + afterEffect + + "] isn't supported anymore and has arbitrarily been replaced with [" + replacement + "]."); + effect = AFTER_EFFECTS.get(replacement); + assert effect != null; + } + } + } + if (effect == null) { throw new IllegalArgumentException("Unsupported AfterEffect [" + afterEffect + "], expected one of " + AFTER_EFFECTS.keySet()); } @@ -263,8 +303,8 @@ public static DFRSimilarity createDfrSimilarity(Settings settings, Version index return new DFRSimilarity( - parseBasicModel(settings), - parseAfterEffect(settings), + parseBasicModel(indexCreatedVersion, settings), + parseAfterEffect(indexCreatedVersion, settings), parseNormalization(settings)); } diff --git a/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java b/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java index 1ecdc797073cf..a22ada87d772c 100644 --- a/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java +++ b/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java @@ -19,14 +19,17 @@ package org.elasticsearch.indices.analysis; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.analysis.LowerCaseFilter; -import org.apache.lucene.analysis.standard.StandardFilter; +import org.apache.lucene.analysis.TokenStream; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.NamedRegistry; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractTokenFilterFactory; import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalyzerProvider; import org.elasticsearch.index.analysis.CharFilterFactory; @@ -39,7 +42,6 @@ import org.elasticsearch.index.analysis.ShingleTokenFilterFactory; import org.elasticsearch.index.analysis.SimpleAnalyzerProvider; import org.elasticsearch.index.analysis.StandardAnalyzerProvider; -import org.elasticsearch.index.analysis.StandardTokenFilterFactory; import org.elasticsearch.index.analysis.StandardTokenizerFactory; import org.elasticsearch.index.analysis.StopAnalyzerProvider; import org.elasticsearch.index.analysis.StopTokenFilterFactory; @@ -69,6 +71,8 @@ public final class AnalysisModule { private static final IndexSettings NA_INDEX_SETTINGS; + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(LogManager.getLogger(AnalysisModule.class)); + private final HunspellService hunspellService; private final AnalysisRegistry analysisRegistry; @@ -116,7 +120,29 @@ private NamedRegistry> setupTokenFilters(Li hunspellService) { NamedRegistry> tokenFilters = new NamedRegistry<>("token_filter"); tokenFilters.register("stop", StopTokenFilterFactory::new); - tokenFilters.register("standard", StandardTokenFilterFactory::new); + // Add "standard" for old indices (bwc) + tokenFilters.register("standard", new AnalysisProvider() { + @Override + public TokenFilterFactory get(IndexSettings indexSettings, Environment environment, String name, Settings settings) { + if (indexSettings.getIndexVersionCreated().before(Version.V_7_0_0_alpha1)) { + DEPRECATION_LOGGER.deprecatedAndMaybeLog("standard_deprecation", + "The [standard] token filter name is deprecated and will be removed in a future version."); + } else { + throw new IllegalArgumentException("The [standard] token filter has been removed."); + } + return new AbstractTokenFilterFactory(indexSettings, name, settings) { + @Override + public TokenStream create(TokenStream tokenStream) { + return tokenStream; + } + }; + } + + @Override + public boolean requiresAnalysisSettings() { + return false; + } + }); tokenFilters.register("shingle", ShingleTokenFilterFactory::new); tokenFilters.register("hunspell", requiresAnalysisSettings((indexSettings, env, name, settings) -> new HunspellTokenFilterFactory (indexSettings, name, settings, hunspellService))); @@ -153,7 +179,17 @@ static Map setupPreConfiguredTokenFilters(List // Add filters available in lucene-core preConfiguredTokenFilters.register("lowercase", PreConfiguredTokenFilter.singleton("lowercase", true, LowerCaseFilter::new)); - preConfiguredTokenFilters.register("standard", PreConfiguredTokenFilter.singleton("standard", false, StandardFilter::new)); + // Add "standard" for old indices (bwc) + preConfiguredTokenFilters.register( "standard", + PreConfiguredTokenFilter.singletonWithVersion("standard", true, (reader, version) -> { + if (version.before(Version.V_7_0_0_alpha1)) { + DEPRECATION_LOGGER.deprecatedAndMaybeLog("standard_deprecation", + "The [standard] token filter is deprecated and will be removed in a future version."); + } else { + throw new IllegalArgumentException("The [standard] token filter has been removed."); + } + return reader; + })); /* Note that "stop" is available in lucene-core but it's pre-built * version uses a set of English stop words that are in * lucene-analyzers-common so "stop" is defined in the analysis-common diff --git a/server/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java b/server/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java index 0f31a8a46f1db..1b4772b3e51ef 100644 --- a/server/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java +++ b/server/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java @@ -24,6 +24,7 @@ import org.apache.lucene.analysis.core.SimpleAnalyzer; import org.apache.lucene.analysis.core.StopAnalyzer; import org.apache.lucene.analysis.core.WhitespaceAnalyzer; +import org.apache.lucene.analysis.en.EnglishAnalyzer; import org.apache.lucene.analysis.standard.ClassicAnalyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.elasticsearch.Version; @@ -61,7 +62,7 @@ protected Analyzer create(Version version) { STOP { @Override protected Analyzer create(Version version) { - Analyzer a = new StopAnalyzer(); + Analyzer a = new StopAnalyzer(EnglishAnalyzer.ENGLISH_STOP_WORDS_SET); a.setVersion(version.luceneVersion); return a; } diff --git a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java index 585713b641f5e..6b9432483f304 100644 --- a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java @@ -19,7 +19,7 @@ package org.elasticsearch.rest; -import org.apache.lucene.search.spell.LevensteinDistance; +import org.apache.lucene.search.spell.LevenshteinDistance; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.CheckedConsumer; @@ -110,7 +110,7 @@ protected final String unrecognized( invalids.size() > 1 ? "s" : "")); boolean first = true; for (final String invalid : invalids) { - final LevensteinDistance ld = new LevensteinDistance(); + final LevenshteinDistance ld = new LevenshteinDistance(); final List> scoredParams = new ArrayList<>(); for (final String candidate : candidates) { final float distance = ld.getDistance(invalid, candidate); diff --git a/server/src/main/java/org/elasticsearch/script/SimilarityScript.java b/server/src/main/java/org/elasticsearch/script/SimilarityScript.java index c410a0bd6eba4..4aeb4063959b3 100644 --- a/server/src/main/java/org/elasticsearch/script/SimilarityScript.java +++ b/server/src/main/java/org/elasticsearch/script/SimilarityScript.java @@ -21,8 +21,6 @@ import org.elasticsearch.index.similarity.ScriptedSimilarity; -import java.io.IOException; - /** A script that is used to build {@link ScriptedSimilarity} instances. */ public abstract class SimilarityScript { @@ -34,7 +32,7 @@ public abstract class SimilarityScript { * @param doc per-document statistics */ public abstract double execute(double weight, ScriptedSimilarity.Query query, - ScriptedSimilarity.Field field, ScriptedSimilarity.Term term, ScriptedSimilarity.Doc doc) throws IOException; + ScriptedSimilarity.Field field, ScriptedSimilarity.Term term, ScriptedSimilarity.Doc doc); public interface Factory { SimilarityScript newInstance(); diff --git a/server/src/main/java/org/elasticsearch/script/SimilarityWeightScript.java b/server/src/main/java/org/elasticsearch/script/SimilarityWeightScript.java index f48a9c93e023b..04bbc3cccf40a 100644 --- a/server/src/main/java/org/elasticsearch/script/SimilarityWeightScript.java +++ b/server/src/main/java/org/elasticsearch/script/SimilarityWeightScript.java @@ -21,8 +21,6 @@ import org.elasticsearch.index.similarity.ScriptedSimilarity; -import java.io.IOException; - /** A script that is used to compute scoring factors that are the same for all documents. */ public abstract class SimilarityWeightScript { @@ -32,7 +30,7 @@ public abstract class SimilarityWeightScript { * @param term term-level statistics */ public abstract double execute(ScriptedSimilarity.Query query, ScriptedSimilarity.Field field, - ScriptedSimilarity.Term term) throws IOException; + ScriptedSimilarity.Term term); public interface Factory { SimilarityWeightScript newInstance(); diff --git a/server/src/main/java/org/elasticsearch/search/SearchHit.java b/server/src/main/java/org/elasticsearch/search/SearchHit.java index 28a600c0d21ef..71ea55e97a762 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHit.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHit.java @@ -82,7 +82,7 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable plugins) { registerFetchSubPhase(new VersionFetchSubPhase()); registerFetchSubPhase(new MatchedQueriesFetchSubPhase()); registerFetchSubPhase(new HighlightPhase(settings, highlighters)); + registerFetchSubPhase(new ScoreFetchSubPhase()); FetchPhaseConstructionContext context = new FetchPhaseConstructionContext(highlighters); registerFromPlugin(plugins, p -> p.getFetchSubPhases(context), this::registerFetchSubPhase); diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index a7db2c55fe149..5cb9f81626c94 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -895,13 +895,13 @@ private void shortcutDocIdsToLoad(SearchContext context) { completionSuggestions = Collections.emptyList(); } if (context.request().scroll() != null) { - TopDocs topDocs = context.queryResult().topDocs(); + TopDocs topDocs = context.queryResult().topDocs().topDocs; docIdsToLoad = new int[topDocs.scoreDocs.length + numSuggestDocs]; for (int i = 0; i < topDocs.scoreDocs.length; i++) { docIdsToLoad[docsOffset++] = topDocs.scoreDocs[i].doc; } } else { - TopDocs topDocs = context.queryResult().topDocs(); + TopDocs topDocs = context.queryResult().topDocs().topDocs; if (topDocs.scoreDocs.length < context.from()) { // no more docs... docIdsToLoad = new int[numSuggestDocs]; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java index 2ad76d8a2b49c..568a692ba61c0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.aggregations; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.ScoreMode; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -98,9 +99,9 @@ public void postCollection() throws IOException { badState(); } @Override - public boolean needsScores() { + public ScoreMode scoreMode() { badState(); - return false; // unreachable + return ScoreMode.COMPLETE; // unreachable } }; addRequestCircuitBreakerBytes(DEFAULT_WEIGHT); @@ -137,13 +138,13 @@ protected long addRequestCircuitBreakerBytes(long bytes) { * your aggregator needs them. */ @Override - public boolean needsScores() { + public ScoreMode scoreMode() { for (Aggregator agg : subAggregators) { - if (agg.needsScores()) { - return true; + if (agg.scoreMode().needsScores()) { + return ScoreMode.COMPLETE; } } - return false; + return ScoreMode.COMPLETE_NO_SCORES; } public Map metaData() { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactory.java index 88cc7319948bd..59b63520a1bd3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactory.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.aggregations; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.util.BigArrays; @@ -74,8 +75,8 @@ public Aggregator parent() { } @Override - public boolean needsScores() { - return first.needsScores(); + public ScoreMode scoreMode() { + return first.scoreMode(); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/BucketCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/BucketCollector.java index f2c8bf5e16e44..c50dd615c7b34 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/BucketCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/BucketCollector.java @@ -22,6 +22,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Collector; +import org.apache.lucene.search.ScoreMode; import java.io.IOException; @@ -45,8 +46,8 @@ public void postCollection() throws IOException { // no-op } @Override - public boolean needsScores() { - return false; + public ScoreMode scoreMode() { + return ScoreMode.COMPLETE_NO_SCORES; } }; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java index a8a015ab5453b..624c8d5409a56 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java @@ -21,9 +21,11 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.CollectionTerminatedException; +import org.apache.lucene.search.Collector; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.MultiCollector; import org.apache.lucene.search.ScoreCachingWrappingScorer; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import java.io.IOException; @@ -96,8 +98,8 @@ public static BucketCollector wrap(Iterable collector private MultiBucketCollector(BucketCollector... collectors) { this.collectors = collectors; int numNeedsScores = 0; - for (BucketCollector collector : collectors) { - if (collector.needsScores()) { + for (Collector collector : collectors) { + if (collector.scoreMode().needsScores()) { numNeedsScores += 1; } } @@ -105,27 +107,30 @@ private MultiBucketCollector(BucketCollector... collectors) { } @Override - public void preCollection() throws IOException { - for (BucketCollector collector : collectors) { - collector.preCollection(); + public ScoreMode scoreMode() { + ScoreMode scoreMode = null; + for (Collector collector : collectors) { + if (scoreMode == null) { + scoreMode = collector.scoreMode(); + } else if (scoreMode != collector.scoreMode()) { + return ScoreMode.COMPLETE; + } } + return scoreMode; } @Override - public void postCollection() throws IOException { + public void preCollection() throws IOException { for (BucketCollector collector : collectors) { - collector.postCollection(); + collector.preCollection(); } } @Override - public boolean needsScores() { + public void postCollection() throws IOException { for (BucketCollector collector : collectors) { - if (collector.needsScores()) { - return true; - } + collector.postCollection(); } - return false; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollector.java index 6ebf9e3c41c40..32695ac69a88e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollector.java @@ -23,6 +23,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.util.packed.PackedInts; @@ -81,11 +82,11 @@ public BestBucketsDeferringCollector(SearchContext context, boolean isGlobal) { } @Override - public boolean needsScores() { + public ScoreMode scoreMode() { if (collector == null) { throw new IllegalStateException(); } - return collector.needsScores(); + return collector.scoreMode(); } /** Set the deferred collectors. */ @@ -153,11 +154,11 @@ public void prepareSelectedBuckets(long... selectedBuckets) throws IOException { } this.selectedBuckets = hash; - boolean needsScores = needsScores(); + boolean needsScores = scoreMode().needsScores(); Weight weight = null; if (needsScores) { Query query = isGlobal ? new MatchAllDocsQuery() : searchContext.query(); - weight = searchContext.searcher().createNormalizedWeight(query, true); + weight = searchContext.searcher().createWeight(searchContext.searcher().rewrite(query), ScoreMode.COMPLETE, 1f); } for (Entry entry : entries) { final LeafBucketCollector leafCollector = collector.getLeafCollector(entry.context); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferringBucketCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferringBucketCollector.java index 3c63df2c06a76..7151a6f33d9fe 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferringBucketCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/DeferringBucketCollector.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.aggregations.bucket; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.ScoreMode; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.BucketCollector; import org.elasticsearch.search.aggregations.InternalAggregation; @@ -62,8 +63,8 @@ protected class WrappedAggregator extends Aggregator { } @Override - public boolean needsScores() { - return in.needsScores(); + public ScoreMode scoreMode() { + return in.scoreMode(); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MergingBucketsDeferringCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MergingBucketsDeferringCollector.java index 5653bc58f2a6c..53049d0301c2d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MergingBucketsDeferringCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MergingBucketsDeferringCollector.java @@ -21,6 +21,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.util.packed.PackedInts; @@ -66,11 +67,11 @@ public void setDeferredCollector(Iterable deferredCollectors) { } @Override - public boolean needsScores() { + public ScoreMode scoreMode() { if (collector == null) { throw new IllegalStateException(); } - return collector.needsScores(); + return collector.scoreMode(); } @Override @@ -158,10 +159,12 @@ public void prepareSelectedBuckets(long... selectedBuckets) throws IOException { } this.selectedBuckets = hash; - boolean needsScores = collector.needsScores(); + boolean needsScores = collector.scoreMode().needsScores(); Weight weight = null; if (needsScores) { - weight = searchContext.searcher().createNormalizedWeight(searchContext.query(), true); + weight = searchContext.searcher().createWeight( + searchContext.searcher().rewrite(searchContext.query()), + ScoreMode.COMPLETE, 1f); } for (Entry entry : entries) { final LeafBucketCollector leafCollector = collector.getLeafCollector(entry.context); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregatorFactory.java index 6df88379d4eb0..69bc2de39dca9 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregatorFactory.java @@ -21,6 +21,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Weight; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -51,7 +52,7 @@ public AdjacencyMatrixAggregatorFactory(String name, List filters, KeyedFilter keyedFilter = filters.get(i); this.keys[i] = keyedFilter.key(); Query filter = keyedFilter.filter().toFilter(context.getQueryShardContext()); - this.weights[i] = contextSearcher.createNormalizedWeight(filter, false); + this.weights[i] = contextSearcher.createWeight(contextSearcher.rewrite(filter), ScoreMode.COMPLETE_NO_SCORES, 1f); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java index 97c535f56c694..3c43cf3ec1d2c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -26,6 +26,7 @@ import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.util.RoaringDocIdSet; @@ -204,11 +205,11 @@ public void collect(int doc, long bucket) throws IOException { * the {@link #deferredCollectors}. */ private void runDeferredCollections() throws IOException { - final boolean needsScores = needsScores(); + final boolean needsScores = scoreMode().needsScores(); Weight weight = null; if (needsScores) { Query query = context.query(); - weight = context.searcher().createNormalizedWeight(query, true); + weight = context.searcher().createWeight(context.searcher().rewrite(query), ScoreMode.COMPLETE, 1f); } deferredCollectors.preCollection(); for (Entry entry : entries) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java index d0f2d6ef9461a..9bf51e57df06d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java @@ -25,7 +25,7 @@ import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.Query; import org.apache.lucene.util.DocIdSetBuilder; -import org.apache.lucene.util.StringHelper; +import org.apache.lucene.util.FutureArrays; import java.io.IOException; import java.util.function.ToLongFunction; @@ -147,8 +147,10 @@ public void visit(int docID, byte[] packedValue) throws IOException { @Override public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) { - if ((upperPointQuery != null && StringHelper.compare(bytesPerDim, minPackedValue, 0, upperPointQuery, 0) > 0) || - (lowerPointQuery != null && StringHelper.compare(bytesPerDim, maxPackedValue, 0, lowerPointQuery, 0) < 0)) { + if ((upperPointQuery != null && + FutureArrays.compareUnsigned(minPackedValue, 0, bytesPerDim, upperPointQuery, 0, bytesPerDim) > 0) || + (lowerPointQuery != null && + FutureArrays.compareUnsigned(maxPackedValue, 0, bytesPerDim, lowerPointQuery, 0, bytesPerDim) < 0)) { // does not match the query return PointValues.Relation.CELL_OUTSIDE_QUERY; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorFactory.java index 4b54dccbf96c1..c8b1e630b8549 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorFactory.java @@ -21,6 +21,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Weight; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.aggregations.AggregationInitializationException; @@ -58,7 +59,7 @@ public Weight getWeight() { if (weight == null) { IndexSearcher contextSearcher = context.searcher(); try { - weight = contextSearcher.createNormalizedWeight(filter, false); + weight = contextSearcher.createWeight(contextSearcher.rewrite(filter), ScoreMode.COMPLETE_NO_SCORES, 1f); } catch (IOException e) { throw new AggregationInitializationException("Failed to initialse filter", e); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorFactory.java index 048042f05ff65..81a78632d4bd6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorFactory.java @@ -21,6 +21,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Weight; import org.elasticsearch.search.aggregations.AggregationInitializationException; import org.elasticsearch.search.aggregations.Aggregator; @@ -74,7 +75,7 @@ public Weight[] getWeights() { IndexSearcher contextSearcher = context.searcher(); weights = new Weight[filters.length]; for (int i = 0; i < filters.length; ++i) { - this.weights[i] = contextSearcher.createNormalizedWeight(filters[i], false); + this.weights[i] = contextSearcher.createWeight(contextSearcher.rewrite(filters[i]), ScoreMode.COMPLETE_NO_SCORES, 1); } } catch (IOException e) { throw new AggregationInitializationException("Failed to initialse filters for aggregation [" + name() + "]", e); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java index ec54abb334056..700145b94fa56 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java @@ -20,6 +20,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.search.ScoreMode; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.util.LongHash; import org.elasticsearch.search.aggregations.Aggregator; @@ -57,8 +58,11 @@ public class GeoHashGridAggregator extends BucketsAggregator { } @Override - public boolean needsScores() { - return (valuesSource != null && valuesSource.needsScores()) || super.needsScores(); + public ScoreMode scoreMode() { + if (valuesSource != null && valuesSource.needsScores()) { + return ScoreMode.COMPLETE; + } + return super.scoreMode(); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java index 95376710373f8..81bb70bd9672a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java @@ -20,6 +20,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.lease.Releasables; @@ -78,8 +79,11 @@ class AutoDateHistogramAggregator extends DeferableBucketAggregator { } @Override - public boolean needsScores() { - return (valuesSource != null && valuesSource.needsScores()) || super.needsScores(); + public ScoreMode scoreMode() { + if (valuesSource != null && valuesSource.needsScores()) { + return ScoreMode.COMPLETE; + } + return super.scoreMode(); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java index 8b1f0c4642160..735a6717210a5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java @@ -20,6 +20,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.lease.Releasables; @@ -86,8 +87,11 @@ class DateHistogramAggregator extends BucketsAggregator { } @Override - public boolean needsScores() { - return (valuesSource != null && valuesSource.needsScores()) || super.needsScores(); + public ScoreMode scoreMode() { + if (valuesSource != null && valuesSource.needsScores()) { + return ScoreMode.COMPLETE; + } + return super.scoreMode(); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregator.java index a0e4871a7df42..e72b609494b75 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregator.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.aggregations.bucket.histogram; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.lease.Releasables; @@ -87,8 +88,11 @@ class HistogramAggregator extends BucketsAggregator { } @Override - public boolean needsScores() { - return (valuesSource != null && valuesSource.needsScores()) || super.needsScores(); + public ScoreMode scoreMode() { + if (valuesSource != null && valuesSource.needsScores()) { + return ScoreMode.COMPLETE; + } + return super.scoreMode(); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java index a85225e846372..ef9c1969c413b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java @@ -26,6 +26,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.search.join.BitSetProducer; @@ -75,7 +76,7 @@ public LeafBucketCollector getLeafCollector(final LeafReaderContext ctx, final L IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(ctx); IndexSearcher searcher = new IndexSearcher(topLevelContext); searcher.setQueryCache(null); - Weight weight = searcher.createNormalizedWeight(childFilter, false); + Weight weight = searcher.createWeight(searcher.rewrite(childFilter), ScoreMode.COMPLETE_NO_SCORES, 1f); Scorer childDocsScorer = weight.scorer(ctx); final BitSet parentDocs = parentFilter.getBitSet(ctx); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java index 14c1cc8818704..b8b0cf293a371 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java @@ -20,6 +20,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.BytesRef; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.search.DocValueFormat; @@ -91,8 +92,11 @@ public BinaryRangeAggregator(String name, AggregatorFactories factories, } @Override - public boolean needsScores() { - return (valuesSource != null && valuesSource.needsScores()) || super.needsScores(); + public ScoreMode scoreMode() { + if (valuesSource != null && valuesSource.needsScores()) { + return ScoreMode.COMPLETE; + } + return super.scoreMode(); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java index c490b344bdbce..9050f1e49f1ad 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.aggregations.bucket.range; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.ScoreMode; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -246,8 +247,11 @@ public RangeAggregator(String name, AggregatorFactories factories, ValuesSource. } @Override - public boolean needsScores() { - return (valuesSource != null && valuesSource.needsScores()) || super.needsScores(); + public ScoreMode scoreMode() { + if (valuesSource != null && valuesSource.needsScores()) { + return ScoreMode.COMPLETE; + } + return super.scoreMode(); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java index bb89173e76791..4e63d693d1875 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java @@ -22,6 +22,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocsCollector; @@ -70,8 +71,8 @@ public class BestDocsDeferringCollector extends DeferringBucketCollector impleme } @Override - public boolean needsScores() { - return true; + public ScoreMode scoreMode() { + return ScoreMode.COMPLETE; } /** Set the deferred collectors. */ @@ -102,7 +103,7 @@ public void collect(int doc, long bucket) throws IOException { // Designed to be overridden by subclasses that may score docs by criteria // other than Lucene score protected TopDocsCollector createTopDocsCollector(int size) throws IOException { - return TopScoreDocCollector.create(size); + return TopScoreDocCollector.create(size, Integer.MAX_VALUE); } @Override @@ -280,6 +281,11 @@ public void collect(int docId, long parentBucket) throws IOException { sampler.collect(docId); maxDocId = Math.max(maxDocId, docId); } + + @Override + public float getMaxScore(int upTo) throws IOException { + return Float.MAX_VALUE; + } } public int getDocCount(long parentBucket) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java index 59e491705c69e..d4995f75616a7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregator.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.aggregations.bucket.sampler; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.ScoreMode; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; @@ -149,8 +150,8 @@ public String toString() { } @Override - public boolean needsScores() { - return true; + public ScoreMode scoreMode() { + return ScoreMode.COMPLETE; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsAggregator.java index 7cd2c4e9b3a85..90aa633ffc5f8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsAggregator.java @@ -20,6 +20,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.search.ScoreMode; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.util.LongHash; import org.elasticsearch.search.DocValueFormat; @@ -61,8 +62,11 @@ public LongTermsAggregator(String name, AggregatorFactories factories, ValuesSou } @Override - public boolean needsScores() { - return (valuesSource != null && valuesSource.needsScores()) || super.needsScores(); + public ScoreMode scoreMode() { + if (valuesSource != null && valuesSource.needsScores()) { + return ScoreMode.COMPLETE; + } + return super.scoreMode(); } protected SortedNumericDocValues getValues(ValuesSource.Numeric valuesSource, LeafReaderContext ctx) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsAggregator.java index 95bc83ad88fd6..5bd8a8cd1d09d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsAggregator.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.aggregations.bucket.terms; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.elasticsearch.common.lease.Releasables; @@ -64,8 +65,11 @@ public StringTermsAggregator(String name, AggregatorFactories factories, ValuesS } @Override - public boolean needsScores() { - return (valuesSource != null && valuesSource.needsScores()) || super.needsScores(); + public ScoreMode scoreMode() { + if (valuesSource != null && valuesSource.needsScores()) { + return ScoreMode.COMPLETE; + } + return super.scoreMode(); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregator.java index f3b867307d172..c1bdc85fb02e7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregator.java @@ -225,7 +225,7 @@ static boolean descendsFromNestedAggregator(Aggregator parent) { private boolean subAggsNeedScore() { for (Aggregator subAgg : subAggregators) { - if (subAgg.needsScores()) { + if (subAgg.scoreMode().needsScores()) { return true; } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregator.java index 27890efbff182..042618011f16d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregator.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.aggregations.metrics.avg; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.ScoreMode; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.DoubleArray; @@ -61,8 +62,8 @@ public AvgAggregator(String name, ValuesSource.Numeric valuesSource, DocValueFor } @Override - public boolean needsScores() { - return valuesSource != null && valuesSource.needsScores(); + public ScoreMode scoreMode() { + return valuesSource != null && valuesSource.needsScores() ? ScoreMode.COMPLETE : ScoreMode.COMPLETE_NO_SCORES; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregator.java index 7a8483b1b26ee..0df6b69681937 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregator.java @@ -25,6 +25,7 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.RamUsageEstimator; @@ -71,8 +72,8 @@ public CardinalityAggregator(String name, ValuesSource valuesSource, int precisi } @Override - public boolean needsScores() { - return valuesSource != null && valuesSource.needsScores(); + public ScoreMode scoreMode() { + return valuesSource != null && valuesSource.needsScores() ? ScoreMode.COMPLETE : ScoreMode.COMPLETE_NO_SCORES; } private Collector pickCollector(LeafReaderContext ctx) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxAggregator.java index ff76e6637baf4..bd73470ff407d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxAggregator.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.aggregations.metrics.max; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.ScoreMode; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.DoubleArray; @@ -60,8 +61,8 @@ public MaxAggregator(String name, ValuesSource.Numeric valuesSource, DocValueFor } @Override - public boolean needsScores() { - return valuesSource != null && valuesSource.needsScores(); + public ScoreMode scoreMode() { + return valuesSource != null && valuesSource.needsScores() ? ScoreMode.COMPLETE : ScoreMode.COMPLETE_NO_SCORES; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregator.java index e4b371514bdf9..0f5dd36cb4930 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregator.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.aggregations.metrics.min; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.ScoreMode; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.DoubleArray; @@ -59,8 +60,8 @@ public MinAggregator(String name, ValuesSource.Numeric valuesSource, DocValueFor } @Override - public boolean needsScores() { - return valuesSource != null && valuesSource.needsScores(); + public ScoreMode scoreMode() { + return valuesSource != null && valuesSource.needsScores() ? ScoreMode.COMPLETE : ScoreMode.COMPLETE_NO_SCORES; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/AbstractHDRPercentilesAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/AbstractHDRPercentilesAggregator.java index 47c267aae903e..56cd7eefbf203 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/AbstractHDRPercentilesAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/AbstractHDRPercentilesAggregator.java @@ -21,6 +21,7 @@ import org.HdrHistogram.DoubleHistogram; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.ScoreMode; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.util.ArrayUtils; import org.elasticsearch.common.util.BigArrays; @@ -65,8 +66,8 @@ public AbstractHDRPercentilesAggregator(String name, ValuesSource.Numeric values } @Override - public boolean needsScores() { - return valuesSource != null && valuesSource.needsScores(); + public ScoreMode scoreMode() { + return valuesSource != null && valuesSource.needsScores() ? ScoreMode.COMPLETE : ScoreMode.COMPLETE_NO_SCORES; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/AbstractTDigestPercentilesAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/AbstractTDigestPercentilesAggregator.java index 1b5ed510f8d61..802e1b0257cea 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/AbstractTDigestPercentilesAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/AbstractTDigestPercentilesAggregator.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.ScoreMode; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.util.ArrayUtils; import org.elasticsearch.common.util.BigArrays; @@ -64,8 +65,8 @@ public AbstractTDigestPercentilesAggregator(String name, ValuesSource.Numeric va } @Override - public boolean needsScores() { - return valuesSource != null && valuesSource.needsScores(); + public ScoreMode scoreMode() { + return valuesSource != null && valuesSource.needsScores() ? ScoreMode.COMPLETE : ScoreMode.COMPLETE_NO_SCORES; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java index ea7bf270b8b62..8a49530f0d3da 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.aggregations.metrics.scripted; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.script.ScriptedMetricAggContexts; @@ -55,8 +56,8 @@ protected ScriptedMetricAggregator(String name, ScriptedMetricAggContexts.MapScr } @Override - public boolean needsScores() { - return true; // TODO: how can we know if the script relies on scores? + public ScoreMode scoreMode() { + return ScoreMode.COMPLETE; // TODO: how can we know if the script relies on scores? } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsAggregator.java index 321e9e10f0fe8..42d14d05fecb4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsAggregator.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.aggregations.metrics.stats; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.ScoreMode; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.DoubleArray; @@ -70,8 +71,8 @@ public StatsAggregator(String name, ValuesSource.Numeric valuesSource, DocValueF } @Override - public boolean needsScores() { - return valuesSource != null && valuesSource.needsScores(); + public ScoreMode scoreMode() { + return valuesSource != null && valuesSource.needsScores() ? ScoreMode.COMPLETE : ScoreMode.COMPLETE_NO_SCORES; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregator.java index 8339c06aefdcc..1089d2e1b9796 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregator.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.aggregations.metrics.stats.extended; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.ScoreMode; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.util.BigArrays; @@ -78,8 +79,8 @@ public ExtendedStatsAggregator(String name, ValuesSource.Numeric valuesSource, D } @Override - public boolean needsScores() { - return valuesSource != null && valuesSource.needsScores(); + public ScoreMode scoreMode() { + return valuesSource != null && valuesSource.needsScores() ? ScoreMode.COMPLETE : ScoreMode.COMPLETE_NO_SCORES; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregator.java index 9ed8103a1e1ee..56122c6f3dac4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregator.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.aggregations.metrics.sum; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.ScoreMode; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.DoubleArray; @@ -57,8 +58,8 @@ public class SumAggregator extends NumericMetricsAggregator.SingleValue { } @Override - public boolean needsScores() { - return valuesSource != null && valuesSource.needsScores(); + public ScoreMode scoreMode() { + return valuesSource != null && valuesSource.needsScores() ? ScoreMode.COMPLETE : ScoreMode.COMPLETE_NO_SCORES; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHits.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHits.java index 58fac4b952048..8b6fa373212b5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHits.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHits.java @@ -23,9 +23,11 @@ import org.apache.lucene.search.Sort; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopFieldDocs; +import org.apache.lucene.search.TotalHits.Relation; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; @@ -43,10 +45,10 @@ public class InternalTopHits extends InternalAggregation implements TopHits { private int from; private int size; - private TopDocs topDocs; + private TopDocsAndMaxScore topDocs; private SearchHits searchHits; - public InternalTopHits(String name, int from, int size, TopDocs topDocs, SearchHits searchHits, + public InternalTopHits(String name, int from, int size, TopDocsAndMaxScore topDocs, SearchHits searchHits, List pipelineAggregators, Map metaData) { super(name, pipelineAggregators, metaData); this.from = from; @@ -85,7 +87,7 @@ public SearchHits getHits() { return searchHits; } - TopDocs getTopDocs() { + TopDocsAndMaxScore getTopDocs() { return topDocs; } @@ -115,12 +117,12 @@ public InternalAggregation doReduce(List aggregations, Redu final TopDocs reducedTopDocs; final TopDocs[] shardDocs; - if (topDocs instanceof TopFieldDocs) { - Sort sort = new Sort(((TopFieldDocs) topDocs).fields); + if (topDocs.topDocs instanceof TopFieldDocs) { + Sort sort = new Sort(((TopFieldDocs) topDocs.topDocs).fields); shardDocs = new TopFieldDocs[aggregations.size()]; for (int i = 0; i < shardDocs.length; i++) { InternalTopHits topHitsAgg = (InternalTopHits) aggregations.get(i); - shardDocs[i] = topHitsAgg.topDocs; + shardDocs[i] = topHitsAgg.topDocs.topDocs; shardHits[i] = topHitsAgg.searchHits; } reducedTopDocs = TopDocs.merge(sort, from, size, (TopFieldDocs[]) shardDocs, true); @@ -128,12 +130,24 @@ public InternalAggregation doReduce(List aggregations, Redu shardDocs = new TopDocs[aggregations.size()]; for (int i = 0; i < shardDocs.length; i++) { InternalTopHits topHitsAgg = (InternalTopHits) aggregations.get(i); - shardDocs[i] = topHitsAgg.topDocs; + shardDocs[i] = topHitsAgg.topDocs.topDocs; shardHits[i] = topHitsAgg.searchHits; } reducedTopDocs = TopDocs.merge(from, size, shardDocs, true); } + float maxScore = Float.NaN; + for (InternalAggregation agg : aggregations) { + InternalTopHits topHitsAgg = (InternalTopHits) agg; + if (Float.isNaN(topHitsAgg.topDocs.maxScore) == false) { + if (Float.isNaN(maxScore)) { + maxScore = topHitsAgg.topDocs.maxScore; + } else { + maxScore = Math.max(maxScore, topHitsAgg.topDocs.maxScore); + } + } + } + final int[] tracker = new int[shardHits.length]; SearchHit[] hits = new SearchHit[reducedTopDocs.scoreDocs.length]; for (int i = 0; i < reducedTopDocs.scoreDocs.length; i++) { @@ -144,9 +158,10 @@ public InternalAggregation doReduce(List aggregations, Redu } while (shardDocs[scoreDoc.shardIndex].scoreDocs[position] != scoreDoc); hits[i] = shardHits[scoreDoc.shardIndex].getAt(position); } - return new InternalTopHits(name, this.from, this.size, reducedTopDocs, new SearchHits(hits, reducedTopDocs.totalHits, - reducedTopDocs.getMaxScore()), - pipelineAggregators(), getMetaData()); + assert reducedTopDocs.totalHits.relation == Relation.EQUAL_TO; + return new InternalTopHits(name, this.from, this.size, + new TopDocsAndMaxScore(reducedTopDocs, maxScore), + new SearchHits(hits, reducedTopDocs.totalHits.value, maxScore), pipelineAggregators(), getMetaData()); } @Override @@ -170,11 +185,12 @@ protected boolean doEquals(Object obj) { InternalTopHits other = (InternalTopHits) obj; if (from != other.from) return false; if (size != other.size) return false; - if (topDocs.totalHits != other.topDocs.totalHits) return false; - if (topDocs.scoreDocs.length != other.topDocs.scoreDocs.length) return false; - for (int d = 0; d < topDocs.scoreDocs.length; d++) { - ScoreDoc thisDoc = topDocs.scoreDocs[d]; - ScoreDoc otherDoc = other.topDocs.scoreDocs[d]; + if (topDocs.topDocs.totalHits.value != other.topDocs.topDocs.totalHits.value) return false; + if (topDocs.topDocs.totalHits.relation != other.topDocs.topDocs.totalHits.relation) return false; + if (topDocs.topDocs.scoreDocs.length != other.topDocs.topDocs.scoreDocs.length) return false; + for (int d = 0; d < topDocs.topDocs.scoreDocs.length; d++) { + ScoreDoc thisDoc = topDocs.topDocs.scoreDocs[d]; + ScoreDoc otherDoc = other.topDocs.topDocs.scoreDocs[d]; if (thisDoc.doc != otherDoc.doc) return false; if (Double.compare(thisDoc.score, otherDoc.score) != 0) return false; if (thisDoc.shardIndex != otherDoc.shardIndex) return false; @@ -195,9 +211,10 @@ protected boolean doEquals(Object obj) { protected int doHashCode() { int hashCode = from; hashCode = 31 * hashCode + size; - hashCode = 31 * hashCode + Long.hashCode(topDocs.totalHits); - for (int d = 0; d < topDocs.scoreDocs.length; d++) { - ScoreDoc doc = topDocs.scoreDocs[d]; + hashCode = 31 * hashCode + Long.hashCode(topDocs.topDocs.totalHits.value); + hashCode = 31 * hashCode + topDocs.topDocs.totalHits.relation.hashCode(); + for (int d = 0; d < topDocs.topDocs.scoreDocs.length; d++) { + ScoreDoc doc = topDocs.topDocs.scoreDocs[d]; hashCode = 31 * hashCode + doc.doc; hashCode = 31 * hashCode + Float.floatToIntBits(doc.score); hashCode = 31 * hashCode + doc.shardIndex; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregator.java index e59299754aead..48a42b74292c2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregator.java @@ -23,18 +23,24 @@ import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Collector; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.LeafCollector; +import org.apache.lucene.search.MultiCollector; import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocsCollector; import org.apache.lucene.search.TopFieldCollector; import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.search.TotalHits; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.search.MaxScoreCollector; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.common.util.LongObjectPagedHashMap; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; @@ -57,9 +63,21 @@ public class TopHitsAggregator extends MetricsAggregator { + private static class Collectors { + public final TopDocsCollector topDocsCollector; + public final MaxScoreCollector maxScoreCollector; + public final Collector collector; + + Collectors(TopDocsCollector topDocsCollector, MaxScoreCollector maxScoreCollector) { + this.topDocsCollector = topDocsCollector; + this.maxScoreCollector = maxScoreCollector; + collector = MultiCollector.wrap(topDocsCollector, maxScoreCollector); + } + } + private final FetchPhase fetchPhase; private final SubSearchContext subSearchContext; - private final LongObjectPagedHashMap> topDocsCollectors; + private final LongObjectPagedHashMap topDocsCollectors; TopHitsAggregator(FetchPhase fetchPhase, SubSearchContext subSearchContext, String name, SearchContext context, Aggregator parent, List pipelineAggregators, Map metaData) throws IOException { @@ -70,13 +88,13 @@ public class TopHitsAggregator extends MetricsAggregator { } @Override - public boolean needsScores() { + public ScoreMode scoreMode() { SortAndFormats sort = subSearchContext.sort(); if (sort != null) { - return sort.sort.needsScores() || subSearchContext.trackScores(); + return sort.sort.needsScores() || subSearchContext.trackScores() ? ScoreMode.COMPLETE : ScoreMode.COMPLETE_NO_SCORES; } else { // sort by score - return true; + return ScoreMode.COMPLETE; } } @@ -102,8 +120,8 @@ public void setScorer(Scorer scorer) throws IOException { @Override public void collect(int docId, long bucket) throws IOException { - TopDocsCollector topDocsCollector = topDocsCollectors.get(bucket); - if (topDocsCollector == null) { + Collectors collectors = topDocsCollectors.get(bucket); + if (collectors == null) { SortAndFormats sort = subSearchContext.sort(); int topN = subSearchContext.from() + subSearchContext.size(); if (sort == null) { @@ -115,20 +133,21 @@ public void collect(int docId, long bucket) throws IOException { // but here we create collectors ourselves and we need prevent OOM because of crazy an offset and size. topN = Math.min(topN, subSearchContext.searcher().getIndexReader().maxDoc()); if (sort == null) { - topDocsCollector = TopScoreDocCollector.create(topN); + collectors = new Collectors(TopScoreDocCollector.create(topN, Integer.MAX_VALUE), null); } else { // TODO: can we pass trackTotalHits=subSearchContext.trackTotalHits(){ // Note that this would require to catch CollectionTerminatedException - topDocsCollector = TopFieldCollector.create(sort.sort, topN, true, subSearchContext.trackScores(), - subSearchContext.trackScores(), true); + collectors = new Collectors( + TopFieldCollector.create(sort.sort, topN, Integer.MAX_VALUE), + subSearchContext.trackScores() ? new MaxScoreCollector() : null); } - topDocsCollectors.put(bucket, topDocsCollector); + topDocsCollectors.put(bucket, collectors); } final LeafCollector leafCollector; final int key = leafCollectors.indexOf(bucket); if (key < 0) { - leafCollector = topDocsCollector.getLeafCollector(ctx); + leafCollector = collectors.collector.getLeafCollector(ctx); if (scorer != null) { leafCollector.setScorer(scorer); } @@ -142,58 +161,65 @@ public void collect(int docId, long bucket) throws IOException { } @Override - public InternalAggregation buildAggregation(long owningBucketOrdinal) { - TopDocsCollector topDocsCollector = topDocsCollectors.get(owningBucketOrdinal); - final InternalTopHits topHits; - if (topDocsCollector == null) { - topHits = buildEmptyAggregation(); - } else { - TopDocs topDocs = topDocsCollector.topDocs(); - if (subSearchContext.sort() == null) { - for (RescoreContext ctx : context().rescore()) { - try { - topDocs = ctx.rescorer().rescore(topDocs, context.searcher(), ctx); - } catch (IOException e) { - throw new ElasticsearchException("Rescore TopHits Failed", e); - } + public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOException { + Collectors collectors = topDocsCollectors.get(owningBucketOrdinal); + if (collectors == null) { + return buildEmptyAggregation(); + } + TopDocsCollector topDocsCollector = collectors.topDocsCollector; + TopDocs topDocs = topDocsCollector.topDocs(); + float maxScore = Float.NaN; + if (subSearchContext.sort() == null) { + for (RescoreContext ctx : context().rescore()) { + try { + topDocs = ctx.rescorer().rescore(topDocs, context.searcher(), ctx); + } catch (IOException e) { + throw new ElasticsearchException("Rescore TopHits Failed", e); } } - subSearchContext.queryResult().topDocs(topDocs, - subSearchContext.sort() == null ? null : subSearchContext.sort().formats); - int[] docIdsToLoad = new int[topDocs.scoreDocs.length]; - for (int i = 0; i < topDocs.scoreDocs.length; i++) { - docIdsToLoad[i] = topDocs.scoreDocs[i].doc; + if (topDocs.scoreDocs.length > 0) { + maxScore = topDocs.scoreDocs[0].score; } - subSearchContext.docIdsToLoad(docIdsToLoad, 0, docIdsToLoad.length); - fetchPhase.execute(subSearchContext); - FetchSearchResult fetchResult = subSearchContext.fetchResult(); - SearchHit[] internalHits = fetchResult.fetchResult().hits().getHits(); - for (int i = 0; i < internalHits.length; i++) { - ScoreDoc scoreDoc = topDocs.scoreDocs[i]; - SearchHit searchHitFields = internalHits[i]; - searchHitFields.shard(subSearchContext.shardTarget()); - searchHitFields.score(scoreDoc.score); - if (scoreDoc instanceof FieldDoc) { - FieldDoc fieldDoc = (FieldDoc) scoreDoc; - searchHitFields.sortValues(fieldDoc.fields, subSearchContext.sort().formats); - } + } else if (subSearchContext.trackScores()) { + TopFieldCollector.populateScores(topDocs.scoreDocs, subSearchContext.searcher(), subSearchContext.query()); + maxScore = collectors.maxScoreCollector.getMaxScore(); + } + final TopDocsAndMaxScore topDocsAndMaxScore = new TopDocsAndMaxScore(topDocs, maxScore); + subSearchContext.queryResult().topDocs(topDocsAndMaxScore, + subSearchContext.sort() == null ? null : subSearchContext.sort().formats); + int[] docIdsToLoad = new int[topDocs.scoreDocs.length]; + for (int i = 0; i < topDocs.scoreDocs.length; i++) { + docIdsToLoad[i] = topDocs.scoreDocs[i].doc; + } + subSearchContext.docIdsToLoad(docIdsToLoad, 0, docIdsToLoad.length); + fetchPhase.execute(subSearchContext); + FetchSearchResult fetchResult = subSearchContext.fetchResult(); + SearchHit[] internalHits = fetchResult.fetchResult().hits().getHits(); + for (int i = 0; i < internalHits.length; i++) { + ScoreDoc scoreDoc = topDocs.scoreDocs[i]; + SearchHit searchHitFields = internalHits[i]; + searchHitFields.shard(subSearchContext.shardTarget()); + searchHitFields.score(scoreDoc.score); + if (scoreDoc instanceof FieldDoc) { + FieldDoc fieldDoc = (FieldDoc) scoreDoc; + searchHitFields.sortValues(fieldDoc.fields, subSearchContext.sort().formats); } - topHits = new InternalTopHits(name, subSearchContext.from(), subSearchContext.size(), topDocs, fetchResult.hits(), - pipelineAggregators(), metaData()); } - return topHits; + return new InternalTopHits(name, subSearchContext.from(), subSearchContext.size(), topDocsAndMaxScore, fetchResult.hits(), + pipelineAggregators(), metaData()); } @Override public InternalTopHits buildEmptyAggregation() { TopDocs topDocs; if (subSearchContext.sort() != null) { - topDocs = new TopFieldDocs(0, new FieldDoc[0], subSearchContext.sort().sort.getSort(), Float.NaN); + topDocs = new TopFieldDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new FieldDoc[0], + subSearchContext.sort().sort.getSort()); } else { topDocs = Lucene.EMPTY_TOP_DOCS; } - return new InternalTopHits(name, subSearchContext.from(), subSearchContext.size(), topDocs, SearchHits.empty(), - pipelineAggregators(), metaData()); + return new InternalTopHits(name, subSearchContext.from(), subSearchContext.size(), new TopDocsAndMaxScore(topDocs, Float.NaN), + SearchHits.empty(), pipelineAggregators(), metaData()); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregator.java index 7a34fe6df4a68..0d9c2b1bc3b83 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregator.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.aggregations.metrics.weighted_avg; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.ScoreMode; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.DoubleArray; @@ -67,8 +68,8 @@ public WeightedAvgAggregator(String name, MultiValuesSource.NumericMultiValuesSo } @Override - public boolean needsScores() { - return valuesSources != null && valuesSources.needsScores(); + public ScoreMode scoreMode() { + return valuesSources != null && valuesSources.needsScores() ? ScoreMode.COMPLETE : ScoreMode.COMPLETE_NO_SCORES; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/collapse/CollapseContext.java b/server/src/main/java/org/elasticsearch/search/collapse/CollapseContext.java index 82a7657f18079..4d8a1ba63ba15 100644 --- a/server/src/main/java/org/elasticsearch/search/collapse/CollapseContext.java +++ b/server/src/main/java/org/elasticsearch/search/collapse/CollapseContext.java @@ -60,11 +60,11 @@ public List getInnerHit() { return innerHits; } - public CollapsingTopDocsCollector createTopDocs(Sort sort, int topN, boolean trackMaxScore) { + public CollapsingTopDocsCollector createTopDocs(Sort sort, int topN) { if (fieldType instanceof KeywordFieldMapper.KeywordFieldType) { - return CollapsingTopDocsCollector.createKeyword(fieldType.name(), sort, topN, trackMaxScore); + return CollapsingTopDocsCollector.createKeyword(fieldType.name(), sort, topN); } else if (fieldType instanceof NumberFieldMapper.NumberFieldType) { - return CollapsingTopDocsCollector.createNumeric(fieldType.name(), sort, topN, trackMaxScore); + return CollapsingTopDocsCollector.createNumeric(fieldType.name(), sort, topN); } else { throw new IllegalStateException("unknown type for collapse field " + fieldType.name() + ", only keywords and numbers are accepted"); diff --git a/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java b/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java index fa7e611348d78..0b7d8da481c62 100644 --- a/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/dfs/DfsPhase.java @@ -25,8 +25,9 @@ import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.Term; -import org.apache.lucene.index.TermContext; +import org.apache.lucene.index.TermStates; import org.apache.lucene.search.CollectionStatistics; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.TermStatistics; import org.elasticsearch.common.collect.HppcMaps; import org.elasticsearch.search.SearchPhase; @@ -53,7 +54,8 @@ public void preProcess(SearchContext context) { public void execute(SearchContext context) { final ObjectHashSet termsSet = new ObjectHashSet<>(); try { - context.searcher().createNormalizedWeight(context.query(), true).extractTerms(new DelegateSet(termsSet)); + context.searcher().createWeight(context.searcher().rewrite(context.query()), ScoreMode.COMPLETE, 1f) + .extractTerms(new DelegateSet(termsSet)); for (RescoreContext rescoreContext : context.rescore()) { try { rescoreContext.rescorer().extractTerms(context.searcher(), rescoreContext, new DelegateSet(termsSet)); @@ -69,17 +71,19 @@ public void execute(SearchContext context) { if(context.isCancelled()) { throw new TaskCancelledException("cancelled"); } - // LUCENE 4 UPGRADE: cache TermContext? - TermContext termContext = TermContext.build(indexReaderContext, terms[i]); + // LUCENE 4 UPGRADE: cache TermStates? + TermStates termContext = TermStates.build(indexReaderContext, terms[i], true); termStatistics[i] = context.searcher().termStatistics(terms[i], termContext); } ObjectObjectHashMap fieldStatistics = HppcMaps.newNoNullKeysMap(); for (Term term : terms) { assert term.field() != null : "field is null"; - if (!fieldStatistics.containsKey(term.field())) { + if (fieldStatistics.containsKey(term.field()) == false) { final CollectionStatistics collectionStatistics = context.searcher().collectionStatistics(term.field()); - fieldStatistics.put(term.field(), collectionStatistics); + if (collectionStatistics != null) { + fieldStatistics.put(term.field(), collectionStatistics); + } if(context.isCancelled()) { throw new TaskCancelledException("cancelled"); } diff --git a/server/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java b/server/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java index 0cd624b00a36b..8de89089c4f01 100644 --- a/server/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java +++ b/server/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java @@ -25,6 +25,7 @@ import org.apache.lucene.search.CollectionStatistics; import org.apache.lucene.search.TermStatistics; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; import org.elasticsearch.common.collect.HppcMaps; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -124,9 +125,16 @@ public static void writeFieldStats(StreamOutput out, ObjectObjectHashMap= 0; out.writeVLong(statistics.maxDoc()); - out.writeVLong(addOne(statistics.docCount())); - out.writeVLong(addOne(statistics.sumTotalTermFreq())); - out.writeVLong(addOne(statistics.sumDocFreq())); + if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + // stats are always positive numbers + out.writeVLong(statistics.docCount()); + out.writeVLong(statistics.sumTotalTermFreq()); + out.writeVLong(statistics.sumDocFreq()); + } else { + out.writeVLong(addOne(statistics.docCount())); + out.writeVLong(addOne(statistics.sumTotalTermFreq())); + out.writeVLong(addOne(statistics.sumDocFreq())); + } } } @@ -138,9 +146,14 @@ public static void writeTermStats(StreamOutput out, TermStatistics[] termStatist } public static void writeSingleTermStats(StreamOutput out, TermStatistics termStatistic) throws IOException { - assert termStatistic.docFreq() >= 0; - out.writeVLong(termStatistic.docFreq()); - out.writeVLong(addOne(termStatistic.totalTermFreq())); + if (termStatistic != null) { + assert termStatistic.docFreq() > 0; + out.writeVLong(termStatistic.docFreq()); + out.writeVLong(addOne(termStatistic.totalTermFreq())); + } else { + out.writeVLong(0); + out.writeVLong(0); + } } public static ObjectObjectHashMap readFieldStats(StreamInput in) throws IOException { @@ -156,9 +169,19 @@ public static ObjectObjectHashMap readFieldStats(S final String field = in.readString(); assert field != null; final long maxDoc = in.readVLong(); - final long docCount = subOne(in.readVLong()); - final long sumTotalTermFreq = subOne(in.readVLong()); - final long sumDocFreq = subOne(in.readVLong()); + final long docCount; + final long sumTotalTermFreq; + final long sumDocFreq; + if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + // stats are always positive numbers + docCount = in.readVLong(); + sumTotalTermFreq = in.readVLong(); + sumDocFreq = in.readVLong(); + } else { + docCount = subOne(in.readVLong()); + sumTotalTermFreq = subOne(in.readVLong()); + sumDocFreq = subOne(in.readVLong()); + } CollectionStatistics stats = new CollectionStatistics(field, maxDoc, docCount, sumTotalTermFreq, sumDocFreq); fieldStatistics.put(field, stats); } @@ -178,6 +201,9 @@ public static TermStatistics[] readTermStats(StreamInput in, Term[] terms) throw final long docFreq = in.readVLong(); assert docFreq >= 0; final long totalTermFreq = subOne(in.readVLong()); + if (docFreq == 0) { + continue; + } termStatistics[i] = new TermStatistics(term, docFreq, totalTermFreq); } } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index f745ee1163c16..69ac90496864b 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -23,7 +23,10 @@ import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.TotalHits; +import org.apache.lucene.search.TotalHits.Relation; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BitSet; import org.elasticsearch.ExceptionsHelper; @@ -168,7 +171,9 @@ public void execute(SearchContext context) { } } - context.fetchResult().hits(new SearchHits(hits, context.queryResult().getTotalHits(), context.queryResult().getMaxScore())); + TotalHits totalHits = context.queryResult().getTotalHits(); + long totalHitsAsLong = totalHits.relation == Relation.EQUAL_TO ? totalHits.value : -1; + context.fetchResult().hits(new SearchHits(hits, totalHitsAsLong, context.queryResult().getMaxScore())); } catch (IOException e) { throw ExceptionsHelper.convertToElastic(e); } @@ -357,7 +362,8 @@ private SearchHit.NestedIdentity getInternalNestedIdentity(SearchContext context current = nestedParentObjectMapper; continue; } - final Weight childWeight = context.searcher().createNormalizedWeight(childFilter, false); + final Weight childWeight = context.searcher() + .createWeight(context.searcher().rewrite(childFilter), ScoreMode.COMPLETE_NO_SCORES, 1f); Scorer childScorer = childWeight.scorer(subReaderContext); if (childScorer == null) { current = nestedParentObjectMapper; diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsContext.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsContext.java index d3b1da7c9376e..48f2f1299c2ea 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsContext.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsContext.java @@ -25,11 +25,13 @@ import org.apache.lucene.search.ConjunctionDISI; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.LeafCollector; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; +import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.internal.SearchContext; @@ -87,7 +89,7 @@ protected InnerHitSubContext(String name, SearchContext context) { this.context = context; } - public abstract TopDocs[] topDocs(SearchHit[] hits) throws IOException; + public abstract TopDocsAndMaxScore[] topDocs(SearchHit[] hits) throws IOException; public String getName() { return name; @@ -104,7 +106,8 @@ public void setChildInnerHits(Map childInnerHits) { protected Weight createInnerHitQueryWeight() throws IOException { final boolean needsScores = size() != 0 && (sort() == null || sort().sort.needsScores()); - return context.searcher().createNormalizedWeight(query(), needsScores); + return context.searcher().createWeight(context.searcher().rewrite(query()), + needsScores ? ScoreMode.COMPLETE : ScoreMode.COMPLETE_NO_SCORES, 1f); } public SearchContext parentSearchContext() { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsFetchSubPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsFetchSubPhase.java index 75d6211aca4bf..4d34a3afa620f 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsFetchSubPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsFetchSubPhase.java @@ -21,7 +21,7 @@ import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.ScoreDoc; -import org.apache.lucene.search.TopDocs; +import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; @@ -50,19 +50,19 @@ public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOExcept for (Map.Entry entry : context.innerHits().getInnerHits().entrySet()) { InnerHitsContext.InnerHitSubContext innerHits = entry.getValue(); - TopDocs[] topDocs = innerHits.topDocs(hits); + TopDocsAndMaxScore[] topDocs = innerHits.topDocs(hits); for (int i = 0; i < hits.length; i++) { SearchHit hit = hits[i]; - TopDocs topDoc = topDocs[i]; + TopDocsAndMaxScore topDoc = topDocs[i]; Map results = hit.getInnerHits(); if (results == null) { hit.setInnerHits(results = new HashMap<>()); } innerHits.queryResult().topDocs(topDoc, innerHits.sort() == null ? null : innerHits.sort().formats); - int[] docIdsToLoad = new int[topDoc.scoreDocs.length]; - for (int j = 0; j < topDoc.scoreDocs.length; j++) { - docIdsToLoad[j] = topDoc.scoreDocs[j].doc; + int[] docIdsToLoad = new int[topDoc.topDocs.scoreDocs.length]; + for (int j = 0; j < topDoc.topDocs.scoreDocs.length; j++) { + docIdsToLoad[j] = topDoc.topDocs.scoreDocs[j].doc; } innerHits.docIdsToLoad(docIdsToLoad, 0, docIdsToLoad.length); innerHits.setUid(new Uid(hit.getType(), hit.getId())); @@ -70,7 +70,7 @@ public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOExcept FetchSearchResult fetchResult = innerHits.fetchResult(); SearchHit[] internalHits = fetchResult.fetchResult().hits().getHits(); for (int j = 0; j < internalHits.length; j++) { - ScoreDoc scoreDoc = topDoc.scoreDocs[j]; + ScoreDoc scoreDoc = topDoc.topDocs.scoreDocs[j]; SearchHit searchHitFields = internalHits[j]; searchHitFields.score(scoreDoc.score); if (scoreDoc instanceof FieldDoc) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesFetchSubPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesFetchSubPhase.java index c28e07ff45526..c2f6980781dba 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesFetchSubPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesFetchSubPhase.java @@ -22,6 +22,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; @@ -67,7 +68,7 @@ public void hitsExecute(SearchContext context, SearchHit[] hits) { Query query = entry.getValue(); int readerIndex = -1; int docBase = -1; - Weight weight = context.searcher().createNormalizedWeight(query, false); + Weight weight = context.searcher().createWeight(context.searcher().rewrite(query), ScoreMode.COMPLETE_NO_SCORES, 1f); Bits matchingDocs = null; final IndexReader indexReader = context.searcher().getIndexReader(); for (int i = 0; i < hits.length; ++i) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ScoreFetchSubPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/ScoreFetchSubPhase.java new file mode 100644 index 0000000000000..3a6db72d5b31a --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/ScoreFetchSubPhase.java @@ -0,0 +1,77 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.fetch.subphase; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; +import org.apache.lucene.search.Weight; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.fetch.FetchSubPhase; +import org.elasticsearch.search.internal.SearchContext; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Comparator; +import java.util.Iterator; + +public class ScoreFetchSubPhase implements FetchSubPhase { + + @Override + public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOException { + if (context.trackScores() == false || hits.length == 0 || + // scores were already computed since they are needed on the coordinated node to merge top hits + context.sort() == null) { + return; + } + + hits = hits.clone(); // don't modify the incoming hits + Arrays.sort(hits, Comparator.comparingInt(SearchHit::docId)); + + final IndexSearcher searcher = context.searcher(); + final Weight weight = searcher.createWeight(searcher.rewrite(context.query()), ScoreMode.COMPLETE, 1); + Iterator leafContextIterator = searcher.getIndexReader().leaves().iterator(); + LeafReaderContext leafContext = null; + Scorer scorer = null; + for (SearchHit hit : hits) { + if (leafContext == null || leafContext.docBase + leafContext.reader().maxDoc() <= hit.docId()) { + do { + leafContext = leafContextIterator.next(); + } while (leafContext == null || leafContext.docBase + leafContext.reader().maxDoc() <= hit.docId()); + ScorerSupplier scorerSupplier = weight.scorerSupplier(leafContext); + if (scorerSupplier == null) { + throw new IllegalStateException("Can't compute score on document " + hit + " as it doesn't match the query"); + } + scorer = scorerSupplier.get(1L); // random-access + } + + final int leafDocID = hit.docId() - leafContext.docBase; + assert leafDocID >= 0 && leafDocID < leafContext.reader().maxDoc(); + int advanced = scorer.iterator().advance(leafDocID); + if (advanced != leafDocID) { + throw new IllegalStateException("Can't compute score on document " + hit + " as it doesn't match the query"); + } + hit.score(scorer.score()); + } + } + +} diff --git a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index a7eb0a953ba58..04a4629e9a875 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -22,7 +22,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.Term; -import org.apache.lucene.index.TermContext; +import org.apache.lucene.index.TermStates; import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.CollectionStatistics; import org.apache.lucene.search.Collector; @@ -31,6 +31,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryCache; import org.apache.lucene.search.QueryCachingPolicy; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TermStatistics; import org.apache.lucene.search.Weight; @@ -71,7 +72,7 @@ public ContextIndexSearcher(Engine.Searcher searcher, super(searcher.reader()); in = searcher.searcher(); engineSearcher = searcher; - setSimilarity(searcher.searcher().getSimilarity(true)); + setSimilarity(searcher.searcher().getSimilarity()); setQueryCache(queryCache); setQueryCachingPolicy(queryCachingPolicy); } @@ -112,22 +113,7 @@ public Query rewrite(Query original) throws IOException { } @Override - public Weight createNormalizedWeight(Query query, boolean needsScores) throws IOException { - // During tests we prefer to use the wrapped IndexSearcher, because then we use the AssertingIndexSearcher - // it is hacky, because if we perform a dfs search, we don't use the wrapped IndexSearcher... - if (aggregatedDfs != null && needsScores) { - // if scores are needed and we have dfs data then use it - return super.createNormalizedWeight(query, needsScores); - } else if (profiler != null) { - // we need to use the createWeight method to insert the wrappers - return super.createNormalizedWeight(query, needsScores); - } else { - return in.createNormalizedWeight(query, needsScores); - } - } - - @Override - public Weight createWeight(Query query, boolean needsScores, float boost) throws IOException { + public Weight createWeight(Query query, ScoreMode scoreMode, float boost) throws IOException { if (profiler != null) { // createWeight() is called for each query in the tree, so we tell the queryProfiler // each invocation so that it can build an internal representation of the query @@ -137,7 +123,7 @@ public Weight createWeight(Query query, boolean needsScores, float boost) throws timer.start(); final Weight weight; try { - weight = super.createWeight(query, needsScores, boost); + weight = super.createWeight(query, scoreMode, boost); } finally { timer.stop(); profiler.pollLastElement(); @@ -145,7 +131,7 @@ public Weight createWeight(Query query, boolean needsScores, float boost) throws return new ProfileWeight(query, weight, profile); } else { // needs to be 'super', not 'in' in order to use aggregated DFS - return super.createWeight(query, needsScores, boost); + return super.createWeight(query, scoreMode, boost); } } @@ -195,13 +181,13 @@ public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { public Explanation explain(Query query, int doc) throws IOException { if (aggregatedDfs != null) { // dfs data is needed to explain the score - return super.explain(createNormalizedWeight(query, true), doc); + return super.explain(createWeight(rewrite(query), ScoreMode.COMPLETE, 1f), doc); } return in.explain(query, doc); } @Override - public TermStatistics termStatistics(Term term, TermContext context) throws IOException { + public TermStatistics termStatistics(Term term, TermStates context) throws IOException { if (aggregatedDfs == null) { // we are either executing the dfs phase or the search_type doesn't include the dfs phase. return super.termStatistics(term, context); diff --git a/server/src/main/java/org/elasticsearch/search/internal/ScrollContext.java b/server/src/main/java/org/elasticsearch/search/internal/ScrollContext.java index 75d48d5d63798..41d7680a780b0 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ScrollContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ScrollContext.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.internal; import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.TotalHits; import org.elasticsearch.search.Scroll; import java.util.HashMap; @@ -30,8 +31,8 @@ public final class ScrollContext { private Map context = null; - public long totalHits = -1; - public float maxScore; + public TotalHits totalHits = null; + public float maxScore = Float.NaN; public ScoreDoc lastEmittedDoc; public Scroll scroll; diff --git a/server/src/main/java/org/elasticsearch/search/profile/aggregation/ProfilingAggregator.java b/server/src/main/java/org/elasticsearch/search/profile/aggregation/ProfilingAggregator.java index 0cdeb458a3031..16388fa789aff 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/aggregation/ProfilingAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/profile/aggregation/ProfilingAggregator.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.profile.aggregation; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.ScoreMode; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; @@ -45,8 +46,8 @@ public void close() { } @Override - public boolean needsScores() { - return delegate.needsScores(); + public ScoreMode scoreMode() { + return delegate.scoreMode(); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/InternalProfileCollector.java b/server/src/main/java/org/elasticsearch/search/profile/query/InternalProfileCollector.java index e892abaab2249..993d91ab7a18c 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/query/InternalProfileCollector.java +++ b/server/src/main/java/org/elasticsearch/search/profile/query/InternalProfileCollector.java @@ -22,6 +22,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Collector; import org.apache.lucene.search.LeafCollector; +import org.apache.lucene.search.ScoreMode; import java.io.IOException; import java.util.ArrayList; @@ -116,8 +117,8 @@ public LeafCollector getLeafCollector(LeafReaderContext context) throws IOExcept } @Override - public boolean needsScores() { - return collector.needsScores(); + public ScoreMode scoreMode() { + return collector.scoreMode(); } public CollectorResult getCollectorTree() { diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/ProfileCollector.java b/server/src/main/java/org/elasticsearch/search/profile/query/ProfileCollector.java index ea8dbb2f335ca..940e3902954b5 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/query/ProfileCollector.java +++ b/server/src/main/java/org/elasticsearch/search/profile/query/ProfileCollector.java @@ -24,6 +24,7 @@ import org.apache.lucene.search.FilterCollector; import org.apache.lucene.search.FilterLeafCollector; import org.apache.lucene.search.LeafCollector; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import java.io.IOException; @@ -44,10 +45,10 @@ public Collector getDelegate() { } @Override - public boolean needsScores() { + public ScoreMode scoreMode() { final long start = System.nanoTime(); try { - return super.needsScores(); + return super.scoreMode(); } finally { time += Math.max(1, System.nanoTime() - start); } diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java b/server/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java index 66e0e0fe77cfe..8913f484847e6 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java +++ b/server/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java @@ -36,7 +36,7 @@ final class ProfileScorer extends Scorer { private final Scorer scorer; private ProfileWeight profileWeight; - private final Timer scoreTimer, nextDocTimer, advanceTimer, matchTimer; + private final Timer scoreTimer, nextDocTimer, advanceTimer, matchTimer, shallowAdvanceTimer, computeMaxScoreTimer; ProfileScorer(ProfileWeight w, Scorer scorer, QueryProfileBreakdown profile) throws IOException { super(w); @@ -46,6 +46,8 @@ final class ProfileScorer extends Scorer { nextDocTimer = profile.getTimer(QueryTimingType.NEXT_DOC); advanceTimer = profile.getTimer(QueryTimingType.ADVANCE); matchTimer = profile.getTimer(QueryTimingType.MATCH); + shallowAdvanceTimer = profile.getTimer(QueryTimingType.SHALLOW_ADVANCE); + computeMaxScoreTimer = profile.getTimer(QueryTimingType.COMPUTE_MAX_SCORE); } @Override @@ -166,4 +168,24 @@ public float matchCost() { } }; } + + @Override + public int advanceShallow(int target) throws IOException { + shallowAdvanceTimer.start(); + try { + return scorer.advanceShallow(target); + } finally { + shallowAdvanceTimer.stop(); + } + } + + @Override + public float getMaxScore(int upTo) throws IOException { + computeMaxScoreTimer.start(); + try { + return scorer.getMaxScore(upTo); + } finally { + computeMaxScoreTimer.stop(); + } + } } diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/QueryTimingType.java b/server/src/main/java/org/elasticsearch/search/profile/query/QueryTimingType.java index 5f194a7d5f10d..146bd8f07bcd1 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/query/QueryTimingType.java +++ b/server/src/main/java/org/elasticsearch/search/profile/query/QueryTimingType.java @@ -27,7 +27,9 @@ public enum QueryTimingType { NEXT_DOC, ADVANCE, MATCH, - SCORE; + SCORE, + SHALLOW_ADVANCE, + COMPUTE_MAX_SCORE; @Override public String toString() { diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryCollectorContext.java b/server/src/main/java/org/elasticsearch/search/query/QueryCollectorContext.java index ff80dda77fb6d..f0c94bd822edf 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryCollectorContext.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryCollectorContext.java @@ -23,6 +23,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MultiCollector; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Weight; import org.elasticsearch.common.lucene.MinimumScoreCollector; import org.elasticsearch.common.lucene.search.FilteredCollector; @@ -114,7 +115,7 @@ static QueryCollectorContext createFilteredCollectorContext(IndexSearcher search return new QueryCollectorContext(REASON_SEARCH_POST_FILTER) { @Override Collector create(Collector in ) throws IOException { - final Weight filterWeight = searcher.createNormalizedWeight(query, false); + final Weight filterWeight = searcher.createWeight(searcher.rewrite(query), ScoreMode.COMPLETE_NO_SCORES, 1f); return new FilteredCollector(in, filterWeight); } }; diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java index 84c76e85f3dd0..e4f0aa6898ad8 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -27,7 +27,6 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Collector; import org.apache.lucene.search.ConstantScoreQuery; -import org.apache.lucene.search.EarlyTerminatingSortingCollector; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; @@ -35,9 +34,11 @@ import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Sort; import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.TotalHits; import org.apache.lucene.util.Counter; import org.elasticsearch.action.search.SearchTask; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor; import org.elasticsearch.common.util.concurrent.QueueResizingEsThreadPoolExecutor; @@ -94,8 +95,8 @@ public void execute(SearchContext searchContext) throws QueryPhaseExecutionExcep if (searchContext.hasOnlySuggest()) { suggestPhase.execute(searchContext); // TODO: fix this once we can fetch docs for suggestions - searchContext.queryResult().topDocs( - new TopDocs(0, Lucene.EMPTY_SCORE_DOCS, Float.NaN), + searchContext.queryResult().topDocs(new TopDocsAndMaxScore( + new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), Lucene.EMPTY_SCORE_DOCS), Float.NaN), new DocValueFormat[0]); return; } @@ -138,7 +139,7 @@ static boolean execute(SearchContext searchContext, final ScrollContext scrollContext = searchContext.scrollContext(); if (scrollContext != null) { - if (scrollContext.totalHits == -1) { + if (scrollContext.totalHits == null) { // first round assert scrollContext.lastEmittedDoc == null; // there is not much that we can optimize here since we want to collect all @@ -268,7 +269,7 @@ static boolean execute(SearchContext searchContext, queryResult.terminatedEarly(true); } catch (TimeExceededException e) { assert timeoutSet : "TimeExceededException thrown even though timeout wasn't set"; - + if (searchContext.request().allowPartialSearchResults() == false) { // Can't rethrow TimeExceededException because not serializable throw new QueryPhaseExecutionException(searchContext, "Time exceeded"); @@ -327,7 +328,7 @@ static boolean canEarlyTerminate(IndexReader reader, SortAndFormats sortAndForma final Sort sort = sortAndFormats.sort; for (LeafReaderContext ctx : reader.leaves()) { Sort indexSort = ctx.reader().getMetaData().getSort(); - if (indexSort == null || EarlyTerminatingSortingCollector.canEarlyTerminate(sort, indexSort) == false) { + if (indexSort == null || Lucene.canEarlyTerminate(sort, indexSort) == false) { return false; } } diff --git a/server/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java b/server/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java index 83c43d10172c2..2aded57ece04c 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java +++ b/server/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java @@ -20,10 +20,11 @@ package org.elasticsearch.search.query; import org.apache.lucene.search.FieldDoc; -import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.TotalHits; import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; @@ -46,7 +47,10 @@ public final class QuerySearchResult extends SearchPhaseResult { private int from; private int size; - private TopDocs topDocs; + private TopDocsAndMaxScore topDocsAndMaxScore; + private boolean hasScoreDocs; + private TotalHits totalHits; + private float maxScore = Float.NaN; private DocValueFormat[] sortValueFormats; private InternalAggregations aggregations; private boolean hasAggs; @@ -56,9 +60,6 @@ public final class QuerySearchResult extends SearchPhaseResult { private Boolean terminatedEarly = null; private ProfileShardResult profileShardResults; private boolean hasProfileResults; - private boolean hasScoreDocs; - private long totalHits; - private float maxScore; private long serviceTimeEWMA = -1; private int nodeQueueSize = -1; @@ -92,37 +93,37 @@ public Boolean terminatedEarly() { return this.terminatedEarly; } - public TopDocs topDocs() { - if (topDocs == null) { + public TopDocsAndMaxScore topDocs() { + if (topDocsAndMaxScore == null) { throw new IllegalStateException("topDocs already consumed"); } - return topDocs; + return topDocsAndMaxScore; } /** * Returns true iff the top docs have already been consumed. */ public boolean hasConsumedTopDocs() { - return topDocs == null; + return topDocsAndMaxScore == null; } /** * Returns and nulls out the top docs for this search results. This allows to free up memory once the top docs are consumed. * @throws IllegalStateException if the top docs have already been consumed. */ - public TopDocs consumeTopDocs() { - TopDocs topDocs = this.topDocs; - if (topDocs == null) { + public TopDocsAndMaxScore consumeTopDocs() { + TopDocsAndMaxScore topDocsAndMaxScore = this.topDocsAndMaxScore; + if (topDocsAndMaxScore == null) { throw new IllegalStateException("topDocs already consumed"); } - this.topDocs = null; - return topDocs; + this.topDocsAndMaxScore = null; + return topDocsAndMaxScore; } - public void topDocs(TopDocs topDocs, DocValueFormat[] sortValueFormats) { + public void topDocs(TopDocsAndMaxScore topDocs, DocValueFormat[] sortValueFormats) { setTopDocs(topDocs); - if (topDocs.scoreDocs.length > 0 && topDocs.scoreDocs[0] instanceof FieldDoc) { - int numFields = ((FieldDoc) topDocs.scoreDocs[0]).fields.length; + if (topDocs.topDocs.scoreDocs.length > 0 && topDocs.topDocs.scoreDocs[0] instanceof FieldDoc) { + int numFields = ((FieldDoc) topDocs.topDocs.scoreDocs[0]).fields.length; if (numFields != sortValueFormats.length) { throw new IllegalArgumentException("The number of sort fields does not match: " + numFields + " != " + sortValueFormats.length); @@ -131,11 +132,11 @@ public void topDocs(TopDocs topDocs, DocValueFormat[] sortValueFormats) { this.sortValueFormats = sortValueFormats; } - private void setTopDocs(TopDocs topDocs) { - this.topDocs = topDocs; - hasScoreDocs = topDocs.scoreDocs.length > 0; - this.totalHits = topDocs.totalHits; - this.maxScore = topDocs.getMaxScore(); + private void setTopDocs(TopDocsAndMaxScore topDocsAndMaxScore) { + this.topDocsAndMaxScore = topDocsAndMaxScore; + this.totalHits = topDocsAndMaxScore.topDocs.totalHits; + this.maxScore = topDocsAndMaxScore.maxScore; + this.hasScoreDocs = topDocsAndMaxScore.topDocs.scoreDocs.length > 0; } public DocValueFormat[] sortValueFormats() { @@ -326,7 +327,7 @@ public void writeToNoId(StreamOutput out) throws IOException { out.writeNamedWriteable(sortValueFormats[i]); } } - writeTopDocs(out, topDocs); + writeTopDocs(out, topDocsAndMaxScore); if (aggregations == null) { out.writeBoolean(false); } else { @@ -349,7 +350,7 @@ public void writeToNoId(StreamOutput out) throws IOException { } } - public long getTotalHits() { + public TotalHits getTotalHits() { return totalHits; } diff --git a/server/src/main/java/org/elasticsearch/search/query/TopDocsCollectorContext.java b/server/src/main/java/org/elasticsearch/search/query/TopDocsCollectorContext.java index 8d40cc802fffd..d1b115ff68006 100644 --- a/server/src/main/java/org/elasticsearch/search/query/TopDocsCollectorContext.java +++ b/server/src/main/java/org/elasticsearch/search/query/TopDocsCollectorContext.java @@ -37,9 +37,14 @@ import org.apache.lucene.search.TopFieldCollector; import org.apache.lucene.search.TopScoreDocCollector; import org.apache.lucene.search.TotalHitCountCollector; +import org.apache.lucene.search.TotalHits; +import org.apache.lucene.search.grouping.CollapseTopFieldDocs; import org.apache.lucene.search.grouping.CollapsingTopDocsCollector; +import org.elasticsearch.action.search.MaxScoreCollector; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; +import org.elasticsearch.common.util.CachedSupplier; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.collapse.CollapseContext; import org.elasticsearch.search.internal.ScrollContext; @@ -49,7 +54,6 @@ import java.io.IOException; import java.util.Objects; -import java.util.function.IntSupplier; import java.util.function.Supplier; import static org.elasticsearch.search.profile.query.CollectorResult.REASON_SEARCH_COUNT; @@ -82,7 +86,7 @@ boolean shouldRescore() { static class EmptyTopDocsCollectorContext extends TopDocsCollectorContext { private final Collector collector; - private final IntSupplier hitCountSupplier; + private final Supplier hitCountSupplier; /** * Ctr @@ -100,15 +104,15 @@ private EmptyTopDocsCollectorContext(IndexReader reader, Query query, int hitCount = hasFilterCollector ? -1 : shortcutTotalHitCount(reader, query); if (hitCount == -1) { this.collector = hitCountCollector; - this.hitCountSupplier = hitCountCollector::getTotalHits; + this.hitCountSupplier = () -> new TotalHits(hitCountCollector.getTotalHits(), TotalHits.Relation.EQUAL_TO); } else { this.collector = new EarlyTerminatingCollector(hitCountCollector, 0, false); - this.hitCountSupplier = () -> hitCount; + this.hitCountSupplier = () -> new TotalHits(hitCount, TotalHits.Relation.EQUAL_TO); } } else { this.collector = new EarlyTerminatingCollector(new TotalHitCountCollector(), 0, false); // for bwc hit count is set to 0, it will be converted to -1 by the coordinating node - this.hitCountSupplier = () -> 0; + this.hitCountSupplier = () -> new TotalHits(0, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO); } } @@ -119,14 +123,15 @@ Collector create(Collector in) { @Override void postProcess(QuerySearchResult result) { - final int totalHitCount = hitCountSupplier.getAsInt(); - result.topDocs(new TopDocs(totalHitCount, Lucene.EMPTY_SCORE_DOCS, Float.NaN), null); + final TotalHits totalHitCount = hitCountSupplier.get(); + result.topDocs(new TopDocsAndMaxScore(new TopDocs(totalHitCount, Lucene.EMPTY_SCORE_DOCS), Float.NaN), null); } } static class CollapsingTopDocsCollectorContext extends TopDocsCollectorContext { private final DocValueFormat[] sortFmt; private final CollapsingTopDocsCollector topDocsCollector; + private final Supplier maxScoreSupplier; /** * Ctr @@ -144,7 +149,15 @@ private CollapsingTopDocsCollectorContext(CollapseContext collapseContext, assert collapseContext != null; Sort sort = sortAndFormats == null ? Sort.RELEVANCE : sortAndFormats.sort; this.sortFmt = sortAndFormats == null ? new DocValueFormat[] { DocValueFormat.RAW } : sortAndFormats.formats; - this.topDocsCollector = collapseContext.createTopDocs(sort, numHits, trackMaxScore); + this.topDocsCollector = collapseContext.createTopDocs(sort, numHits); + + MaxScoreCollector maxScoreCollector = null; + if (trackMaxScore) { + maxScoreCollector = new MaxScoreCollector(); + maxScoreSupplier = maxScoreCollector::getMaxScore; + } else { + maxScoreSupplier = () -> Float.NaN; + } } @Override @@ -155,15 +168,17 @@ Collector create(Collector in) throws IOException { @Override void postProcess(QuerySearchResult result) throws IOException { - result.topDocs(topDocsCollector.getTopDocs(), sortFmt); + CollapseTopFieldDocs topDocs = topDocsCollector.getTopDocs(); + result.topDocs(new TopDocsAndMaxScore(topDocs, maxScoreSupplier.get()), sortFmt); } } abstract static class SimpleTopDocsCollectorContext extends TopDocsCollectorContext { private final @Nullable SortAndFormats sortAndFormats; private final Collector collector; - private final IntSupplier totalHitsSupplier; + private final Supplier totalHitsSupplier; private final Supplier topDocsSupplier; + private final Supplier maxScoreSupplier; /** * Ctr @@ -187,37 +202,52 @@ private SimpleTopDocsCollectorContext(IndexReader reader, super(REASON_SEARCH_TOP_HITS, numHits); this.sortAndFormats = sortAndFormats; if (sortAndFormats == null) { - final TopDocsCollector topDocsCollector = TopScoreDocCollector.create(numHits, searchAfter); + final TopDocsCollector topDocsCollector = TopScoreDocCollector.create(numHits, searchAfter, Integer.MAX_VALUE); this.collector = topDocsCollector; - this.topDocsSupplier = topDocsCollector::topDocs; - this.totalHitsSupplier = topDocsCollector::getTotalHits; + this.topDocsSupplier = new CachedSupplier<>(topDocsCollector::topDocs); + this.totalHitsSupplier = () -> topDocsSupplier.get().totalHits; + this.maxScoreSupplier = () -> { + TopDocs topDocs = topDocsSupplier.get(); + if (topDocs.scoreDocs.length == 0) { + return Float.NaN; + } else { + return topDocs.scoreDocs[0].score; + } + }; } else { /** * We explicitly don't track total hits in the topdocs collector, it can early terminate * if the sort matches the index sort. */ final TopDocsCollector topDocsCollector = TopFieldCollector.create(sortAndFormats.sort, numHits, - (FieldDoc) searchAfter, true, trackMaxScore, trackMaxScore, false); - this.topDocsSupplier = topDocsCollector::topDocs; + (FieldDoc) searchAfter, 1); + this.topDocsSupplier = new CachedSupplier<>(topDocsCollector::topDocs); + TotalHitCountCollector hitCountCollector = null; if (trackTotalHits) { // implicit total hit counts are valid only when there is no filter collector in the chain int count = hasFilterCollector ? -1 : shortcutTotalHitCount(reader, query); if (count != -1) { // we can extract the total count from the shard statistics directly - this.totalHitsSupplier = () -> count; - this.collector = topDocsCollector; + this.totalHitsSupplier = () -> new TotalHits(count, TotalHits.Relation.EQUAL_TO); } else { // wrap a collector that counts the total number of hits even // if the top docs collector terminates early final TotalHitCountCollector countingCollector = new TotalHitCountCollector(); - this.collector = MultiCollector.wrap(topDocsCollector, countingCollector); - this.totalHitsSupplier = countingCollector::getTotalHits; + hitCountCollector = countingCollector; + this.totalHitsSupplier = () -> new TotalHits(countingCollector.getTotalHits(), TotalHits.Relation.EQUAL_TO); } } else { // total hit count is not needed - this.collector = topDocsCollector; - this.totalHitsSupplier = topDocsCollector::getTotalHits; + this.totalHitsSupplier = () -> topDocsSupplier.get().totalHits; + } + MaxScoreCollector maxScoreCollector = null; + if (trackMaxScore) { + maxScoreCollector = new MaxScoreCollector(); + maxScoreSupplier = maxScoreCollector::getMaxScore; + } else { + maxScoreSupplier = () -> Float.NaN; } + collector = MultiCollector.wrap(topDocsCollector, hitCountCollector, maxScoreCollector); } } @@ -230,8 +260,8 @@ Collector create(Collector in) { @Override void postProcess(QuerySearchResult result) throws IOException { final TopDocs topDocs = topDocsSupplier.get(); - topDocs.totalHits = totalHitsSupplier.getAsInt(); - result.topDocs(topDocs, sortAndFormats == null ? null : sortAndFormats.formats); + topDocs.totalHits = totalHitsSupplier.get(); + result.topDocs(new TopDocsAndMaxScore(topDocs, maxScoreSupplier.get()), sortAndFormats == null ? null : sortAndFormats.formats); } } @@ -257,22 +287,22 @@ private ScrollingTopDocsCollectorContext(IndexReader reader, @Override void postProcess(QuerySearchResult result) throws IOException { super.postProcess(result); - final TopDocs topDocs = result.topDocs(); - if (scrollContext.totalHits == -1) { + final TopDocsAndMaxScore topDocs = result.topDocs(); + if (scrollContext.totalHits == null) { // first round - scrollContext.totalHits = topDocs.totalHits; - scrollContext.maxScore = topDocs.getMaxScore(); + scrollContext.totalHits = topDocs.topDocs.totalHits; + scrollContext.maxScore = topDocs.maxScore; } else { // subsequent round: the total number of hits and // the maximum score were computed on the first round - topDocs.totalHits = scrollContext.totalHits; - topDocs.setMaxScore(scrollContext.maxScore); + topDocs.topDocs.totalHits = scrollContext.totalHits; + topDocs.maxScore = scrollContext.maxScore; } if (numberOfShards == 1) { // if we fetch the document in the same roundtrip, we already know the last emitted doc - if (topDocs.scoreDocs.length > 0) { + if (topDocs.topDocs.scoreDocs.length > 0) { // set the last emitted doc - scrollContext.lastEmittedDoc = topDocs.scoreDocs[topDocs.scoreDocs.length - 1]; + scrollContext.lastEmittedDoc = topDocs.topDocs.scoreDocs[topDocs.topDocs.scoreDocs.length - 1]; } } result.topDocs(topDocs, result.sortValueFormats()); @@ -334,8 +364,7 @@ static TopDocsCollectorContext createTopDocsCollectorContext(SearchContext searc } else if (searchContext.collapse() != null) { boolean trackScores = searchContext.sort() == null ? true : searchContext.trackScores(); int numDocs = Math.min(searchContext.from() + searchContext.size(), totalNumDocs); - return new CollapsingTopDocsCollectorContext(searchContext.collapse(), - searchContext.sort(), numDocs, trackScores); + return new CollapsingTopDocsCollectorContext(searchContext.collapse(), searchContext.sort(), numDocs, trackScores); } else { int numDocs = Math.min(searchContext.from() + searchContext.size(), totalNumDocs); final boolean rescore = searchContext.rescore().isEmpty() == false; diff --git a/server/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java b/server/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java index 4a9567a32c06a..61bd150291d9f 100644 --- a/server/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java +++ b/server/src/main/java/org/elasticsearch/search/rescore/QueryRescorer.java @@ -24,6 +24,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.TopDocs; import java.io.IOException; @@ -41,7 +42,7 @@ public final class QueryRescorer implements Rescorer { public TopDocs rescore(TopDocs topDocs, IndexSearcher searcher, RescoreContext rescoreContext) throws IOException { assert rescoreContext != null; - if (topDocs == null || topDocs.totalHits == 0 || topDocs.scoreDocs.length == 0) { + if (topDocs == null || topDocs.scoreDocs.length == 0) { return topDocs; } @@ -87,7 +88,7 @@ public Explanation explain(int topLevelDocId, IndexSearcher searcher, RescoreCon Explanation prim; if (sourceExplanation.isMatch()) { prim = Explanation.match( - sourceExplanation.getValue() * primaryWeight, + sourceExplanation.getValue().floatValue() * primaryWeight, "product of:", sourceExplanation, Explanation.match(primaryWeight, "primaryWeight")); } else { prim = Explanation.noMatch("First pass did not match", sourceExplanation); @@ -99,12 +100,12 @@ public Explanation explain(int topLevelDocId, IndexSearcher searcher, RescoreCon if (rescoreExplain != null && rescoreExplain.isMatch()) { float secondaryWeight = rescore.rescoreQueryWeight(); Explanation sec = Explanation.match( - rescoreExplain.getValue() * secondaryWeight, + rescoreExplain.getValue().floatValue() * secondaryWeight, "product of:", rescoreExplain, Explanation.match(secondaryWeight, "secondaryWeight")); QueryRescoreMode scoreMode = rescore.scoreMode(); return Explanation.match( - scoreMode.combine(prim.getValue(), sec.getValue()), + scoreMode.combine(prim.getValue().floatValue(), sec.getValue().floatValue()), scoreMode + " of:", prim, sec); } @@ -123,15 +124,14 @@ public int compare(ScoreDoc o1, ScoreDoc o2) { /** Returns a new {@link TopDocs} with the topN from the incoming one, or the same TopDocs if the number of hits is already <= * topN. */ private TopDocs topN(TopDocs in, int topN) { - if (in.totalHits < topN) { - assert in.scoreDocs.length == in.totalHits; + if (in.scoreDocs.length < topN) { return in; } ScoreDoc[] subset = new ScoreDoc[topN]; System.arraycopy(in.scoreDocs, 0, subset, 0, topN); - return new TopDocs(in.totalHits, subset, in.getMaxScore()); + return new TopDocs(in.totalHits, subset); } /** Modifies incoming TopDocs (in) by replacing the top hits with resorted's hits, and then resorting all hits. */ @@ -151,8 +151,6 @@ private TopDocs combine(TopDocs in, TopDocs resorted, QueryRescoreContext ctx) { // incoming first pass hits, instead of allowing recoring of just the top subset: Arrays.sort(in.scoreDocs, SCORE_DOC_COMPARATOR); } - // update the max score after the resort - in.setMaxScore(in.scoreDocs[0].score); return in; } @@ -206,7 +204,8 @@ public void setScoreMode(String scoreMode) { @Override public void extractTerms(IndexSearcher searcher, RescoreContext rescoreContext, Set termsSet) throws IOException { - searcher.createNormalizedWeight(((QueryRescoreContext) rescoreContext).query(), false).extractTerms(termsSet); + Query query = ((QueryRescoreContext) rescoreContext).query(); + searcher.createWeight(searcher.rewrite(query), ScoreMode.COMPLETE_NO_SCORES, 1f).extractTerms(termsSet); } } diff --git a/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java b/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java index 7baaa61bbb8c5..7f5a1be285d8e 100644 --- a/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java +++ b/server/src/main/java/org/elasticsearch/search/rescore/RescorePhase.java @@ -23,6 +23,7 @@ import org.apache.lucene.search.TopDocs; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.search.SearchPhase; import org.elasticsearch.search.internal.SearchContext; @@ -44,15 +45,19 @@ public void preProcess(SearchContext context) { @Override public void execute(SearchContext context) { + TopDocs topDocs = context.queryResult().topDocs().topDocs; + if (topDocs.scoreDocs.length == 0) { + return; + } try { - TopDocs topDocs = context.queryResult().topDocs(); for (RescoreContext ctx : context.rescore()) { topDocs = ctx.rescorer().rescore(topDocs, context.searcher(), ctx); // It is the responsibility of the rescorer to sort the resulted top docs, // here we only assert that this condition is met. assert context.sort() == null && topDocsSortedByScore(topDocs): "topdocs should be sorted after rescore"; } - context.queryResult().topDocs(topDocs, context.queryResult().sortValueFormats()); + context.queryResult().topDocs(new TopDocsAndMaxScore(topDocs, topDocs.scoreDocs[0].score), + context.queryResult().sortValueFormats()); } catch (IOException e) { throw new ElasticsearchException("Rescore Phase Failed", e); } diff --git a/server/src/main/java/org/elasticsearch/search/slice/DocValuesSliceQuery.java b/server/src/main/java/org/elasticsearch/search/slice/DocValuesSliceQuery.java index c1aaad04d1d49..f2cf854947fd8 100644 --- a/server/src/main/java/org/elasticsearch/search/slice/DocValuesSliceQuery.java +++ b/server/src/main/java/org/elasticsearch/search/slice/DocValuesSliceQuery.java @@ -27,6 +27,7 @@ import org.apache.lucene.search.ConstantScoreWeight; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; @@ -45,7 +46,7 @@ public DocValuesSliceQuery(String field, int id, int max) { } @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException { + public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new ConstantScoreWeight(this, boost) { @Override diff --git a/server/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java b/server/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java index da1b98822cf19..1a10770fe9d2b 100644 --- a/server/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java +++ b/server/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java @@ -25,6 +25,7 @@ import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Weight; @@ -55,7 +56,7 @@ public TermsSliceQuery(String field, int id, int max) { } @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException { + public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new ConstantScoreWeight(this, boost) { @Override public Scorer scorer(LeafReaderContext context) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java index 5690acd7abd97..7dc63a8daac78 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java @@ -84,7 +84,7 @@ protected Suggest.Suggestion suggestDocs = new ArrayList(size); + final List suggestDocs = new ArrayList<>(size); final CharArraySet seenSurfaceForms = doSkipDuplicates() ? new CharArraySet(size, false) : null; for (TopSuggestDocs.SuggestScoreDoc suggestEntry : entries.scoreLookupDocs()) { final SuggestDoc suggestDoc; @@ -209,8 +209,8 @@ public TopSuggestDocs get() throws IOException { } suggestDocs.add(suggestDoc); } - return new TopSuggestDocs((int) entries.totalHits, - suggestDocs.toArray(new TopSuggestDocs.SuggestScoreDoc[0]), entries.getMaxScore()); + return new TopSuggestDocs(entries.totalHits, + suggestDocs.toArray(new TopSuggestDocs.SuggestScoreDoc[0])); } } } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorBuilder.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorBuilder.java index 7b7584f4674cc..6fdff8d18eba0 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorBuilder.java @@ -21,7 +21,7 @@ import org.apache.lucene.search.spell.DirectSpellChecker; import org.apache.lucene.search.spell.JaroWinklerDistance; -import org.apache.lucene.search.spell.LevensteinDistance; +import org.apache.lucene.search.spell.LevenshteinDistance; import org.apache.lucene.search.spell.LuceneLevenshteinDistance; import org.apache.lucene.search.spell.NGramDistance; import org.apache.lucene.search.spell.StringDistance; @@ -466,7 +466,7 @@ static StringDistance resolveDistance(String distanceVal) { } else if ("damerau_levenshtein".equals(distanceVal)) { return new LuceneLevenshteinDistance(); } else if ("levenshtein".equals(distanceVal)) { - return new LevensteinDistance(); + return new LevenshteinDistance(); } else if ("jaro_winkler".equals(distanceVal)) { return new JaroWinklerDistance(); } else if ("ngram".equals(distanceVal)) { diff --git a/server/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java b/server/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java index fdc31dd6c2fca..ad6a8b4acf354 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/term/TermSuggestionBuilder.java @@ -21,7 +21,7 @@ import org.apache.lucene.search.spell.DirectSpellChecker; import org.apache.lucene.search.spell.JaroWinklerDistance; -import org.apache.lucene.search.spell.LevensteinDistance; +import org.apache.lucene.search.spell.LevenshteinDistance; import org.apache.lucene.search.spell.LuceneLevenshteinDistance; import org.apache.lucene.search.spell.NGramDistance; import org.apache.lucene.search.spell.StringDistance; @@ -548,7 +548,7 @@ public StringDistance toLucene() { LEVENSHTEIN { @Override public StringDistance toLucene() { - return new LevensteinDistance(); + return new LevenshteinDistance(); } }, /** String distance algorithm based on Jaro-Winkler algorithm. */ diff --git a/server/src/test/java/org/apache/lucene/grouping/CollapsingTopDocsCollectorTests.java b/server/src/test/java/org/apache/lucene/grouping/CollapsingTopDocsCollectorTests.java index bce5965e50b6b..50c80b8e4350d 100644 --- a/server/src/test/java/org/apache/lucene/grouping/CollapsingTopDocsCollectorTests.java +++ b/server/src/test/java/org/apache/lucene/grouping/CollapsingTopDocsCollectorTests.java @@ -28,23 +28,26 @@ import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.search.CheckHits; import org.apache.lucene.search.Collector; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedNumericSortField; import org.apache.lucene.search.SortedSetSortField; import org.apache.lucene.search.TopFieldCollector; import org.apache.lucene.search.TopFieldDocs; +import org.apache.lucene.search.TotalHits; import org.apache.lucene.search.Weight; import org.apache.lucene.search.grouping.CollapseTopFieldDocs; import org.apache.lucene.search.grouping.CollapsingTopDocsCollector; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; -import org.apache.lucene.util.TestUtil; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -54,7 +57,6 @@ import java.util.List; import java.util.Set; -import static org.hamcrest.core.IsEqual.equalTo; public class CollapsingTopDocsCollectorTests extends ESTestCase { private static class SegmentSearcher extends IndexSearcher { @@ -84,15 +86,12 @@ interface CollapsingDocValuesProducer> { } > void assertSearchCollapse(CollapsingDocValuesProducer dvProducers, boolean numeric) throws IOException { - assertSearchCollapse(dvProducers, numeric, true, true); - assertSearchCollapse(dvProducers, numeric, true, false); - assertSearchCollapse(dvProducers, numeric, false, true); - assertSearchCollapse(dvProducers, numeric, false, false); + assertSearchCollapse(dvProducers, numeric, true); + assertSearchCollapse(dvProducers, numeric, false); } private > void assertSearchCollapse(CollapsingDocValuesProducer dvProducers, - boolean numeric, boolean multivalued, - boolean trackMaxScores) throws IOException { + boolean numeric, boolean multivalued) throws IOException { final int numDocs = randomIntBetween(1000, 2000); int maxGroup = randomIntBetween(2, 500); final Directory dir = newDirectory(); @@ -123,29 +122,25 @@ private > void assertSearchCollapse(CollapsingDocValuesP final CollapsingTopDocsCollector collapsingCollector; if (numeric) { collapsingCollector = - CollapsingTopDocsCollector.createNumeric(collapseField.getField(), sort, expectedNumGroups, trackMaxScores); + CollapsingTopDocsCollector.createNumeric(collapseField.getField(), sort, expectedNumGroups); } else { collapsingCollector = - CollapsingTopDocsCollector.createKeyword(collapseField.getField(), sort, expectedNumGroups, trackMaxScores); + CollapsingTopDocsCollector.createKeyword(collapseField.getField(), sort, expectedNumGroups); } TopFieldCollector topFieldCollector = - TopFieldCollector.create(sort, totalHits, true, trackMaxScores, trackMaxScores, true); - - searcher.search(new MatchAllDocsQuery(), collapsingCollector); - searcher.search(new MatchAllDocsQuery(), topFieldCollector); + TopFieldCollector.create(sort, totalHits, Integer.MAX_VALUE); + Query query = new MatchAllDocsQuery(); + searcher.search(query, collapsingCollector); + searcher.search(query, topFieldCollector); CollapseTopFieldDocs collapseTopFieldDocs = collapsingCollector.getTopDocs(); TopFieldDocs topDocs = topFieldCollector.topDocs(); assertEquals(collapseField.getField(), collapseTopFieldDocs.field); assertEquals(expectedNumGroups, collapseTopFieldDocs.scoreDocs.length); - assertEquals(totalHits, collapseTopFieldDocs.totalHits); + assertEquals(totalHits, collapseTopFieldDocs.totalHits.value); + assertEquals(TotalHits.Relation.EQUAL_TO, collapseTopFieldDocs.totalHits.relation); assertEquals(totalHits, topDocs.scoreDocs.length); - assertEquals(totalHits, topDocs.totalHits); - if (trackMaxScores) { - assertThat(collapseTopFieldDocs.getMaxScore(), equalTo(topDocs.getMaxScore())); - } else { - assertThat(collapseTopFieldDocs.getMaxScore(), equalTo(Float.NaN)); - } + assertEquals(totalHits, topDocs.totalHits.value); Set seen = new HashSet<>(); // collapse field is the last sort @@ -170,7 +165,6 @@ private > void assertSearchCollapse(CollapsingDocValuesP assertTrue(seen.contains(fieldDoc.fields[collapseIndex])); } - // check merge final IndexReaderContext ctx = searcher.getTopReaderContext(); final SegmentSearcher[] subSearchers; @@ -196,27 +190,27 @@ private > void assertSearchCollapse(CollapsingDocValuesP } final CollapseTopFieldDocs[] shardHits = new CollapseTopFieldDocs[subSearchers.length]; - final Weight weight = searcher.createNormalizedWeight(new MatchAllDocsQuery(), true); + final Weight weight = searcher.createWeight(searcher.rewrite(new MatchAllDocsQuery()), ScoreMode.COMPLETE, 1f); for (int shardIDX = 0; shardIDX < subSearchers.length; shardIDX++) { final SegmentSearcher subSearcher = subSearchers[shardIDX]; final CollapsingTopDocsCollector c; if (numeric) { - c = CollapsingTopDocsCollector.createNumeric(collapseField.getField(), sort, expectedNumGroups, trackMaxScores); + c = CollapsingTopDocsCollector.createNumeric(collapseField.getField(), sort, expectedNumGroups); } else { - c = CollapsingTopDocsCollector.createKeyword(collapseField.getField(), sort, expectedNumGroups, trackMaxScores); + c = CollapsingTopDocsCollector.createKeyword(collapseField.getField(), sort, expectedNumGroups); } subSearcher.search(weight, c); shardHits[shardIDX] = c.getTopDocs(); } CollapseTopFieldDocs mergedFieldDocs = CollapseTopFieldDocs.merge(sort, 0, expectedNumGroups, shardHits, true); - assertTopDocsEquals(mergedFieldDocs, collapseTopFieldDocs); + assertTopDocsEquals(query, mergedFieldDocs, collapseTopFieldDocs); w.close(); reader.close(); dir.close(); } - private static void assertTopDocsEquals(CollapseTopFieldDocs topDocs1, CollapseTopFieldDocs topDocs2) { - TestUtil.assertEquals(topDocs1, topDocs2); + private static void assertTopDocsEquals(Query query, CollapseTopFieldDocs topDocs1, CollapseTopFieldDocs topDocs2) { + CheckHits.checkEqual(query, topDocs1.scoreDocs, topDocs2.scoreDocs); assertArrayEquals(topDocs1.collapseValues, topDocs2.collapseValues); } @@ -384,7 +378,7 @@ public void testEmptyNumericSegment() throws Exception { sortField.setMissingValue(Long.MAX_VALUE); Sort sort = new Sort(sortField); final CollapsingTopDocsCollector collapsingCollector = - CollapsingTopDocsCollector.createNumeric("group", sort, 10, false); + CollapsingTopDocsCollector.createNumeric("group", sort, 10); searcher.search(new MatchAllDocsQuery(), collapsingCollector); CollapseTopFieldDocs collapseTopFieldDocs = collapsingCollector.getTopDocs(); assertEquals(4, collapseTopFieldDocs.scoreDocs.length); @@ -420,7 +414,7 @@ public void testEmptySortedSegment() throws Exception { final IndexSearcher searcher = newSearcher(reader); Sort sort = new Sort(new SortField("group", SortField.Type.STRING_VAL)); final CollapsingTopDocsCollector collapsingCollector = - CollapsingTopDocsCollector.createKeyword("group", sort, 10, false); + CollapsingTopDocsCollector.createKeyword("group", sort, 10); searcher.search(new MatchAllDocsQuery(), collapsingCollector); CollapseTopFieldDocs collapseTopFieldDocs = collapsingCollector.getTopDocs(); assertEquals(4, collapseTopFieldDocs.scoreDocs.length); diff --git a/server/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java b/server/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java index 8f96936e43b55..5b37b4bf48178 100644 --- a/server/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java +++ b/server/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java @@ -34,6 +34,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.QueryUtils; import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.similarities.BM25Similarity; @@ -63,15 +64,12 @@ public void testDismaxQuery() throws IOException { "generator", "foo fighers - generator", "foo fighters generator" }; final boolean omitNorms = random().nextBoolean(); + final boolean omitFreqs = random().nextBoolean(); FieldType ft = new FieldType(TextField.TYPE_NOT_STORED); - ft.setIndexOptions(random().nextBoolean() ? IndexOptions.DOCS : IndexOptions.DOCS_AND_FREQS); + ft.setIndexOptions(omitFreqs ? IndexOptions.DOCS : IndexOptions.DOCS_AND_FREQS); ft.setOmitNorms(omitNorms); ft.freeze(); - FieldType ft1 = new FieldType(TextField.TYPE_NOT_STORED); - ft1.setIndexOptions(random().nextBoolean() ? IndexOptions.DOCS : IndexOptions.DOCS_AND_FREQS); - ft1.setOmitNorms(omitNorms); - ft1.freeze(); for (int i = 0; i < username.length; i++) { Document d = new Document(); d.add(new TextField("id", Integer.toString(i), Field.Store.YES)); @@ -83,8 +81,8 @@ public void testDismaxQuery() throws IOException { for (int j = 0; j < iters; j++) { Document d = new Document(); d.add(new TextField("id", Integer.toString(username.length + j), Field.Store.YES)); - d.add(new Field("username", "foo fighters", ft1)); - d.add(new Field("song", "some bogus text to bump up IDF", ft1)); + d.add(new Field("username", "foo fighters", ft)); + d.add(new Field("song", "some bogus text to bump up IDF", ft)); w.addDocument(d); } w.commit(); @@ -167,7 +165,7 @@ public void testExtractTerms() throws IOException { BlendedTermQuery blendedTermQuery = BlendedTermQuery.dismaxBlendedQuery(terms.toArray(new Term[0]), random().nextFloat()); Set extracted = new HashSet<>(); IndexSearcher searcher = new IndexSearcher(new MultiReader()); - searcher.createNormalizedWeight(blendedTermQuery, false).extractTerms(extracted); + searcher.createWeight(searcher.rewrite(blendedTermQuery), ScoreMode.COMPLETE_NO_SCORES, 1f).extractTerms(extracted); assertThat(extracted.size(), equalTo(terms.size())); assertThat(extracted, containsInAnyOrder(terms.toArray(new Term[0]))); } diff --git a/server/src/test/java/org/apache/lucene/queries/InetAddressRandomBinaryDocValuesRangeQueryTests.java b/server/src/test/java/org/apache/lucene/queries/InetAddressRandomBinaryDocValuesRangeQueryTests.java index 2def2702d38b3..ec468fd8d9b89 100644 --- a/server/src/test/java/org/apache/lucene/queries/InetAddressRandomBinaryDocValuesRangeQueryTests.java +++ b/server/src/test/java/org/apache/lucene/queries/InetAddressRandomBinaryDocValuesRangeQueryTests.java @@ -19,7 +19,7 @@ package org.apache.lucene.queries; import org.apache.lucene.document.InetAddressPoint; -import org.apache.lucene.util.StringHelper; +import org.apache.lucene.util.FutureArrays; import org.elasticsearch.index.mapper.RangeFieldMapper; import java.net.InetAddress; @@ -44,7 +44,7 @@ protected Range nextRange(int dimensions) throws Exception { byte[] bMin = InetAddressPoint.encode(min); InetAddress max = nextInetaddress(); byte[] bMax = InetAddressPoint.encode(max); - if (StringHelper.compare(bMin.length, bMin, 0, bMax, 0) > 0) { + if (FutureArrays.compareUnsigned(bMin, 0, bMin.length, bMax, 0, bMin.length) > 0) { return new IpRange(max, min); } return new IpRange(min, max); @@ -91,7 +91,7 @@ protected void setMin(int dim, Object val) { InetAddress v = (InetAddress)val; byte[] e = InetAddressPoint.encode(v); - if (StringHelper.compare(e.length, min, 0, e, 0) < 0) { + if (FutureArrays.compareUnsigned(min, 0, e.length, e, 0, e.length) < 0) { max = e; maxAddress = v; } else { @@ -111,7 +111,7 @@ protected void setMax(int dim, Object val) { InetAddress v = (InetAddress)val; byte[] e = InetAddressPoint.encode(v); - if (StringHelper.compare(e.length, max, 0, e, 0) > 0) { + if (FutureArrays.compareUnsigned(max, 0, e.length, e, 0, e.length) > 0) { min = e; minAddress = v; } else { @@ -123,22 +123,22 @@ protected void setMax(int dim, Object val) { @Override protected boolean isDisjoint(Range o) { IpRange other = (IpRange) o; - return StringHelper.compare(min.length, min, 0, other.max, 0) > 0 || - StringHelper.compare(max.length, max, 0, other.min, 0) < 0; + return FutureArrays.compareUnsigned(min, 0, min.length, other.max, 0, min.length) > 0 || + FutureArrays.compareUnsigned(max, 0, max.length, other.min, 0, max.length) < 0; } @Override protected boolean isWithin(Range o) { IpRange other = (IpRange)o; - return StringHelper.compare(min.length, min, 0, other.min, 0) >= 0 && - StringHelper.compare(max.length, max, 0, other.max, 0) <= 0; + return FutureArrays.compareUnsigned(min, 0, min.length, other.min, 0, min.length) >= 0 && + FutureArrays.compareUnsigned(max, 0, max.length, other.max, 0, max.length) <= 0; } @Override protected boolean contains(Range o) { IpRange other = (IpRange)o; - return StringHelper.compare(min.length, min, 0, other.min, 0) <= 0 && - StringHelper.compare(max.length, max, 0, other.max, 0) >= 0; + return FutureArrays.compareUnsigned(min, 0, min.length, other.min, 0, min.length) <= 0 && + FutureArrays.compareUnsigned(max, 0, max.length, other.max, 0, max.length) >= 0; } } diff --git a/server/src/test/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java b/server/src/test/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java index 796553034fb38..a6e676006fdbf 100644 --- a/server/src/test/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java +++ b/server/src/test/java/org/apache/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java @@ -74,7 +74,7 @@ private void assertHighlightOneDoc(String fieldName, String[] inputs, Analyzer a IndexSearcher searcher = newSearcher(reader); iw.close(); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 1, Sort.INDEXORDER); - assertThat(topDocs.totalHits, equalTo(1L)); + assertThat(topDocs.totalHits.value, equalTo(1L)); String rawValue = Strings.arrayToDelimitedString(inputs, String.valueOf(MULTIVAL_SEP_CHAR)); CustomUnifiedHighlighter highlighter = new CustomUnifiedHighlighter(searcher, analyzer, null, new CustomPassageFormatter("", "", new DefaultEncoder()), locale, diff --git a/server/src/test/java/org/elasticsearch/VersionTests.java b/server/src/test/java/org/elasticsearch/VersionTests.java index 4c7dc9eb094b7..c0d29e86fd60b 100644 --- a/server/src/test/java/org/elasticsearch/VersionTests.java +++ b/server/src/test/java/org/elasticsearch/VersionTests.java @@ -208,9 +208,9 @@ public void testIsBeta() { public void testIsAlpha() { - assertTrue(new Version(5000001, org.apache.lucene.util.Version.LUCENE_6_0_0).isAlpha()); - assertFalse(new Version(4000002, org.apache.lucene.util.Version.LUCENE_6_0_0).isAlpha()); - assertTrue(new Version(4000002, org.apache.lucene.util.Version.LUCENE_6_0_0).isBeta()); + assertTrue(new Version(5000001, org.apache.lucene.util.Version.LUCENE_7_0_0).isAlpha()); + assertFalse(new Version(4000002, org.apache.lucene.util.Version.LUCENE_7_0_0).isAlpha()); + assertTrue(new Version(4000002, org.apache.lucene.util.Version.LUCENE_7_0_0).isBeta()); assertTrue(Version.fromString("5.0.0-alpha14").isAlpha()); assertEquals(5000014, Version.fromString("5.0.0-alpha14").id); assertTrue(Version.fromId(5000015).isAlpha()); @@ -226,7 +226,6 @@ public void testIsAlpha() { } } - public void testParseVersion() { final int iters = scaledRandomIntBetween(100, 1000); for (int i = 0; i < iters; i++) { diff --git a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java index c1bdf901a6d1b..fe9be2a06e27f 100644 --- a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java @@ -22,7 +22,9 @@ import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TermStatistics; import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.TotalHits; import org.apache.lucene.store.MockDirectoryWrapper; +import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.AtomicArray; @@ -68,13 +70,17 @@ public void sendExecuteQuery(Transport.Connection connection, QuerySearchRequest if (request.id() == 1) { QuerySearchResult queryResult = new QuerySearchResult(123, new SearchShardTarget("node1", new Index("test", "na"), 0, null)); - queryResult.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(42, 1.0F)}, 2.0F), new DocValueFormat[0]); + queryResult.topDocs(new TopDocsAndMaxScore( + new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), + new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(2); // the size of the result set listener.onResponse(queryResult); } else if (request.id() == 2) { QuerySearchResult queryResult = new QuerySearchResult(123, new SearchShardTarget("node2", new Index("test", "na"), 0, null)); - queryResult.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(84, 2.0F)}, 2.0F), new DocValueFormat[0]); + queryResult.topDocs(new TopDocsAndMaxScore( + new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), + new DocValueFormat[0]); queryResult.size(2); // the size of the result set listener.onResponse(queryResult); } else { @@ -97,12 +103,12 @@ public void run() throws IOException { assertNotNull(responseRef.get()); assertNotNull(responseRef.get().get(0)); assertNull(responseRef.get().get(0).fetchResult()); - assertEquals(1, responseRef.get().get(0).queryResult().topDocs().totalHits); - assertEquals(42, responseRef.get().get(0).queryResult().topDocs().scoreDocs[0].doc); + assertEquals(1, responseRef.get().get(0).queryResult().topDocs().topDocs.totalHits.value); + assertEquals(42, responseRef.get().get(0).queryResult().topDocs().topDocs.scoreDocs[0].doc); assertNotNull(responseRef.get().get(1)); assertNull(responseRef.get().get(1).fetchResult()); - assertEquals(1, responseRef.get().get(1).queryResult().topDocs().totalHits); - assertEquals(84, responseRef.get().get(1).queryResult().topDocs().scoreDocs[0].doc); + assertEquals(1, responseRef.get().get(1).queryResult().topDocs().topDocs.totalHits.value); + assertEquals(84, responseRef.get().get(1).queryResult().topDocs().topDocs.scoreDocs[0].doc); assertTrue(mockSearchPhaseContext.releasedSearchContexts.isEmpty()); assertEquals(2, mockSearchPhaseContext.numSuccess.get()); } @@ -126,7 +132,9 @@ public void sendExecuteQuery(Transport.Connection connection, QuerySearchRequest if (request.id() == 1) { QuerySearchResult queryResult = new QuerySearchResult(123, new SearchShardTarget("node1", new Index("test", "na"), 0, null)); - queryResult.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(42, 1.0F)}, 2.0F), new DocValueFormat[0]); + queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs( + new TotalHits(1, TotalHits.Relation.EQUAL_TO), + new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(2); // the size of the result set listener.onResponse(queryResult); } else if (request.id() == 2) { @@ -151,8 +159,8 @@ public void run() throws IOException { assertNotNull(responseRef.get()); assertNotNull(responseRef.get().get(0)); assertNull(responseRef.get().get(0).fetchResult()); - assertEquals(1, responseRef.get().get(0).queryResult().topDocs().totalHits); - assertEquals(42, responseRef.get().get(0).queryResult().topDocs().scoreDocs[0].doc); + assertEquals(1, responseRef.get().get(0).queryResult().topDocs().topDocs.totalHits.value); + assertEquals(42, responseRef.get().get(0).queryResult().topDocs().topDocs.scoreDocs[0].doc); assertNull(responseRef.get().get(1)); assertEquals(1, mockSearchPhaseContext.numSuccess.get()); @@ -183,7 +191,9 @@ public void sendExecuteQuery(Transport.Connection connection, QuerySearchRequest if (request.id() == 1) { QuerySearchResult queryResult = new QuerySearchResult(123, new SearchShardTarget("node1", new Index("test", "na"), 0, null)); - queryResult.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(42, 1.0F)}, 2.0F), new DocValueFormat[0]); + queryResult.topDocs(new TopDocsAndMaxScore( + new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), + new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(2); // the size of the result set listener.onResponse(queryResult); } else if (request.id() == 2) { diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java index e9795bfdf6f59..55ca24826fc37 100644 --- a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java @@ -20,7 +20,9 @@ import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.TotalHits; import org.apache.lucene.store.MockDirectoryWrapper; +import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.Index; @@ -55,7 +57,8 @@ public void testShortcutQueryAndFetchOptimization() throws IOException { final int numHits; if (hasHits) { QuerySearchResult queryResult = new QuerySearchResult(); - queryResult.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(42, 1.0F)}, 1.0F), new DocValueFormat[0]); + queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), + new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 1.0F), new DocValueFormat[0]); queryResult.size(1); FetchSearchResult fetchResult = new FetchSearchResult(); fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(42)}, 1, 1.0F)); @@ -94,13 +97,15 @@ public void testFetchTwoDocument() throws IOException { AtomicReference responseRef = new AtomicReference<>(); int resultSetSize = randomIntBetween(2, 10); QuerySearchResult queryResult = new QuerySearchResult(123, new SearchShardTarget("node1", new Index("test", "na"), 0, null)); - queryResult.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(42, 1.0F)}, 2.0F), new DocValueFormat[0]); + queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), + new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); // the size of the result set queryResult.setShardIndex(0); results.consumeResult(queryResult); queryResult = new QuerySearchResult(321, new SearchShardTarget("node2", new Index("test", "na"), 1, null)); - queryResult.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(84, 2.0F)}, 2.0F), new DocValueFormat[0]); + queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), + new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); queryResult.setShardIndex(1); results.consumeResult(queryResult); @@ -149,13 +154,15 @@ public void testFailFetchOneDoc() throws IOException { AtomicReference responseRef = new AtomicReference<>(); int resultSetSize = randomIntBetween(2, 10); QuerySearchResult queryResult = new QuerySearchResult(123, new SearchShardTarget("node1", new Index("test", "na"), 0, null)); - queryResult.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(42, 1.0F)}, 2.0F), new DocValueFormat[0]); + queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), + new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); // the size of the result set queryResult.setShardIndex(0); results.consumeResult(queryResult); queryResult = new QuerySearchResult(321, new SearchShardTarget("node2", new Index("test", "na"), 1, null)); - queryResult.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(84, 2.0F)}, 2.0F), new DocValueFormat[0]); + queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), + new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); queryResult.setShardIndex(1); results.consumeResult(queryResult); @@ -209,7 +216,8 @@ public void testFetchDocsConcurrently() throws IOException, InterruptedException AtomicReference responseRef = new AtomicReference<>(); for (int i = 0; i < numHits; i++) { QuerySearchResult queryResult = new QuerySearchResult(i, new SearchShardTarget("node1", new Index("test", "na"), 0, null)); - queryResult.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(i+1, i)}, i), new DocValueFormat[0]); + queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), + new ScoreDoc[] {new ScoreDoc(i+1, i)}), i), new DocValueFormat[0]); queryResult.size(resultSetSize); // the size of the result set queryResult.setShardIndex(i); results.consumeResult(queryResult); @@ -265,13 +273,15 @@ public void testExceptionFailsPhase() throws IOException { AtomicReference responseRef = new AtomicReference<>(); int resultSetSize = randomIntBetween(2, 10); QuerySearchResult queryResult = new QuerySearchResult(123, new SearchShardTarget("node1", new Index("test", "na"), 0, null)); - queryResult.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(42, 1.0F)}, 2.0F), new DocValueFormat[0]); + queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), + new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); // the size of the result set queryResult.setShardIndex(0); results.consumeResult(queryResult); queryResult = new QuerySearchResult(321, new SearchShardTarget("node2", new Index("test", "na"), 1, null)); - queryResult.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(84, 2.0F)}, 2.0F), new DocValueFormat[0]); + queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), + new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); queryResult.setShardIndex(1); results.consumeResult(queryResult); @@ -319,13 +329,15 @@ public void testCleanupIrrelevantContexts() throws IOException { // contexts tha AtomicReference responseRef = new AtomicReference<>(); int resultSetSize = 1; QuerySearchResult queryResult = new QuerySearchResult(123, new SearchShardTarget("node1", new Index("test", "na"), 0, null)); - queryResult.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(42, 1.0F)}, 2.0F), new DocValueFormat[0]); + queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), + new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); // the size of the result set queryResult.setShardIndex(0); results.consumeResult(queryResult); queryResult = new QuerySearchResult(321, new SearchShardTarget("node2", new Index("test", "na"), 1, null)); - queryResult.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(84, 2.0F)}, 2.0F), new DocValueFormat[0]); + queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), + new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); queryResult.setShardIndex(1); results.consumeResult(queryResult); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java index 393c45fa57242..04fd258fa1596 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java @@ -22,6 +22,9 @@ import com.carrotsearch.randomizedtesting.RandomizedContext; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.TotalHits; +import org.apache.lucene.search.TotalHits.Relation; +import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.util.BigArrays; @@ -187,11 +190,11 @@ private AtomicArray generateQueryResults(int nShards, for (int shardIndex = 0; shardIndex < nShards; shardIndex++) { QuerySearchResult querySearchResult = new QuerySearchResult(shardIndex, new SearchShardTarget("", new Index("", ""), shardIndex, null)); - TopDocs topDocs = new TopDocs(0, new ScoreDoc[0], 0); + TopDocs topDocs = new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]); + float maxScore = 0; if (searchHitsSize > 0) { int nDocs = randomIntBetween(0, searchHitsSize); ScoreDoc[] scoreDocs = new ScoreDoc[nDocs]; - float maxScore = 0F; for (int i = 0; i < nDocs; i++) { float score = useConstantScore ? 1.0F : Math.abs(randomFloat()); scoreDocs[i] = new ScoreDoc(i, score); @@ -199,7 +202,7 @@ private AtomicArray generateQueryResults(int nShards, maxScore = score; } } - topDocs = new TopDocs(scoreDocs.length, scoreDocs, maxScore); + topDocs = new TopDocs(new TotalHits(scoreDocs.length, TotalHits.Relation.EQUAL_TO), scoreDocs); } List shardSuggestion = new ArrayList<>(); for (CompletionSuggestion completionSuggestion : suggestions) { @@ -208,19 +211,19 @@ private AtomicArray generateQueryResults(int nShards, final CompletionSuggestion.Entry completionEntry = new CompletionSuggestion.Entry(new Text(""), 0, 5); suggestion.addTerm(completionEntry); int optionSize = randomIntBetween(1, suggestion.getSize()); - float maxScore = randomIntBetween(suggestion.getSize(), (int) Float.MAX_VALUE); + float maxScoreValue = randomIntBetween(suggestion.getSize(), (int) Float.MAX_VALUE); for (int i = 0; i < optionSize; i++) { - completionEntry.addOption(new CompletionSuggestion.Entry.Option(i, new Text(""), maxScore, + completionEntry.addOption(new CompletionSuggestion.Entry.Option(i, new Text(""), maxScoreValue, Collections.emptyMap())); float dec = randomIntBetween(0, optionSize); - if (dec <= maxScore) { - maxScore -= dec; + if (dec <= maxScoreValue) { + maxScoreValue -= dec; } } suggestion.setShardIndex(shardIndex); shardSuggestion.add(suggestion); } - querySearchResult.topDocs(topDocs, null); + querySearchResult.topDocs(new TopDocsAndMaxScore(topDocs, maxScore), null); querySearchResult.size(searchHitsSize); querySearchResult.suggest(new Suggest(new ArrayList<>(shardSuggestion))); querySearchResult.setShardIndex(shardIndex); @@ -232,7 +235,9 @@ private AtomicArray generateQueryResults(int nShards, private int getTotalQueryHits(AtomicArray results) { int resultCount = 0; for (SearchPhaseResult shardResult : results.asList()) { - resultCount += shardResult.queryResult().topDocs().totalHits; + TopDocs topDocs = shardResult.queryResult().topDocs().topDocs; + assert topDocs.totalHits.relation == Relation.EQUAL_TO; + resultCount += topDocs.totalHits.value; } return resultCount; } @@ -292,7 +297,8 @@ public void testConsumer() { request.setBatchedReduceSize(bufferSize); InitialSearchPhase.ArraySearchPhaseResults consumer = searchPhaseController.newSearchPhaseResults(request, 3); QuerySearchResult result = new QuerySearchResult(0, new SearchShardTarget("node", new Index("a", "b"), 0, null)); - result.topDocs(new TopDocs(0, new ScoreDoc[0], 0.0F), new DocValueFormat[0]); + result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), Float.NaN), + new DocValueFormat[0]); InternalAggregations aggs = new InternalAggregations(Arrays.asList(new InternalMax("test", 1.0D, DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap()))); result.aggregations(aggs); @@ -300,7 +306,8 @@ public void testConsumer() { consumer.consumeResult(result); result = new QuerySearchResult(1, new SearchShardTarget("node", new Index("a", "b"), 0, null)); - result.topDocs(new TopDocs(0, new ScoreDoc[0], 0.0F), new DocValueFormat[0]); + result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), Float.NaN), + new DocValueFormat[0]); aggs = new InternalAggregations(Arrays.asList(new InternalMax("test", 3.0D, DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap()))); result.aggregations(aggs); @@ -308,7 +315,8 @@ public void testConsumer() { consumer.consumeResult(result); result = new QuerySearchResult(1, new SearchShardTarget("node", new Index("a", "b"), 0, null)); - result.topDocs(new TopDocs(0, new ScoreDoc[0], 0.0F), new DocValueFormat[0]); + result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), Float.NaN), + new DocValueFormat[0]); aggs = new InternalAggregations(Arrays.asList(new InternalMax("test", 2.0D, DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap()))); result.aggregations(aggs); @@ -347,7 +355,9 @@ public void testConsumerConcurrently() throws InterruptedException { int number = randomIntBetween(1, 1000); max.updateAndGet(prev -> Math.max(prev, number)); QuerySearchResult result = new QuerySearchResult(id, new SearchShardTarget("node", new Index("a", "b"), id, null)); - result.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(0, number)}, number), new DocValueFormat[0]); + result.topDocs(new TopDocsAndMaxScore( + new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(0, number)}), number), + new DocValueFormat[0]); InternalAggregations aggs = new InternalAggregations(Arrays.asList(new InternalMax("test", (double) number, DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap()))); result.aggregations(aggs); @@ -384,7 +394,8 @@ public void testConsumerOnlyAggs() throws InterruptedException { int number = randomIntBetween(1, 1000); max.updateAndGet(prev -> Math.max(prev, number)); QuerySearchResult result = new QuerySearchResult(id, new SearchShardTarget("node", new Index("a", "b"), id, null)); - result.topDocs(new TopDocs(1, new ScoreDoc[0], number), new DocValueFormat[0]); + result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), number), + new DocValueFormat[0]); InternalAggregations aggs = new InternalAggregations(Arrays.asList(new InternalMax("test", (double) number, DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap()))); result.aggregations(aggs); @@ -417,7 +428,8 @@ public void testConsumerOnlyHits() throws InterruptedException { int number = randomIntBetween(1, 1000); max.updateAndGet(prev -> Math.max(prev, number)); QuerySearchResult result = new QuerySearchResult(id, new SearchShardTarget("node", new Index("a", "b"), id, null)); - result.topDocs(new TopDocs(1, new ScoreDoc[] {new ScoreDoc(0, number)}, number), new DocValueFormat[0]); + result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), + new ScoreDoc[] {new ScoreDoc(0, number)}), number), new DocValueFormat[0]); result.setShardIndex(id); result.size(1); consumer.consumeResult(result); @@ -477,7 +489,8 @@ public void testReduceTopNWithFromOffset() { for (int j = 0; j < docs.length; j++) { docs[j] = new ScoreDoc(0, score--); } - result.topDocs(new TopDocs(3, docs, docs[0].score), new DocValueFormat[0]); + result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(3, TotalHits.Relation.EQUAL_TO), docs), docs[0].score), + new DocValueFormat[0]); result.setShardIndex(i); result.size(5); result.from(5); diff --git a/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java b/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java index 890f6ef163b33..b677247f266cd 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java @@ -37,6 +37,7 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; @@ -205,10 +206,10 @@ public void testPruneUnreferencedFiles() throws IOException { assertEquals(3, open.maxDoc()); IndexSearcher s = new IndexSearcher(open); - assertEquals(s.search(new TermQuery(new Term("id", "1")), 1).totalHits, 1); - assertEquals(s.search(new TermQuery(new Term("id", "2")), 1).totalHits, 1); - assertEquals(s.search(new TermQuery(new Term("id", "3")), 1).totalHits, 1); - assertEquals(s.search(new TermQuery(new Term("id", "4")), 1).totalHits, 0); + assertEquals(s.search(new TermQuery(new Term("id", "1")), 1).totalHits.value, 1); + assertEquals(s.search(new TermQuery(new Term("id", "2")), 1).totalHits.value, 1); + assertEquals(s.search(new TermQuery(new Term("id", "3")), 1).totalHits.value, 1); + assertEquals(s.search(new TermQuery(new Term("id", "4")), 1).totalHits.value, 0); for (String file : dir.listAll()) { assertFalse("unexpected file: " + file, file.equals("segments_3") || file.startsWith("_2")); @@ -381,7 +382,7 @@ public void testAsSequentialAccessBits() throws Exception { try (DirectoryReader reader = DirectoryReader.open(w)) { IndexSearcher searcher = newSearcher(reader); - Weight termWeight = new TermQuery(new Term("foo", "bar")).createWeight(searcher, false, 1f); + Weight termWeight = new TermQuery(new Term("foo", "bar")).createWeight(searcher, ScoreMode.COMPLETE_NO_SCORES, 1f); assertEquals(1, reader.leaves().size()); LeafReaderContext leafReaderContext = searcher.getIndexReader().leaves().get(0); Bits bits = Lucene.asSequentialAccessBits(leafReaderContext.reader().maxDoc(), termWeight.scorerSupplier(leafReaderContext)); diff --git a/server/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java b/server/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java index 6ebb604725d6c..d60458cf82642 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java @@ -103,6 +103,11 @@ public float score() throws IOException { final int idx = Arrays.binarySearch(docs, docID()); return scores[idx]; } + + @Override + public float getMaxScore(int upTo) throws IOException { + return Float.MAX_VALUE; + } }; } diff --git a/server/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java b/server/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java index 0475c324f0648..7d01b3992fcbd 100644 --- a/server/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java +++ b/server/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java @@ -62,7 +62,7 @@ public void testVectorHighlighter() throws Exception { IndexSearcher searcher = new IndexSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); - assertThat(topDocs.totalHits, equalTo(1L)); + assertThat(topDocs.totalHits.value, equalTo(1L)); FastVectorHighlighter highlighter = new FastVectorHighlighter(); String fragment = highlighter.getBestFragment(highlighter.getFieldQuery(new TermQuery(new Term("content", "bad"))), @@ -88,7 +88,7 @@ public void testVectorHighlighterPrefixQuery() throws Exception { IndexSearcher searcher = new IndexSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); - assertThat(topDocs.totalHits, equalTo(1L)); + assertThat(topDocs.totalHits.value, equalTo(1L)); FastVectorHighlighter highlighter = new FastVectorHighlighter(); @@ -129,7 +129,7 @@ public void testVectorHighlighterNoStore() throws Exception { IndexSearcher searcher = new IndexSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); - assertThat(topDocs.totalHits, equalTo(1L)); + assertThat(topDocs.totalHits.value, equalTo(1L)); FastVectorHighlighter highlighter = new FastVectorHighlighter(); String fragment = highlighter.getBestFragment(highlighter.getFieldQuery(new TermQuery(new Term("content", "bad"))), @@ -150,7 +150,7 @@ public void testVectorHighlighterNoTermVector() throws Exception { IndexSearcher searcher = new IndexSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); - assertThat(topDocs.totalHits, equalTo(1L)); + assertThat(topDocs.totalHits.value, equalTo(1L)); FastVectorHighlighter highlighter = new FastVectorHighlighter(); String fragment = highlighter.getBestFragment(highlighter.getFieldQuery(new TermQuery(new Term("content", "bad"))), diff --git a/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java index 000722863887c..75ff1ac1259d2 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -21,7 +21,6 @@ import org.apache.lucene.index.AssertingDirectoryReader; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FieldInvertState; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.Term; import org.apache.lucene.search.CollectionStatistics; import org.apache.lucene.search.IndexSearcher; @@ -432,13 +431,8 @@ public long computeNorm(FieldInvertState state) { } @Override - public SimWeight computeWeight(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) { - return delegate.computeWeight(boost, collectionStats, termStats); - } - - @Override - public SimScorer simScorer(SimWeight weight, LeafReaderContext context) throws IOException { - return delegate.simScorer(weight, context); + public SimScorer scorer(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) { + return delegate.scorer(boost, collectionStats, termStats); } } diff --git a/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java b/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java index 8ace3aa34e86a..52513ce7a8b77 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java @@ -211,7 +211,7 @@ public void testRefreshActuallyWorks() throws Exception { // we are running on updateMetaData if the interval changes try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.searcher().search(new MatchAllDocsQuery(), 10); - assertEquals(1, search.totalHits); + assertEquals(1, search.totalHits.value); } }); assertFalse(refreshTask.isClosed()); @@ -224,7 +224,7 @@ public void testRefreshActuallyWorks() throws Exception { // this one becomes visible due to the force refresh we are running on updateMetaData if the interval changes try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.searcher().search(new MatchAllDocsQuery(), 10); - assertEquals(2, search.totalHits); + assertEquals(2, search.totalHits.value); } }); client().prepareIndex("test", "test", "2").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); @@ -232,7 +232,7 @@ public void testRefreshActuallyWorks() throws Exception { // this one becomes visible due to the scheduled refresh try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.searcher().search(new MatchAllDocsQuery(), 10); - assertEquals(3, search.totalHits); + assertEquals(3, search.totalHits.value); } }); } diff --git a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java index f82f2c39f4470..ddb2b85748686 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -22,8 +22,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat; import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode; -import org.apache.lucene.codecs.lucene62.Lucene62Codec; -import org.apache.lucene.codecs.lucene70.Lucene70Codec; +import org.apache.lucene.codecs.lucene80.Lucene80Codec; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; @@ -54,8 +53,8 @@ public class CodecTests extends ESTestCase { public void testResolveDefaultCodecs() throws Exception { CodecService codecService = createCodecService(); assertThat(codecService.codec("default"), instanceOf(PerFieldMappingPostingFormatCodec.class)); - assertThat(codecService.codec("default"), instanceOf(Lucene70Codec.class)); - assertThat(codecService.codec("Lucene62"), instanceOf(Lucene62Codec.class)); + assertThat(codecService.codec("default"), instanceOf(Lucene80Codec.class)); + assertThat(codecService.codec("Lucene80"), instanceOf(Lucene80Codec.class)); } public void testDefault() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 9cc8d859c6ff1..a44829890d5a9 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -781,7 +781,7 @@ public void testTranslogRecoveryWithMultipleGenerations() throws IOException { recoveringEngine.recoverFromTranslog(Long.MAX_VALUE); try (Engine.Searcher searcher = recoveringEngine.acquireSearcher("test")) { TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), docs); - assertEquals(docs, topDocs.totalHits); + assertEquals(docs, topDocs.totalHits.value); } } finally { IOUtils.close(initialEngine, recoveringEngine, store); @@ -2706,7 +2706,7 @@ public void testSkipTranslogReplay() throws IOException { engine.skipTranslogRecovery(); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), randomIntBetween(numDocs, numDocs + 10)); - assertThat(topDocs.totalHits, equalTo(0L)); + assertThat(topDocs.totalHits.value, equalTo(0L)); } } } @@ -2782,14 +2782,14 @@ public void testTranslogReplay() throws IOException { assertThat(result.getVersion(), equalTo(2L)); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), numDocs + 1); - assertThat(topDocs.totalHits, equalTo(numDocs + 1L)); + assertThat(topDocs.totalHits.value, equalTo(numDocs + 1L)); } engine.close(); engine = createEngine(store, primaryTranslogDir, inSyncGlobalCheckpointSupplier); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), numDocs + 1); - assertThat(topDocs.totalHits, equalTo(numDocs + 1L)); + assertThat(topDocs.totalHits.value, equalTo(numDocs + 1L)); } parser = (TranslogHandler) engine.config().getTranslogRecoveryRunner(); assertEquals(flush ? 1 : 2, parser.appliedOperations()); @@ -2802,7 +2802,7 @@ public void testTranslogReplay() throws IOException { } try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), numDocs); - assertThat(topDocs.totalHits, equalTo((long) numDocs)); + assertThat(topDocs.totalHits.value, equalTo((long) numDocs)); } } @@ -3102,7 +3102,7 @@ public void testDoubleDeliveryPrimary() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits); + assertEquals(1, topDocs.totalHits.value); } operation = appendOnlyPrimary(doc, false, 1); retry = appendOnlyPrimary(doc, true, 1); @@ -3123,7 +3123,7 @@ public void testDoubleDeliveryPrimary() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits); + assertEquals(1, topDocs.totalHits.value); } } @@ -3167,7 +3167,7 @@ public void testDoubleDeliveryReplicaAppendingAndDeleteOnly() throws IOException engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), 10); - assertEquals(0, topDocs.totalHits); + assertEquals(0, topDocs.totalHits.value); } } @@ -3212,7 +3212,7 @@ public void testDoubleDeliveryReplicaAppendingOnly() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits); + assertEquals(1, topDocs.totalHits.value); } operation = randomAppendOnly(doc.get(), false, 1); retry = randomAppendOnly(doc.get(), true, 1); @@ -3233,7 +3233,7 @@ public void testDoubleDeliveryReplicaAppendingOnly() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits); + assertEquals(1, topDocs.totalHits.value); } } @@ -3273,12 +3273,12 @@ public void testDoubleDeliveryReplica() throws IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits); + assertEquals(1, topDocs.totalHits.value); } engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits); + assertEquals(1, topDocs.totalHits.value); } List ops = readAllOperationsInLucene(engine, createMapperService("test")); assertThat(ops.stream().map(o -> o.seqNo()).collect(Collectors.toList()), hasItem(20L)); @@ -3305,7 +3305,7 @@ public void testRetryWithAutogeneratedIdWorksAndNoDuplicateDocs() throws IOExcep engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits); + assertEquals(1, topDocs.totalHits.value); } index = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), index.primaryTerm(), indexResult.getVersion(), null, REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry); @@ -3314,7 +3314,7 @@ public void testRetryWithAutogeneratedIdWorksAndNoDuplicateDocs() throws IOExcep replicaEngine.refresh("test"); try (Engine.Searcher searcher = replicaEngine.acquireSearcher("test")) { TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits); + assertEquals(1, topDocs.totalHits.value); } } @@ -3339,7 +3339,7 @@ public void testRetryWithAutogeneratedIdsAndWrongOrderWorksAndNoDuplicateDocs() engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits); + assertEquals(1, topDocs.totalHits.value); } Engine.Index secondIndexRequestReplica = new Engine.Index(newUid(doc), doc, result.getSeqNo(), secondIndexRequest.primaryTerm(), result.getVersion(), null, REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry); @@ -3347,7 +3347,7 @@ public void testRetryWithAutogeneratedIdsAndWrongOrderWorksAndNoDuplicateDocs() replicaEngine.refresh("test"); try (Engine.Searcher searcher = replicaEngine.acquireSearcher("test")) { TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits); + assertEquals(1, topDocs.totalHits.value); } } @@ -3424,7 +3424,7 @@ public void testRetryConcurrently() throws InterruptedException, IOException { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), 10); - assertEquals(numDocs, topDocs.totalHits); + assertEquals(numDocs, topDocs.totalHits.value); } if (primary) { // primaries rely on lucene dedup and may index the same document twice @@ -3524,7 +3524,7 @@ public void run() { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), 10); - assertEquals(docs.size(), topDocs.totalHits); + assertEquals(docs.size(), topDocs.totalHits.value); } assertEquals(0, engine.getNumVersionLookups()); assertEquals(0, engine.getNumIndexVersionsLookups()); diff --git a/server/src/test/java/org/elasticsearch/index/engine/SegmentTests.java b/server/src/test/java/org/elasticsearch/index/engine/SegmentTests.java index f9641ba24d7ac..47946a6850c48 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/SegmentTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/SegmentTests.java @@ -77,7 +77,7 @@ static Segment randomSegment() { segment.sizeInBytes = randomNonNegativeLong(); segment.docCount = randomIntBetween(1, Integer.MAX_VALUE); segment.delDocCount = randomIntBetween(0, segment.docCount); - segment.version = Version.LUCENE_6_5_0; + segment.version = Version.LUCENE_7_0_0; segment.compound = randomBoolean(); segment.mergeId = randomAlphaOfLengthBetween(1, 10); segment.memoryInBytes = randomNonNegativeLong(); diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java index cd1dc01d9ef4a..048455ccb41e2 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java @@ -115,7 +115,7 @@ public void testSingleValueAllSet() throws Exception { SortField sortField = indexFieldData.sortField(null, MultiValueMode.MIN, null, false); topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits, equalTo(3L)); + assertThat(topDocs.totalHits.value, equalTo(3L)); assertThat(topDocs.scoreDocs[0].doc, equalTo(1)); assertThat(toString(((FieldDoc) topDocs.scoreDocs[0]).fields[0]), equalTo(one())); assertThat(topDocs.scoreDocs[1].doc, equalTo(0)); @@ -126,7 +126,7 @@ public void testSingleValueAllSet() throws Exception { sortField = indexFieldData.sortField(null, MultiValueMode.MAX, null, true); topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits, equalTo(3L)); + assertThat(topDocs.totalHits.value, equalTo(3L)); assertThat(topDocs.scoreDocs[0].doc, equalTo(2)); assertThat(topDocs.scoreDocs[1].doc, equalTo(0)); assertThat(topDocs.scoreDocs[2].doc, equalTo(1)); @@ -192,7 +192,7 @@ public void testMultiValueAllSet() throws Exception { IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer)); SortField sortField = indexFieldData.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits, equalTo(3L)); + assertThat(topDocs.totalHits.value, equalTo(3L)); assertThat(topDocs.scoreDocs.length, equalTo(3)); assertThat(topDocs.scoreDocs[0].doc, equalTo(1)); assertThat(topDocs.scoreDocs[1].doc, equalTo(0)); @@ -200,7 +200,7 @@ public void testMultiValueAllSet() throws Exception { ; sortField = indexFieldData.sortField(null, MultiValueMode.MAX, null, true); topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits, equalTo(3L)); + assertThat(topDocs.totalHits.value, equalTo(3L)); assertThat(topDocs.scoreDocs.length, equalTo(3)); assertThat(topDocs.scoreDocs[0].doc, equalTo(0)); assertThat(topDocs.scoreDocs[1].doc, equalTo(2)); @@ -259,7 +259,7 @@ public void testSortMultiValuesFields() throws Exception { indexFieldData.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits, equalTo(8L)); + assertThat(topDocs.totalHits.value, equalTo(8L)); assertThat(topDocs.scoreDocs.length, equalTo(8)); assertThat(topDocs.scoreDocs[0].doc, equalTo(7)); assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("!08")); @@ -281,7 +281,7 @@ public void testSortMultiValuesFields() throws Exception { sortField = indexFieldData.sortField(null, MultiValueMode.MAX, null, true); topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits, equalTo(8L)); + assertThat(topDocs.totalHits.value, equalTo(8L)); assertThat(topDocs.scoreDocs.length, equalTo(8)); assertThat(topDocs.scoreDocs[0].doc, equalTo(6)); assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("10")); diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java index 04cd13766176b..ef2a9b3873580 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java @@ -265,7 +265,7 @@ public void testActualMissingValue(boolean reverse) throws IOException { IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer)); SortField sortField = indexFieldData.sortField(missingValue, MultiValueMode.MIN, null, reverse); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), randomBoolean() ? numDocs : randomIntBetween(10, numDocs), new Sort(sortField)); - assertEquals(numDocs, topDocs.totalHits); + assertEquals(numDocs, topDocs.totalHits.value); BytesRef previousValue = reverse ? UnicodeUtil.BIG_TERM : new BytesRef(); for (int i = 0; i < topDocs.scoreDocs.length; ++i) { final String docValue = searcher.doc(topDocs.scoreDocs[i].doc).get("value"); @@ -319,7 +319,7 @@ public void testSortMissing(boolean first, boolean reverse) throws IOException { IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer)); SortField sortField = indexFieldData.sortField(first ? "_first" : "_last", MultiValueMode.MIN, null, reverse); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), randomBoolean() ? numDocs : randomIntBetween(10, numDocs), new Sort(sortField)); - assertEquals(numDocs, topDocs.totalHits); + assertEquals(numDocs, topDocs.totalHits.value); BytesRef previousValue = first ? null : reverse ? UnicodeUtil.BIG_TERM : new BytesRef(); for (int i = 0; i < topDocs.scoreDocs.length; ++i) { final String docValue = searcher.doc(topDocs.scoreDocs[i].doc).get("value"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java index b7ee74fb773a0..23e205b8f58d7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java @@ -71,25 +71,25 @@ public void testDoubleIndexingSameDoc() throws Exception { IndexSearcher searcher = new IndexSearcher(reader); TopDocs topDocs = searcher.search(mapperService.fullName("field1").termQuery("value1", context), 10); - assertThat(topDocs.totalHits, equalTo(2L)); + assertThat(topDocs.totalHits.value, equalTo(2L)); topDocs = searcher.search(mapperService.fullName("field2").termQuery("1", context), 10); - assertThat(topDocs.totalHits, equalTo(2L)); + assertThat(topDocs.totalHits.value, equalTo(2L)); topDocs = searcher.search(mapperService.fullName("field3").termQuery("1.1", context), 10); - assertThat(topDocs.totalHits, equalTo(2L)); + assertThat(topDocs.totalHits.value, equalTo(2L)); topDocs = searcher.search(mapperService.fullName("field4").termQuery("2010-01-01", context), 10); - assertThat(topDocs.totalHits, equalTo(2L)); + assertThat(topDocs.totalHits.value, equalTo(2L)); topDocs = searcher.search(mapperService.fullName("field5").termQuery("1", context), 10); - assertThat(topDocs.totalHits, equalTo(2L)); + assertThat(topDocs.totalHits.value, equalTo(2L)); topDocs = searcher.search(mapperService.fullName("field5").termQuery("2", context), 10); - assertThat(topDocs.totalHits, equalTo(2L)); + assertThat(topDocs.totalHits.value, equalTo(2L)); topDocs = searcher.search(mapperService.fullName("field5").termQuery("3", context), 10); - assertThat(topDocs.totalHits, equalTo(2L)); + assertThat(topDocs.totalHits.value, equalTo(2L)); writer.close(); reader.close(); dir.close(); diff --git a/server/src/test/java/org/elasticsearch/index/query/BoostingQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/BoostingQueryBuilderTests.java index 49cb4442beb8c..cdc65cce92708 100644 --- a/server/src/test/java/org/elasticsearch/index/query/BoostingQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/BoostingQueryBuilderTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.query; -import org.apache.lucene.queries.BoostingQuery; +import org.apache.lucene.queries.function.FunctionScoreQuery; import org.apache.lucene.search.Query; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; @@ -45,7 +45,7 @@ protected void doAssertLuceneQuery(BoostingQueryBuilder queryBuilder, Query quer if (positive == null || negative == null) { assertThat(query, nullValue()); } else { - assertThat(query, instanceOf(BoostingQuery.class)); + assertThat(query, instanceOf(FunctionScoreQuery.class)); } } diff --git a/server/src/test/java/org/elasticsearch/index/query/DisMaxQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/DisMaxQueryBuilderTests.java index 98a5d91e1b195..ef98c67e56ed4 100644 --- a/server/src/test/java/org/elasticsearch/index/query/DisMaxQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/DisMaxQueryBuilderTests.java @@ -50,7 +50,7 @@ protected DisMaxQueryBuilder doCreateTestQueryBuilder() { dismax.add(RandomQueryBuilder.createQuery(random())); } if (randomBoolean()) { - dismax.tieBreaker(2.0f / randomIntBetween(1, 20)); + dismax.tieBreaker((float) randomDoubleBetween(0d, 1d, true)); } return dismax; } diff --git a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index b0ee32548737a..1cc058eb724b8 100644 --- a/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -154,7 +154,7 @@ protected QueryStringQueryBuilder doCreateTestQueryBuilder() { queryStringQueryBuilder.quoteFieldSuffix(randomAlphaOfLengthBetween(1, 3)); } if (randomBoolean()) { - queryStringQueryBuilder.tieBreaker(randomFloat()); + queryStringQueryBuilder.tieBreaker((float) randomDoubleBetween(0d, 1d, true)); } if (randomBoolean()) { queryStringQueryBuilder.minimumShouldMatch(randomMinimumShouldMatch()); diff --git a/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java index cfc423d918ad7..698cb71692b0f 100644 --- a/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java @@ -208,7 +208,7 @@ public void testDoToQuery() throws Exception { .setMinimumShouldMatchField("m_s_m").doToQuery(context); IndexSearcher searcher = new IndexSearcher(ir); TopDocs topDocs = searcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertThat(topDocs.totalHits, equalTo(3L)); + assertThat(topDocs.totalHits.value, equalTo(3L)); assertThat(topDocs.scoreDocs[0].doc, equalTo(1)); assertThat(topDocs.scoreDocs[1].doc, equalTo(3)); assertThat(topDocs.scoreDocs[2].doc, equalTo(4)); @@ -254,7 +254,7 @@ public void testDoToQuery_msmScriptField() throws Exception { .setMinimumShouldMatchScript(script).doToQuery(context); IndexSearcher searcher = new IndexSearcher(ir); TopDocs topDocs = searcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertThat(topDocs.totalHits, equalTo(3L)); + assertThat(topDocs.totalHits.value, equalTo(3L)); assertThat(topDocs.scoreDocs[0].doc, equalTo(0)); assertThat(topDocs.scoreDocs[1].doc, equalTo(2)); assertThat(topDocs.scoreDocs[2].doc, equalTo(4)); diff --git a/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java b/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java index 112de76b43e21..a77d10f12eafa 100644 --- a/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreTests.java @@ -320,7 +320,7 @@ public void testExplainFunctionScoreQuery() throws IOException { public Explanation getFunctionScoreExplanation(IndexSearcher searcher, ScoreFunction scoreFunction) throws IOException { FunctionScoreQuery functionScoreQuery = new FunctionScoreQuery(new TermQuery(TERM), scoreFunction, CombineFunction.AVG,0.0f, 100); - Weight weight = searcher.createNormalizedWeight(functionScoreQuery, true); + Weight weight = searcher.createWeight(searcher.rewrite(functionScoreQuery), org.apache.lucene.search.ScoreMode.COMPLETE, 1f); Explanation explanation = weight.explain(searcher.getIndexReader().leaves().get(0), 0); return explanation.getDetails()[1]; } @@ -397,7 +397,7 @@ public Explanation getFiltersFunctionScoreExplanation(IndexSearcher searcher, Sc } protected Explanation getExplanation(IndexSearcher searcher, FunctionScoreQuery functionScoreQuery) throws IOException { - Weight weight = searcher.createNormalizedWeight(functionScoreQuery, true); + Weight weight = searcher.createWeight(searcher.rewrite(functionScoreQuery), org.apache.lucene.search.ScoreMode.COMPLETE, 1f); return weight.explain(searcher.getIndexReader().leaves().get(0), 0); } @@ -421,18 +421,19 @@ public void checkFiltersFunctionScoreExplanation(Explanation randomExplanation, assertThat(functionExplanation.getDetails()[1].getDescription(), equalTo(functionExpl)); } - private static float[] randomFloats(int size) { + private static float[] randomPositiveFloats(int size) { float[] values = new float[size]; for (int i = 0; i < values.length; i++) { - values[i] = randomFloat() * (randomBoolean() ? 1.0f : -1.0f) * randomInt(100) + 1.e-5f; + values[i] = randomFloat() * randomInt(100) + 1.e-5f; } return values; } - private static double[] randomDoubles(int size) { + private static double[] randomPositiveDoubles(int size) { double[] values = new double[size]; for (int i = 0; i < values.length; i++) { - values[i] = randomDouble() * (randomBoolean() ? 1.0d : -1.0d) * randomInt(100) + 1.e-5d; + double rand = randomValueOtherThanMany((d) -> Double.compare(d, 0) < 0, ESTestCase::randomDouble); + values[i] = rand * randomInt(100) + 1.e-5d; } return values; } @@ -478,8 +479,8 @@ protected int doHashCode() { public void testSimpleWeightedFunction() throws IOException, ExecutionException, InterruptedException { int numFunctions = randomIntBetween(1, 3); - float[] weights = randomFloats(numFunctions); - double[] scores = randomDoubles(numFunctions); + float[] weights = randomPositiveFloats(numFunctions); + double[] scores = randomPositiveDoubles(numFunctions); ScoreFunctionStub[] scoreFunctionStubs = new ScoreFunctionStub[numFunctions]; for (int i = 0; i < numFunctions; i++) { scoreFunctionStubs[i] = new ScoreFunctionStub(scores[i]); @@ -502,7 +503,7 @@ public void testSimpleWeightedFunction() throws IOException, ExecutionException, score *= weights[i] * scores[i]; } assertThat(scoreWithWeight / (float) score, is(1f)); - float explainedScore = getExplanation(searcher, functionScoreQueryWithWeights).getValue(); + float explainedScore = getExplanation(searcher, functionScoreQueryWithWeights).getValue().floatValue(); assertThat(explainedScore / scoreWithWeight, is(1f)); functionScoreQueryWithWeights = getFiltersFunctionScoreQuery( @@ -518,7 +519,7 @@ public void testSimpleWeightedFunction() throws IOException, ExecutionException, sum += weights[i] * scores[i]; } assertThat(scoreWithWeight / (float) sum, is(1f)); - explainedScore = getExplanation(searcher, functionScoreQueryWithWeights).getValue(); + explainedScore = getExplanation(searcher, functionScoreQueryWithWeights).getValue().floatValue(); assertThat(explainedScore / scoreWithWeight, is(1f)); functionScoreQueryWithWeights = getFiltersFunctionScoreQuery( @@ -536,7 +537,7 @@ public void testSimpleWeightedFunction() throws IOException, ExecutionException, sum += weights[i] * scores[i]; } assertThat(scoreWithWeight / (float) (sum / norm), is(1f)); - explainedScore = getExplanation(searcher, functionScoreQueryWithWeights).getValue(); + explainedScore = getExplanation(searcher, functionScoreQueryWithWeights).getValue().floatValue(); assertThat(explainedScore / scoreWithWeight, is(1f)); functionScoreQueryWithWeights = getFiltersFunctionScoreQuery( @@ -552,7 +553,7 @@ public void testSimpleWeightedFunction() throws IOException, ExecutionException, min = Math.min(min, weights[i] * scores[i]); } assertThat(scoreWithWeight / (float) min, is(1f)); - explainedScore = getExplanation(searcher, functionScoreQueryWithWeights).getValue(); + explainedScore = getExplanation(searcher, functionScoreQueryWithWeights).getValue().floatValue(); assertThat(explainedScore / scoreWithWeight, is(1f)); functionScoreQueryWithWeights = getFiltersFunctionScoreQuery( @@ -568,7 +569,7 @@ public void testSimpleWeightedFunction() throws IOException, ExecutionException, max = Math.max(max, weights[i] * scores[i]); } assertThat(scoreWithWeight / (float) max, is(1f)); - explainedScore = getExplanation(searcher, functionScoreQueryWithWeights).getValue(); + explainedScore = getExplanation(searcher, functionScoreQueryWithWeights).getValue().floatValue(); assertThat(explainedScore / scoreWithWeight, is(1f)); } @@ -587,7 +588,7 @@ public void testMinScoreExplain() throws IOException { FunctionScoreQuery fsq = new FunctionScoreQuery(query,0f, Float.POSITIVE_INFINITY); Explanation fsqExpl = searcher.explain(fsq, 0); assertTrue(fsqExpl.isMatch()); - assertEquals(queryExpl.getValue(), fsqExpl.getValue(), 0f); + assertEquals(queryExpl.getValue(), fsqExpl.getValue()); assertEquals(queryExpl.getDescription(), fsqExpl.getDescription()); fsq = new FunctionScoreQuery(query, 10f, Float.POSITIVE_INFINITY); @@ -598,7 +599,7 @@ public void testMinScoreExplain() throws IOException { FunctionScoreQuery ffsq = new FunctionScoreQuery(query, 0f, Float.POSITIVE_INFINITY); Explanation ffsqExpl = searcher.explain(ffsq, 0); assertTrue(ffsqExpl.isMatch()); - assertEquals(queryExpl.getValue(), ffsqExpl.getValue(), 0f); + assertEquals(queryExpl.getValue(), ffsqExpl.getValue()); assertEquals(queryExpl.getDescription(), ffsqExpl.getDescription()); ffsq = new FunctionScoreQuery(query, 10f, Float.POSITIVE_INFINITY); @@ -613,8 +614,8 @@ public void testPropagatesApproximations() throws IOException { searcher.setQueryCache(null); // otherwise we could get a cached entry that does not have approximations FunctionScoreQuery fsq = new FunctionScoreQuery(query, null, Float.POSITIVE_INFINITY); - for (boolean needsScores : new boolean[] {true, false}) { - Weight weight = searcher.createWeight(fsq, needsScores, 1f); + for (org.apache.lucene.search.ScoreMode scoreMode : org.apache.lucene.search.ScoreMode.values()) { + Weight weight = searcher.createWeight(fsq, scoreMode, 1f); Scorer scorer = weight.scorer(reader.leaves().get(0)); assertNotNull(scorer.twoPhaseIterator()); } diff --git a/server/src/test/java/org/elasticsearch/index/query/plugin/DummyQueryParserPlugin.java b/server/src/test/java/org/elasticsearch/index/query/plugin/DummyQueryParserPlugin.java index 3d0eee79595f5..02653dcfd0e4d 100644 --- a/server/src/test/java/org/elasticsearch/index/query/plugin/DummyQueryParserPlugin.java +++ b/server/src/test/java/org/elasticsearch/index/query/plugin/DummyQueryParserPlugin.java @@ -22,6 +22,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Weight; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SearchPlugin; @@ -52,8 +53,8 @@ public String toString(String field) { } @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException { - return matchAllDocsQuery.createWeight(searcher, needsScores, boost); + public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { + return matchAllDocsQuery.createWeight(searcher, scoreMode, boost); } @Override diff --git a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java index fba71dd1e5296..e471874f6d664 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java @@ -236,7 +236,7 @@ public void testConflictingOpsOnReplica() throws Exception { for (IndexShard shard : shards) { try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.searcher().search(new TermQuery(new Term("f", "2")), 10); - assertEquals("shard " + shard.routingEntry() + " misses new version", 1, search.totalHits); + assertEquals("shard " + shard.routingEntry() + " misses new version", 1, search.totalHits.value); } } } diff --git a/server/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java b/server/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java index d4dc71388ac7d..f64a9e38b871a 100644 --- a/server/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/search/nested/AbstractNumberNestedSortingTestCase.java @@ -226,7 +226,7 @@ public void testNestedSorting() throws Exception { Sort sort = new Sort(new SortField("field2", nestedComparatorSource)); TopFieldDocs topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits, equalTo(7L)); + assertThat(topDocs.totalHits.value, equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(11)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(7)); @@ -241,7 +241,7 @@ public void testNestedSorting() throws Exception { sort = new Sort(new SortField("field2", nestedComparatorSource, true)); topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits, equalTo(7L)); + assertThat(topDocs.totalHits.value, equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(28)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(13)); @@ -263,7 +263,7 @@ public void testNestedSorting() throws Exception { ); sort = new Sort(new SortField("field2", nestedComparatorSource, true)); topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits, equalTo(6L)); + assertThat(topDocs.totalHits.value, equalTo(6L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(23)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(12)); @@ -278,7 +278,7 @@ public void testNestedSorting() throws Exception { sort = new Sort(new SortField("field2", nestedComparatorSource)); topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits, equalTo(6L)); + assertThat(topDocs.totalHits.value, equalTo(6L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(15)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(3)); @@ -294,7 +294,7 @@ public void testNestedSorting() throws Exception { nestedComparatorSource = createFieldComparator("field2", sortMode, 127, createNested(searcher, parentFilter, childFilter)); sort = new Sort(new SortField("field2", nestedComparatorSource, true)); topDocs = searcher.search(new TermQuery(new Term("__type", "parent")), 5, sort); - assertThat(topDocs.totalHits, equalTo(8L)); + assertThat(topDocs.totalHits.value, equalTo(8L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(19)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(127)); @@ -310,7 +310,7 @@ public void testNestedSorting() throws Exception { nestedComparatorSource = createFieldComparator("field2", sortMode, -127, createNested(searcher, parentFilter, childFilter)); sort = new Sort(new SortField("field2", nestedComparatorSource)); topDocs = searcher.search(new TermQuery(new Term("__type", "parent")), 5, sort); - assertThat(topDocs.totalHits, equalTo(8L)); + assertThat(topDocs.totalHits.value, equalTo(8L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(19)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(-127)); @@ -336,7 +336,7 @@ protected void assertAvgScoreMode(Query parentFilter, IndexSearcher searcher) th Query query = new ToParentBlockJoinQuery(new ConstantScoreQuery(childFilter), new QueryBitSetProducer(parentFilter), ScoreMode.None); Sort sort = new Sort(new SortField("field2", nestedComparatorSource)); TopDocs topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits, equalTo(7L)); + assertThat(topDocs.totalHits.value, equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(11)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(2)); diff --git a/server/src/test/java/org/elasticsearch/index/search/nested/DoubleNestedSortingTests.java b/server/src/test/java/org/elasticsearch/index/search/nested/DoubleNestedSortingTests.java index c643ea6cee045..93945231e2b6f 100644 --- a/server/src/test/java/org/elasticsearch/index/search/nested/DoubleNestedSortingTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/nested/DoubleNestedSortingTests.java @@ -69,7 +69,7 @@ protected void assertAvgScoreMode(Query parentFilter, IndexSearcher searcher) th Query query = new ToParentBlockJoinQuery(new ConstantScoreQuery(childFilter), new QueryBitSetProducer(parentFilter), ScoreMode.None); Sort sort = new Sort(new SortField("field2", nestedComparatorSource)); TopDocs topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits, equalTo(7L)); + assertThat(topDocs.totalHits.value, equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(11)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(2)); diff --git a/server/src/test/java/org/elasticsearch/index/search/nested/FloatNestedSortingTests.java b/server/src/test/java/org/elasticsearch/index/search/nested/FloatNestedSortingTests.java index 13d0e83e37e01..2d1ffb1e1a344 100644 --- a/server/src/test/java/org/elasticsearch/index/search/nested/FloatNestedSortingTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/nested/FloatNestedSortingTests.java @@ -68,7 +68,7 @@ protected void assertAvgScoreMode(Query parentFilter, IndexSearcher searcher, In Query query = new ToParentBlockJoinQuery(new ConstantScoreQuery(childFilter), new QueryBitSetProducer(parentFilter), ScoreMode.None); Sort sort = new Sort(new SortField("field2", nestedComparatorSource)); TopDocs topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits, equalTo(7L)); + assertThat(topDocs.totalHits.value, equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(11)); assertThat(((Number) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).intValue(), equalTo(2)); diff --git a/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java b/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java index 1300debd5ebda..0bee6eeb6ed12 100644 --- a/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java @@ -304,7 +304,7 @@ public void testNestedSorting() throws Exception { Sort sort = new Sort(new SortField("field2", nestedComparatorSource)); TopFieldDocs topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits, equalTo(7L)); + assertThat(topDocs.totalHits.value, equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(3)); assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("a")); @@ -321,7 +321,7 @@ public void testNestedSorting() throws Exception { nestedComparatorSource = new BytesRefFieldComparatorSource(indexFieldData, null, sortMode, createNested(searcher, parentFilter, childFilter)); sort = new Sort(new SortField("field2", nestedComparatorSource, true)); topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits, equalTo(7L)); + assertThat(topDocs.totalHits.value, equalTo(7L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(28)); assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("o")); @@ -347,7 +347,7 @@ public void testNestedSorting() throws Exception { ); sort = new Sort(new SortField("field2", nestedComparatorSource, true)); topDocs = searcher.search(query, 5, sort); - assertThat(topDocs.totalHits, equalTo(6L)); + assertThat(topDocs.totalHits.value, equalTo(6L)); assertThat(topDocs.scoreDocs.length, equalTo(5)); assertThat(topDocs.scoreDocs[0].doc, equalTo(23)); assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("m")); @@ -614,7 +614,7 @@ public void testMultiLevelNestedSorting() throws IOException { sortBuilder.setNestedSort(new NestedSortBuilder("chapters").setNestedSort(new NestedSortBuilder("chapters.paragraphs"))); QueryBuilder queryBuilder = new MatchAllQueryBuilder(); TopFieldDocs topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); - assertThat(topFields.totalHits, equalTo(5L)); + assertThat(topFields.totalHits.value, equalTo(5L)); assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("4")); @@ -630,25 +630,25 @@ public void testMultiLevelNestedSorting() throws IOException { { queryBuilder = new TermQueryBuilder("genre", "romance"); topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); - assertThat(topFields.totalHits, equalTo(1L)); + assertThat(topFields.totalHits.value, equalTo(1L)); assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); queryBuilder = new TermQueryBuilder("genre", "science fiction"); topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); - assertThat(topFields.totalHits, equalTo(1L)); + assertThat(topFields.totalHits.value, equalTo(1L)); assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(234L)); queryBuilder = new TermQueryBuilder("genre", "horror"); topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); - assertThat(topFields.totalHits, equalTo(1L)); + assertThat(topFields.totalHits.value, equalTo(1L)); assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(976L)); queryBuilder = new TermQueryBuilder("genre", "cooking"); topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); - assertThat(topFields.totalHits, equalTo(1L)); + assertThat(topFields.totalHits.value, equalTo(1L)); assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); } @@ -658,7 +658,7 @@ public void testMultiLevelNestedSorting() throws IOException { sortBuilder.order(SortOrder.DESC); queryBuilder = new MatchAllQueryBuilder(); topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); - assertThat(topFields.totalHits, equalTo(5L)); + assertThat(topFields.totalHits.value, equalTo(5L)); assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(976L)); assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("1")); @@ -675,25 +675,25 @@ public void testMultiLevelNestedSorting() throws IOException { { queryBuilder = new TermQueryBuilder("genre", "romance"); topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); - assertThat(topFields.totalHits, equalTo(1L)); + assertThat(topFields.totalHits.value, equalTo(1L)); assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); queryBuilder = new TermQueryBuilder("genre", "science fiction"); topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); - assertThat(topFields.totalHits, equalTo(1L)); + assertThat(topFields.totalHits.value, equalTo(1L)); assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(849L)); queryBuilder = new TermQueryBuilder("genre", "horror"); topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); - assertThat(topFields.totalHits, equalTo(1L)); + assertThat(topFields.totalHits.value, equalTo(1L)); assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(976L)); queryBuilder = new TermQueryBuilder("genre", "cooking"); topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); - assertThat(topFields.totalHits, equalTo(1L)); + assertThat(topFields.totalHits.value, equalTo(1L)); assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(180L)); } @@ -708,7 +708,7 @@ public void testMultiLevelNestedSorting() throws IOException { .setNestedSort(new NestedSortBuilder("chapters.paragraphs")) ); topFields = search(new NestedQueryBuilder("chapters", queryBuilder, ScoreMode.None), sortBuilder, queryShardContext, searcher); - assertThat(topFields.totalHits, equalTo(2L)); + assertThat(topFields.totalHits.value, equalTo(2L)); assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("4")); @@ -716,7 +716,7 @@ public void testMultiLevelNestedSorting() throws IOException { sortBuilder.order(SortOrder.DESC); topFields = search(new NestedQueryBuilder("chapters", queryBuilder, ScoreMode.None), sortBuilder, queryShardContext, searcher); - assertThat(topFields.totalHits, equalTo(2L)); + assertThat(topFields.totalHits.value, equalTo(2L)); assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); @@ -736,7 +736,7 @@ public void testMultiLevelNestedSorting() throws IOException { ) ); topFields = search(new NestedQueryBuilder("chapters", queryBuilder, ScoreMode.None), sortBuilder, queryShardContext, searcher); - assertThat(topFields.totalHits, equalTo(2L)); + assertThat(topFields.totalHits.value, equalTo(2L)); assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); @@ -744,7 +744,7 @@ public void testMultiLevelNestedSorting() throws IOException { sortBuilder.order(SortOrder.DESC); topFields = search(new NestedQueryBuilder("chapters", queryBuilder, ScoreMode.None), sortBuilder, queryShardContext, searcher); - assertThat(topFields.totalHits, equalTo(2L)); + assertThat(topFields.totalHits.value, equalTo(2L)); assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); assertThat(searcher.doc(topFields.scoreDocs[1].doc).get("_id"), equalTo("2")); @@ -762,25 +762,25 @@ public void testMultiLevelNestedSorting() throws IOException { queryBuilder = new TermQueryBuilder("genre", "romance"); topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); - assertThat(topFields.totalHits, equalTo(1L)); + assertThat(topFields.totalHits.value, equalTo(1L)); assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("2")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(76L)); queryBuilder = new TermQueryBuilder("genre", "science fiction"); topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); - assertThat(topFields.totalHits, equalTo(1L)); + assertThat(topFields.totalHits.value, equalTo(1L)); assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("1")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(Long.MAX_VALUE)); queryBuilder = new TermQueryBuilder("genre", "horror"); topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); - assertThat(topFields.totalHits, equalTo(1L)); + assertThat(topFields.totalHits.value, equalTo(1L)); assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("3")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(Long.MAX_VALUE)); queryBuilder = new TermQueryBuilder("genre", "cooking"); topFields = search(queryBuilder, sortBuilder, queryShardContext, searcher); - assertThat(topFields.totalHits, equalTo(1L)); + assertThat(topFields.totalHits.value, equalTo(1L)); assertThat(searcher.doc(topFields.scoreDocs[0].doc).get("_id"), equalTo("4")); assertThat(((FieldDoc) topFields.scoreDocs[0]).fields[0], equalTo(87L)); } diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java index 4479c7b390954..e9f52d7c3198d 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java @@ -56,7 +56,7 @@ public void testReaderCloseListenerIsCalled() throws IOException { writer.addDocument(doc); DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); IndexSearcher searcher = new IndexSearcher(open); - assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits); + assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); final AtomicInteger closeCalls = new AtomicInteger(0); IndexSearcherWrapper wrapper = new IndexSearcherWrapper() { @Override @@ -82,7 +82,7 @@ public IndexSearcher wrap(IndexSearcher searcher) throws EngineException { } outerCount.incrementAndGet(); }); - assertEquals(0, wrap.searcher().search(new TermQuery(new Term("field", "doc")), 1).totalHits); + assertEquals(0, wrap.searcher().search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); wrap.close(); assertFalse("wrapped reader is closed", wrap.reader().tryIncRef()); assertEquals(sourceRefCount, open.getRefCount()); @@ -106,7 +106,7 @@ public void testIsCacheable() throws IOException { writer.addDocument(doc); DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); IndexSearcher searcher = new IndexSearcher(open); - assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits); + assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); searcher.setSimilarity(iwc.getSimilarity()); final AtomicInteger closeCalls = new AtomicInteger(0); IndexSearcherWrapper wrapper = new IndexSearcherWrapper() { @@ -148,7 +148,7 @@ public void testNoWrap() throws IOException { writer.addDocument(doc); DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); IndexSearcher searcher = new IndexSearcher(open); - assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits); + assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); searcher.setSimilarity(iwc.getSimilarity()); IndexSearcherWrapper wrapper = new IndexSearcherWrapper(); try (Engine.Searcher engineSearcher = new Engine.Searcher("foo", searcher)) { diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index e8fc8a71a5b83..713bc04634b0a 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -1957,9 +1957,9 @@ public void testSearcherWrapperIsUsed() throws IOException { } try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.searcher().search(new TermQuery(new Term("foo", "bar")), 10); - assertEquals(search.totalHits, 1); + assertEquals(search.totalHits.value, 1); search = searcher.searcher().search(new TermQuery(new Term("foobar", "bar")), 10); - assertEquals(search.totalHits, 1); + assertEquals(search.totalHits.value, 1); } IndexSearcherWrapper wrapper = new IndexSearcherWrapper() { @Override @@ -1987,9 +1987,9 @@ public IndexSearcher wrap(IndexSearcher searcher) throws EngineException { try (Engine.Searcher searcher = newShard.acquireSearcher("test")) { TopDocs search = searcher.searcher().search(new TermQuery(new Term("foo", "bar")), 10); - assertEquals(search.totalHits, 0); + assertEquals(search.totalHits.value, 0); search = searcher.searcher().search(new TermQuery(new Term("foobar", "bar")), 10); - assertEquals(search.totalHits, 1); + assertEquals(search.totalHits.value, 1); } try (Engine.GetResult getResult = newShard .get(new Engine.Get(false, false, "test", "1", new Term(IdFieldMapper.NAME, Uid.encodeId("1"))))) { diff --git a/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java b/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java index 9dcb712a05da7..9296b4f311138 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java @@ -29,6 +29,7 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; @@ -260,9 +261,8 @@ void assertSplit(Directory dir, IndexMetaData metaData, int targetShardId, boole try (IndexReader reader = DirectoryReader.open(dir)) { IndexSearcher searcher = new IndexSearcher(reader); searcher.setQueryCache(null); - final boolean needsScores = false; - final Weight splitWeight = searcher.createNormalizedWeight(new ShardSplittingQuery(metaData, targetShardId, hasNested), - needsScores); + final Weight splitWeight = searcher.createWeight(searcher.rewrite(new ShardSplittingQuery(metaData, targetShardId, hasNested)), + ScoreMode.COMPLETE_NO_SCORES, 1f); final List leaves = reader.leaves(); for (final LeafReaderContext ctx : leaves) { Scorer scorer = splitWeight.scorer(ctx); diff --git a/server/src/test/java/org/elasticsearch/index/similarity/ScriptedSimilarityTests.java b/server/src/test/java/org/elasticsearch/index/similarity/ScriptedSimilarityTests.java index cc1d0e827c71c..22089bc40e498 100644 --- a/server/src/test/java/org/elasticsearch/index/similarity/ScriptedSimilarityTests.java +++ b/server/src/test/java/org/elasticsearch/index/similarity/ScriptedSimilarityTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FieldInvertState; +import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.Term; @@ -45,6 +46,7 @@ import org.elasticsearch.test.ESTestCase; import java.io.IOException; +import java.util.Arrays; import java.util.concurrent.atomic.AtomicBoolean; public class ScriptedSimilarityTests extends ESTestCase { @@ -65,7 +67,10 @@ private void doTestSameNormsAsBM25(boolean discountOverlaps) { final int length = TestUtil.nextInt(random(), 1, 100); final int position = random().nextInt(length); final int numOverlaps = random().nextInt(length); - FieldInvertState state = new FieldInvertState(Version.LATEST.major, "foo", position, length, numOverlaps, 100); + int maxTermFrequency = TestUtil.nextInt(random(), 1, 10); + int uniqueTermCount = TestUtil.nextInt(random(), 1, 10); + FieldInvertState state = new FieldInvertState(Version.LATEST.major, "foo", IndexOptions.DOCS_AND_FREQS, position, length, + numOverlaps, 100, maxTermFrequency, uniqueTermCount); assertEquals( sim2.computeNorm(state), sim1.computeNorm(state), @@ -81,7 +86,17 @@ public void testBasics() throws IOException { @Override public double execute(double weight, ScriptedSimilarity.Query query, ScriptedSimilarity.Field field, ScriptedSimilarity.Term term, - ScriptedSimilarity.Doc doc) throws IOException { + ScriptedSimilarity.Doc doc) { + + StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace(); + if (Arrays.stream(stackTraceElements).anyMatch(ste -> { + return ste.getClassName().endsWith(".TermScorer") && + ste.getMethodName().equals("score"); + }) == false) { + // this might happen when computing max scores + return Float.MAX_VALUE; + } + assertEquals(1, weight, 0); assertNotNull(doc); assertEquals(2f, doc.getFreq(), 0); @@ -129,7 +144,7 @@ public double execute(double weight, ScriptedSimilarity.Query query, .add(new TermQuery(new Term("match", "yes")), Occur.FILTER) .build(), 3.2f); TopDocs topDocs = searcher.search(query, 1); - assertEquals(1, topDocs.totalHits); + assertEquals(1, topDocs.totalHits.value); assertTrue(called.get()); assertEquals(42, topDocs.scoreDocs[0].score, 0); w.close(); @@ -143,14 +158,13 @@ public void testInitScript() throws IOException { @Override public double execute(ScriptedSimilarity.Query query, ScriptedSimilarity.Field field, - ScriptedSimilarity.Term term) throws IOException { - assertNotNull(field); + ScriptedSimilarity.Term term) { assertEquals(3, field.getDocCount()); assertEquals(5, field.getSumDocFreq()); assertEquals(6, field.getSumTotalTermFreq()); assertNotNull(term); - assertEquals(2, term.getDocFreq()); - assertEquals(3, term.getTotalTermFreq()); + assertEquals(1, term.getDocFreq()); + assertEquals(2, term.getTotalTermFreq()); assertNotNull(query); assertEquals(3.2f, query.getBoost(), 0); initCalled.set(true); @@ -166,7 +180,17 @@ public double execute(ScriptedSimilarity.Query query, ScriptedSimilarity.Field f @Override public double execute(double weight, ScriptedSimilarity.Query query, ScriptedSimilarity.Field field, ScriptedSimilarity.Term term, - ScriptedSimilarity.Doc doc) throws IOException { + ScriptedSimilarity.Doc doc) { + + StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace(); + if (Arrays.stream(stackTraceElements).anyMatch(ste -> { + return ste.getClassName().endsWith(".TermScorer") && + ste.getMethodName().equals("score"); + }) == false) { + // this might happen when computing max scores + return Float.MAX_VALUE; + } + assertEquals(28, weight, 0d); assertNotNull(doc); assertEquals(2f, doc.getFreq(), 0); @@ -176,8 +200,8 @@ public double execute(double weight, ScriptedSimilarity.Query query, assertEquals(5, field.getSumDocFreq()); assertEquals(6, field.getSumTotalTermFreq()); assertNotNull(term); - assertEquals(2, term.getDocFreq()); - assertEquals(3, term.getTotalTermFreq()); + assertEquals(1, term.getDocFreq()); + assertEquals(2, term.getTotalTermFreq()); assertNotNull(query); assertEquals(3.2f, query.getBoost(), 0); called.set(true); @@ -191,8 +215,7 @@ public double execute(double weight, ScriptedSimilarity.Query query, IndexWriter w = new IndexWriter(dir, newIndexWriterConfig().setSimilarity(sim)); Document doc = new Document(); - doc.add(new TextField("f", "foo bar", Store.NO)); - doc.add(new StringField("match", "no", Store.NO)); + doc.add(new TextField("f", "bar baz", Store.NO)); w.addDocument(doc); doc = new Document(); @@ -202,19 +225,15 @@ public double execute(double weight, ScriptedSimilarity.Query query, doc = new Document(); doc.add(new TextField("f", "bar", Store.NO)); - doc.add(new StringField("match", "no", Store.NO)); w.addDocument(doc); IndexReader r = DirectoryReader.open(w); w.close(); IndexSearcher searcher = new IndexSearcher(r); searcher.setSimilarity(sim); - Query query = new BoostQuery(new BooleanQuery.Builder() - .add(new TermQuery(new Term("f", "foo")), Occur.SHOULD) - .add(new TermQuery(new Term("match", "yes")), Occur.FILTER) - .build(), 3.2f); + Query query = new BoostQuery(new TermQuery(new Term("f", "foo")), 3.2f); TopDocs topDocs = searcher.search(query, 1); - assertEquals(1, topDocs.totalHits); + assertEquals(1, topDocs.totalHits.value); assertTrue(initCalled.get()); assertTrue(called.get()); assertEquals(42, topDocs.scoreDocs[0].score, 0); diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java index e155639f143c6..88bc4381626d4 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java @@ -32,6 +32,7 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryCachingPolicy; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; @@ -72,7 +73,7 @@ public String toString(String field) { } @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) + public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new ConstantScoreWeight(this, boost) { @Override @@ -414,7 +415,7 @@ public void onUse(Query query) {} IndicesQueryCache cache = new IndicesQueryCache(settings); s.setQueryCache(cache); Query query = new MatchAllDocsQuery(); - final DummyWeight weight = new DummyWeight(s.createNormalizedWeight(query, false)); + final DummyWeight weight = new DummyWeight(s.createWeight(s.rewrite(query), ScoreMode.COMPLETE_NO_SCORES, 1f)); final Weight cached = cache.doCache(weight, s.getQueryCachingPolicy()); assertNotSame(weight, cached); assertFalse(weight.scorerCalled); diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java index 8059c8a103927..4418a7cfb7f83 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java @@ -343,7 +343,7 @@ public BytesReference get() { try (BytesStreamOutput out = new BytesStreamOutput()) { IndexSearcher searcher = new IndexSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("id", Integer.toString(id))), 1); - assertEquals(1, topDocs.totalHits); + assertEquals(1, topDocs.totalHits.value); Document document = reader.document(topDocs.scoreDocs[0].doc); out.writeString(document.get("value")); loadedFromCache = false; diff --git a/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java b/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java index 485fd92099630..119a74262bf7a 100644 --- a/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java +++ b/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java @@ -240,6 +240,35 @@ public void testUnderscoreInAnalyzerName() throws IOException { } } + public void testStandardFilterBWC() throws IOException { + Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT.minimumCompatibilityVersion()); + // bwc deprecation + { + Settings settings = Settings.builder() + .put("index.analysis.analyzer.my_standard.tokenizer", "standard") + .put("index.analysis.analyzer.my_standard.filter", "standard") + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(IndexMetaData.SETTING_VERSION_CREATED, version) + .build(); + IndexAnalyzers analyzers = getIndexAnalyzers(settings); + assertTokenStreamContents(analyzers.get("my_standard").tokenStream("", "test"), new String[]{"test"}); + assertWarnings("The [standard] token filter is deprecated and will be removed in a future version."); + } + // removal + { + final Settings settings = Settings.builder() + .put("index.analysis.analyzer.my_standard.tokenizer", "standard") + .put("index.analysis.analyzer.my_standard.filter", "standard") + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_7_0_0_alpha1) + .build(); + IndexAnalyzers analyzers = getIndexAnalyzers(settings); + IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> + analyzers.get("my_standard").tokenStream("", "")); + assertThat(exc.getMessage(), equalTo("The [standard] token filter has been removed.")); + } + } + /** * Tests that plugins can register pre-configured char filters that vary in behavior based on Elasticsearch version, Lucene version, * and that do not vary based on version at all. @@ -376,34 +405,34 @@ public void reset() throws IOException { } } AnalysisRegistry registry = new AnalysisModule(TestEnvironment.newEnvironment(emptyNodeSettings), - singletonList(new AnalysisPlugin() { - @Override - public List getPreConfiguredTokenizers() { - return Arrays.asList( + singletonList(new AnalysisPlugin() { + @Override + public List getPreConfiguredTokenizers() { + return Arrays.asList( PreConfiguredTokenizer.singleton("no_version", () -> new FixedTokenizer("no_version"), - noVersionSupportsMultiTerm ? () -> AppendTokenFilter.factoryForSuffix("no_version") : null), + noVersionSupportsMultiTerm ? () -> AppendTokenFilter.factoryForSuffix("no_version") : null), PreConfiguredTokenizer.luceneVersion("lucene_version", - luceneVersion -> new FixedTokenizer(luceneVersion.toString()), - luceneVersionSupportsMultiTerm ? - luceneVersion -> AppendTokenFilter.factoryForSuffix(luceneVersion.toString()) : null), + luceneVersion -> new FixedTokenizer(luceneVersion.toString()), + luceneVersionSupportsMultiTerm ? + luceneVersion -> AppendTokenFilter.factoryForSuffix(luceneVersion.toString()) : null), PreConfiguredTokenizer.elasticsearchVersion("elasticsearch_version", - esVersion -> new FixedTokenizer(esVersion.toString()), - elasticsearchVersionSupportsMultiTerm ? - esVersion -> AppendTokenFilter.factoryForSuffix(esVersion.toString()) : null) - ); - } - })).getAnalysisRegistry(); + esVersion -> new FixedTokenizer(esVersion.toString()), + elasticsearchVersionSupportsMultiTerm ? + esVersion -> AppendTokenFilter.factoryForSuffix(esVersion.toString()) : null) + ); + } + })).getAnalysisRegistry(); Version version = VersionUtils.randomVersion(random()); IndexAnalyzers analyzers = getIndexAnalyzers(registry, Settings.builder() - .put("index.analysis.analyzer.no_version.tokenizer", "no_version") - .put("index.analysis.analyzer.lucene_version.tokenizer", "lucene_version") - .put("index.analysis.analyzer.elasticsearch_version.tokenizer", "elasticsearch_version") - .put(IndexMetaData.SETTING_VERSION_CREATED, version) - .build()); - assertTokenStreamContents(analyzers.get("no_version").tokenStream("", "test"), new String[] {"no_version"}); - assertTokenStreamContents(analyzers.get("lucene_version").tokenStream("", "test"), new String[] {version.luceneVersion.toString()}); - assertTokenStreamContents(analyzers.get("elasticsearch_version").tokenStream("", "test"), new String[] {version.toString()}); + .put("index.analysis.analyzer.no_version.tokenizer", "no_version") + .put("index.analysis.analyzer.lucene_version.tokenizer", "lucene_version") + .put("index.analysis.analyzer.elasticsearch_version.tokenizer", "elasticsearch_version") + .put(IndexMetaData.SETTING_VERSION_CREATED, version) + .build()); + assertTokenStreamContents(analyzers.get("no_version").tokenStream("", "test"), new String[]{"no_version"}); + assertTokenStreamContents(analyzers.get("lucene_version").tokenStream("", "test"), new String[]{version.luceneVersion.toString()}); + assertTokenStreamContents(analyzers.get("elasticsearch_version").tokenStream("", "test"), new String[]{version.toString()}); // These are current broken by https://github.com/elastic/elasticsearch/issues/24752 // assertEquals("test" + (noVersionSupportsMultiTerm ? "no_version" : ""), diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java index 075d5bc2aa3df..a42804692fbf3 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java @@ -112,8 +112,8 @@ public void testToXContent() throws IOException { searchHits.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); assertEquals("{\"hits\":{\"total\":1000,\"max_score\":1.5," + - "\"hits\":[{\"_type\":\"type\",\"_id\":\"id1\",\"_score\":\"-Infinity\"},"+ - "{\"_type\":\"type\",\"_id\":\"id2\",\"_score\":\"-Infinity\"}]}}", Strings.toString(builder)); + "\"hits\":[{\"_type\":\"type\",\"_id\":\"id1\",\"_score\":null},"+ + "{\"_type\":\"type\",\"_id\":\"id2\",\"_score\":null}]}}", Strings.toString(builder)); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationCollectorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationCollectorTests.java index 9919e9dcdbbd1..6a77a89fc58f0 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationCollectorTests.java @@ -62,7 +62,7 @@ private boolean needsScores(IndexService index, String agg) throws IOException { final AggregatorFactories factories = AggregatorFactories.parseAggregators(aggParser).build(context, null); final Aggregator[] aggregators = factories.createTopLevelAggregators(); assertEquals(1, aggregators.length); - return aggregators[0].needsScores(); + return aggregators[0].scoreMode().needsScores(); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java index f9abdeed50f82..e3fe39db95246 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java @@ -27,6 +27,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; @@ -66,6 +67,11 @@ public DocIdSetIterator iterator() { throw new UnsupportedOperationException(); } + @Override + public float getMaxScore(int upTo) throws IOException { + return Float.MAX_VALUE; + } + @Override public Weight getWeight() { throw new UnsupportedOperationException(); @@ -107,8 +113,8 @@ public void collect(int doc, long bucket) throws IOException { } @Override - public boolean needsScores() { - return false; + public ScoreMode scoreMode() { + return ScoreMode.COMPLETE; } @Override @@ -136,8 +142,8 @@ public void collect(int doc, long bucket) throws IOException { } @Override - public boolean needsScores() { - return false; + public ScoreMode scoreMode() { + return ScoreMode.COMPLETE_NO_SCORES; } @Override @@ -173,8 +179,8 @@ public void setScorer(Scorer scorer) throws IOException { } @Override - public boolean needsScores() { - return false; + public ScoreMode scoreMode() { + return ScoreMode.COMPLETE; } @Override diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollectorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollectorTests.java index 8d60dde58343f..2f99ebbf323d5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/BestBucketsDeferringCollectorTests.java @@ -30,6 +30,7 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.Directory; @@ -70,8 +71,8 @@ public void testReplay() throws Exception { when(searchContext.query()).thenReturn(rewrittenQuery); BestBucketsDeferringCollector collector = new BestBucketsDeferringCollector(searchContext, false) { @Override - public boolean needsScores() { - return true; + public ScoreMode scoreMode() { + return ScoreMode.COMPLETE; } }; Set deferredCollectedDocIds = new HashSet<>(); @@ -126,8 +127,8 @@ public void postCollection() throws IOException { } @Override - public boolean needsScores() { - return false; + public ScoreMode scoreMode() { + return ScoreMode.COMPLETE_NO_SCORES; } }; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollectorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollectorTests.java index 86e937a356b46..3a740e868ee23 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollectorTests.java @@ -28,6 +28,7 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.Directory; @@ -105,8 +106,8 @@ public void postCollection() throws IOException { } @Override - public boolean needsScores() { - return false; + public ScoreMode scoreMode() { + return ScoreMode.COMPLETE_NO_SCORES; } }; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHitsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHitsTests.java index 3289c5a7f6424..0fba35358ecb0 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHitsTests.java @@ -26,10 +26,12 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopFieldDocs; +import org.apache.lucene.search.TotalHits; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.common.text.Text; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; @@ -104,12 +106,13 @@ protected InternalTopHits createTestInstance(String name, List inpu totalHits += internalHits.getTotalHits(); maxScore = max(maxScore, internalHits.getMaxScore()); for (int i = 0; i < internalHits.getHits().length; i++) { - ScoreDoc doc = inputs.get(input).getTopDocs().scoreDocs[i]; + ScoreDoc doc = inputs.get(input).getTopDocs().topDocs.scoreDocs[i]; if (testInstancesLookSortedByField) { doc = new FieldDoc(doc.doc, doc.score, ((FieldDoc) doc).fields, input); } else { @@ -253,7 +256,7 @@ protected InternalTopHits mutateInstance(InternalTopHits instance) { String name = instance.getName(); int from = instance.getFrom(); int size = instance.getSize(); - TopDocs topDocs = instance.getTopDocs(); + TopDocsAndMaxScore topDocs = instance.getTopDocs(); SearchHits searchHits = instance.getHits(); List pipelineAggregators = instance.pipelineAggregators(); Map metaData = instance.getMetaData(); @@ -268,7 +271,8 @@ protected InternalTopHits mutateInstance(InternalTopHits instance) { size += between(1, 100); break; case 3: - topDocs = new TopDocs(topDocs.totalHits + between(1, 100), topDocs.scoreDocs, topDocs.getMaxScore() + randomFloat()); + topDocs = new TopDocsAndMaxScore(new TopDocs(new TotalHits(topDocs.topDocs.totalHits.value + between(1, 100), + topDocs.topDocs.totalHits.relation), topDocs.topDocs.scoreDocs), topDocs.maxScore + randomFloat()); break; case 4: searchHits = new SearchHits(searchHits.getHits(), searchHits.totalHits + between(1, 100), diff --git a/server/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java b/server/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java index 92488a69d6d60..d5ceec9d7c285 100644 --- a/server/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java @@ -60,6 +60,7 @@ import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.startsWith; public class TransportTwoNodesSearchIT extends ESIntegTestCase { @@ -146,16 +147,16 @@ public void testDfsQueryThenFetch() throws Exception { SearchHit hit = hits[i]; assertThat(hit.getExplanation(), notNullValue()); assertThat(hit.getExplanation().getDetails().length, equalTo(1)); - assertThat(hit.getExplanation().getDetails()[0].getDetails().length, equalTo(2)); - assertThat(hit.getExplanation().getDetails()[0].getDetails()[0].getDetails().length, equalTo(2)); - assertThat(hit.getExplanation().getDetails()[0].getDetails()[0].getDetails()[0].getDescription(), - equalTo("docFreq")); - assertThat(hit.getExplanation().getDetails()[0].getDetails()[0].getDetails()[0].getValue(), - equalTo(100.0f)); - assertThat(hit.getExplanation().getDetails()[0].getDetails()[0].getDetails()[1].getDescription(), - equalTo("docCount")); - assertThat(hit.getExplanation().getDetails()[0].getDetails()[0].getDetails()[1].getValue(), - equalTo(100.0f)); + assertThat(hit.getExplanation().getDetails()[0].getDetails().length, equalTo(3)); + assertThat(hit.getExplanation().getDetails()[0].getDetails()[1].getDetails().length, equalTo(2)); + assertThat(hit.getExplanation().getDetails()[0].getDetails()[1].getDetails()[0].getDescription(), + startsWith("n,")); + assertThat(hit.getExplanation().getDetails()[0].getDetails()[1].getDetails()[0].getValue(), + equalTo(100L)); + assertThat(hit.getExplanation().getDetails()[0].getDetails()[1].getDetails()[1].getDescription(), + startsWith("N,")); + assertThat(hit.getExplanation().getDetails()[0].getDetails()[1].getDetails()[1].getValue(), + equalTo(100L)); assertThat("id[" + hit.getId() + "] -> " + hit.getExplanation().toString(), hit.getId(), equalTo(Integer.toString(100 - total - i - 1))); } total += hits.length; @@ -181,16 +182,16 @@ public void testDfsQueryThenFetchWithSort() throws Exception { SearchHit hit = hits[i]; assertThat(hit.getExplanation(), notNullValue()); assertThat(hit.getExplanation().getDetails().length, equalTo(1)); - assertThat(hit.getExplanation().getDetails()[0].getDetails().length, equalTo(2)); - assertThat(hit.getExplanation().getDetails()[0].getDetails()[0].getDetails().length, equalTo(2)); - assertThat(hit.getExplanation().getDetails()[0].getDetails()[0].getDetails()[0].getDescription(), - equalTo("docFreq")); - assertThat(hit.getExplanation().getDetails()[0].getDetails()[0].getDetails()[0].getValue(), - equalTo(100.0f)); - assertThat(hit.getExplanation().getDetails()[0].getDetails()[0].getDetails()[1].getDescription(), - equalTo("docCount")); - assertThat(hit.getExplanation().getDetails()[0].getDetails()[0].getDetails()[1].getValue(), - equalTo(100.0f)); + assertThat(hit.getExplanation().getDetails()[0].getDetails().length, equalTo(3)); + assertThat(hit.getExplanation().getDetails()[0].getDetails()[1].getDetails().length, equalTo(2)); + assertThat(hit.getExplanation().getDetails()[0].getDetails()[1].getDetails()[0].getDescription(), + startsWith("n,")); + assertThat(hit.getExplanation().getDetails()[0].getDetails()[1].getDetails()[0].getValue(), + equalTo(100L)); + assertThat(hit.getExplanation().getDetails()[0].getDetails()[1].getDetails()[1].getDescription(), + startsWith("N,")); + assertThat(hit.getExplanation().getDetails()[0].getDetails()[1].getDetails()[1].getValue(), + equalTo(100L)); assertThat("id[" + hit.getId() + "]", hit.getId(), equalTo(Integer.toString(total + i))); } total += hits.length; diff --git a/server/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java b/server/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java index 6657ad9823ffe..c9679ae2ea96c 100644 --- a/server/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java +++ b/server/src/test/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java @@ -99,7 +99,7 @@ static class MyScript extends ScoreScript implements ExplainableSearchScript { MyScript(Map params, SearchLookup lookup, LeafReaderContext leafContext) { super(params, lookup, leafContext); } - + @Override public Explanation explain(Explanation subQueryScore) throws IOException { Explanation scoreExp = Explanation.match(subQueryScore.getValue(), "_score: ", subQueryScore); @@ -139,10 +139,9 @@ public void testExplainScript() throws InterruptedException, IOException, Execut int idCounter = 19; for (SearchHit hit : hits.getHits()) { assertThat(hit.getId(), equalTo(Integer.toString(idCounter))); - assertThat(hit.getExplanation().toString(), - containsString(Double.toString(idCounter) + " = This script returned " + Double.toString(idCounter))); - assertThat(hit.getExplanation().toString(), containsString("freq=1.0")); - assertThat(hit.getExplanation().toString(), containsString("termFreq=1.0")); + assertThat(hit.getExplanation().toString(), containsString(Double.toString(idCounter))); + assertThat(hit.getExplanation().toString(), containsString("1 = n")); + assertThat(hit.getExplanation().toString(), containsString("1 = N")); assertThat(hit.getExplanation().getDetails().length, equalTo(2)); idCounter--; } diff --git a/server/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java b/server/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java index fc11554dfb3fe..7e96539084e74 100644 --- a/server/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java +++ b/server/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java @@ -33,6 +33,7 @@ import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.util.ArrayList; @@ -132,8 +133,8 @@ public void testScriptScoresWithAgg() throws IOException { } public void testMinScoreFunctionScoreBasic() throws IOException { - float score = randomFloat(); - float minScore = randomFloat(); + float score = randomValueOtherThanMany((f) -> Float.compare(f, 0) < 0, ESTestCase::randomFloat); + float minScore = randomValueOtherThanMany((f) -> Float.compare(f, 0) < 0, ESTestCase::randomFloat); index(INDEX, TYPE, jsonBuilder().startObject() .field("num", 2) .field("random_score", score) // Pass the random score as a document field so that it can be extracted in the script @@ -167,8 +168,8 @@ public void testMinScoreFunctionScoreBasic() throws IOException { public void testMinScoreFunctionScoreManyDocsAndRandomMinScore() throws IOException, ExecutionException, InterruptedException { List docs = new ArrayList<>(); int numDocs = randomIntBetween(1, 100); - int scoreOffset = randomIntBetween(-2 * numDocs, 2 * numDocs); - int minScore = randomIntBetween(-2 * numDocs, 2 * numDocs); + int scoreOffset = randomIntBetween(0, 2 * numDocs); + int minScore = randomIntBetween(0, 2 * numDocs); for (int i = 0; i < numDocs; i++) { docs.add(client().prepareIndex(INDEX, TYPE, Integer.toString(i)).setSource("num", i + scoreOffset)); } diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java index 5e10292fa3e7c..fd924ce07ca93 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java @@ -33,6 +33,7 @@ import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Query; import org.apache.lucene.search.RandomApproximationQuery; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Sort; @@ -218,7 +219,7 @@ public int hashCode() { } @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException { + public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new Weight(this) { @Override public void extractTerms(Set terms) { @@ -267,7 +268,7 @@ public void testScorerSupplier() throws IOException { w.close(); IndexSearcher s = newSearcher(reader); s.setQueryCache(null); - Weight weight = s.createNormalizedWeight(new DummyQuery(), randomBoolean()); + Weight weight = s.createWeight(s.rewrite(new DummyQuery()), randomFrom(ScoreMode.values()), 1f); // exception when getting the scorer expectThrows(UnsupportedOperationException.class, () -> weight.scorer(s.getIndexReader().leaves().get(0))); // no exception, means scorerSupplier is delegated diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/RandomQueryGenerator.java b/server/src/test/java/org/elasticsearch/search/profile/query/RandomQueryGenerator.java index 14fe8d58132f9..00b859394c65f 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/RandomQueryGenerator.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/RandomQueryGenerator.java @@ -61,8 +61,7 @@ public static QueryBuilder randomQueryBuilder(List stringFields, List {}); assertFalse(rescore); - assertEquals(searcher.count(query), context.queryResult().topDocs().totalHits); - assertThat(context.queryResult().topDocs().getMaxScore(), equalTo(Float.NaN)); + assertEquals(searcher.count(query), context.queryResult().topDocs().topDocs.totalHits.value); } private void countTestCase(boolean withDeletions) throws Exception { @@ -173,15 +171,12 @@ public void testPostFilterDisablesCountOptimization() throws Exception { context.parsedQuery(new ParsedQuery(new MatchAllDocsQuery())); QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); - assertEquals(1, context.queryResult().topDocs().totalHits); - assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(0)); - assertThat(context.queryResult().topDocs().getMaxScore(), equalTo(Float.NaN)); + assertEquals(1, context.queryResult().topDocs().topDocs.totalHits.value); contextSearcher = new IndexSearcher(reader); context.parsedPostFilter(new ParsedQuery(new MatchNoDocsQuery())); QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); - assertEquals(0, context.queryResult().topDocs().totalHits); - assertThat(context.queryResult().topDocs().getMaxScore(), equalTo(Float.NaN)); + assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value); reader.close(); dir.close(); } @@ -209,9 +204,8 @@ public void testTerminateAfterWithFilter() throws Exception { for (int i = 0; i < 10; i++) { context.parsedPostFilter(new ParsedQuery(new TermQuery(new Term("foo", Integer.toString(i))))); QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); - assertEquals(1, context.queryResult().topDocs().totalHits); - assertThat(context.queryResult().topDocs().getMaxScore(), equalTo(1F)); - assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(1)); + assertEquals(1, context.queryResult().topDocs().topDocs.totalHits.value); + assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); } reader.close(); dir.close(); @@ -234,14 +228,12 @@ public void testMinScoreDisablesCountOptimization() throws Exception { context.setSize(0); context.setTask(new SearchTask(123L, "", "", "", null, Collections.emptyMap())); QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); - assertEquals(1, context.queryResult().topDocs().totalHits); - assertThat(context.queryResult().topDocs().getMaxScore(), equalTo(Float.NaN)); + assertEquals(1, context.queryResult().topDocs().topDocs.totalHits.value); contextSearcher = new IndexSearcher(reader); context.minimumScore(100); QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); - assertEquals(0, context.queryResult().topDocs().totalHits); - assertThat(context.queryResult().topDocs().getMaxScore(), equalTo(Float.NaN)); + assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value); reader.close(); dir.close(); } @@ -288,28 +280,25 @@ public void testInOrderScrollOptimization() throws Exception { ScrollContext scrollContext = new ScrollContext(); scrollContext.lastEmittedDoc = null; scrollContext.maxScore = Float.NaN; - scrollContext.totalHits = -1; + scrollContext.totalHits = null; context.scrollContext(scrollContext); context.setTask(new SearchTask(123L, "", "", "", null, Collections.emptyMap())); int size = randomIntBetween(2, 5); context.setSize(size); QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); - assertThat(context.queryResult().topDocs().totalHits, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().getMaxScore(), equalTo(1F)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); assertNull(context.queryResult().terminatedEarly()); assertThat(context.terminateAfter(), equalTo(0)); - assertThat(context.queryResult().getTotalHits(), equalTo((long) numDocs)); + assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs)); contextSearcher = getAssertingEarlyTerminationSearcher(reader, size); QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); - assertThat(context.queryResult().topDocs().totalHits, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().getMaxScore(), equalTo(1F)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); assertTrue(context.queryResult().terminatedEarly()); assertThat(context.terminateAfter(), equalTo(size)); - assertThat(context.queryResult().getTotalHits(), equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().getMaxScore(), equalTo(1F)); - assertThat(context.queryResult().topDocs().scoreDocs[0].doc, greaterThanOrEqualTo(size)); + assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0].doc, greaterThanOrEqualTo(size)); reader.close(); dir.close(); } @@ -343,25 +332,22 @@ public void testTerminateAfterEarlyTermination() throws Exception { context.setSize(1); QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().totalHits, equalTo(1L)); - assertThat(context.queryResult().topDocs().getMaxScore(), equalTo(1F)); - assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(1)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L)); + assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); context.setSize(0); QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().totalHits, equalTo(1L)); - assertThat(context.queryResult().topDocs().getMaxScore(), equalTo(Float.NaN)); - assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(0)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L)); + assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } { context.setSize(1); QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().totalHits, equalTo(1L)); - assertThat(context.queryResult().topDocs().getMaxScore(), equalTo(1F)); - assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(1)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L)); + assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); } { context.setSize(1); @@ -372,17 +358,15 @@ public void testTerminateAfterEarlyTermination() throws Exception { context.parsedQuery(new ParsedQuery(bq)); QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().totalHits, equalTo(1L)); - assertThat(context.queryResult().topDocs().getMaxScore(), greaterThan(0f)); - assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(1)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L)); + assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); context.setSize(0); context.parsedQuery(new ParsedQuery(bq)); QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().totalHits, equalTo(1L)); - assertThat(context.queryResult().topDocs().getMaxScore(), equalTo(Float.NaN)); - assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(0)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L)); + assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } { context.setSize(1); @@ -390,9 +374,8 @@ public void testTerminateAfterEarlyTermination() throws Exception { context.queryCollectors().put(TotalHitCountCollector.class, collector); QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().totalHits, equalTo(1L)); - assertThat(context.queryResult().topDocs().getMaxScore(), greaterThan(0f)); - assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(1)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L)); + assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); assertThat(collector.getTotalHits(), equalTo(1)); context.queryCollectors().clear(); } @@ -402,9 +385,8 @@ public void testTerminateAfterEarlyTermination() throws Exception { context.queryCollectors().put(TotalHitCountCollector.class, collector); QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().totalHits, equalTo(1L)); - assertThat(context.queryResult().topDocs().getMaxScore(), equalTo(Float.NaN)); - assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(0)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L)); + assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); assertThat(collector.getTotalHits(), equalTo(1)); } @@ -441,19 +423,19 @@ public void testIndexSortingEarlyTermination() throws Exception { final IndexReader reader = DirectoryReader.open(dir); IndexSearcher contextSearcher = new IndexSearcher(reader); QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); - assertThat(context.queryResult().topDocs().totalHits, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(1)); - assertThat(context.queryResult().topDocs().scoreDocs[0], instanceOf(FieldDoc.class)); - FieldDoc fieldDoc = (FieldDoc) context.queryResult().topDocs().scoreDocs[0]; + assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); + assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class)); + FieldDoc fieldDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[0]; assertThat(fieldDoc.fields[0], equalTo(1)); { context.parsedPostFilter(new ParsedQuery(new MinDocQuery(1))); QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); assertNull(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().totalHits, equalTo(numDocs - 1L)); - assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(1)); - assertThat(context.queryResult().topDocs().scoreDocs[0], instanceOf(FieldDoc.class)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(numDocs - 1L)); + assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); + assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class)); assertThat(fieldDoc.fields[0], anyOf(equalTo(1), equalTo(2))); context.parsedPostFilter(null); @@ -461,9 +443,9 @@ public void testIndexSortingEarlyTermination() throws Exception { context.queryCollectors().put(TotalHitCountCollector.class, totalHitCountCollector); QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); assertNull(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().totalHits, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(1)); - assertThat(context.queryResult().topDocs().scoreDocs[0], instanceOf(FieldDoc.class)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); + assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class)); assertThat(fieldDoc.fields[0], anyOf(equalTo(1), equalTo(2))); assertThat(totalHitCountCollector.getTotalHits(), equalTo(numDocs)); context.queryCollectors().clear(); @@ -474,14 +456,14 @@ public void testIndexSortingEarlyTermination() throws Exception { context.trackTotalHits(false); QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); assertNull(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(1)); - assertThat(context.queryResult().topDocs().scoreDocs[0], instanceOf(FieldDoc.class)); + assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); + assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class)); assertThat(fieldDoc.fields[0], anyOf(equalTo(1), equalTo(2))); QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); assertNull(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().scoreDocs.length, equalTo(1)); - assertThat(context.queryResult().topDocs().scoreDocs[0], instanceOf(FieldDoc.class)); + assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); + assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class)); assertThat(fieldDoc.fields[0], anyOf(equalTo(1), equalTo(2))); } reader.close(); @@ -520,27 +502,27 @@ public void testIndexSortScrollOptimization() throws Exception { ScrollContext scrollContext = new ScrollContext(); scrollContext.lastEmittedDoc = null; scrollContext.maxScore = Float.NaN; - scrollContext.totalHits = -1; + scrollContext.totalHits = null; context.scrollContext(scrollContext); context.setTask(new SearchTask(123L, "", "", "", null, Collections.emptyMap())); context.setSize(10); context.sort(searchSortAndFormat); QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); - assertThat(context.queryResult().topDocs().totalHits, equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); assertNull(context.queryResult().terminatedEarly()); assertThat(context.terminateAfter(), equalTo(0)); - assertThat(context.queryResult().getTotalHits(), equalTo((long) numDocs)); - int sizeMinus1 = context.queryResult().topDocs().scoreDocs.length - 1; - FieldDoc lastDoc = (FieldDoc) context.queryResult().topDocs().scoreDocs[sizeMinus1]; + assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs)); + int sizeMinus1 = context.queryResult().topDocs().topDocs.scoreDocs.length - 1; + FieldDoc lastDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[sizeMinus1]; contextSearcher = getAssertingEarlyTerminationSearcher(reader, 10); QueryPhase.execute(context, contextSearcher, checkCancelled -> {}); assertNull(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().totalHits, equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); assertThat(context.terminateAfter(), equalTo(0)); - assertThat(context.queryResult().getTotalHits(), equalTo((long) numDocs)); - FieldDoc firstDoc = (FieldDoc) context.queryResult().topDocs().scoreDocs[0]; + assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs)); + FieldDoc firstDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[0]; for (int i = 0; i < searchSortAndFormat.sort.getSort().length; i++) { @SuppressWarnings("unchecked") FieldComparator comparator = (FieldComparator) searchSortAndFormat.sort.getSort()[i].getComparator(1, i); diff --git a/server/src/test/java/org/elasticsearch/search/slice/DocValuesSliceQueryTests.java b/server/src/test/java/org/elasticsearch/search/slice/DocValuesSliceQueryTests.java index 846c411881f4f..70eb0266eea38 100644 --- a/server/src/test/java/org/elasticsearch/search/slice/DocValuesSliceQueryTests.java +++ b/server/src/test/java/org/elasticsearch/search/slice/DocValuesSliceQueryTests.java @@ -32,6 +32,7 @@ import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.QueryUtils; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.store.Directory; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.common.UUIDs; @@ -112,8 +113,8 @@ public void collect(int doc) throws IOException { } @Override - public boolean needsScores() { - return false; + public ScoreMode scoreMode() { + return ScoreMode.COMPLETE_NO_SCORES; } }); } diff --git a/server/src/test/java/org/elasticsearch/search/slice/TermsSliceQueryTests.java b/server/src/test/java/org/elasticsearch/search/slice/TermsSliceQueryTests.java index 3fa4ce410529a..9ae4b9bc7daf5 100644 --- a/server/src/test/java/org/elasticsearch/search/slice/TermsSliceQueryTests.java +++ b/server/src/test/java/org/elasticsearch/search/slice/TermsSliceQueryTests.java @@ -31,7 +31,7 @@ import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.QueryUtils; - +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.StringHelper; @@ -106,8 +106,8 @@ public void collect(int doc) throws IOException { } @Override - public boolean needsScores() { - return false; + public ScoreMode scoreMode() { + return ScoreMode.COMPLETE_NO_SCORES; } }); } diff --git a/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index 65c58e631ec0e..ca21cbc86ca9d 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -526,7 +526,7 @@ public void testThatSynonymsWork() throws Exception { Settings.Builder settingsBuilder = Settings.builder() .put("analysis.analyzer.suggest_analyzer_synonyms.type", "custom") .put("analysis.analyzer.suggest_analyzer_synonyms.tokenizer", "standard") - .putList("analysis.analyzer.suggest_analyzer_synonyms.filter", "standard", "lowercase", "my_synonyms") + .putList("analysis.analyzer.suggest_analyzer_synonyms.filter", "lowercase", "my_synonyms") .put("analysis.filter.my_synonyms.type", "synonym") .putList("analysis.filter.my_synonyms.synonyms", "foo,renamed"); completionMappingBuilder.searchAnalyzer("suggest_analyzer_synonyms").indexAnalyzer("suggest_analyzer_synonyms"); @@ -804,7 +804,7 @@ public void testThatSortingOnCompletionFieldReturnsUsefulException() throws Exce public void testThatSuggestStopFilterWorks() throws Exception { Settings.Builder settingsBuilder = Settings.builder() .put("index.analysis.analyzer.stoptest.tokenizer", "standard") - .putList("index.analysis.analyzer.stoptest.filter", "standard", "suggest_stop_filter") + .putList("index.analysis.analyzer.stoptest.filter", "suggest_stop_filter") .put("index.analysis.filter.suggest_stop_filter.type", "stop") .put("index.analysis.filter.suggest_stop_filter.remove_trailing", false); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java b/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java index aaeaadd4c9f83..995a2c10fe555 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java @@ -687,7 +687,7 @@ public void testShardFailures() throws IOException, InterruptedException { .put(indexSettings()) .put(IndexSettings.MAX_SHINGLE_DIFF_SETTING.getKey(), 4) .put("index.analysis.analyzer.suggest.tokenizer", "standard") - .putList("index.analysis.analyzer.suggest.filter", "standard", "lowercase", "shingler") + .putList("index.analysis.analyzer.suggest.filter", "lowercase", "shingler") .put("index.analysis.filter.shingler.type", "shingle") .put("index.analysis.filter.shingler.min_shingle_size", 2) .put("index.analysis.filter.shingler.max_shingle_size", 5) @@ -748,7 +748,7 @@ public void testEmptyShards() throws IOException, InterruptedException { .put(indexSettings()) .put(IndexSettings.MAX_SHINGLE_DIFF_SETTING.getKey(), 4) .put("index.analysis.analyzer.suggest.tokenizer", "standard") - .putList("index.analysis.analyzer.suggest.filter", "standard", "lowercase", "shingler") + .putList("index.analysis.analyzer.suggest.filter", "lowercase", "shingler") .put("index.analysis.filter.shingler.type", "shingle") .put("index.analysis.filter.shingler.min_shingle_size", 2) .put("index.analysis.filter.shingler.max_shingle_size", 5) diff --git a/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java b/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java index 925526323a540..ca95310cd501f 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java @@ -21,7 +21,7 @@ import org.apache.lucene.search.spell.DirectSpellChecker; import org.apache.lucene.search.spell.JaroWinklerDistance; -import org.apache.lucene.search.spell.LevensteinDistance; +import org.apache.lucene.search.spell.LevenshteinDistance; import org.apache.lucene.search.spell.LuceneLevenshteinDistance; import org.apache.lucene.search.spell.NGramDistance; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -76,7 +76,7 @@ public void testEqualsAndHashcode() throws IOException { public void testFromString() { assertThat(DirectCandidateGeneratorBuilder.resolveDistance("internal"), equalTo(DirectSpellChecker.INTERNAL_LEVENSHTEIN)); assertThat(DirectCandidateGeneratorBuilder.resolveDistance("damerau_levenshtein"), instanceOf(LuceneLevenshteinDistance.class)); - assertThat(DirectCandidateGeneratorBuilder.resolveDistance("levenshtein"), instanceOf(LevensteinDistance.class)); + assertThat(DirectCandidateGeneratorBuilder.resolveDistance("levenshtein"), instanceOf(LevenshteinDistance.class)); assertThat(DirectCandidateGeneratorBuilder.resolveDistance("jaro_winkler"), instanceOf(JaroWinklerDistance.class)); assertThat(DirectCandidateGeneratorBuilder.resolveDistance("ngram"), instanceOf(NGramDistance.class)); diff --git a/test/framework/src/main/java/org/elasticsearch/index/analysis/MyFilterTokenFilterFactory.java b/test/framework/src/main/java/org/elasticsearch/index/analysis/MyFilterTokenFilterFactory.java index 921a09e98e691..157adf9e55cf2 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/analysis/MyFilterTokenFilterFactory.java +++ b/test/framework/src/main/java/org/elasticsearch/index/analysis/MyFilterTokenFilterFactory.java @@ -20,11 +20,10 @@ import org.apache.lucene.analysis.StopFilter; import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.core.StopAnalyzer; +import org.apache.lucene.analysis.en.EnglishAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.analysis.AbstractTokenFilterFactory; public class MyFilterTokenFilterFactory extends AbstractTokenFilterFactory { @@ -34,6 +33,6 @@ public MyFilterTokenFilterFactory(IndexSettings indexSettings, Environment env, @Override public TokenStream create(TokenStream tokenStream) { - return new StopFilter(tokenStream, StopAnalyzer.ENGLISH_STOP_WORDS_SET); + return new StopFilter(tokenStream, EnglishAnalyzer.ENGLISH_STOP_WORDS_SET); } } diff --git a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java index 5298c3995cec2..2164fe32a3945 100644 --- a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java @@ -29,7 +29,6 @@ import org.elasticsearch.index.analysis.PreConfiguredTokenFilter; import org.elasticsearch.index.analysis.PreConfiguredTokenizer; import org.elasticsearch.index.analysis.ShingleTokenFilterFactory; -import org.elasticsearch.index.analysis.StandardTokenFilterFactory; import org.elasticsearch.index.analysis.StandardTokenizerFactory; import org.elasticsearch.index.analysis.StopTokenFilterFactory; import org.elasticsearch.index.analysis.SynonymGraphTokenFilterFactory; @@ -167,7 +166,6 @@ private static String toCamelCase(String s) { .put("soraninormalization", MovedToAnalysisCommon.class) .put("soranistem", MovedToAnalysisCommon.class) .put("spanishlightstem", MovedToAnalysisCommon.class) - .put("standard", StandardTokenFilterFactory.class) .put("stemmeroverride", MovedToAnalysisCommon.class) .put("stop", StopTokenFilterFactory.class) .put("swedishlightstem", MovedToAnalysisCommon.class) @@ -267,8 +265,9 @@ protected Map> getTokenizers() { */ protected Map> getPreConfiguredTokenFilters() { Map> filters = new HashMap<>(); - filters.put("standard", null); filters.put("lowercase", null); + // for old indices + filters.put("standard", Void.class); return filters; } diff --git a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java index 0ee5798efb30b..71d40a7b86ab6 100644 --- a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java +++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java @@ -393,7 +393,7 @@ public class MockSimilarityScript extends SimilarityScript { } @Override - public double execute(double weight, Query query, Field field, Term term, Doc doc) throws IOException { + public double execute(double weight, Query query, Field field, Term term, Doc doc) { Map map = new HashMap<>(); map.put("weight", weight); map.put("query", query); @@ -413,7 +413,7 @@ public class MockSimilarityWeightScript extends SimilarityWeightScript { } @Override - public double execute(Query query, Field field, Term term) throws IOException { + public double execute(Query query, Field field, Term term) { Map map = new HashMap<>(); map.put("query", query); map.put("field", field); diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 22c5772ff2d53..17202839a65fd 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -30,6 +30,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryCache; import org.apache.lucene.search.QueryCachingPolicy; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Weight; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -367,7 +368,7 @@ protected A searchAndReduc List aggs = new ArrayList<> (); Query rewritten = searcher.rewrite(query); - Weight weight = searcher.createWeight(rewritten, true, 1f); + Weight weight = searcher.createWeight(rewritten, ScoreMode.COMPLETE, 1f); MultiBucketConsumer bucketConsumer = new MultiBucketConsumer(maxBucket); C root = createAggregator(query, builder, searcher, bucketConsumer, fieldTypes); diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java index fc2a85b35a95b..27bcb5868c548 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java @@ -143,7 +143,7 @@ public AssertingIndexSearcher newSearcher(Engine.Searcher searcher) throws Engin } // this executes basic query checks and asserts that weights are normalized only once etc. final AssertingIndexSearcher assertingIndexSearcher = new AssertingIndexSearcher(mockContext.random, wrappedReader); - assertingIndexSearcher.setSimilarity(searcher.searcher().getSimilarity(true)); + assertingIndexSearcher.setSimilarity(searcher.searcher().getSimilarity()); assertingIndexSearcher.setQueryCache(filterCache); assertingIndexSearcher.setQueryCachingPolicy(filterCachingPolicy); return assertingIndexSearcher; @@ -185,7 +185,7 @@ public DirectoryReaderWrapper(DirectoryReader in, SubReaderWrapper subReaderWrap public Engine.Searcher wrapSearcher(String source, Engine.Searcher engineSearcher) { final AssertingIndexSearcher assertingIndexSearcher = newSearcher(engineSearcher); - assertingIndexSearcher.setSimilarity(engineSearcher.searcher().getSimilarity(true)); + assertingIndexSearcher.setSimilarity(engineSearcher.searcher().getSimilarity()); // pass the original searcher to the super.newSearcher() method to make sure this is the searcher that will // be released later on. If we wrap an index reader here must not pass the wrapped version to the manager // on release otherwise the reader will be closed too early. - good news, stuff will fail all over the place if we don't get this right here diff --git a/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.json b/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.json index 38937a9b5af93..e69c2db6ff400 100644 --- a/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.json +++ b/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.json @@ -42,7 +42,7 @@ }, "czechAnalyzerWithStemmer":{ "tokenizer":"standard", - "filter":["standard", "lowercase", "stop", "czech_stem"] + "filter":["lowercase", "stop", "czech_stem"] }, "decompoundingAnalyzer":{ "tokenizer":"standard", diff --git a/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.yml b/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.yml index f7a57d14dbe3d..82f933296a314 100644 --- a/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.yml +++ b/test/framework/src/main/resources/org/elasticsearch/analysis/common/test1.yml @@ -33,7 +33,7 @@ index : version: 3.6 czechAnalyzerWithStemmer : tokenizer : standard - filter : [standard, lowercase, stop, czech_stem] + filter : [lowercase, stop, czech_stem] decompoundingAnalyzer : tokenizer : standard filter : [dict_dec] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReader.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReader.java index 223b7f00807fe..6d3864aa3eba2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReader.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReader.java @@ -375,6 +375,7 @@ private Terms wrapTerms(Terms terms, String field) throws IOException { class FieldNamesTerms extends FilterTerms { final long size; final long sumDocFreq; + final long sumTotalFreq; FieldNamesTerms(Terms in) throws IOException { super(in); @@ -382,13 +383,15 @@ class FieldNamesTerms extends FilterTerms { // re-compute the stats for the field to take // into account the filtered terms. final TermsEnum e = iterator(); - long size = 0, sumDocFreq = 0; + long size = 0, sumDocFreq = 0, sumTotalFreq = 0; while (e.next() != null) { size ++; sumDocFreq += e.docFreq(); + sumTotalFreq += e.totalTermFreq(); } this.size = size; this.sumDocFreq = sumDocFreq; + this.sumTotalFreq = sumTotalFreq; } @Override @@ -406,6 +409,11 @@ public long getSumDocFreq() throws IOException { return sumDocFreq; } + @Override + public long getSumTotalTermFreq() throws IOException { + return sumTotalFreq; + } + @Override public int getDocCount() throws IOException { // it is costly to recompute this value so we assume that docCount == maxDoc. diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapper.java index 9426b64364783..60b598a3a99c8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapper.java @@ -183,7 +183,7 @@ protected IndexSearcher wrap(IndexSearcher searcher) throws EngineException { IndexSearcher indexSearcher = new IndexSearcherWrapper((DocumentSubsetDirectoryReader) directoryReader); indexSearcher.setQueryCache(indexSearcher.getQueryCache()); indexSearcher.setQueryCachingPolicy(indexSearcher.getQueryCachingPolicy()); - indexSearcher.setSimilarity(indexSearcher.getSimilarity(true)); + indexSearcher.setSimilarity(indexSearcher.getSimilarity()); return indexSearcher; } return searcher; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java index dca2f37f3f224..bd6ac12ee3c1b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java @@ -108,14 +108,14 @@ public void testSearch() throws Exception { new TermQuery(new Term("field", "value1")))); assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(1)); TopDocs result = indexSearcher.search(new MatchAllDocsQuery(), 1); - assertThat(result.totalHits, equalTo(1L)); + assertThat(result.totalHits.value, equalTo(1L)); assertThat(result.scoreDocs[0].doc, equalTo(0)); indexSearcher = new IndexSearcher(DocumentSubsetReader.wrap(directoryReader, bitsetFilterCache, new TermQuery(new Term("field", "value2")))); assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(1)); result = indexSearcher.search(new MatchAllDocsQuery(), 1); - assertThat(result.totalHits, equalTo(1L)); + assertThat(result.totalHits.value, equalTo(1L)); assertThat(result.scoreDocs[0].doc, equalTo(1)); // this doc has been marked as deleted: @@ -123,13 +123,13 @@ public void testSearch() throws Exception { new TermQuery(new Term("field", "value3")))); assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(0)); result = indexSearcher.search(new MatchAllDocsQuery(), 1); - assertThat(result.totalHits, equalTo(0L)); + assertThat(result.totalHits.value, equalTo(0L)); indexSearcher = new IndexSearcher(DocumentSubsetReader.wrap(directoryReader, bitsetFilterCache, new TermQuery(new Term("field", "value4")))); assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(1)); result = indexSearcher.search(new MatchAllDocsQuery(), 1); - assertThat(result.totalHits, equalTo(1L)); + assertThat(result.totalHits.value, equalTo(1L)); assertThat(result.scoreDocs[0].doc, equalTo(3)); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperUnitTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperUnitTests.java index dccbd14c04704..e364b0a7e8a66 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperUnitTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexSearcherWrapperUnitTests.java @@ -232,7 +232,7 @@ public void onRemoval(ShardId shardId, Accountable accountable) { new SecurityIndexSearcherWrapper(indexSettings, null, null, threadContext, licenseState, scriptService); IndexSearcher result = securityIndexSearcherWrapper.wrap(indexSearcher); assertThat(result, not(sameInstance(indexSearcher))); - assertThat(result.getSimilarity(true), sameInstance(indexSearcher.getSimilarity(true))); + assertThat(result.getSimilarity(), sameInstance(indexSearcher.getSimilarity())); bitsetFilterCache.close(); } @@ -270,7 +270,8 @@ public void testIntersectScorerAndRoleBits() throws Exception { iw.close(); DirectoryReader directoryReader = DirectoryReader.open(directory); IndexSearcher searcher = new IndexSearcher(directoryReader); - Weight weight = searcher.createNormalizedWeight(new TermQuery(new Term("field2", "value1")), false); + Weight weight = searcher.createWeight(new TermQuery(new Term("field2", "value1")), + org.apache.lucene.search.ScoreMode.COMPLETE_NO_SCORES, 1f); LeafReaderContext leaf = directoryReader.leaves().get(0); @@ -545,8 +546,8 @@ public Query rewrite(IndexReader reader) throws IOException { } @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException { - return new CreateScorerOnceWeight(query.createWeight(searcher, needsScores, boost)); + public Weight createWeight(IndexSearcher searcher, org.apache.lucene.search.ScoreMode scoreMode, float boost) throws IOException { + return new CreateScorerOnceWeight(query.createWeight(searcher, scoreMode, boost)); } @Override diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/OptOutQueryCacheTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/OptOutQueryCacheTests.java index 1d6d524cbbb70..efe154f8d780c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/OptOutQueryCacheTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/OptOutQueryCacheTests.java @@ -11,6 +11,7 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.QueryCachingPolicy; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.Weight; @@ -65,7 +66,7 @@ public void testOptOutQueryCacheSafetyCheck() throws IOException { BooleanQuery.Builder builder = new BooleanQuery.Builder(); builder.add(new TermQuery(new Term("foo", "bar")), BooleanClause.Occur.MUST); builder.add(new TermQuery(new Term("no", "baz")), BooleanClause.Occur.MUST_NOT); - Weight weight = builder.build().createWeight(searcher, false, 1f); + Weight weight = builder.build().createWeight(searcher, ScoreMode.COMPLETE_NO_SCORES, 1f); // whenever the allowed fields match the fields in the query and we do not deny access to any fields we allow caching. IndicesAccessControl.IndexAccessControl permissions = new IndicesAccessControl.IndexAccessControl(true, diff --git a/x-pack/plugin/sql/sql-action/licenses/lucene-core-7.5.0-snapshot-13b9e28f9d.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/lucene-core-7.5.0-snapshot-13b9e28f9d.jar.sha1 deleted file mode 100644 index fdedaf3fc5756..0000000000000 --- a/x-pack/plugin/sql/sql-action/licenses/lucene-core-7.5.0-snapshot-13b9e28f9d.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -73dd7703a94ec2357581f65ee7c1c4d618ff310f \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-4d78db26be.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-4d78db26be.jar.sha1 new file mode 100644 index 0000000000000..683b585bb2f61 --- /dev/null +++ b/x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-4d78db26be.jar.sha1 @@ -0,0 +1 @@ +126faacb28d1b8cc1ab81d702973d057892120d1 \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/StringUtils.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/StringUtils.java index 9570eaf1b6a06..0f00822e3f445 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/StringUtils.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/StringUtils.java @@ -5,7 +5,7 @@ */ package org.elasticsearch.xpack.sql.util; -import org.apache.lucene.search.spell.LevensteinDistance; +import org.apache.lucene.search.spell.LevenshteinDistance; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; @@ -248,7 +248,7 @@ public static String toString(SearchSourceBuilder source) { } public static List findSimilar(String match, Iterable potentialMatches) { - LevensteinDistance ld = new LevensteinDistance(); + LevenshteinDistance ld = new LevenshteinDistance(); List> scoredMatches = new ArrayList<>(); for (String potentialMatch : potentialMatches) { float distance = ld.getDistance(match, potentialMatch); From 443f9cadddd44249f5e4ed75c8ab0e7b97135931 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Thu, 6 Sep 2018 01:19:49 +0300 Subject: [PATCH 17/91] DOC: Enhance SQL Functions documentation Split function section into multiple chapters Add String functions Add (small) section on Conversion/Cast functions Add missing aggregation functions Enable documentation testing (was disabled by accident). While at it, fix failing tests Improve spec tests to allow multi-line queries (useful for docs) Add ability to ignore a spec test (name should end with -Ignore) --- docs/reference/sql/functions/aggs.asciidoc | 168 +++++ .../sql/functions/date-time.asciidoc | 94 +++ docs/reference/sql/functions/index.asciidoc | 430 +------------ docs/reference/sql/functions/math.asciidoc | 159 +++++ .../sql/functions/operators.asciidoc | 115 ++++ docs/reference/sql/functions/search.asciidoc | 35 + docs/reference/sql/functions/string.asciidoc | 240 +++++++ .../sql/functions/type-conversion.asciidoc | 39 ++ .../sql/language/data-types.asciidoc | 2 +- ...cCsvSpectIT.java => JdbcDocCsvSpecIT.java} | 10 +- .../xpack/qa/sql/jdbc/CsvTestUtils.java | 19 +- .../sql/jdbc/SpecBaseIntegrationTestCase.java | 1 + .../xpack/qa/sql/jdbc/SqlSpecTestCase.java | 19 +- x-pack/qa/sql/src/main/resources/agg.csv-spec | 4 +- x-pack/qa/sql/src/main/resources/agg.sql-spec | 8 - .../qa/sql/src/main/resources/docs.csv-spec | 604 +++++++++++++++--- 16 files changed, 1413 insertions(+), 534 deletions(-) create mode 100644 docs/reference/sql/functions/aggs.asciidoc create mode 100644 docs/reference/sql/functions/date-time.asciidoc create mode 100644 docs/reference/sql/functions/math.asciidoc create mode 100644 docs/reference/sql/functions/operators.asciidoc create mode 100644 docs/reference/sql/functions/search.asciidoc create mode 100644 docs/reference/sql/functions/string.asciidoc create mode 100644 docs/reference/sql/functions/type-conversion.asciidoc rename x-pack/qa/sql/no-security/src/test/java/org/elasticsearch/xpack/qa/sql/nosecurity/{JdbcDocCsvSpectIT.java => JdbcDocCsvSpecIT.java} (89%) diff --git a/docs/reference/sql/functions/aggs.asciidoc b/docs/reference/sql/functions/aggs.asciidoc new file mode 100644 index 0000000000000..c2d485dbe6aad --- /dev/null +++ b/docs/reference/sql/functions/aggs.asciidoc @@ -0,0 +1,168 @@ +[role="xpack"] +[testenv="basic"] +[[sql-functions-aggs]] +=== Aggregate Functions + +Functions for computing a _single_ result from a set of input values. +{es-sql} supports aggregate functions only alongside <> (implicit or explicit). + +==== General Purpose + +[[sql-functions-aggs-avg]] +===== `AVG` + +*Input*: Numeric, *Output*: `double` + +https://en.wikipedia.org/wiki/Arithmetic_mean[Average] (arithmetic mean) of input values. + + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[aggAvg] +---- + +[[sql-functions-aggs-count]] +===== `COUNT` + +*Input*: Any, *Output*: `bigint` + +Total number (count) of input values. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[aggCountStar] +---- + +[[sql-functions-aggs-count-distinct]] +===== `COUNT(DISTINCT)` + +*Input*: Any, *Output*: `bigint` + +Total number of _distinct_ values in input values. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[aggCountDistinct] +---- + +[[sql-functions-aggs-max]] +===== `MAX` + +*Input*: Numeric, *Output*: Same as input + +Maximum value across input values. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[aggMax] +---- + +[[sql-functions-aggs-min]] +===== `MIN` + +*Input*: Numeric, *Output*: Same as input + +Minimum value across input values. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[aggMin] +---- + +[[sql-functions-aggs-sum]] +===== `SUM` + +*Input*: Numeric, *Output*: `bigint` for integer input, `double` for floating points + +Sum of input values. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[aggSum] +---- + +==== Statistics + +[[sql-functions-aggs-kurtosis]] +===== `KURTOSIS` + +*Input*: Numeric, *Output*: `double` + +https://en.wikipedia.org/wiki/Kurtosis[Quantify] the shape of the distribution of input values. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[aggKurtosis] +---- + +[[sql-functions-aggs-percentile]] +===== `PERCENTILE` + +*Input*: Numeric, *Output*: `double` + +The nth https://en.wikipedia.org/wiki/Percentile[percentile] of input values. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[aggPercentile] +---- + +[[sql-functions-aggs-percentile-rank]] +===== `PERCENTILE_RANK` + +*Input*: Numeric, *Output*: `double` + +The https://en.wikipedia.org/wiki/Percentile_rank[percentile rank] of input values of input values. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[aggPercentileRank] +---- + +[[sql-functions-aggs-skewness]] +===== `SKEWNESS` + +*Input*: Numeric, *Output*: `double` + +https://en.wikipedia.org/wiki/Skewness[Quantify] the asymmetric distribution of input values. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[aggSkewness] +---- + +[[sql-functions-aggs-stddev-pop]] +===== `STDDEV_POP` + +*Input*: Numeric, *Output*: `double` + +https://en.wikipedia.org/wiki/Standard_deviations[Population standard deviation] of input values. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[aggStddevPop] +---- + +[[sql-functions-aggs-sum-squares]] +===== `SUM_OF_SQUARES` + +*Input*: Numeric, *Output*: `double` + +https://en.wikipedia.org/wiki/Total_sum_of_squares[Sum of squares] of input values. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[aggSumOfSquares] +---- + +[[sql-functions-aggs-var-pop]] +===== `VAR_POP` + +*Input*: Numeric, *Output*: `double` + +https://en.wikipedia.org/wiki/Variance[Population] variance of input values. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[aggVarPop] +---- \ No newline at end of file diff --git a/docs/reference/sql/functions/date-time.asciidoc b/docs/reference/sql/functions/date-time.asciidoc new file mode 100644 index 0000000000000..f0d90cbb07ec2 --- /dev/null +++ b/docs/reference/sql/functions/date-time.asciidoc @@ -0,0 +1,94 @@ +[role="xpack"] +[testenv="basic"] +[[sql-functions-datetime]] +=== Date and Time Functions + +* Extract the year from a date (`YEAR`) + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/datetime.csv-spec[year] +-------------------------------------------------- + +* Extract the month of the year from a date (`MONTH_OF_YEAR` or `MONTH`) + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/datetime.csv-spec[monthOfYear] +-------------------------------------------------- + +* Extract the week of the year from a date (`WEEK_OF_YEAR` or `WEEK`) + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/datetime.csv-spec[weekOfYear] +-------------------------------------------------- + +* Extract the day of the year from a date (`DAY_OF_YEAR` or `DOY`) + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/datetime.csv-spec[dayOfYear] +-------------------------------------------------- + +* Extract the day of the month from a date (`DAY_OF_MONTH`, `DOM`, or `DAY`) + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/datetime.csv-spec[dayOfMonth] +-------------------------------------------------- + +* Extract the day of the week from a date (`DAY_OF_WEEK` or `DOW`). +Monday is `1`, Tuesday is `2`, etc. + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/datetime.csv-spec[dayOfWeek] +-------------------------------------------------- + +* Extract the hour of the day from a date (`HOUR_OF_DAY` or `HOUR`). +Monday is `1`, Tuesday is `2`, etc. + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/datetime.csv-spec[hourOfDay] +-------------------------------------------------- + +* Extract the minute of the day from a date (`MINUTE_OF_DAY`). + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/datetime.csv-spec[minuteOfDay] +-------------------------------------------------- + +* Extract the minute of the hour from a date (`MINUTE_OF_HOUR`, `MINUTE`). + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/datetime.csv-spec[minuteOfHour] +-------------------------------------------------- + +* Extract the second of the minute from a date (`SECOND_OF_MINUTE`, `SECOND`). + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/datetime.csv-spec[secondOfMinute] +-------------------------------------------------- + +* Extract + +As an alternative, one can support `EXTRACT` to extract fields from datetimes. +You can run any <> +with `EXTRACT( FROM )`. So + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/datetime.csv-spec[extractDayOfYear] +-------------------------------------------------- + +is the equivalent to + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/datetime.csv-spec[dayOfYear] +-------------------------------------------------- \ No newline at end of file diff --git a/docs/reference/sql/functions/index.asciidoc b/docs/reference/sql/functions/index.asciidoc index 93d201a182828..82e8154de93c9 100644 --- a/docs/reference/sql/functions/index.asciidoc +++ b/docs/reference/sql/functions/index.asciidoc @@ -3,416 +3,20 @@ [[sql-functions]] == Functions and Operators -{es-sql} provides a number of built-in operators and functions. - -=== Comparison Operators - -{es-sql} supports the following comparison operators: - -* Equality (`=`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/filter.sql-spec[whereFieldEquality] --------------------------------------------------- - -* Inequality (`<>` or `!=` or `<=>`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/filter.sql-spec[whereFieldNonEquality] --------------------------------------------------- - -* Comparison (`<`, `<=`, `>`, `>=`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/filter.sql-spec[whereFieldLessThan] --------------------------------------------------- - -* `BETWEEN` - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/filter.sql-spec[whereBetween] --------------------------------------------------- - -* `IS NULL`/`IS NOT NULL` - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/filter.sql-spec[whereIsNotNullAndIsNull] --------------------------------------------------- - - -=== Logical Operators - -{es-sql} supports the following logical operators: - -* `AND` - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/filter.sql-spec[whereFieldAndComparison] --------------------------------------------------- - -* `OR` - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/filter.sql-spec[whereFieldOrComparison] --------------------------------------------------- - -* `NOT` - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/filter.sql-spec[whereFieldEqualityNot] --------------------------------------------------- - - -=== Math Operators - -{es-sql} supports the following math operators: - -* Add (`+`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/arithmetic.sql-spec[plus] --------------------------------------------------- - -* Subtract (infix `-`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/arithmetic.sql-spec[minus] --------------------------------------------------- - -* Negate (unary `-`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/arithmetic.sql-spec[unaryMinus] --------------------------------------------------- - -* Multiply (`*`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/arithmetic.sql-spec[multiply] --------------------------------------------------- - -* Divide (`/`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/arithmetic.sql-spec[divide] --------------------------------------------------- - -* https://en.wikipedia.org/wiki/Modulo_operation[Modulo] or Reminder(`%`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/arithmetic.sql-spec[mod] --------------------------------------------------- - - -=== Math Functions - -All math and trigonometric functions require their input (where applicable) -to be numeric. - -==== Generic - -* `ABS` - -https://en.wikipedia.org/wiki/Absolute_value[Absolute value], returns \[same type as input] - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[abs] --------------------------------------------------- - -* `CBRT` - -https://en.wikipedia.org/wiki/Cube_root[Cube root], returns `double` - -// TODO make the example in the tests presentable - -* `CEIL` - -https://en.wikipedia.org/wiki/Floor_and_ceiling_functions[Ceiling], returns `double` - -* `CEILING` - -Same as `CEIL` - -// TODO make the example in the tests presentable - -* `E` - -https://en.wikipedia.org/wiki/E_%28mathematical_constant%29[Euler's number], returns `2.7182818284590452354` - - -* https://en.wikipedia.org/wiki/Rounding#Round_half_up[Round] (`ROUND`) - -// TODO make the example in the tests presentable - -NOTE: This rounds "half up" meaning that `ROUND(-1.5)` results in `-1`. - - -* https://en.wikipedia.org/wiki/Floor_and_ceiling_functions[Floor] (`FLOOR`) - -// TODO make the example in the tests presentable - -* https://en.wikipedia.org/wiki/Natural_logarithm[Natural logarithm] (`LOG`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[log] --------------------------------------------------- - -* https://en.wikipedia.org/wiki/Logarithm[Logarithm] base 10 (`LOG10`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[log10] --------------------------------------------------- - -* https://en.wikipedia.org/wiki/Square_root[Square root] (`SQRT`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[sqrt] --------------------------------------------------- - -* https://en.wikipedia.org/wiki/Exponential_function[e^x^] (`EXP`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[exp] --------------------------------------------------- - -* https://docs.oracle.com/javase/8/docs/api/java/lang/Math.html#expm1-double-[e^x^ - 1] (`EXPM1`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[expm1] --------------------------------------------------- - -==== Trigonometric - -* Convert from https://en.wikipedia.org/wiki/Radian[radians] -to https://en.wikipedia.org/wiki/Degree_(angle)[degrees] (`DEGREES`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[degrees] --------------------------------------------------- - -* Convert from https://en.wikipedia.org/wiki/Degree_(angle)[degrees] -to https://en.wikipedia.org/wiki/Radian[radians] (`RADIANS`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[degrees] --------------------------------------------------- - -* https://en.wikipedia.org/wiki/Trigonometric_functions#sine[Sine] (`SIN`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[sin] --------------------------------------------------- - -* https://en.wikipedia.org/wiki/Trigonometric_functions#cosine[Cosine] (`COS`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[cos] --------------------------------------------------- - -* https://en.wikipedia.org/wiki/Trigonometric_functions#tangent[Tangent] (`TAN`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[tan] --------------------------------------------------- - -* https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[Arc sine] (`ASIN`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[asin] --------------------------------------------------- - -* https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[Arc cosine] (`ACOS`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[acos] --------------------------------------------------- - -* https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[Arc tangent] (`ATAN`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[atan] --------------------------------------------------- - -* https://en.wikipedia.org/wiki/Hyperbolic_function[Hyperbolic sine] (`SINH`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[sinh] --------------------------------------------------- - -* https://en.wikipedia.org/wiki/Hyperbolic_function[Hyperbolic cosine] (`COSH`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/math.sql-spec[cosh] --------------------------------------------------- - -[[sql-functions-datetime]] -=== Date and Time Functions - -* Extract the year from a date (`YEAR`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/datetime.csv-spec[year] --------------------------------------------------- - -* Extract the month of the year from a date (`MONTH_OF_YEAR` or `MONTH`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/datetime.csv-spec[monthOfYear] --------------------------------------------------- - -* Extract the week of the year from a date (`WEEK_OF_YEAR` or `WEEK`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/datetime.csv-spec[weekOfYear] --------------------------------------------------- - -* Extract the day of the year from a date (`DAY_OF_YEAR` or `DOY`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/datetime.csv-spec[dayOfYear] --------------------------------------------------- - -* Extract the day of the month from a date (`DAY_OF_MONTH`, `DOM`, or `DAY`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/datetime.csv-spec[dayOfMonth] --------------------------------------------------- - -* Extract the day of the week from a date (`DAY_OF_WEEK` or `DOW`). -Monday is `1`, Tuesday is `2`, etc. - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/datetime.csv-spec[dayOfWeek] --------------------------------------------------- - -* Extract the hour of the day from a date (`HOUR_OF_DAY` or `HOUR`). -Monday is `1`, Tuesday is `2`, etc. - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/datetime.csv-spec[hourOfDay] --------------------------------------------------- - -* Extract the minute of the day from a date (`MINUTE_OF_DAY`). - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/datetime.csv-spec[minuteOfDay] --------------------------------------------------- - -* Extract the minute of the hour from a date (`MINUTE_OF_HOUR`, `MINUTE`). - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/datetime.csv-spec[minuteOfHour] --------------------------------------------------- - -* Extract the second of the minute from a date (`SECOND_OF_MINUTE`, `SECOND`). - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/datetime.csv-spec[secondOfMinute] --------------------------------------------------- - -* Extract - -As an alternative, one can support `EXTRACT` to extract fields from datetimes. -You can run any <> -with `EXTRACT( FROM )`. So - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/datetime.csv-spec[extractDayOfYear] --------------------------------------------------- - -is the equivalent to - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/datetime.csv-spec[dayOfYear] --------------------------------------------------- - - -[[sql-functions-aggregate]] -=== Aggregate Functions - -==== Basic - -* https://en.wikipedia.org/wiki/Arithmetic_mean[Average] (`AVG`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/agg.sql-spec[avg] --------------------------------------------------- - -* Count the number of matching fields (`COUNT`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/agg.sql-spec[countStar] --------------------------------------------------- - -* Count the number of distinct values in matching documents (`COUNT(DISTINCT`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/agg.sql-spec[countDistinct] --------------------------------------------------- - -* Find the maximum value in matching documents (`MAX`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/agg.sql-spec[max] --------------------------------------------------- - -* Find the minimum value in matching documents (`MIN`) - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/agg.sql-spec[min] --------------------------------------------------- - -* https://en.wikipedia.org/wiki/Kahan_summation_algorithm[Sum] -all values of matching documents (`SUM`). - -["source","sql",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{sql-specs}/agg.csv-spec[sum] --------------------------------------------------- +{es-sql} provides a comprehensive set of built-in operators and functions: + +* <> +* <> +* <> +* <> +* <> +* <> +* <> + +include::operators.asciidoc[] +include::aggs.asciidoc[] +include::date-time.asciidoc[] +include::search.asciidoc[] +include::math.asciidoc[] +include::string.asciidoc[] +include::type-conversion.asciidoc[] diff --git a/docs/reference/sql/functions/math.asciidoc b/docs/reference/sql/functions/math.asciidoc new file mode 100644 index 0000000000000..604603f297323 --- /dev/null +++ b/docs/reference/sql/functions/math.asciidoc @@ -0,0 +1,159 @@ +[role="xpack"] +[testenv="basic"] +[[sql-functions-math]] +=== Math Functions + +All math and trigonometric functions require their input (where applicable) +to be numeric. + +==== Generic + +* `ABS` + +https://en.wikipedia.org/wiki/Absolute_value[Absolute value], returns \[same type as input] + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/math.sql-spec[abs] +-------------------------------------------------- + +* `CBRT` + +https://en.wikipedia.org/wiki/Cube_root[Cube root], returns `double` + +// TODO make the example in the tests presentable + +* `CEIL` + +https://en.wikipedia.org/wiki/Floor_and_ceiling_functions[Ceiling], returns `double` + +* `CEILING` + +Same as `CEIL` + +// TODO make the example in the tests presentable + +* `E` + +https://en.wikipedia.org/wiki/E_%28mathematical_constant%29[Euler's number], returns `2.7182818284590452354` + + +* https://en.wikipedia.org/wiki/Rounding#Round_half_up[Round] (`ROUND`) + +// TODO make the example in the tests presentable + +NOTE: This rounds "half up" meaning that `ROUND(-1.5)` results in `-1`. + + +* https://en.wikipedia.org/wiki/Floor_and_ceiling_functions[Floor] (`FLOOR`) + +// TODO make the example in the tests presentable + +* https://en.wikipedia.org/wiki/Natural_logarithm[Natural logarithm] (`LOG`) + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/math.sql-spec[log] +-------------------------------------------------- + +* https://en.wikipedia.org/wiki/Logarithm[Logarithm] base 10 (`LOG10`) + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/math.sql-spec[log10] +-------------------------------------------------- + +* https://en.wikipedia.org/wiki/Square_root[Square root] (`SQRT`) + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/math.sql-spec[sqrt] +-------------------------------------------------- + +* https://en.wikipedia.org/wiki/Exponential_function[e^x^] (`EXP`) + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/math.sql-spec[exp] +-------------------------------------------------- + +* https://docs.oracle.com/javase/8/docs/api/java/lang/Math.html#expm1-double-[e^x^ - 1] (`EXPM1`) + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/math.sql-spec[expm1] +-------------------------------------------------- + +==== Trigonometric + +* Convert from https://en.wikipedia.org/wiki/Radian[radians] +to https://en.wikipedia.org/wiki/Degree_(angle)[degrees] (`DEGREES`) + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/math.sql-spec[degrees] +-------------------------------------------------- + +* Convert from https://en.wikipedia.org/wiki/Degree_(angle)[degrees] +to https://en.wikipedia.org/wiki/Radian[radians] (`RADIANS`) + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/math.sql-spec[degrees] +-------------------------------------------------- + +* https://en.wikipedia.org/wiki/Trigonometric_functions#sine[Sine] (`SIN`) + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/math.sql-spec[sin] +-------------------------------------------------- + +* https://en.wikipedia.org/wiki/Trigonometric_functions#cosine[Cosine] (`COS`) + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/math.sql-spec[cos] +-------------------------------------------------- + +* https://en.wikipedia.org/wiki/Trigonometric_functions#tangent[Tangent] (`TAN`) + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/math.sql-spec[tan] +-------------------------------------------------- + +* https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[Arc sine] (`ASIN`) + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/math.sql-spec[asin] +-------------------------------------------------- + +* https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[Arc cosine] (`ACOS`) + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/math.sql-spec[acos] +-------------------------------------------------- + +* https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[Arc tangent] (`ATAN`) + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/math.sql-spec[atan] +-------------------------------------------------- + +* https://en.wikipedia.org/wiki/Hyperbolic_function[Hyperbolic sine] (`SINH`) + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/math.sql-spec[sinh] +-------------------------------------------------- + +* https://en.wikipedia.org/wiki/Hyperbolic_function[Hyperbolic cosine] (`COSH`) + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/math.sql-spec[cosh] +-------------------------------------------------- diff --git a/docs/reference/sql/functions/operators.asciidoc b/docs/reference/sql/functions/operators.asciidoc new file mode 100644 index 0000000000000..9c90d12320ed0 --- /dev/null +++ b/docs/reference/sql/functions/operators.asciidoc @@ -0,0 +1,115 @@ +[role="xpack"] +[testenv="basic"] +[[sql-operators]] +=== Comparison Operators + +Boolean operator for comparing one or two expressions. + +* Equality (`=`) + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/filter.sql-spec[whereFieldEquality] +-------------------------------------------------- + +* Inequality (`<>` or `!=` or `<=>`) + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/filter.sql-spec[whereFieldNonEquality] +-------------------------------------------------- + +* Comparison (`<`, `<=`, `>`, `>=`) + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/filter.sql-spec[whereFieldLessThan] +-------------------------------------------------- + +* `BETWEEN` + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/filter.sql-spec[whereBetween] +-------------------------------------------------- + +* `IS NULL`/`IS NOT NULL` + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/filter.sql-spec[whereIsNotNullAndIsNull] +-------------------------------------------------- + +[[sql-operators-logical]] +=== Logical Operators + +Boolean operator for evaluating one or two expressions. + +* `AND` + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/filter.sql-spec[whereFieldAndComparison] +-------------------------------------------------- + +* `OR` + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/filter.sql-spec[whereFieldOrComparison] +-------------------------------------------------- + +* `NOT` + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/filter.sql-spec[whereFieldEqualityNot] +-------------------------------------------------- + +[[sql-operators-math]] +=== Math Operators + +Perform mathematical operations affecting one or two values. +The result is a value of numeric type. + +* Add (`+`) + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/arithmetic.sql-spec[plus] +-------------------------------------------------- + +* Subtract (infix `-`) + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/arithmetic.sql-spec[minus] +-------------------------------------------------- + +* Negate (unary `-`) + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/arithmetic.sql-spec[unaryMinus] +-------------------------------------------------- + +* Multiply (`*`) + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/arithmetic.sql-spec[multiply] +-------------------------------------------------- + +* Divide (`/`) + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/arithmetic.sql-spec[divide] +-------------------------------------------------- + +* https://en.wikipedia.org/wiki/Modulo_operation[Modulo] or Reminder(`%`) + +["source","sql",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{sql-specs}/arithmetic.sql-spec[mod] +-------------------------------------------------- diff --git a/docs/reference/sql/functions/search.asciidoc b/docs/reference/sql/functions/search.asciidoc new file mode 100644 index 0000000000000..564f57dcbdde0 --- /dev/null +++ b/docs/reference/sql/functions/search.asciidoc @@ -0,0 +1,35 @@ +[role="xpack"] +[testenv="basic"] +[[sql-functions-search]] +=== Full-Text Search Functions + +Search functions should be used when performing full-text search, namely +when the `MATCH` or `QUERY` predicates are being used. +Outside a, so-called, search context, these functions will return default values +such as `0` or `NULL`. + +[[sql-functions-search-score]] +==== `SCORE` + +*Input*: None, *Output*: `double` + +Returns the {defguide}/relevance-intro.html[relevance] of a given input to the executed query. +The higher score, the more relevant the data. + +NOTE: When doing multiple text queries in the `WHERE` clause then, their scores will be +combined using the same rules as {es}'s +<>. + +Typically `SCORE` is used for ordering the results of a query based on their relevance: + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[orderByScore] +---- + +However, it is perfectly fine to return the score without sorting by it: + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[scoreWithMatch] +---- \ No newline at end of file diff --git a/docs/reference/sql/functions/string.asciidoc b/docs/reference/sql/functions/string.asciidoc new file mode 100644 index 0000000000000..ccc11938028d6 --- /dev/null +++ b/docs/reference/sql/functions/string.asciidoc @@ -0,0 +1,240 @@ +[role="xpack"] +[testenv="basic"] +[[sql-functions-string]] +=== String Functions + +Functions for performing string manipulation. + +[[sql-functions-string-ascii]] +==== `ASCII` + +*Input*: `string`, *Output*: `integer` + +Returns the ASCII code value of the leftmost character of string_exp as an integer. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[stringAscii] +---- + +[[sql-functions-string-bit-length]] +==== `BIT_LENGTH` + +*Input*: `string`, *Output*: `integer` + +Returns the length in bits of the input. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[stringBitLength] +---- + +[[sql-functions-string-char]] +==== `CHAR` + +*Input*: `numeric`, *Output*: `string` + +Returns the character that has the ASCII code value specified by the numeric input. The value should be between 0 and 255; otherwise, the return value is data source–dependent. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[stringChar] +---- + +[[sql-functions-string-char-length]] +==== `CHAR_LENGTH` + +*Input*: `string`, *Output*: `integer` + +Returns the length in characters of the input, if the string expression is of a character data type; otherwise, returns the length in bytes of the string expression (the smallest integer not less than the number of bits divided by 8). + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[stringCharLength] +---- + +[[sql-functions-string-concat]] +==== `CONCAT` + +*Input*: `string1`, `string2`, *Output*: `string` + +turns a character string that is the result of concatenating string1 to string2. If one of the string is `NULL`, +the other string will be returned. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[stringConcat] +---- + +[[sql-functions-string-insert]] +==== `INSERT` + +*Input*: `string1`, `start`, `length`, `string2`, *Output*: `string` + +Returns a string where length characters have been deleted from string1, beginning at start, and where string2 has been inserted into string1, beginning at start. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[stringInsert] +---- + +[[sql-functions-string-lcase]] +==== `LCASE` + +*Input*: `string`, *Output*: `string` + +Returns a string equal to that in string, with all uppercase characters converted to lowercase. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[stringLCase] +---- + +[[sql-functions-string-left]] +==== `LEFT` + +*Input*: `string`, *Output*: `string` + +Returns the leftmost count characters of string. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[stringLeft] +---- + +[[sql-functions-string-length]] +==== `LENGTH` + +*Input*: `string`, *Output*: `integer` + +Returns the number of characters in string, excluding trailing blanks. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[stringLength] +---- + +[[sql-functions-string-locate]] +==== `LOCATE` + +*Input*: `string1`, `string2`[, `start`]`, *Output*: `integer` + +Returns the starting position of the first occurrence of string1 within string2. The search for the first occurrence of string1 begins with the first character position in string2 unless the optional argument, start, is specified. If start is specified, the search begins with the character position indicated by the value of start. The first character position in string2 is indicated by the value 1. If string1 is not found within string2, the value 0 is returned. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[stringLocateWoStart] +---- + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[stringLocateWithStart] +---- + +[[sql-functions-string-ltrim]] +==== `LTRIM` + +*Input*: `string`, *Output*: `string` + +Returns the characters of string_exp, with leading blanks removed. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[stringLTrim] +---- + +[[sql-functions-string-position]] +==== `POSITION` + +*Input*: `string1`, `string2`, *Output*: `integer` + +Returns the position of the string1 in string2. The result is an exact numeric. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[stringPosition] +---- + +[[sql-functions-string-repeat]] +==== `REPEAT` + +*Input*: `string`, `count`, *Output*: `string` + +Returns a character string composed of string1 repeated count times. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[stringRepeat] +---- + +[[sql-functions-string-replace]] +==== `REPLACE` + +*Input*: `string1`, `string2`, `string3`, *Output*: `string` + +Search string1 for occurrences of string2, and replace with string3. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[stringReplace] +---- + +[[sql-functions-string-right]] +==== `RIGHT` + +*Input*: `string`, `count`, *Output*: `string` + +Returns the rightmost count characters of string. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[stringRight] +---- + +[[sql-functions-string-rtrim]] +==== `RTRIM` + +*Input*: `string`, *Output*: `string` + +Returns the characters of string with trailing blanks removed. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[stringRTrim] +---- + +[[sql-functions-string-space]] +==== `SPACE` + +*Input*: `integer`, *Output*: `string` + +Returns a character string consisting of count spaces. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[stringSpace] +---- + +[[sql-functions-string-substring]] +==== `SUBSTRING` + +*Input*: `string`, `start`, `length`, *Output*: `integer` + +Returns a character string that is derived from the string, beginning at the character position specified by `start` for `length` characters. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[stringSubString] +---- + +[[sql-functions-string-ucase]] +==== `UCASE` + +*Input*: `string`, *Output*: `string` + +Returns a string equal to that of the input, with all lowercase characters converted to uppercase. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[stringUCase] +---- diff --git a/docs/reference/sql/functions/type-conversion.asciidoc b/docs/reference/sql/functions/type-conversion.asciidoc new file mode 100644 index 0000000000000..549b05d69d86d --- /dev/null +++ b/docs/reference/sql/functions/type-conversion.asciidoc @@ -0,0 +1,39 @@ +[role="xpack"] +[testenv="basic"] +[[sql-functions-type-conversion]] +=== Type Conversion Functions + +Functions for converting an expression of one data type to another. + +[[sql-functions-type-conversion-cast]] +==== `CAST` + +.Synopsis +[source, sql] +---- +CAST ( expression<1> AS data_type<2> ) +---- + +<1> Expression to cast +<2> Target data type to cast to + +.Description + +Casts the result of the given expression to the target type. +If the cast is not possible (for example because of target type is too narrow or because +the value itself cannot be converted), the query fails. + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[conversionStringToIntCast] +---- + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[conversionIntToStringCast] +---- + +["source","sql",subs="attributes,callouts,macros"] +---- +include-tagged::{sql-specs}/docs.csv-spec[conversionStringToDateCast] +---- \ No newline at end of file diff --git a/docs/reference/sql/language/data-types.asciidoc b/docs/reference/sql/language/data-types.asciidoc index 7f98add97248b..9cf9c4eed6284 100644 --- a/docs/reference/sql/language/data-types.asciidoc +++ b/docs/reference/sql/language/data-types.asciidoc @@ -17,7 +17,7 @@ Most of {es} <> are available in {es-sql}, as indicat | <> | `tinyint` | 3 | <> | `smallint` | 5 | <> | `integer` | 10 -| <> | `long` | 19 +| <> | `bigint` | 19 | <> | `double` | 15 | <> | `real` | 7 | <> | `float` | 16 diff --git a/x-pack/qa/sql/no-security/src/test/java/org/elasticsearch/xpack/qa/sql/nosecurity/JdbcDocCsvSpectIT.java b/x-pack/qa/sql/no-security/src/test/java/org/elasticsearch/xpack/qa/sql/nosecurity/JdbcDocCsvSpecIT.java similarity index 89% rename from x-pack/qa/sql/no-security/src/test/java/org/elasticsearch/xpack/qa/sql/nosecurity/JdbcDocCsvSpectIT.java rename to x-pack/qa/sql/no-security/src/test/java/org/elasticsearch/xpack/qa/sql/nosecurity/JdbcDocCsvSpecIT.java index 2c99c8b5383c6..017fc4b523818 100644 --- a/x-pack/qa/sql/no-security/src/test/java/org/elasticsearch/xpack/qa/sql/nosecurity/JdbcDocCsvSpectIT.java +++ b/x-pack/qa/sql/no-security/src/test/java/org/elasticsearch/xpack/qa/sql/nosecurity/JdbcDocCsvSpecIT.java @@ -11,7 +11,7 @@ import org.elasticsearch.client.RestClient; import org.elasticsearch.xpack.qa.sql.jdbc.CsvTestUtils.CsvTestCase; import org.elasticsearch.xpack.qa.sql.jdbc.DataLoader; -import org.elasticsearch.xpack.qa.sql.jdbc.JdbcTestUtils; +import org.elasticsearch.xpack.qa.sql.jdbc.JdbcAssert; import org.elasticsearch.xpack.qa.sql.jdbc.SpecBaseIntegrationTestCase; import org.elasticsearch.xpack.qa.sql.jdbc.SqlSpecTestCase; @@ -36,7 +36,7 @@ * That's not to say the two cannot be merged however that felt like too much of an effort * at this stage and, to not keep things stalling, started with this approach. */ -public class JdbcDocCsvSpectIT extends SpecBaseIntegrationTestCase { +public class JdbcDocCsvSpecIT extends SpecBaseIntegrationTestCase { private final CsvTestCase testCase; @@ -56,7 +56,7 @@ public static List readScriptSpec() throws Exception { return readScriptSpec("/docs.csv-spec", parser); } - public JdbcDocCsvSpectIT(String fileName, String groupName, String testName, Integer lineNumber, CsvTestCase testCase) { + public JdbcDocCsvSpecIT(String fileName, String groupName, String testName, Integer lineNumber, CsvTestCase testCase) { super(fileName, groupName, testName, lineNumber); this.testCase = testCase; } @@ -68,8 +68,8 @@ protected void assertResults(ResultSet expected, ResultSet elastic) throws SQLEx // // uncomment this to printout the result set and create new CSV tests // - JdbcTestUtils.logLikeCLI(elastic, log); - //JdbcAssert.assertResultSets(expected, elastic, log, true); + //JdbcTestUtils.logLikeCLI(elastic, log); + JdbcAssert.assertResultSets(expected, elastic, log, true); } @Override diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/CsvTestUtils.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/CsvTestUtils.java index abf84dc7311d8..a5e8b549bce8f 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/CsvTestUtils.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/CsvTestUtils.java @@ -160,17 +160,28 @@ public static CsvSpecParser specParser() { } private static class CsvSpecParser implements SpecBaseIntegrationTestCase.Parser { + private final StringBuilder query = new StringBuilder(); private final StringBuilder data = new StringBuilder(); private CsvTestCase testCase; @Override public Object parse(String line) { - // beginning of the section + // read the query if (testCase == null) { - // pick up the query - testCase = new CsvTestCase(); - testCase.query = line.endsWith(";") ? line.substring(0, line.length() - 1) : line; + if (line.endsWith(";")) { + // pick up the query + testCase = new CsvTestCase(); + query.append(line.substring(0, line.length() - 1).trim()); + testCase.query = query.toString(); + query.setLength(0); + } + // keep reading the query + else { + query.append(line); + query.append("\r\n"); + } } + // read the results else { // read data if (line.startsWith(";")) { diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SpecBaseIntegrationTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SpecBaseIntegrationTestCase.java index 9ece8d7d1d33c..86cbdec197e72 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SpecBaseIntegrationTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SpecBaseIntegrationTestCase.java @@ -85,6 +85,7 @@ public static void wipeTestData() throws IOException { public final void test() throws Throwable { try { + assumeFalse("Test marked as Ignored", testName.endsWith("-Ignore")); doTest(); } catch (AssertionError ae) { throw reworkException(ae); diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SqlSpecTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SqlSpecTestCase.java index b77820fc77e72..605e19807ed4c 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SqlSpecTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SqlSpecTestCase.java @@ -25,13 +25,13 @@ public abstract class SqlSpecTestCase extends SpecBaseIntegrationTestCase { private String query; @ClassRule - public static LocalH2 H2 = new LocalH2((c) -> { + public static LocalH2 H2 = new LocalH2((c) -> { c.createStatement().execute("RUNSCRIPT FROM 'classpath:/setup_test_emp.sql'"); c.createStatement().execute("RUNSCRIPT FROM 'classpath:/setup_test_emp_with_nulls.sql'"); }); @ParametersFactory(argumentFormatting = PARAM_FORMATTING) - public static List readScriptSpec() throws Exception { + public static List readScriptSpec() throws Exception { Parser parser = specParser(); List tests = new ArrayList<>(); tests.addAll(readScriptSpec("/select.sql-spec", parser)); @@ -47,9 +47,22 @@ public static List readScriptSpec() throws Exception { } private static class SqlSpecParser implements Parser { + private final StringBuilder query = new StringBuilder(); + @Override public Object parse(String line) { - return line.endsWith(";") ? line.substring(0, line.length() - 1) : line; + // not initialized + String q = null; + if (line.endsWith(";")) { + query.append(line.substring(0, line.length() - 1)); + q = query.toString(); + query.setLength(0); + } else { + query.append(line); + query.append("\r\n"); + } + + return q; } } diff --git a/x-pack/qa/sql/src/main/resources/agg.csv-spec b/x-pack/qa/sql/src/main/resources/agg.csv-spec index 0d7b0e1476020..1d9592d963d4f 100644 --- a/x-pack/qa/sql/src/main/resources/agg.csv-spec +++ b/x-pack/qa/sql/src/main/resources/agg.csv-spec @@ -66,11 +66,9 @@ F | 10099.1936 | 10098.021 | 26.35135135 M | 10095.6112 | 10090.846 | 23.41269841269841 ; -// Simple sum used in documentation sum -// tag::sum SELECT SUM(salary) FROM test_emp; -// end::sum + SUM(salary) --------------- 4824855 diff --git a/x-pack/qa/sql/src/main/resources/agg.sql-spec b/x-pack/qa/sql/src/main/resources/agg.sql-spec index f1ab9160b1af4..a86b8b65eef96 100644 --- a/x-pack/qa/sql/src/main/resources/agg.sql-spec +++ b/x-pack/qa/sql/src/main/resources/agg.sql-spec @@ -87,9 +87,7 @@ SELECT (emp_no % 3) + 1 AS e, (languages % 3) + 1 AS l FROM test_emp GROUP BY e, // COUNT aggCountImplicit -// tag::countStar SELECT COUNT(*) AS count FROM test_emp; -// end::countStar aggCountImplicitWithCast SELECT CAST(COUNT(*) AS INT) c FROM "test_emp"; aggCountImplicitWithConstant @@ -105,9 +103,7 @@ SELECT gender g, CAST(COUNT(*) AS INT) c FROM "test_emp" WHERE emp_no < 10020 GR aggCountWithAlias SELECT gender g, COUNT(*) c FROM "test_emp" GROUP BY g ORDER BY gender; countDistinct -// tag::countDistinct SELECT COUNT(DISTINCT hire_date) AS count FROM test_emp; -// end::countDistinct aggCountAliasAndWhereClauseMultiGroupBy SELECT gender g, languages l, COUNT(*) c FROM "test_emp" WHERE emp_no < 10020 GROUP BY gender, languages ORDER BY gender, languages; @@ -237,9 +233,7 @@ SELECT gender g, languages l, MIN(emp_no) m FROM "test_emp" GROUP BY g, l HAVING // MAX aggMaxImplicit -// tag::max SELECT MAX(salary) AS max FROM test_emp; -// end::max aggMaxImplicitWithCast SELECT CAST(MAX(emp_no) AS SMALLINT) c FROM "test_emp"; aggMax @@ -310,9 +304,7 @@ SELECT gender g, CAST(AVG(emp_no) AS FLOAT) a FROM "test_emp" GROUP BY gender OR aggAvgWithCastToDouble SELECT gender g, CAST(AVG(emp_no) AS DOUBLE) a FROM "test_emp" GROUP BY gender ORDER BY gender; aggAvg -// tag::avg SELECT AVG(salary) AS avg FROM test_emp; -// end::avg aggAvgWithCastAndCount SELECT gender g, CAST(AVG(emp_no) AS FLOAT) a, COUNT(1) c FROM "test_emp" GROUP BY gender ORDER BY gender; aggAvgWithCastAndCountWithFilter diff --git a/x-pack/qa/sql/src/main/resources/docs.csv-spec b/x-pack/qa/sql/src/main/resources/docs.csv-spec index 87b74760027c7..2a4f29fcf5d9a 100644 --- a/x-pack/qa/sql/src/main/resources/docs.csv-spec +++ b/x-pack/qa/sql/src/main/resources/docs.csv-spec @@ -148,7 +148,7 @@ emp |BASE TABLE // end::showTablesLikeMixed ; -showTablesLikeEscape +showTablesLikeEscape-Ignore // tag::showTablesLikeEscape SHOW TABLES LIKE 'emp!%' ESCAPE '!'; @@ -183,88 +183,88 @@ SHOW FUNCTIONS; name | type ----------------+--------------- -AVG |AGGREGATE -COUNT |AGGREGATE -MAX |AGGREGATE -MIN |AGGREGATE -SUM |AGGREGATE -STDDEV_POP |AGGREGATE -VAR_POP |AGGREGATE -PERCENTILE |AGGREGATE -PERCENTILE_RANK |AGGREGATE -SUM_OF_SQUARES |AGGREGATE -SKEWNESS |AGGREGATE -KURTOSIS |AGGREGATE -DAY_OF_MONTH |SCALAR -DAY |SCALAR -DOM |SCALAR -DAY_OF_WEEK |SCALAR -DOW |SCALAR -DAY_OF_YEAR |SCALAR -DOY |SCALAR -HOUR_OF_DAY |SCALAR -HOUR |SCALAR -MINUTE_OF_DAY |SCALAR -MINUTE_OF_HOUR |SCALAR -MINUTE |SCALAR -SECOND_OF_MINUTE|SCALAR -SECOND |SCALAR -MONTH_OF_YEAR |SCALAR -MONTH |SCALAR -YEAR |SCALAR -WEEK_OF_YEAR |SCALAR -WEEK |SCALAR -ABS |SCALAR -ACOS |SCALAR -ASIN |SCALAR -ATAN |SCALAR -ATAN2 |SCALAR -CBRT |SCALAR -CEIL |SCALAR -CEILING |SCALAR -COS |SCALAR -COSH |SCALAR -COT |SCALAR -DEGREES |SCALAR -E |SCALAR -EXP |SCALAR -EXPM1 |SCALAR -FLOOR |SCALAR -LOG |SCALAR -LOG10 |SCALAR -MOD |SCALAR -PI |SCALAR -POWER |SCALAR -RADIANS |SCALAR -RANDOM |SCALAR -RAND |SCALAR -ROUND |SCALAR -SIGN |SCALAR -SIGNUM |SCALAR -SIN |SCALAR -SINH |SCALAR -SQRT |SCALAR -TAN |SCALAR -ASCII |SCALAR -CHAR |SCALAR -BIT_LENGTH |SCALAR -CHAR_LENGTH |SCALAR -LCASE |SCALAR -LENGTH |SCALAR -LTRIM |SCALAR -RTRIM |SCALAR -SPACE |SCALAR -UCASE |SCALAR -CONCAT |SCALAR -INSERT |SCALAR -LEFT |SCALAR -LOCATE |SCALAR -POSITION |SCALAR -REPEAT |SCALAR -REPLACE |SCALAR -RIGHT |SCALAR -SUBSTRING |SCALAR -SCORE |SCORE +AVG |AGGREGATE +COUNT |AGGREGATE +MAX |AGGREGATE +MIN |AGGREGATE +SUM |AGGREGATE +STDDEV_POP |AGGREGATE +VAR_POP |AGGREGATE +PERCENTILE |AGGREGATE +PERCENTILE_RANK |AGGREGATE +SUM_OF_SQUARES |AGGREGATE +SKEWNESS |AGGREGATE +KURTOSIS |AGGREGATE +DAY_OF_MONTH |SCALAR +DAY |SCALAR +DOM |SCALAR +DAY_OF_WEEK |SCALAR +DOW |SCALAR +DAY_OF_YEAR |SCALAR +DOY |SCALAR +HOUR_OF_DAY |SCALAR +HOUR |SCALAR +MINUTE_OF_DAY |SCALAR +MINUTE_OF_HOUR |SCALAR +MINUTE |SCALAR +SECOND_OF_MINUTE|SCALAR +SECOND |SCALAR +MONTH_OF_YEAR |SCALAR +MONTH |SCALAR +YEAR |SCALAR +WEEK_OF_YEAR |SCALAR +WEEK |SCALAR +ABS |SCALAR +ACOS |SCALAR +ASIN |SCALAR +ATAN |SCALAR +ATAN2 |SCALAR +CBRT |SCALAR +CEIL |SCALAR +CEILING |SCALAR +COS |SCALAR +COSH |SCALAR +COT |SCALAR +DEGREES |SCALAR +E |SCALAR +EXP |SCALAR +EXPM1 |SCALAR +FLOOR |SCALAR +LOG |SCALAR +LOG10 |SCALAR +MOD |SCALAR +PI |SCALAR +POWER |SCALAR +RADIANS |SCALAR +RANDOM |SCALAR +RAND |SCALAR +ROUND |SCALAR +SIGN |SCALAR +SIGNUM |SCALAR +SIN |SCALAR +SINH |SCALAR +SQRT |SCALAR +TAN |SCALAR +ASCII |SCALAR +CHAR |SCALAR +BIT_LENGTH |SCALAR +CHAR_LENGTH |SCALAR +LCASE |SCALAR +LENGTH |SCALAR +LTRIM |SCALAR +RTRIM |SCALAR +SPACE |SCALAR +CONCAT |SCALAR +INSERT |SCALAR +LEFT |SCALAR +LOCATE |SCALAR +POSITION |SCALAR +REPEAT |SCALAR +REPLACE |SCALAR +RIGHT |SCALAR +SUBSTRING |SCALAR +UCASE |SCALAR +SCORE |SCORE // end::showFunctions ; @@ -331,7 +331,7 @@ MINUTE_OF_DAY |SCALAR selectColumnAlias // tag::selectColumnAlias -SELECT 1 + 1 AS result +SELECT 1 + 1 AS result; result --------------- @@ -503,18 +503,19 @@ groupByMulti // tag::groupByMulti SELECT gender g, languages l, COUNT(*) c FROM "emp" GROUP BY g, l ORDER BY languages ASC, gender DESC; - g | l | c + g | l | c ---------------+---------------+--------------- -F |2 |4 -F |3 |8 -F |4 |7 -F |5 |7 -F |6 |11 -M |2 |12 -M |3 |12 -M |4 |15 -M |5 |11 -M |6 |13 +M |1 |12 +F |1 |4 +M |2 |12 +F |2 |8 +M |3 |15 +F |3 |7 +M |4 |11 +F |4 |7 +M |5 |13 +F |5 |11 + // end::groupByMulti ; @@ -658,7 +659,7 @@ James S.A. Corey |Leviathan Wakes |561 |2011-06-02T00:00:00Z orderByScore // tag::orderByScore -SELECT SCORE(), * FROM library WHERE match(name, 'dune') ORDER BY SCORE() DESC; +SELECT SCORE(), * FROM library WHERE MATCH(name, 'dune') ORDER BY SCORE() DESC; SCORE() | author | name | page_count | release_date ---------------+---------------+-------------------+---------------+-------------------- @@ -672,7 +673,7 @@ SELECT SCORE(), * FROM library WHERE match(name, 'dune') ORDER BY SCORE() DESC; orderByScoreWithMatch // tag::orderByScoreWithMatch -SELECT SCORE(), * FROM library WHERE match(name, 'dune') ORDER BY page_count DESC; +SELECT SCORE(), * FROM library WHERE MATCH(name, 'dune') ORDER BY page_count DESC; SCORE() | author | name | page_count | release_date ---------------+---------------+-------------------+---------------+-------------------- @@ -684,6 +685,19 @@ SELECT SCORE(), * FROM library WHERE match(name, 'dune') ORDER BY page_count DES // end::orderByScoreWithMatch ; +scoreWithMatch +// tag::scoreWithMatch +SELECT SCORE() AS score, name, release_date FROM library WHERE QUERY('dune') ORDER BY YEAR(release_date) DESC; + + score | name | release_date +---------------+-------------------+-------------------- +1.4005898 |God Emperor of Dune|1981-05-28T00:00:00Z +1.6086555 |Children of Dune |1976-04-21T00:00:00Z +1.8893257 |Dune Messiah |1969-10-15T00:00:00Z +2.288635 |Dune |1965-06-01T00:00:00Z +// end::scoreWithMatch +; + /////////////////////////////// // @@ -701,3 +715,399 @@ Georgi |Facello |10001 // end::limitBasic ; + +/////////////////////////////// +// +// Aggregations +// +/////////////////////////////// + +aggAvg +// tag::aggAvg +SELECT AVG(salary) AS avg FROM emp; + + avg +--------------- +48248 +// end::aggAvg +; + +aggCountStar +// tag::aggCountStar +SELECT COUNT(*) AS count FROM emp; + + count +--------------- +100 +// end::aggCountStar +; + +aggCountDistinct +// tag::aggCountDistinct + +SELECT COUNT(DISTINCT hire_date) AS hires FROM emp; + + hires +--------------- +99 +// end::aggCountDistinct +; + +aggMax +// tag::aggMax +SELECT MAX(salary) AS max FROM emp; + + max +--------------- +74999 +// end::aggMax +; + +aggMin +// tag::aggMin +SELECT MIN(salary) AS min FROM emp; + + min +--------------- +25324 +// end::aggMin +; + +aggSum +// tag::aggSum +SELECT SUM(salary) AS sum FROM emp; + + sum +--------------- +4824855 +// end::aggSum +; + +aggKurtosis +// tag::aggKurtosis +SELECT MIN(salary) AS min, MAX(salary) AS max, KURTOSIS(salary) AS k FROM emp; + + min | max | k +---------------+---------------+------------------ +25324 |74999 |2.0444718929142986 +// end::aggKurtosis +; + +aggPercentile +// tag::aggPercentile +SELECT languages, PERCENTILE(salary, 95) AS "95th" FROM emp + GROUP BY languages; + + languages | 95th +---------------+----------------- +1 |72605.2 +2 |71741.0 +3 |74981.6 +4 |72115.59999999999 +5 |68927.19999999998 +// end::aggPercentile +; + +aggPercentileRank +// tag::aggPercentileRank +SELECT languages, PERCENTILE_RANK(salary, 65000) AS rank FROM emp GROUP BY languages; + + languages | rank +---------------+----------------- +1 |75.37108985853756 +2 |89.43605326660112 +3 |77.74873333978765 +4 |85.70446389643493 +5 |92.52677973666592 +// end::aggPercentileRank +; + +aggSkewness +// tag::aggSkewness +SELECT MIN(salary) AS min, MAX(salary) AS max, SKEWNESS(salary) AS s FROM emp; + + min | max | s +---------------+---------------+------------------ +25324 |74999 |0.2707722118423227 +// end::aggSkewness +; + +aggStddevPop +// tag::aggStddevPop +SELECT MIN(salary) AS min, MAX(salary) AS max, STDDEV_POP(salary) AS stddev + FROM emp; + + min | max | stddev +---------------+---------------+------------------ +25324 |74999 |13765.125502787832 +// end::aggStddevPop +; + + +aggSumOfSquares +// tag::aggSumOfSquares +SELECT MIN(salary) AS min, MAX(salary) AS max, SUM_OF_SQUARES(salary) AS sumsq + FROM emp; + + min | max | sumsq +---------------+---------------+---------------- +25324 |74999 |2.51740125721E11 +// end::aggSumOfSquares +; + + +aggVarPop +// tag::aggVarPop +SELECT MIN(salary) AS min, MAX(salary) AS max, VAR_POP(salary) AS varpop FROM emp; + + min | max | varpop +---------------+---------------+---------------- +25324 |74999 |1.894786801075E8 +// end::aggVarPop +; + + +/////////////////////////////// +// +// String +// +/////////////////////////////// + +stringAscii +// tag::stringAscii +SELECT ASCII('Elastic'); + +ASCII(Elastic) +--------------- +69 +// end::stringAscii +; + +stringBitLength +// tag::stringBitLength +SELECT BIT_LENGTH('Elastic'); + +BIT_LENGTH(Elastic) +------------------- +56 +// end::stringBitLength +; + +stringChar +// tag::stringChar +SELECT CHAR(69); + + CHAR(69) +--------------- +E +// end::stringChar +; + +stringCharLength +// tag::stringCharLength +SELECT CHAR_LENGTH('Elastic'); + +CHAR_LENGTH(Elastic) +-------------------- +7 +// end::stringCharLength +; + +stringConcat +// tag::stringConcat +SELECT CONCAT('Elasticsearch', ' SQL'); + +CONCAT(Elasticsearch, SQL) +-------------------------- +Elasticsearch SQL +// end::stringConcat +; + +stringInsert +// tag::stringInsert +SELECT INSERT('Elastic ', 8, 1, 'search'); + +INSERT(Elastic ,8,1,search) +--------------------------- +Elasticsearch +// end::stringInsert +; + +stringLCase +// tag::stringLCase +SELECT LCASE('Elastic'); + +LCASE(Elastic) +--------------- +elastic +// end::stringLCase +; + +stringLeft +// tag::stringLeft +SELECT LEFT('Elastic',3); + +LEFT(Elastic,3) +--------------- +Ela +// end::stringLeft +; + +stringLength +// tag::stringLength +SELECT LENGTH('Elastic '); + +LENGTH(Elastic ) +------------------ +7 +// end::stringLength +; + +stringLocateWoStart +// tag::stringLocateWoStart +SELECT LOCATE('a', 'Elasticsearch'); + +LOCATE(a,Elasticsearch) +----------------------- +3 +// end::stringLocateWoStart +; + +stringLocateWithStart +// tag::stringLocateWithStart +SELECT LOCATE('a', 'Elasticsearch', 5); + +LOCATE(a,Elasticsearch,5) +------------------------- +10 +// end::stringLocateWithStart +; + +stringLTrim +// tag::stringLTrim +SELECT LTRIM(' Elastic'); + +LTRIM( Elastic) +----------------- +Elastic +// end::stringLTrim +; + +stringPosition +// tag::stringPosition +SELECT POSITION('Elastic', 'Elasticsearch'); + +POSITION(Elastic,Elasticsearch) +------------------------------- +1 +// end::stringPosition +; + +stringRepeat +// tag::stringRepeat +SELECT REPEAT('La', 3); + + REPEAT(La,3) +--------------- +LaLaLa +// end::stringRepeat +; + +stringReplace-Ignore +// tag::stringReplace +SELECT REPLACE('Elastic', 'El', 'Fant'); + + REPLACE(Elastic, El, Fant) +----------------------------- +Fantastic +// end::stringReplace +; + +stringRight +// tag::stringRight +SELECT RIGHT('Elastic',3); + +RIGHT(Elastic,3) +---------------- +tic +// end::stringRight +; + +stringRTrim +// tag::stringRTrim +SELECT RTRIM('Elastic '); + +RTRIM(Elastic ) +----------------- +Elastic +// end::stringRTrim +; + +stringSpace-Ignore +// tag::stringSpace +SELECT SPACE(3); + + SPACE(3) +--------------- + + +// end::stringSpace +; + +stringSubString +// tag::stringSubString +SELECT SUBSTRING('Elasticsearch', 0, 7); + +SUBSTRING(Elasticsearch,0,7) +---------------------------- +Elastic +// end::stringSubString +; + +stringUCase +// tag::stringUCase +SELECT UCASE('Elastic'); + +UCASE(Elastic) +--------------- +ELASTIC +// end::stringUCase +; + + +/////////////////////////////// +// +// Cast +// +/////////////////////////////// + +conversionStringToIntCast +// tag::conversionStringToIntCast +SELECT CAST('123' AS INT) AS int; + + int +--------------- +123 +// end::conversionStringToIntCast +; + +conversionIntToStringCast-Ignore +// tag::conversionIntToStringCast +SELECT CAST(123 AS VARCHAR) AS string; + + string +--------------- +123 + +// end::conversionIntToStringCast +; + +conversionStringToDateCast +// tag::conversionStringToDateCast +SELECT YEAR(CAST('2018-05-19T11:23:45Z' AS TIMESTAMP)) AS year; + + year +--------------- +2018 +// end::conversionStringToDateCast +; From 8afe09a7494bb34dec29f3f3cc4391547afdff62 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Thu, 6 Sep 2018 11:59:16 -0400 Subject: [PATCH 18/91] Pass TranslogRecoveryRunner to engine from outside (#33449) This commit allows us to use different TranslogRecoveryRunner when recovering an engine from its local translog. This change is a prerequisite for the commit-based rollback PR. Relates #32867 --- .../elasticsearch/index/engine/Engine.java | 10 ++- .../index/engine/EngineConfig.java | 21 +----- .../index/engine/InternalEngine.java | 8 +-- .../elasticsearch/index/shard/IndexShard.java | 4 +- .../index/engine/InternalEngineTests.java | 72 +++++++++---------- .../index/shard/RefreshListenersTests.java | 4 +- .../index/engine/EngineTestCase.java | 18 +++-- .../index/engine/TranslogHandler.java | 2 +- .../index/engine/FollowingEngineTests.java | 6 +- 9 files changed, 67 insertions(+), 78 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/engine/Engine.java b/server/src/main/java/org/elasticsearch/index/engine/Engine.java index 0c54fb916f5ed..a64c3f88eb3d2 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -1642,9 +1642,10 @@ public interface Warmer { * Performs recovery from the transaction log up to {@code recoverUpToSeqNo} (inclusive). * This operation will close the engine if the recovery fails. * - * @param recoverUpToSeqNo the upper bound, inclusive, of sequence number to be recovered + * @param translogRecoveryRunner the translog recovery runner + * @param recoverUpToSeqNo the upper bound, inclusive, of sequence number to be recovered */ - public abstract Engine recoverFromTranslog(long recoverUpToSeqNo) throws IOException; + public abstract Engine recoverFromTranslog(TranslogRecoveryRunner translogRecoveryRunner, long recoverUpToSeqNo) throws IOException; /** * Do not replay translog operations, but make the engine be ready. @@ -1662,4 +1663,9 @@ public boolean isRecovering() { * Tries to prune buffered deletes from the version map. */ public abstract void maybePruneDeletes(); + + @FunctionalInterface + public interface TranslogRecoveryRunner { + int run(Engine engine, Translog.Snapshot snapshot) throws IOException; + } } diff --git a/server/src/main/java/org/elasticsearch/index/engine/EngineConfig.java b/server/src/main/java/org/elasticsearch/index/engine/EngineConfig.java index 23a90553f60a8..f95ba96d343c9 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/EngineConfig.java +++ b/server/src/main/java/org/elasticsearch/index/engine/EngineConfig.java @@ -37,13 +37,11 @@ import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.Store; -import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogConfig; import org.elasticsearch.indices.IndexingMemoryController; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.threadpool.ThreadPool; -import java.io.IOException; import java.util.List; import java.util.function.LongSupplier; @@ -76,7 +74,6 @@ public final class EngineConfig { private final List internalRefreshListener; @Nullable private final Sort indexSort; - private final TranslogRecoveryRunner translogRecoveryRunner; @Nullable private final CircuitBreakerService circuitBreakerService; private final LongSupplier globalCheckpointSupplier; @@ -127,9 +124,8 @@ public EngineConfig(ShardId shardId, String allocationId, ThreadPool threadPool, TranslogConfig translogConfig, TimeValue flushMergesAfter, List externalRefreshListener, List internalRefreshListener, Sort indexSort, - TranslogRecoveryRunner translogRecoveryRunner, CircuitBreakerService circuitBreakerService, - LongSupplier globalCheckpointSupplier, LongSupplier primaryTermSupplier, - TombstoneDocSupplier tombstoneDocSupplier) { + CircuitBreakerService circuitBreakerService, LongSupplier globalCheckpointSupplier, + LongSupplier primaryTermSupplier, TombstoneDocSupplier tombstoneDocSupplier) { this.shardId = shardId; this.allocationId = allocationId; this.indexSettings = indexSettings; @@ -163,7 +159,6 @@ public EngineConfig(ShardId shardId, String allocationId, ThreadPool threadPool, this.externalRefreshListener = externalRefreshListener; this.internalRefreshListener = internalRefreshListener; this.indexSort = indexSort; - this.translogRecoveryRunner = translogRecoveryRunner; this.circuitBreakerService = circuitBreakerService; this.globalCheckpointSupplier = globalCheckpointSupplier; this.primaryTermSupplier = primaryTermSupplier; @@ -324,18 +319,6 @@ public TranslogConfig getTranslogConfig() { */ public TimeValue getFlushMergesAfter() { return flushMergesAfter; } - @FunctionalInterface - public interface TranslogRecoveryRunner { - int run(Engine engine, Translog.Snapshot snapshot) throws IOException; - } - - /** - * Returns a runner that implements the translog recovery from the given snapshot - */ - public TranslogRecoveryRunner getTranslogRecoveryRunner() { - return translogRecoveryRunner; - } - /** * The refresh listeners to add to Lucene for externally visible refreshes */ diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index f902ce0750245..ea2b53bea8d0a 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -393,7 +393,7 @@ private void bootstrapAppendOnlyInfoFromWriter(IndexWriter writer) { } @Override - public InternalEngine recoverFromTranslog(long recoverUpToSeqNo) throws IOException { + public InternalEngine recoverFromTranslog(TranslogRecoveryRunner translogRecoveryRunner, long recoverUpToSeqNo) throws IOException { flushLock.lock(); try (ReleasableLock lock = readLock.acquire()) { ensureOpen(); @@ -401,7 +401,7 @@ public InternalEngine recoverFromTranslog(long recoverUpToSeqNo) throws IOExcept throw new IllegalStateException("Engine has already been recovered"); } try { - recoverFromTranslogInternal(recoverUpToSeqNo); + recoverFromTranslogInternal(translogRecoveryRunner, recoverUpToSeqNo); } catch (Exception e) { try { pendingTranslogRecovery.set(true); // just play safe and never allow commits on this see #ensureCanFlush @@ -423,13 +423,13 @@ public void skipTranslogRecovery() { pendingTranslogRecovery.set(false); // we are good - now we can commit } - private void recoverFromTranslogInternal(long recoverUpToSeqNo) throws IOException { + private void recoverFromTranslogInternal(TranslogRecoveryRunner translogRecoveryRunner, long recoverUpToSeqNo) throws IOException { Translog.TranslogGeneration translogGeneration = translog.getGeneration(); final int opsRecovered; final long translogFileGen = Long.parseLong(lastCommittedSegmentInfos.getUserData().get(Translog.TRANSLOG_GENERATION_KEY)); try (Translog.Snapshot snapshot = translog.newSnapshotFromGen( new Translog.TranslogGeneration(translog.getTranslogUUID(), translogFileGen), recoverUpToSeqNo)) { - opsRecovered = config().getTranslogRecoveryRunner().run(this, snapshot); + opsRecovered = translogRecoveryRunner.run(this, snapshot); } catch (Exception e) { throw new EngineException(shardId, "failed to recover from translog", e); } diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index f1e7dec6995a0..bceb106aeef91 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -1314,7 +1314,7 @@ int runTranslogRecovery(Engine engine, Translog.Snapshot snapshot) throws IOExce **/ public void openEngineAndRecoverFromTranslog() throws IOException { innerOpenEngineAndTranslog(); - getEngine().recoverFromTranslog(Long.MAX_VALUE); + getEngine().recoverFromTranslog(this::runTranslogRecovery, Long.MAX_VALUE); } /** @@ -2233,7 +2233,7 @@ private EngineConfig newEngineConfig() { IndexingMemoryController.SHARD_INACTIVE_TIME_SETTING.get(indexSettings.getSettings()), Collections.singletonList(refreshListeners), Collections.singletonList(new RefreshMetricUpdater(refreshMetric)), - indexSort, this::runTranslogRecovery, circuitBreakerService, replicationTracker, () -> operationPrimaryTerm, tombstoneDocSupplier()); + indexSort, circuitBreakerService, replicationTracker, () -> operationPrimaryTerm, tombstoneDocSupplier()); } /** diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index a44829890d5a9..6d9cdd0f225d7 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -661,7 +661,7 @@ public IndexSearcher wrap(IndexSearcher searcher) throws EngineException { trimUnsafeCommits(engine.config()); engine = new InternalEngine(engine.config()); assertTrue(engine.isRecovering()); - engine.recoverFromTranslog(Long.MAX_VALUE); + engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); Engine.Searcher searcher = wrapper.wrap(engine.acquireSearcher("test")); assertThat(counter.get(), equalTo(2)); searcher.close(); @@ -678,7 +678,7 @@ public void testFlushIsDisabledDuringTranslogRecovery() throws IOException { engine = new InternalEngine(engine.config()); expectThrows(IllegalStateException.class, () -> engine.flush(true, true)); assertTrue(engine.isRecovering()); - engine.recoverFromTranslog(Long.MAX_VALUE); + engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); assertFalse(engine.isRecovering()); doc = testParsedDocument("2", null, testDocumentWithTextField(), SOURCE, null); engine.index(indexForDoc(doc)); @@ -708,7 +708,7 @@ public void testTranslogMultipleOperationsSameDocument() throws IOException { } trimUnsafeCommits(engine.config()); try (Engine recoveringEngine = new InternalEngine(engine.config())){ - recoveringEngine.recoverFromTranslog(Long.MAX_VALUE); + recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); try (Engine.Searcher searcher = recoveringEngine.acquireSearcher("test")) { final TotalHitCountCollector collector = new TotalHitCountCollector(); searcher.searcher().search(new MatchAllDocsQuery(), collector); @@ -744,7 +744,7 @@ protected void commitIndexWriter(IndexWriter writer, Translog translog, String s } }; assertThat(getTranslog(recoveringEngine).stats().getUncommittedOperations(), equalTo(docs)); - recoveringEngine.recoverFromTranslog(Long.MAX_VALUE); + recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); assertTrue(committed.get()); } finally { IOUtils.close(recoveringEngine); @@ -778,7 +778,7 @@ public void testTranslogRecoveryWithMultipleGenerations() throws IOException { initialEngine.close(); trimUnsafeCommits(initialEngine.config()); recoveringEngine = new InternalEngine(initialEngine.config()); - recoveringEngine.recoverFromTranslog(Long.MAX_VALUE); + recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); try (Engine.Searcher searcher = recoveringEngine.acquireSearcher("test")) { TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), docs); assertEquals(docs, topDocs.totalHits.value); @@ -811,14 +811,14 @@ public void testRecoveryFromTranslogUpToSeqNo() throws IOException { } trimUnsafeCommits(config); try (InternalEngine engine = new InternalEngine(config)) { - engine.recoverFromTranslog(Long.MAX_VALUE); + engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); assertThat(engine.getLocalCheckpoint(), equalTo(maxSeqNo)); assertThat(engine.getLocalCheckpointTracker().getMaxSeqNo(), equalTo(maxSeqNo)); } trimUnsafeCommits(config); try (InternalEngine engine = new InternalEngine(config)) { long upToSeqNo = randomLongBetween(globalCheckpoint.get(), maxSeqNo); - engine.recoverFromTranslog(upToSeqNo); + engine.recoverFromTranslog(translogHandler, upToSeqNo); assertThat(engine.getLocalCheckpoint(), equalTo(upToSeqNo)); assertThat(engine.getLocalCheckpointTracker().getMaxSeqNo(), equalTo(upToSeqNo)); } @@ -1202,7 +1202,7 @@ public void testSyncedFlushSurvivesEngineRestart() throws IOException { } trimUnsafeCommits(config); engine = new InternalEngine(config); - engine.recoverFromTranslog(Long.MAX_VALUE); + engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); assertEquals(engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID), syncId); } @@ -1221,7 +1221,7 @@ public void testSyncedFlushVanishesOnReplay() throws IOException { engine.close(); trimUnsafeCommits(config); engine = new InternalEngine(config); - engine.recoverFromTranslog(Long.MAX_VALUE); + engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); assertNull("Sync ID must be gone since we have a document to replay", engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID)); } @@ -2187,7 +2187,7 @@ public void testSeqNoAndCheckpoints() throws IOException { trimUnsafeCommits(initialEngine.engineConfig); try (InternalEngine recoveringEngine = new InternalEngine(initialEngine.config())){ - recoveringEngine.recoverFromTranslog(Long.MAX_VALUE); + recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); assertEquals(primarySeqNo, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo()); assertThat( @@ -2508,7 +2508,7 @@ public void testCurrentTranslogIDisCommitted() throws IOException { try (InternalEngine engine = createEngine(config)) { engine.index(firstIndexRequest); globalCheckpoint.set(engine.getLocalCheckpoint()); - expectThrows(IllegalStateException.class, () -> engine.recoverFromTranslog(Long.MAX_VALUE)); + expectThrows(IllegalStateException.class, () -> engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE)); Map userData = engine.getLastCommittedSegmentInfos().getUserData(); assertEquals("1", userData.get(Translog.TRANSLOG_GENERATION_KEY)); assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY)); @@ -2530,7 +2530,7 @@ public void testCurrentTranslogIDisCommitted() throws IOException { assertEquals("3", userData.get(Translog.TRANSLOG_GENERATION_KEY)); } assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY)); - engine.recoverFromTranslog(Long.MAX_VALUE); + engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); userData = engine.getLastCommittedSegmentInfos().getUserData(); assertEquals("3", userData.get(Translog.TRANSLOG_GENERATION_KEY)); assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY)); @@ -2547,7 +2547,7 @@ public void testCurrentTranslogIDisCommitted() throws IOException { Map userData = engine.getLastCommittedSegmentInfos().getUserData(); assertEquals("1", userData.get(Translog.TRANSLOG_GENERATION_KEY)); assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY)); - engine.recoverFromTranslog(Long.MAX_VALUE); + engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); assertEquals(2, engine.getTranslog().currentFileGeneration()); assertEquals(0L, engine.getTranslog().stats().getUncommittedOperations()); } @@ -2561,7 +2561,7 @@ public void testCurrentTranslogIDisCommitted() throws IOException { Map userData = engine.getLastCommittedSegmentInfos().getUserData(); assertEquals("1", userData.get(Translog.TRANSLOG_GENERATION_KEY)); assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY)); - engine.recoverFromTranslog(Long.MAX_VALUE); + engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); userData = engine.getLastCommittedSegmentInfos().getUserData(); assertEquals("no changes - nothing to commit", "1", userData.get(Translog.TRANSLOG_GENERATION_KEY)); assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY)); @@ -2667,7 +2667,7 @@ protected void commitIndexWriter(IndexWriter writer, Translog translog, String s } } }) { - engine.recoverFromTranslog(Long.MAX_VALUE); + engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); final ParsedDocument doc1 = testParsedDocument("1", null, testDocumentWithTextField(), SOURCE, null); engine.index(indexForDoc(doc1)); globalCheckpoint.set(engine.getLocalCheckpoint()); @@ -2678,7 +2678,7 @@ protected void commitIndexWriter(IndexWriter writer, Translog translog, String s try (InternalEngine engine = new InternalEngine(config(indexSettings, store, translogPath, newMergePolicy(), null, null, globalCheckpointSupplier))) { - engine.recoverFromTranslog(Long.MAX_VALUE); + engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); assertVisibleCount(engine, 1); final long committedGen = Long.valueOf( engine.getLastCommittedSegmentInfos().getUserData().get(Translog.TRANSLOG_GENERATION_KEY)); @@ -2739,30 +2739,28 @@ public void testTranslogReplay() throws IOException { assertThat(indexResult.getVersion(), equalTo(1L)); } assertVisibleCount(engine, numDocs); - - TranslogHandler parser = (TranslogHandler) engine.config().getTranslogRecoveryRunner(); - parser.mappingUpdate = dynamicUpdate(); + translogHandler = createTranslogHandler(engine.engineConfig.getIndexSettings()); + translogHandler.mappingUpdate = dynamicUpdate(); engine.close(); trimUnsafeCommits(copy(engine.config(), inSyncGlobalCheckpointSupplier)); engine = new InternalEngine(copy(engine.config(), inSyncGlobalCheckpointSupplier)); // we need to reuse the engine config unless the parser.mappingModified won't work - engine.recoverFromTranslog(Long.MAX_VALUE); + engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); assertVisibleCount(engine, numDocs, false); - parser = (TranslogHandler) engine.config().getTranslogRecoveryRunner(); - assertEquals(numDocs, parser.appliedOperations()); - if (parser.mappingUpdate != null) { - assertEquals(1, parser.getRecoveredTypes().size()); - assertTrue(parser.getRecoveredTypes().containsKey("test")); + assertEquals(numDocs, translogHandler.appliedOperations()); + if (translogHandler.mappingUpdate != null) { + assertEquals(1, translogHandler.getRecoveredTypes().size()); + assertTrue(translogHandler.getRecoveredTypes().containsKey("test")); } else { - assertEquals(0, parser.getRecoveredTypes().size()); + assertEquals(0, translogHandler.getRecoveredTypes().size()); } engine.close(); + translogHandler = createTranslogHandler(engine.engineConfig.getIndexSettings()); engine = createEngine(store, primaryTranslogDir, inSyncGlobalCheckpointSupplier); assertVisibleCount(engine, numDocs, false); - parser = (TranslogHandler) engine.config().getTranslogRecoveryRunner(); - assertEquals(0, parser.appliedOperations()); + assertEquals(0, translogHandler.appliedOperations()); final boolean flush = randomBoolean(); int randomId = randomIntBetween(numDocs + 1, numDocs + 10); @@ -2786,13 +2784,13 @@ public void testTranslogReplay() throws IOException { } engine.close(); + translogHandler = createTranslogHandler(engine.engineConfig.getIndexSettings()); engine = createEngine(store, primaryTranslogDir, inSyncGlobalCheckpointSupplier); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), numDocs + 1); assertThat(topDocs.totalHits.value, equalTo(numDocs + 1L)); } - parser = (TranslogHandler) engine.config().getTranslogRecoveryRunner(); - assertEquals(flush ? 1 : 2, parser.appliedOperations()); + assertEquals(flush ? 1 : 2, translogHandler.appliedOperations()); engine.delete(new Engine.Delete("test", Integer.toString(randomId), newUid(doc), primaryTerm.get())); if (randomBoolean()) { engine.refresh("test"); @@ -2836,7 +2834,7 @@ public void testRecoverFromForeignTranslog() throws IOException { threadPool, config.getIndexSettings(), null, store, newMergePolicy(), config.getAnalyzer(), config.getSimilarity(), new CodecService(null, logger), config.getEventListener(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, TimeValue.timeValueMinutes(5), - config.getExternalRefreshListener(), config.getInternalRefreshListener(), null, config.getTranslogRecoveryRunner(), + config.getExternalRefreshListener(), config.getInternalRefreshListener(), null, new NoneCircuitBreakerService(), () -> SequenceNumbers.UNASSIGNED_SEQ_NO, primaryTerm::get, tombstoneDocSupplier()); try { InternalEngine internalEngine = new InternalEngine(brokenConfig); @@ -3455,7 +3453,7 @@ public void testEngineMaxTimestampIsInitialized() throws IOException { } try (Store store = createStore(newFSDirectory(storeDir)); Engine engine = new InternalEngine(configSupplier.apply(store))) { assertEquals(IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, engine.segmentsStats(false).getMaxUnsafeAutoIdTimestamp()); - engine.recoverFromTranslog(Long.MAX_VALUE); + engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); assertEquals(timestamp1, engine.segmentsStats(false).getMaxUnsafeAutoIdTimestamp()); final ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null); @@ -3738,7 +3736,7 @@ public void testSequenceNumberAdvancesToMaxSeqOnEngineOpenOnPrimary() throws Bro } trimUnsafeCommits(initialEngine.config()); try (Engine recoveringEngine = new InternalEngine(initialEngine.config())) { - recoveringEngine.recoverFromTranslog(Long.MAX_VALUE); + recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); recoveringEngine.fillSeqNoGaps(2); assertThat(recoveringEngine.getLocalCheckpoint(), greaterThanOrEqualTo((long) (docs - 1))); } @@ -3849,7 +3847,7 @@ protected long doGenerateSeqNoForOperation(Operation operation) { throw new UnsupportedOperationException(); } }; - noOpEngine.recoverFromTranslog(Long.MAX_VALUE); + noOpEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); final int gapsFilled = noOpEngine.fillSeqNoGaps(primaryTerm.get()); final String reason = "filling gaps"; noOpEngine.noOp(new Engine.NoOp(maxSeqNo + 1, primaryTerm.get(), LOCAL_TRANSLOG_RECOVERY, System.nanoTime(), reason)); @@ -4127,7 +4125,7 @@ public void testFillUpSequenceIdGapsOnRecovery() throws IOException { trimUnsafeCommits(copy(replicaEngine.config(), globalCheckpoint::get)); recoveringEngine = new InternalEngine(copy(replicaEngine.config(), globalCheckpoint::get)); assertEquals(numDocsOnReplica, getTranslog(recoveringEngine).stats().getUncommittedOperations()); - recoveringEngine.recoverFromTranslog(Long.MAX_VALUE); + recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); assertEquals(maxSeqIDOnReplica, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo()); assertEquals(checkpointOnReplica, recoveringEngine.getLocalCheckpoint()); assertEquals((maxSeqIDOnReplica + 1) - numDocsOnReplica, recoveringEngine.fillSeqNoGaps(2)); @@ -4163,7 +4161,7 @@ public void testFillUpSequenceIdGapsOnRecovery() throws IOException { if (flushed) { assertThat(recoveringEngine.getTranslogStats().getUncommittedOperations(), equalTo(0)); } - recoveringEngine.recoverFromTranslog(Long.MAX_VALUE); + recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); assertEquals(maxSeqIDOnReplica, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo()); assertEquals(maxSeqIDOnReplica, recoveringEngine.getLocalCheckpoint()); assertEquals(0, recoveringEngine.fillSeqNoGaps(3)); @@ -4356,7 +4354,7 @@ protected void commitIndexWriter(IndexWriter writer, Translog translog, String s super.commitIndexWriter(writer, translog, syncId); } }) { - engine.recoverFromTranslog(Long.MAX_VALUE); + engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); int numDocs = scaledRandomIntBetween(10, 100); for (int docId = 0; docId < numDocs; docId++) { ParseContext.Document document = testDocumentWithTextField(); diff --git a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java index b93f170174c3c..a43c7c214aeb3 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java @@ -131,10 +131,10 @@ public void onFailedEngine(String reason, @Nullable Exception e) { indexSettings, null, store, newMergePolicy(), iwc.getAnalyzer(), iwc.getSimilarity(), new CodecService(null, logger), eventListener, IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, TimeValue.timeValueMinutes(5), Collections.singletonList(listeners), Collections.emptyList(), null, - (e, s) -> 0, new NoneCircuitBreakerService(), () -> SequenceNumbers.NO_OPS_PERFORMED, () -> primaryTerm, + new NoneCircuitBreakerService(), () -> SequenceNumbers.NO_OPS_PERFORMED, () -> primaryTerm, EngineTestCase.tombstoneDocSupplier()); engine = new InternalEngine(config); - engine.recoverFromTranslog(Long.MAX_VALUE); + engine.recoverFromTranslog((e, s) -> 0, Long.MAX_VALUE); listeners.setCurrentRefreshLocationSupplier(engine::getTranslogLastWriteLocation); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java index b558cd1ba9000..0e22d0a7eda2a 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java @@ -125,6 +125,7 @@ public abstract class EngineTestCase extends ESTestCase { protected static final IndexSettings INDEX_SETTINGS = IndexSettingsModule.newIndexSettings("index", Settings.EMPTY); protected ThreadPool threadPool; + protected TranslogHandler translogHandler; protected Store store; protected Store storeReplica; @@ -189,6 +190,7 @@ public void setUp() throws Exception { Lucene.cleanLuceneIndex(store.directory()); Lucene.cleanLuceneIndex(storeReplica.directory()); primaryTranslogDir = createTempDir("translog-primary"); + translogHandler = createTranslogHandler(defaultSettings); engine = createEngine(store, primaryTranslogDir); LiveIndexWriterConfig currentIndexWriterConfig = engine.getCurrentIndexWriterConfig(); @@ -213,7 +215,7 @@ public EngineConfig copy(EngineConfig config, LongSupplier globalCheckpointSuppl config.getWarmer(), config.getStore(), config.getMergePolicy(), config.getAnalyzer(), config.getSimilarity(), new CodecService(null, logger), config.getEventListener(), config.getQueryCache(), config.getQueryCachingPolicy(), config.getTranslogConfig(), config.getFlushMergesAfter(), - config.getExternalRefreshListener(), Collections.emptyList(), config.getIndexSort(), config.getTranslogRecoveryRunner(), + config.getExternalRefreshListener(), Collections.emptyList(), config.getIndexSort(), config.getCircuitBreakerService(), globalCheckpointSupplier, config.getPrimaryTermSupplier(), tombstoneDocSupplier()); } @@ -222,7 +224,7 @@ public EngineConfig copy(EngineConfig config, Analyzer analyzer) { config.getWarmer(), config.getStore(), config.getMergePolicy(), analyzer, config.getSimilarity(), new CodecService(null, logger), config.getEventListener(), config.getQueryCache(), config.getQueryCachingPolicy(), config.getTranslogConfig(), config.getFlushMergesAfter(), - config.getExternalRefreshListener(), Collections.emptyList(), config.getIndexSort(), config.getTranslogRecoveryRunner(), + config.getExternalRefreshListener(), Collections.emptyList(), config.getIndexSort(), config.getCircuitBreakerService(), config.getGlobalCheckpointSupplier(), config.getPrimaryTermSupplier(), config.getTombstoneDocSupplier()); } @@ -232,7 +234,7 @@ public EngineConfig copy(EngineConfig config, MergePolicy mergePolicy) { config.getWarmer(), config.getStore(), mergePolicy, config.getAnalyzer(), config.getSimilarity(), new CodecService(null, logger), config.getEventListener(), config.getQueryCache(), config.getQueryCachingPolicy(), config.getTranslogConfig(), config.getFlushMergesAfter(), - config.getExternalRefreshListener(), Collections.emptyList(), config.getIndexSort(), config.getTranslogRecoveryRunner(), + config.getExternalRefreshListener(), Collections.emptyList(), config.getIndexSort(), config.getCircuitBreakerService(), config.getGlobalCheckpointSupplier(), config.getPrimaryTermSupplier(), config.getTombstoneDocSupplier()); } @@ -377,6 +379,10 @@ protected Translog createTranslog(Path translogPath, LongSupplier primaryTermSup () -> SequenceNumbers.NO_OPS_PERFORMED, primaryTermSupplier); } + protected TranslogHandler createTranslogHandler(IndexSettings indexSettings) { + return new TranslogHandler(xContentRegistry(), indexSettings); + } + protected InternalEngine createEngine(Store store, Path translogPath) throws IOException { return createEngine(defaultSettings, store, translogPath, newMergePolicy(), null); } @@ -478,7 +484,7 @@ private InternalEngine createEngine(@Nullable IndexWriterFactory indexWriterFact } InternalEngine internalEngine = createInternalEngine(indexWriterFactory, localCheckpointTrackerSupplier, seqNoForOperation, config); - internalEngine.recoverFromTranslog(Long.MAX_VALUE); + internalEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); return internalEngine; } @@ -553,14 +559,12 @@ public void onFailedEngine(String reason, @Nullable Exception e) { // we don't need to notify anybody in this test } }; - final TranslogHandler handler = new TranslogHandler(xContentRegistry(), IndexSettingsModule.newIndexSettings(shardId.getIndexName(), - indexSettings.getSettings())); final List refreshListenerList = refreshListener == null ? emptyList() : Collections.singletonList(refreshListener); EngineConfig config = new EngineConfig(shardId, allocationId.getId(), threadPool, indexSettings, null, store, mergePolicy, iwc.getAnalyzer(), iwc.getSimilarity(), new CodecService(null, logger), listener, IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), translogConfig, - TimeValue.timeValueMinutes(5), refreshListenerList, Collections.emptyList(), indexSort, handler, + TimeValue.timeValueMinutes(5), refreshListenerList, Collections.emptyList(), indexSort, new NoneCircuitBreakerService(), globalCheckpointSupplier == null ? new ReplicationTracker(shardId, allocationId.getId(), indexSettings, SequenceNumbers.NO_OPS_PERFORMED, update -> {}) : diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java b/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java index 9999a3b3748f1..12785841ef2d0 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java @@ -46,7 +46,7 @@ import static java.util.Collections.emptyMap; import static org.elasticsearch.index.mapper.SourceToParse.source; -public class TranslogHandler implements EngineConfig.TranslogRecoveryRunner { +public class TranslogHandler implements Engine.TranslogRecoveryRunner { private final MapperService mapperService; public Mapping mappingUpdate = null; diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java index 677b8955490da..6897e3bf3f73e 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java @@ -44,7 +44,6 @@ import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.DummyShardLock; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -254,8 +253,6 @@ public void onFailedEngine(String reason, Exception e) { Collections.emptyList(), Collections.emptyList(), null, - new TranslogHandler( - xContentRegistry, IndexSettingsModule.newIndexSettings(shardId.getIndexName(), indexSettings.getSettings())), new NoneCircuitBreakerService(), () -> SequenceNumbers.NO_OPS_PERFORMED, () -> primaryTerm.get(), @@ -280,7 +277,8 @@ private FollowingEngine createEngine(Store store, EngineConfig config) throws IO SequenceNumbers.NO_OPS_PERFORMED, shardId, 1L); store.associateIndexWithNewTranslog(translogUuid); FollowingEngine followingEngine = new FollowingEngine(config); - followingEngine.recoverFromTranslog(Long.MAX_VALUE); + TranslogHandler translogHandler = new TranslogHandler(xContentRegistry(), config.getIndexSettings()); + followingEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); return followingEngine; } From 7441c0376e8ac8cf437bec75e41c112e21f4934a Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Thu, 6 Sep 2018 09:20:42 -0700 Subject: [PATCH 19/91] [DOCS] Adds delete forecast API (#33401) --- .../ml/apis/delete-forecast.asciidoc | 78 +++++++++++++++++++ docs/reference/ml/apis/ml-api.asciidoc | 3 +- 2 files changed, 80 insertions(+), 1 deletion(-) create mode 100644 docs/reference/ml/apis/delete-forecast.asciidoc diff --git a/docs/reference/ml/apis/delete-forecast.asciidoc b/docs/reference/ml/apis/delete-forecast.asciidoc new file mode 100644 index 0000000000000..159dafefb0efc --- /dev/null +++ b/docs/reference/ml/apis/delete-forecast.asciidoc @@ -0,0 +1,78 @@ +[role="xpack"] +[testenv="platinum"] +[[ml-delete-forecast]] +=== Delete Forecast API +++++ +Delete Forecast +++++ + +Deletes forecasts from a {ml} job. + +==== Request + +`DELETE _xpack/ml/anomaly_detectors//_forecast` + + +`DELETE _xpack/ml/anomaly_detectors//_forecast/` + + +`DELETE _xpack/ml/anomaly_detectors//_forecast/_all` + + +==== Description + +By default, forecasts are retained for 14 days. You can specify a different +retention period with the `expires_in` parameter in the <>. The delete forecast API enables you to delete one or more forecasts before they expire. + +NOTE: When you delete a job its associated forecasts are deleted. + +For more information, see {stack-ov}/ml-overview.html#ml-forecasting[Forecasting the Future]. + + +==== Path Parameters + +`job_id` (required):: + (string) Identifier for the job. + +`forecast_id`:: + (string) A comma-separated list of forecast identifiers. + If you do not specify this optional parameter or if you specify `_all`, the + API deletes all forecasts from the job. + +==== Request Parameters + +`allow_no_forecasts`:: + (boolean) Specifies whether an error occurs when there are no forecasts. In + particular, if this parameter is set to `false` and there are no forecasts + associated with the job, attempts to delete all forecasts return an error. + The default value is `true`. + +`timeout`:: + (time units) Specifies the period of time to wait for the completion of the + delete operation. When this period of time elapses, the API fails and returns + an error. The default value is `30s`. For more information about time units, + see <>. + + +==== Authorization + +You must have `manage_ml`, or `manage` cluster privileges to use this API. +For more information, see {stack-ov}/security-privileges.html[Security Privileges]. + +==== Examples + +The following example deletes all forecasts from the `total-requests` job: + +[source,js] +-------------------------------------------------- +DELETE _xpack/ml/anomaly_detectors/total-requests/_forecast/_all +-------------------------------------------------- +// CONSOLE +// TEST[skip:setup:server_metrics_openjob] + +If the request does not encounter errors, you receive the following result: +[source,js] +---- +{ + "acknowledged": true +} +---- +// NOTCONSOLE diff --git a/docs/reference/ml/apis/ml-api.asciidoc b/docs/reference/ml/apis/ml-api.asciidoc index b8509f221524c..961eb37e9d7e0 100644 --- a/docs/reference/ml/apis/ml-api.asciidoc +++ b/docs/reference/ml/apis/ml-api.asciidoc @@ -48,7 +48,7 @@ machine learning APIs and in advanced job configuration options in Kibana. * <> * <> * <> -* <> +* <>, <> [float] [[ml-api-snapshot-endpoint]] @@ -85,6 +85,7 @@ include::delete-calendar.asciidoc[] include::delete-datafeed.asciidoc[] include::delete-calendar-event.asciidoc[] include::delete-filter.asciidoc[] +include::delete-forecast.asciidoc[] include::delete-job.asciidoc[] include::delete-calendar-job.asciidoc[] include::delete-snapshot.asciidoc[] From 5251300fb5124b4a47d91302dd6ac2dd58c9ee50 Mon Sep 17 00:00:00 2001 From: Michael Basnight Date: Thu, 6 Sep 2018 11:33:26 -0500 Subject: [PATCH 20/91] HLRC: split graph request converters (#33429) In an effort to encapsulate the different clients, the request converters are being shuffled around. This splits the GraphClient request converters. --- .../org/elasticsearch/client/GraphClient.java | 4 +- .../client/GraphRequestConverters.java | 35 ++++++++++ .../client/RequestConverters.java | 8 --- .../client/GrapRequestConvertersTests.java | 67 +++++++++++++++++++ .../client/RequestConvertersTests.java | 32 --------- 5 files changed, 104 insertions(+), 42 deletions(-) create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/GraphRequestConverters.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/GrapRequestConvertersTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/GraphClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/GraphClient.java index 293105f5abeb8..5099bf8d51d32 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/GraphClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/GraphClient.java @@ -43,7 +43,7 @@ public class GraphClient { */ public final GraphExploreResponse explore(GraphExploreRequest graphExploreRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(graphExploreRequest, RequestConverters::xPackGraphExplore, + return restHighLevelClient.performRequestAndParseEntity(graphExploreRequest, GraphRequestConverters::explore, options, GraphExploreResponse::fromXContext, emptySet()); } @@ -56,7 +56,7 @@ public final GraphExploreResponse explore(GraphExploreRequest graphExploreReques public final void exploreAsync(GraphExploreRequest graphExploreRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(graphExploreRequest, RequestConverters::xPackGraphExplore, + restHighLevelClient.performRequestAsyncAndParseEntity(graphExploreRequest, GraphRequestConverters::explore, options, GraphExploreResponse::fromXContext, listener, emptySet()); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/GraphRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/GraphRequestConverters.java new file mode 100644 index 0000000000000..c1f1e1d115f15 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/GraphRequestConverters.java @@ -0,0 +1,35 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.client.methods.HttpGet; +import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest; + +import java.io.IOException; + +public class GraphRequestConverters { + + static Request explore(GraphExploreRequest exploreRequest) throws IOException { + String endpoint = RequestConverters.endpoint(exploreRequest.indices(), exploreRequest.types(), "_xpack/graph/_explore"); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + request.setEntity(RequestConverters.createEntity(exploreRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); + return request; + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 89521b5e9b06f..cfd3b71c698b8 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -117,7 +117,6 @@ import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; -import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.script.mustache.MultiSearchTemplateRequest; import org.elasticsearch.script.mustache.SearchTemplateRequest; @@ -1152,13 +1151,6 @@ static Request xPackInfo(XPackInfoRequest infoRequest) { return request; } - static Request xPackGraphExplore(GraphExploreRequest exploreRequest) throws IOException { - String endpoint = endpoint(exploreRequest.indices(), exploreRequest.types(), "_xpack/graph/_explore"); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.setEntity(createEntity(exploreRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - static Request xPackWatcherPutWatch(PutWatchRequest putWatchRequest) { String endpoint = new EndpointBuilder() .addPathPartAsIs("_xpack") diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/GrapRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/GrapRequestConvertersTests.java new file mode 100644 index 0000000000000..6598800d76edb --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/GrapRequestConvertersTests.java @@ -0,0 +1,67 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.client.methods.HttpGet; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest; +import org.elasticsearch.protocol.xpack.graph.Hop; +import org.elasticsearch.test.ESTestCase; +import org.junit.Assert; + +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.is; + +public class GrapRequestConvertersTests extends ESTestCase{ + + public void testGraphExplore() throws Exception { + Map expectedParams = new HashMap<>(); + + GraphExploreRequest graphExploreRequest = new GraphExploreRequest(); + graphExploreRequest.sampleDiversityField("diversity"); + graphExploreRequest.indices("index1", "index2"); + graphExploreRequest.types("type1", "type2"); + int timeout = ESTestCase.randomIntBetween(10000, 20000); + graphExploreRequest.timeout(TimeValue.timeValueMillis(timeout)); + graphExploreRequest.useSignificance(ESTestCase.randomBoolean()); + int numHops = ESTestCase.randomIntBetween(1, 5); + for (int i = 0; i < numHops; i++) { + int hopNumber = i + 1; + QueryBuilder guidingQuery = null; + if (ESTestCase.randomBoolean()) { + guidingQuery = new TermQueryBuilder("field" + hopNumber, "value" + hopNumber); + } + Hop hop = graphExploreRequest.createNextHop(guidingQuery); + hop.addVertexRequest("field" + hopNumber); + hop.getVertexRequest(0).addInclude("value" + hopNumber, hopNumber); + } + Request request = GraphRequestConverters.explore(graphExploreRequest); + Assert.assertEquals(HttpGet.METHOD_NAME, request.getMethod()); + Assert.assertEquals("/index1,index2/type1,type2/_xpack/graph/_explore", request.getEndpoint()); + Assert.assertEquals(expectedParams, request.getParameters()); + Assert.assertThat(request.getEntity().getContentType().getValue(), is(XContentType.JSON.mediaTypeWithoutParameters())); + RequestConvertersTests.assertToXContentBody(graphExploreRequest, request.getEntity()); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index a6845448b8299..32c374501073c 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -114,7 +114,6 @@ import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.RandomCreateIndexGenerator; import org.elasticsearch.index.VersionType; -import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.rankeval.PrecisionAtK; @@ -129,8 +128,6 @@ import org.elasticsearch.protocol.xpack.XPackInfoRequest; import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; -import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest; -import org.elasticsearch.protocol.xpack.graph.Hop; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.rest.action.search.RestSearchAction; @@ -2657,35 +2654,6 @@ public void testXPackPutWatch() throws Exception { assertThat(bos.toString("UTF-8"), is(body)); } - public void testGraphExplore() throws Exception { - Map expectedParams = new HashMap<>(); - - GraphExploreRequest graphExploreRequest = new GraphExploreRequest(); - graphExploreRequest.sampleDiversityField("diversity"); - graphExploreRequest.indices("index1", "index2"); - graphExploreRequest.types("type1", "type2"); - int timeout = randomIntBetween(10000, 20000); - graphExploreRequest.timeout(TimeValue.timeValueMillis(timeout)); - graphExploreRequest.useSignificance(randomBoolean()); - int numHops = randomIntBetween(1, 5); - for (int i = 0; i < numHops; i++) { - int hopNumber = i + 1; - QueryBuilder guidingQuery = null; - if (randomBoolean()) { - guidingQuery = new TermQueryBuilder("field" + hopNumber, "value" + hopNumber); - } - Hop hop = graphExploreRequest.createNextHop(guidingQuery); - hop.addVertexRequest("field" + hopNumber); - hop.getVertexRequest(0).addInclude("value" + hopNumber, hopNumber); - } - Request request = RequestConverters.xPackGraphExplore(graphExploreRequest); - assertEquals(HttpGet.METHOD_NAME, request.getMethod()); - assertEquals("/index1,index2/type1,type2/_xpack/graph/_explore", request.getEndpoint()); - assertEquals(expectedParams, request.getParameters()); - assertThat(request.getEntity().getContentType().getValue(), is(XContentType.JSON.mediaTypeWithoutParameters())); - assertToXContentBody(graphExploreRequest, request.getEntity()); - } - public void testXPackDeleteWatch() { DeleteWatchRequest deleteWatchRequest = new DeleteWatchRequest(); String watchId = randomAlphaOfLength(10); From 8ce4ceb59e64e6d238137582dfaa09a7cb9ed07a Mon Sep 17 00:00:00 2001 From: Michael Basnight Date: Thu, 6 Sep 2018 11:37:27 -0500 Subject: [PATCH 21/91] HLRC: split license request converters (#33438) In an effort to encapsulate the different clients, the request converters are being shuffled around. This splits the LicenseClient request converters. --- .../elasticsearch/client/LicenseClient.java | 12 ++-- .../client/LicenseRequestConverters.java | 64 +++++++++++++++++++ .../client/RequestConverters.java | 39 +---------- 3 files changed, 71 insertions(+), 44 deletions(-) create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/LicenseRequestConverters.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/LicenseClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/LicenseClient.java index ca6539daa0432..bf8abc21fe135 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/LicenseClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/LicenseClient.java @@ -65,7 +65,7 @@ public final class LicenseClient { * @throws IOException in case there is a problem sending the request or parsing back the response */ public PutLicenseResponse putLicense(PutLicenseRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::putLicense, options, + return restHighLevelClient.performRequestAndParseEntity(request, LicenseRequestConverters::putLicense, options, PutLicenseResponse::fromXContent, emptySet()); } @@ -75,7 +75,7 @@ public PutLicenseResponse putLicense(PutLicenseRequest request, RequestOptions o * @param listener the listener to be notified upon request completion */ public void putLicenseAsync(PutLicenseRequest request, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::putLicense, options, + restHighLevelClient.performRequestAsyncAndParseEntity(request, LicenseRequestConverters::putLicense, options, PutLicenseResponse::fromXContent, listener, emptySet()); } @@ -86,7 +86,7 @@ public void putLicenseAsync(PutLicenseRequest request, RequestOptions options, A * @throws IOException in case there is a problem sending the request or parsing back the response */ public GetLicenseResponse getLicense(GetLicenseRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequest(request, RequestConverters::getLicense, options, + return restHighLevelClient.performRequest(request, LicenseRequestConverters::getLicense, options, response -> new GetLicenseResponse(convertResponseToJson(response)), emptySet()); } @@ -96,7 +96,7 @@ public GetLicenseResponse getLicense(GetLicenseRequest request, RequestOptions o * @param listener the listener to be notified upon request completion */ public void getLicenseAsync(GetLicenseRequest request, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsync(request, RequestConverters::getLicense, options, + restHighLevelClient.performRequestAsync(request, LicenseRequestConverters::getLicense, options, response -> new GetLicenseResponse(convertResponseToJson(response)), listener, emptySet()); } @@ -107,7 +107,7 @@ public void getLicenseAsync(GetLicenseRequest request, RequestOptions options, A * @throws IOException in case there is a problem sending the request or parsing back the response */ public AcknowledgedResponse deleteLicense(DeleteLicenseRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::deleteLicense, options, + return restHighLevelClient.performRequestAndParseEntity(request, LicenseRequestConverters::deleteLicense, options, AcknowledgedResponse::fromXContent, emptySet()); } @@ -117,7 +117,7 @@ public AcknowledgedResponse deleteLicense(DeleteLicenseRequest request, RequestO * @param listener the listener to be notified upon request completion */ public void deleteLicenseAsync(DeleteLicenseRequest request, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::deleteLicense, options, + restHighLevelClient.performRequestAsyncAndParseEntity(request, LicenseRequestConverters::deleteLicense, options, AcknowledgedResponse::fromXContent, listener, emptySet()); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/LicenseRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/LicenseRequestConverters.java new file mode 100644 index 0000000000000..7c2c049324eab --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/LicenseRequestConverters.java @@ -0,0 +1,64 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; +import org.elasticsearch.protocol.xpack.license.GetLicenseRequest; +import org.elasticsearch.protocol.xpack.license.PutLicenseRequest; + +public class LicenseRequestConverters { + static Request putLicense(PutLicenseRequest putLicenseRequest) { + String endpoint = new RequestConverters.EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("license") + .build(); + Request request = new Request(HttpPut.METHOD_NAME, endpoint); + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withTimeout(putLicenseRequest.timeout()); + parameters.withMasterTimeout(putLicenseRequest.masterNodeTimeout()); + if (putLicenseRequest.isAcknowledge()) { + parameters.putParam("acknowledge", "true"); + } + request.setJsonEntity(putLicenseRequest.getLicenseDefinition()); + return request; + } + + static Request getLicense(GetLicenseRequest getLicenseRequest) { + String endpoint = new RequestConverters.EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("license") + .build(); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withLocal(getLicenseRequest.local()); + return request; + } + + static Request deleteLicense(DeleteLicenseRequest deleteLicenseRequest) { + Request request = new Request(HttpDelete.METHOD_NAME, "/_xpack/license"); + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withTimeout(deleteLicenseRequest.timeout()); + parameters.withMasterTimeout(deleteLicenseRequest.masterNodeTimeout()); + return request; + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index cfd3b71c698b8..9cb853c5e5091 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -111,9 +111,7 @@ import org.elasticsearch.index.reindex.UpdateByQueryRequest; import org.elasticsearch.protocol.xpack.XPackInfoRequest; import org.elasticsearch.protocol.xpack.XPackUsageRequest; -import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; -import org.elasticsearch.protocol.xpack.license.GetLicenseRequest; -import org.elasticsearch.protocol.xpack.license.PutLicenseRequest; +import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest; import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; @@ -1189,41 +1187,6 @@ static Request xpackUsage(XPackUsageRequest usageRequest) { return request; } - static Request putLicense(PutLicenseRequest putLicenseRequest) { - String endpoint = new EndpointBuilder() - .addPathPartAsIs("_xpack") - .addPathPartAsIs("license") - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - Params parameters = new Params(request); - parameters.withTimeout(putLicenseRequest.timeout()); - parameters.withMasterTimeout(putLicenseRequest.masterNodeTimeout()); - if (putLicenseRequest.isAcknowledge()) { - parameters.putParam("acknowledge", "true"); - } - request.setJsonEntity(putLicenseRequest.getLicenseDefinition()); - return request; - } - - static Request getLicense(GetLicenseRequest getLicenseRequest) { - String endpoint = new EndpointBuilder() - .addPathPartAsIs("_xpack") - .addPathPartAsIs("license") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - Params parameters = new Params(request); - parameters.withLocal(getLicenseRequest.local()); - return request; - } - - static Request deleteLicense(DeleteLicenseRequest deleteLicenseRequest) { - Request request = new Request(HttpDelete.METHOD_NAME, "/_xpack/license"); - Params parameters = new Params(request); - parameters.withTimeout(deleteLicenseRequest.timeout()); - parameters.withMasterTimeout(deleteLicenseRequest.masterNodeTimeout()); - return request; - } - static Request getMigrationAssistance(IndexUpgradeInfoRequest indexUpgradeInfoRequest) { EndpointBuilder endpointBuilder = new EndpointBuilder() .addPathPartAsIs("_xpack/migration/assistance") From c6c456e8cbd102a2aee16097c8c07f60f77466e1 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Thu, 6 Sep 2018 18:48:05 +0200 Subject: [PATCH 22/91] Move up acquireSearcher logic to Engine (#33453) By moving the logic to acquire the searcher up to the engine it's simpler to build new engines that are for instance read-only. --- .../elasticsearch/index/engine/Engine.java | 27 ++++++++- .../index/engine/InternalEngine.java | 59 ++++--------------- .../blobstore/BlobStoreRepository.java | 2 +- 3 files changed, 40 insertions(+), 48 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/engine/Engine.java b/server/src/main/java/org/elasticsearch/index/engine/Engine.java index a64c3f88eb3d2..fe27aea805eef 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -33,6 +33,7 @@ import org.apache.lucene.index.SegmentReader; import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.ReferenceManager; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; @@ -569,7 +570,31 @@ public final Searcher acquireSearcher(String source) throws EngineException { * * @see Searcher#close() */ - public abstract Searcher acquireSearcher(String source, SearcherScope scope) throws EngineException; + public Searcher acquireSearcher(String source, SearcherScope scope) throws EngineException { + /* Acquire order here is store -> manager since we need + * to make sure that the store is not closed before + * the searcher is acquired. */ + if (store.tryIncRef() == false) { + throw new AlreadyClosedException(shardId + " store is closed", failedEngine.get()); + } + Releasable releasable = store::decRef; + try { + EngineSearcher engineSearcher = new EngineSearcher(source, getReferenceManager(scope), store, logger); + releasable = null; // success - hand over the reference to the engine searcher + return engineSearcher; + } catch (AlreadyClosedException ex) { + throw ex; + } catch (Exception ex) { + maybeFailEngine("acquire_searcher", ex); + ensureOpen(ex); // throw EngineCloseException here if we are already closed + logger.error(() -> new ParameterizedMessage("failed to acquire searcher, source {}", source), ex); + throw new EngineException(shardId, "failed to acquire searcher, source " + source, ex); + } finally { + Releasables.close(releasable); + } + } + + protected abstract ReferenceManager getReferenceManager(SearcherScope scope); public enum SearcherScope { EXTERNAL, INTERNAL diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index ea2b53bea8d0a..d9b03777f1b1b 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.engine; import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.index.DirectoryReader; @@ -52,7 +51,6 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.lease.Releasable; -import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lucene.LoggerInfoStream; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; @@ -1447,19 +1445,11 @@ final void refresh(String source, SearcherScope scope) throws EngineException { if (store.tryIncRef()) { // increment the ref just to ensure nobody closes the store during a refresh try { - switch (scope) { - case EXTERNAL: - // even though we maintain 2 managers we really do the heavy-lifting only once. - // the second refresh will only do the extra work we have to do for warming caches etc. - externalSearcherManager.maybeRefreshBlocking(); - // the break here is intentional we never refresh both internal / external together - break; - case INTERNAL: - internalSearcherManager.maybeRefreshBlocking(); - break; - default: - throw new IllegalArgumentException("unknown scope: " + scope); - } + // even though we maintain 2 managers we really do the heavy-lifting only once. + // the second refresh will only do the extra work we have to do for warming caches etc. + ReferenceManager referenceManager = getReferenceManager(scope); + // it is intentional that we never refresh both internal / external together + referenceManager.maybeRefreshBlocking(); } finally { store.decRef(); } @@ -2010,37 +2000,14 @@ protected final void closeNoLock(String reason, CountDownLatch closedLatch) { } @Override - public Searcher acquireSearcher(String source, SearcherScope scope) { - /* Acquire order here is store -> manager since we need - * to make sure that the store is not closed before - * the searcher is acquired. */ - if (store.tryIncRef() == false) { - throw new AlreadyClosedException(shardId + " store is closed", failedEngine.get()); - } - Releasable releasable = store::decRef; - try { - final ReferenceManager referenceManager; - switch (scope) { - case INTERNAL: - referenceManager = internalSearcherManager; - break; - case EXTERNAL: - referenceManager = externalSearcherManager; - break; - default: - throw new IllegalStateException("unknown scope: " + scope); - } - EngineSearcher engineSearcher = new EngineSearcher(source, referenceManager, store, logger); - releasable = null; // success - hand over the reference to the engine searcher - return engineSearcher; - } catch (AlreadyClosedException ex) { - throw ex; - } catch (Exception ex) { - ensureOpen(ex); // throw EngineCloseException here if we are already closed - logger.error(() -> new ParameterizedMessage("failed to acquire searcher, source {}", source), ex); - throw new EngineException(shardId, "failed to acquire searcher, source " + source, ex); - } finally { - Releasables.close(releasable); + protected final ReferenceManager getReferenceManager(SearcherScope scope) { + switch (scope) { + case INTERNAL: + return internalSearcherManager; + case EXTERNAL: + return externalSearcherManager; + default: + throw new IllegalStateException("unknown scope: " + scope); } } diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 9469f657c96bd..4c36cc5eed802 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -1337,7 +1337,7 @@ private void snapshotFile(final BlobStoreIndexShardSnapshot.FileInfo fileInfo) t } private void failStoreIfCorrupted(Exception e) { - if (e instanceof CorruptIndexException || e instanceof IndexFormatTooOldException || e instanceof IndexFormatTooNewException) { + if (Lucene.isCorruptionException(e)) { try { store.markStoreCorrupted((IOException) e); } catch (IOException inner) { From cd4bdde328b927762eeb98a19a44377166cb305f Mon Sep 17 00:00:00 2001 From: lcawl Date: Thu, 6 Sep 2018 09:53:03 -0700 Subject: [PATCH 23/91] [DOCS] Moves troubleshooting and limitations to stack-docs --- x-pack/docs/en/security/limitations.asciidoc | 87 ---- .../docs/en/security/troubleshooting.asciidoc | 490 ------------------ .../watcher/images/watcher-ui-edit-watch.png | Bin 250698 -> 0 bytes x-pack/docs/en/watcher/limitations.asciidoc | 28 - .../docs/en/watcher/troubleshooting.asciidoc | 63 --- 5 files changed, 668 deletions(-) delete mode 100644 x-pack/docs/en/security/limitations.asciidoc delete mode 100644 x-pack/docs/en/security/troubleshooting.asciidoc delete mode 100644 x-pack/docs/en/watcher/images/watcher-ui-edit-watch.png delete mode 100644 x-pack/docs/en/watcher/limitations.asciidoc delete mode 100644 x-pack/docs/en/watcher/troubleshooting.asciidoc diff --git a/x-pack/docs/en/security/limitations.asciidoc b/x-pack/docs/en/security/limitations.asciidoc deleted file mode 100644 index fb8b826d5dd58..0000000000000 --- a/x-pack/docs/en/security/limitations.asciidoc +++ /dev/null @@ -1,87 +0,0 @@ -[role="xpack"] -[[security-limitations]] -== Security Limitations - -[float] -=== Plugins - -Elasticsearch's plugin infrastructure is extremely flexible in terms of what can -be extended. While it opens up Elasticsearch to a wide variety of (often custom) -additional functionality, when it comes to security, this high extensibility level -comes at a cost. We have no control over the third-party plugins' code (open -source or not) and therefore we cannot guarantee their compliance with {security}. -For this reason, third-party plugins are not officially supported on clusters -with {security} enabled. - -[float] -=== Changes in Index Wildcard Behavior - -Elasticsearch clusters with {security} enabled apply the `/_all` wildcard, and -all other wildcards, to the indices that the current user has privileges for, not -the set of all indices on the cluster. - -[float] -=== Multi Document APIs - -Multi get and multi term vectors API throw IndexNotFoundException when trying to access non existing indices that the user is -not authorized for. By doing that they leak information regarding the fact that the index doesn't exist, while the user is not -authorized to know anything about those indices. - -[float] -=== Filtered Index Aliases - -Aliases containing filters are not a secure way to restrict access to individual -documents, due to the limitations described in <>. -{security} provides a secure way to restrict access to documents through the -<> feature. - -[float] -=== Field and Document Level Security Limitations - -When a user's role enables document or field level security for an index: - -* The user cannot perform write operations: -** The update API isn't supported. -** Update requests included in bulk requests aren't supported. -* The request cache is disabled for search requests. - -When a user's role enables document level security for an index: - -* Document level security isn't applied for APIs that aren't document based. - An example is the field stats API. -* Document level security doesn't affect global index statistics that relevancy - scoring uses. So this means that scores are computed without taking the role - query into account. Note that documents not matching with the role query are - never returned. -* The `has_child` and `has_parent` queries aren't supported as query in the - role definition. The `has_child` and `has_parent` queries can be used in the - search API with document level security enabled. -* Any query that makes remote calls to fetch data to query by isn't supported. - The following queries aren't supported: -** The `terms` query with terms lookup isn't supported. -** The `geo_shape` query with indexed shapes isn't supported. -** The `percolate` query isn't supported. -* If suggesters are specified and document level security is enabled then - the specified suggesters are ignored. -* A search request cannot be profiled if document level security is enabled. - -[float] -[[alias-limitations]] -=== Index and Field Names Can Be Leaked When Using Aliases - -Calling certain Elasticsearch APIs on an alias can potentially leak information -about indices that the user isn't authorized to access. For example, when you get -the mappings for an alias with the `_mapping` API, the response includes the -index name and mappings for each index that the alias applies to. - -Until this limitation is addressed, avoid index and field names that contain -confidential or sensitive information. - -[float] -=== LDAP Realm - -The <> does not currently support the discovery of nested -LDAP Groups. For example, if a user is a member of `group_1` and `group_1` is a -member of `group_2`, only `group_1` will be discovered. However, the -<> *does* support transitive -group membership. diff --git a/x-pack/docs/en/security/troubleshooting.asciidoc b/x-pack/docs/en/security/troubleshooting.asciidoc deleted file mode 100644 index 72a05ada29958..0000000000000 --- a/x-pack/docs/en/security/troubleshooting.asciidoc +++ /dev/null @@ -1,490 +0,0 @@ -[role="xpack"] -[[security-troubleshooting]] -== {security} Troubleshooting -++++ -{security} -++++ - -Use the information in this section to troubleshoot common problems and find -answers for frequently asked questions. - -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> - - -To get help, see <>. - -[[security-trb-settings]] -=== Some settings are not returned via the nodes settings API - -*Symptoms:* - -* When you use the {ref}/cluster-nodes-info.html[nodes info API] to retrieve -settings for a node, some information is missing. - -*Resolution:* - -This is intentional. Some of the settings are considered to be highly -sensitive: all `ssl` settings, ldap `bind_dn`, and `bind_password`. -For this reason, we filter these settings and do not expose them via -the nodes info API rest endpoint. You can also define additional -sensitive settings that should be hidden using the -`xpack.security.hide_settings` setting. For example, this snippet -hides the `url` settings of the `ldap1` realm and all settings of the -`ad1` realm. - -[source, yaml] ------------------------------------------- -xpack.security.hide_settings: xpack.security.authc.realms.ldap1.url, -xpack.security.authc.realms.ad1.* ------------------------------------------- - -[[security-trb-roles]] -=== Authorization exceptions - -*Symptoms:* - -* I configured the appropriate roles and the users, but I still get an -authorization exception. -* I can authenticate to LDAP, but I still get an authorization exception. - - -*Resolution:* - -. Verify that the role names associated with the users match the roles defined -in the `roles.yml` file. You can use the `elasticsearch-users` tool to list all -the users. Any unknown roles are marked with `*`. -+ --- -[source, shell] ------------------------------------------- -bin/elasticsearch-users list -rdeniro : admin -alpacino : power_user -jacknich : monitoring,unknown_role* <1> ------------------------------------------- -<1> `unknown_role` was not found in `roles.yml` - -For more information about this command, see the -{ref}/users-command.html[`elasticsearch-users` command]. --- - -. If you are authenticating to LDAP, a number of configuration options can cause -this error. -+ --- -|====================== -|_group identification_ | - -Groups are located by either an LDAP search or by the "memberOf" attribute on -the user. Also, If subtree search is turned off, it will search only one -level deep. See the <> for all the options. -There are many options here and sticking to the defaults will not work for all -scenarios. - -| _group to role mapping_| - -Either the `role_mapping.yml` file or the location for this file could be -misconfigured. See <> for more. - -|_role definition_| - -The role definition might be missing or invalid. - -|====================== - -To help track down these possibilities, add the following lines to the end of -the `log4j2.properties` configuration file in the `ES_PATH_CONF`: - -[source,properties] ----------------- -logger.authc.name = org.elasticsearch.xpack.security.authc -logger.authc.level = DEBUG ----------------- - -A successful authentication should produce debug statements that list groups and -role mappings. --- - -[[security-trb-extraargs]] -=== Users command fails due to extra arguments - -*Symptoms:* - -* The `elasticsearch-users` command fails with the following message: -`ERROR: extra arguments [...] were provided`. - -*Resolution:* - -This error occurs when the `elasticsearch-users` tool is parsing the input and -finds unexpected arguments. This can happen when there are special characters -used in some of the arguments. For example, on Windows systems the `,` character -is considered a parameter separator; in other words `-r role1,role2` is -translated to `-r role1 role2` and the `elasticsearch-users` tool only -recognizes `role1` as an expected parameter. The solution here is to quote the -parameter: `-r "role1,role2"`. - -For more information about this command, see -{ref}/users-command.html[`elasticsearch-users` command]. - -[[trouble-shoot-active-directory]] -=== Users are frequently locked out of Active Directory - -*Symptoms:* - -* Certain users are being frequently locked out of Active Directory. - -*Resolution:* - -Check your realm configuration; realms are checked serially, one after another. -If your Active Directory realm is being checked before other realms and there -are usernames that appear in both Active Directory and another realm, a valid -login for one realm might be causing failed login attempts in another realm. - -For example, if `UserA` exists in both Active Directory and a file realm, and -the Active Directory realm is checked first and file is checked second, an -attempt to authenticate as `UserA` in the file realm would first attempt to -authenticate against Active Directory and fail, before successfully -authenticating against the `file` realm. Because authentication is verified on -each request, the Active Directory realm would be checked - and fail - on each -request for `UserA` in the `file` realm. In this case, while the authentication -request completed successfully, the account on Active Directory would have -received several failed login attempts, and that account might become -temporarily locked out. Plan the order of your realms accordingly. - -Also note that it is not typically necessary to define multiple Active Directory -realms to handle domain controller failures. When using Microsoft DNS, the DNS -entry for the domain should always point to an available domain controller. - - -[[trb-security-maccurl]] -=== Certificate verification fails for curl on Mac - -*Symptoms:* - -* `curl` on the Mac returns a certificate verification error even when the -`--cacert` option is used. - - -*Resolution:* - -Apple's integration of `curl` with their keychain technology disables the -`--cacert` option. -See http://curl.haxx.se/mail/archive-2013-10/0036.html for more information. - -You can use another tool, such as `wget`, to test certificates. Alternately, you -can add the certificate for the signing certificate authority MacOS system -keychain, using a procedure similar to the one detailed at the -http://support.apple.com/kb/PH14003[Apple knowledge base]. Be sure to add the -signing CA's certificate and not the server's certificate. - - -[[trb-security-sslhandshake]] -=== SSLHandshakeException causes connections to fail - -*Symptoms:* - -* A `SSLHandshakeException` causes a connection to a node to fail and indicates -that there is a configuration issue. Some of the common exceptions are shown -below with tips on how to resolve these issues. - - -*Resolution:* - -`java.security.cert.CertificateException: No name matching node01.example.com found`:: -+ --- -Indicates that a client connection was made to `node01.example.com` but the -certificate returned did not contain the name `node01.example.com`. In most -cases, the issue can be resolved by ensuring the name is specified during -certificate creation. For more information, see <>. Another scenario is -when the environment does not wish to use DNS names in certificates at all. In -this scenario, all settings in `elasticsearch.yml` should only use IP addresses -including the `network.publish_host` setting. --- - -`java.security.cert.CertificateException: No subject alternative names present`:: -+ --- -Indicates that a client connection was made to an IP address but the returned -certificate did not contain any `SubjectAlternativeName` entries. IP addresses -are only used for hostname verification if they are specified as a -`SubjectAlternativeName` during certificate creation. If the intent was to use -IP addresses for hostname verification, then the certificate will need to be -regenerated with the appropriate IP address. See <>. --- - -`javax.net.ssl.SSLHandshakeException: null cert chain` and `javax.net.ssl.SSLException: Received fatal alert: bad_certificate`:: -+ --- -The `SSLHandshakeException` indicates that a self-signed certificate was -returned by the client that is not trusted as it cannot be found in the -`truststore` or `keystore`. This `SSLException` is seen on the client side of -the connection. --- - -`sun.security.provider.certpath.SunCertPathBuilderException: unable to find valid certification path to requested target` and `javax.net.ssl.SSLException: Received fatal alert: certificate_unknown`:: -+ --- -This `SunCertPathBuilderException` indicates that a certificate was returned -during the handshake that is not trusted. This message is seen on the client -side of the connection. The `SSLException` is seen on the server side of the -connection. The CA certificate that signed the returned certificate was not -found in the `keystore` or `truststore` and needs to be added to trust this -certificate. --- - -[[trb-security-ssl]] -=== Common SSL/TLS exceptions - -*Symptoms:* - -* You might see some exceptions related to SSL/TLS in your logs. Some of the -common exceptions are shown below with tips on how to resolve these issues. + - - - -*Resolution:* - -`WARN: received plaintext http traffic on a https channel, closing connection`:: -+ --- -Indicates that there was an incoming plaintext http request. This typically -occurs when an external applications attempts to make an unencrypted call to the -REST interface. Please ensure that all applications are using `https` when -calling the REST interface with SSL enabled. --- - -`org.elasticsearch.common.netty.handler.ssl.NotSslRecordException: not an SSL/TLS record:`:: -+ --- -Indicates that there was incoming plaintext traffic on an SSL connection. This -typically occurs when a node is not configured to use encrypted communication -and tries to connect to nodes that are using encrypted communication. Please -verify that all nodes are using the same setting for -`xpack.security.transport.ssl.enabled`. - -For more information about this setting, see -{ref}/security-settings.html[Security Settings in {es}]. --- - -`java.io.StreamCorruptedException: invalid internal transport message format, got`:: -+ --- -Indicates an issue with data received on the transport interface in an unknown -format. This can happen when a node with encrypted communication enabled -connects to a node that has encrypted communication disabled. Please verify that -all nodes are using the same setting for `xpack.security.transport.ssl.enabled`. - -For more information about this setting, see -{ref}/security-settings.html[Security Settings in {es}]. --- - -`java.lang.IllegalArgumentException: empty text`:: -+ --- -This exception is typically seen when a `https` request is made to a node that -is not using `https`. If `https` is desired, please ensure the following setting -is in `elasticsearch.yml`: - -[source,yaml] ----------------- -xpack.security.http.ssl.enabled: true ----------------- - -For more information about this setting, see -{ref}/security-settings.html[Security Settings in {es}]. --- - -`ERROR: unsupported ciphers [...] were requested but cannot be used in this JVM`:: -+ --- -This error occurs when a SSL/TLS cipher suite is specified that cannot supported -by the JVM that {es} is running in. Security tries to use the specified cipher -suites that are supported by this JVM. This error can occur when using the -Security defaults as some distributions of OpenJDK do not enable the PKCS11 -provider by default. In this case, we recommend consulting your JVM -documentation for details on how to enable the PKCS11 provider. - -Another common source of this error is requesting cipher suites that use -encrypting with a key length greater than 128 bits when running on an Oracle JDK. -In this case, you must install the -<>. --- - -[[trb-security-kerberos]] -=== Common Kerberos exceptions - -*Symptoms:* - -* User authentication fails due to either GSS negotiation failure -or a service login failure (either on the server or in the {es} http client). -Some of the common exceptions are listed below with some tips to help resolve -them. - -*Resolution:* - -`Failure unspecified at GSS-API level (Mechanism level: Checksum failed)`:: -+ --- - -When you see this error message on the HTTP client side, then it may be -related to an incorrect password. - -When you see this error message in the {es} server logs, then it may be -related to the {es} service keytab. The keytab file is present but it failed -to log in as the user. Please check the keytab expiry. Also check whether the -keytab contain up-to-date credentials; if not, replace them. - -You can use tools like `klist` or `ktab` to list principals inside -the keytab and validate them. You can use `kinit` to see if you can acquire -initial tickets using the keytab. Please check the tools and their documentation -in your Kerberos environment. - -Kerberos depends on proper hostname resolution, so please check your DNS infrastructure. -Incorrect DNS setup, DNS SRV records or configuration for KDC servers in `krb5.conf` -can cause problems with hostname resolution. - --- - -`Failure unspecified at GSS-API level (Mechanism level: Request is a replay (34))`:: - -`Failure unspecified at GSS-API level (Mechanism level: Clock skew too great (37))`:: -+ --- - -To prevent replay attacks, Kerberos V5 sets a maximum tolerance for computer -clock synchronization and it is typically 5 minutes. Please check whether -the time on the machines within the domain is in sync. - --- - -As Kerberos logs are often cryptic in nature and many things can go wrong -as it depends on external services like DNS and NTP. You might -have to enable additional debug logs to determine the root cause of the issue. - -{es} uses a JAAS (Java Authentication and Authorization Service) Kerberos login -module to provide Kerberos support. To enable debug logs on {es} for the login -module use following Kerberos realm setting: -[source,yaml] ----------------- -xpack.security.authc.realms..krb.debug: true ----------------- - -For detailed information, see {ref}/security-settings.html#ref-kerberos-settings[Kerberos realm settings]. - -Sometimes you may need to go deeper to understand the problem during SPNEGO -GSS context negotiation or look at the Kerberos message exchange. To enable -Kerberos/SPNEGO debug logging on JVM, add following JVM system properties: - -`-Dsun.security.krb5.debug=true` - -`-Dsun.security.spnego.debug=true` - -For more information about JVM system properties, see {ref}/jvm-options.html[configuring JVM options]. - -[[trb-security-internalserver]] -=== Internal Server Error in Kibana - -*Symptoms:* - -* In 5.1.1, an `UnhandledPromiseRejectionWarning` occurs and {kib} displays an -Internal Server Error. -//TBD: Is the same true for later releases? - -*Resolution:* - -If the Security plugin is enabled in {es} but disabled in {kib}, you must -still set `elasticsearch.username` and `elasticsearch.password` in `kibana.yml`. -Otherwise, {kib} cannot connect to {es}. - - -[[trb-security-setup]] -=== Setup-passwords command fails due to connection failure - -The {ref}/setup-passwords.html[elasticsearch-setup-passwords command] sets -passwords for the built-in users by sending user management API requests. If -your cluster uses SSL/TLS for the HTTP (REST) interface, the command attempts to -establish a connection with the HTTPS protocol. If the connection attempt fails, -the command fails. - -*Symptoms:* - -. {es} is running HTTPS, but the command fails to detect it and returns the -following errors: -+ --- -[source, shell] ------------------------------------------- -Cannot connect to elasticsearch node. -java.net.SocketException: Unexpected end of file from server -... -ERROR: Failed to connect to elasticsearch at -http://127.0.0.1:9200/_xpack/security/_authenticate?pretty. -Is the URL correct and elasticsearch running? ------------------------------------------- --- - -. SSL/TLS is configured, but trust cannot be established. The command returns -the following errors: -+ --- -[source, shell] ------------------------------------------- -SSL connection to -https://127.0.0.1:9200/_xpack/security/_authenticate?pretty -failed: sun.security.validator.ValidatorException: -PKIX path building failed: -sun.security.provider.certpath.SunCertPathBuilderException: -unable to find valid certification path to requested target -Please check the elasticsearch SSL settings under -xpack.security.http.ssl. -... -ERROR: Failed to establish SSL connection to elasticsearch at -https://127.0.0.1:9200/_xpack/security/_authenticate?pretty. ------------------------------------------- --- - -. The command fails because hostname verification fails, which results in the -following errors: -+ --- -[source, shell] ------------------------------------------- -SSL connection to -https://idp.localhost.test:9200/_xpack/security/_authenticate?pretty -failed: java.security.cert.CertificateException: -No subject alternative DNS name matching -elasticsearch.example.com found. -Please check the elasticsearch SSL settings under -xpack.security.http.ssl. -... -ERROR: Failed to establish SSL connection to elasticsearch at -https://elasticsearch.example.com:9200/_xpack/security/_authenticate?pretty. ------------------------------------------- --- - -*Resolution:* - -. If your cluster uses TLS/SSL for the HTTP interface but the -`elasticsearch-setup-passwords` command attempts to establish a non-secure -connection, use the `--url` command option to explicitly specify an HTTPS URL. -Alternatively, set the `xpack.security.http.ssl.enabled` setting to `true`. - -. If the command does not trust the {es} server, verify that you configured the -`xpack.security.http.ssl.certificate_authorities` setting or the -`xpack.security.http.ssl.truststore.path` setting. - -. If hostname verification fails, you can disable this verification by setting -`xpack.security.http.ssl.verification_mode` to `certificate`. - -For more information about these settings, see -{ref}/security-settings.html[Security Settings in {es}]. diff --git a/x-pack/docs/en/watcher/images/watcher-ui-edit-watch.png b/x-pack/docs/en/watcher/images/watcher-ui-edit-watch.png deleted file mode 100644 index f6a3ab4354a214a6cf1af8c37709f454a076888b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 250698 zcmb5V2UJr_*FH{1P((!(=|w@Q(tDHMmEJ)SDFK3n9uh87UkSVv8b^RW-`g`0;n35oXG1Pf|Qlm1&7HpV)x zVVAUNT6r{@4(us%!(%^0T+@w+$a*FcLt944c)etuKl(L*^m*wlUC!nZuW zx9E`CIp7h$Qo*4kX`nS_KGcXb`Kxz|@**PCW|W*3mYnbCc3tij(w;85R~2Yzi_-n(={)%E8a>UWB(gNHm3Qowe^)~F zDYExE^^X)%UXuGuRpdOJGjF~==r`Okan!2QUc0TCY2URq z((;m9DxS6^^Id(%QrCZtB1-j&Je{KDmphqvQExMZZ{F2ZExGbCN=9Dz(Jy?(eQ{3r zSvb1#s-U3x3{uG789 z@!LAzn&3lr-wxM~cUu!rhY(L$7OqtJ_Yhu+^l|v~CO2HI_}Sa3f9B2cHC24-yzjAC z=Qda<)ZS>|v^V5tu(N=(3uVl~M+aWV?oHHvQst@W%gF1>HqZ1b$!BG^?)sC;zm_Du z<&Q$e$=!i zbCW7pzgndZaAFtG@2jkSDmyRVK*qTUU$yDA$@du5{Cc|`y=O$SWKWiGhLx)mC5g%; z`w@6y=W<3>hvz4W^Y^}+{ixBfiH)2ri;VHu%#|GCcty}rDeeblAeqB8icVFDXzB-! zv?YPOeQ(kV!si@G?^ESDGFnpecigIAn|jsisO(Cn^+U(|mMt0E5>*2=KAXCQg0Q5> zq<$?TV(dCqb}SQTezfH8=msu?_O%;tySX1|Gk0o5L{lWk3O>^PD3*I;E^>;yohRz! z=RV3@hQ@GTp4L8=74di9BuvSt)rQAJzq4#cX=d%NKmQWHmZP!$%$L$S%sdA?rg?BT zn5H$%|C`7-=@my^CI<0t4^e4Bl9(Rj$}9R2!#{N^d`g&$d##tSA#~`@(+VS*>n=-z z3%8D5+|PIg+6f5}ZeuO)2JC2_NkHkaJ@{Q9g`8@Vqrnbm#Pv@!A)UUDM8Bm~eD;p^ zrh-Z;Pk)2!9cBj_mfnA%8JH2-bx+H+-DP1 z6YLW&C(MlLH+ZbxdZ_r{rT%Qv|7h=#k8F_i3DXJx$(^7N*}9^6fseBv*be9q=nh=f zy}U}B7bT``mXl)A`h9mwaFfiPvNj_)GC1?pTt}y0yWh_3^(Pg#Hsd!FN_qRbi@KSv zn-T)nB%U`69@(hEHRM&B)xdfMlf&z62d2TK5X#m>=)3a|=TDNqc@(hb51C~Y*yb&K z4>3DQpw;+k`qRkyo$g27#sSg<0e(o)O*8Rn60^EZ+iCvkuA+KV>M17otM23OjqZcf z{>65W!DWWV(kArAs3H+_WEoVKAWH7UBu>V}+QZ#*wdamzJk^sfTONHLy&O}ga?NJV z_WY(b%VG85J1J%4@sOX zs{|vsxZUL0L>_z^%oQ9I%yMA3akwFV2sx}ifNgS2VkI4!0$DCynPFy?4(ynCep2aU z$yhR30#&o?daU;LmsFR3mnsh^ha%^VCY>hz`@$Zto;y8&Sesb9!4>_U)FG)t@kW5I zsGs<-gdH$=0AXzcrlD`YokfC%2|=!Gzu>dL_;!rV7fJ zJQi>%)-UES7MS^78&mru__JRDT}RKNrmKl3XfW{w+Qr7z#s%)#TN6`b;|hC@bAR9e zbxO3jH;rX~7%`{R3ymTuQu{@DN4>c!bQQ>2$|@UY76(gTH|#LnPfJO+kFRAPWN8lC z@^p#Z4@BJ?DIIaJv$azfJ``>(<1KS5W3dA^x;6Z45DJjT8hYabGEvZO!snBH8ChL)a|p$&lJw4KT8HPH>x1JPO^%|B-MCGM^t@9YA4knv)%H#8+R z!q+R-CCBNX4LR*OoqttHyC(k4y>rqsYId#?^_%IX$+CZMYdkLA;ZCSX2e;~{v6Szh zWFAyX?W!3&8e17#?0sF4?N2iHHKuqlT$nr6J(XTa&JfO9mO3lmA7s3noBDZ}Z}e-( z(|03Z(jI6P6zMw~DSpt@*3*TjAH884E_*?r8Xa>>hqEsh==8XKw!%SsF3DjwT?6;e<% z2%UfLmL4<{^fSorg#84)hu65Co&HQ+!D_)ai!@%=^Ty?Vkx!UM*v_KP_(_HWj<4UO z)MRoz^QVWL#WNWxZxf_0XZ}o{mVx{iC7`cu3L?_bUg1WqI2LE7XX@`SV($IEjD;8M;R;_SV-f&%8=Rh zdA@_F($}T+^=~`?B~r!0NW%sD2Es;MHya8H!k79NjEqdAI|R@K_)T}uq?P%VmM)ey zgPqx|`3ljC;%-L0h##%PhjoW==C5>l_#aCBZMWMC2vAUH_xTLD7xgB63j0D}^vmzL z*WJ?QW|3d!rl!*-7J_8Xya6#TgWr_5&S(9)ceJOU4Ce^MKZzGolJad?KmPHyg#p5M z!pBi>=-C>;e=>DA$zWKY`BnyW?y)mxf;t;vQ^xw8_ff`QjuL6I^JrS}DXbiB+#YjA z4Mxq|&9js@*t=o8v924eu%NA?(do}Hvhws+c7MV|?01%QX$CJuo8*Q$%oDSX3h=?S zwAlm(>>$qY5%_!brW{|Cry!OD`oT~M-$q}y?WxYc*i(ywC9dI z8-OkPPT;W;j<6K?>gx5vw@y@f}KHGBOVp{wnbQyY;`X{I8-`|65d4Oj7K>i~iTG{}fda`O}5}>eAo#`s*su zU`n(KBL8E2CEA5}`*h-Xu)3)mnG%1={tOQB$A@^h|MxHPoV5I-B(zh2ghYizM_tu4 zkaTO2I?dMXv0U{oVMh;CNSYw*(wea( zWZT+5=u`lGvQtdZvdh!*zfeuyU4T{^O|(L+QZwm#$3&!TyiN=%zCh0JA2rfM=5aZ2zkLq@ko_W9P#9 zf7&#`n?<_*Au@Fs0O_O1GB~zU8x8BRD4?xL#+XO@^NjIiwsCnnW%2yGL6Cb#Lb@L3 zb9auO;#!|2DUf8IPKo-FstKi>YmsVH+C`0Ajg63Xz}iKJ@4WCW?X_2irtemX zZ3??d`HV74lA@HC<_!{P*pPP5Ezb99B2LQMS? ziEkw9rO2><^NEac5xpWHx~tES^OXwkq$)ac+5BOj8^ z4EuNeuq7HW^>eiNFrJ}42bw^5zT zM>O6Ki&rEyB9|HdRRgYR>kwO@>roOx^nts=DkO3*1rpW&9Zbj)+b~Qc%0WtT&Gj2` zWN(pG$^AQTPb49Ay&PUo#Ay}4rE9Oe=*&+4RU6hz{#YOV(WU*mO4jfn)Vd?d^6206 zVx5AlkTR>f`mO+9Gs8}<%k8dT722&QMhjKL${VFq{ka? zUrsR7JB*3R^*S6Gq3b~KjJ>546C!C1F-mT!b&FqdmNwnvn9nIr13mvuU%ORSl5kNr z%iFjHBdIwQ;{Tu*$IeNYDYqN;0w!*qRaRz*2tu*-i-k=<38(#EMV+h_?6I{|wow*w zijSJsR%K#@5N7=Kt&KxLpD_V z10M0GmZeIO5d2a9Ys$-M+wa-r%};Sg+cF#Un+hktZWWeX)~!FYJ=yxIIucF>m3I_j zkwB#j!flpjg5J^aQ73@s$^Sy#(Cy)nn7G*bNIPa|qCa}AyaZA1VC+ubW12O=1 z$Nh_}@{~FivddDnYv$`LMEJJloFp3yO>MW&FF#rTc7;w%+3)Aw{!zC}>e9U@0oCVv zGpEe!A-}6W-z^ZmwzzOQq_y<;lAuGI?fJ=Qs9dVj>KBuh2|W?d-G?nO(1rzFhP25U(=R6x#lI&D5QJMHdcq3l5J8Mf>+dKrq>A&4W$ecEZ$4XpHla zL@5bj6@`*Bo02yn2_E?JKnyS=h?24On>Kr(9Ptdl0Xb~jk55v)_ol5kz$U0R1fTcj zV~~R!=F-W~y6VSKg!HL*l+wz#Z4j?uDShd4A3!;hC)s1*A7`R02n`WoelDBs&x;kF zIF^3h^DAv&Y|C+=t}6Fw{sOWCI_ZTa(=e}%y0fx8 zONjg9(W_N2o_Py>bb1f5xcvD;R3%)QYe z2lopVuq`*sv(TIO`-zCW8=8y)u(uzmKMDliwnF-o~Z>iAji1 z|9>39e2E}#7?o|r;m6@6Ih}c1x1sJ^aQY~>{v%?n#Qotd={m~7>vm~hp+UG(4!yuf zhn~WUxr$sFE^)fl4Y||Fy`Z@{-Y+1 zcgrsnBzY$AC3?lBgupj)R=Pm z#JWws>kR4jLWAHO(<>}LjypRjgDtoyYPHMxWw3rP{LC^$&f}6otu-+%whO*%hu2j8 z$90VgH*sEMc%ti*eCkP^`+FRxf7~eCFKNX}N+Fl6!b_EvDNgFzy!!RaB2pxU6B0wa z)&B%5sqZ_;!Z8rlJeZvi_z+9tdy|Ia#?3LuS=5DmZ{dAJ3S$-$g0lV-5e_hd7Ky3H9Brv7&vu=bfd@KAvuXVf43Z%Br2+H)kIDLDAkol##t7g5$k1^o1fP`qlPH2I{Z9k%Sm$F$t*Ndl1kg+7XHdh||lt0&!l>kH@+v}fMVXUsHo zDxVS5i^Fm`Fv(lK+Xtn)9Vw6FLmRH}KllQJVH06eTZxN3Ch(^(t`uCs2;F8;Q;eIJ z`if0=1NisxS7%Q5Si#5G?N%>(t|Wf!;kPfQnUTkzoF_s)`sva7q^n@>X>kb8#lkD_ zWBb1v6X0n&4oY6pCX8nZe$CGLl?cEBt4nn`-`Aqjqo0%B509O6F$;3H%-=Q+Qc9WG8`RY0Ut!REeD{Q~#TpFe5J*#7pxpS06I zH@QA8rC19t1dW;YAy8@f)!26a@!{DtiLr*1eQERA>Uq}5q~V}~-Y!L4)7Sm;9WT3; z2I%7IifStl3M$sysfY>AfKHgcit{6EOQU*26M>R2j20{|&ZZn5pZuPEcM|j(?n}=b&2`KbXJao%`?nXlcB~2M4CxsepUTZF?<~4P#zX2lzOEzpUz*3s|Q?B zIwBz8^jd~Z@P?xQ#@iQ}RYSq>bo9?sF?)})CyG>=3RS?Ea)y8q3;{fb%EPid|Ac>)QK@O}`xT6r9_@0p?RF`~7DF;d2w) zF7N%@mQy_>-qLK8-7QOtE|A37@5F6do<@`lt?Z3yy%W;!)J#XFGRr9JJ= zjB-Fr>!UTS%#wbi*Tj>NTIb&3Kd8lZCPG{)4u4$luE4a)>G*u zsDo|r0SO8`-7JK!J|8~n{xn<_0!#+6Zz>SL%ad#JwgR5L?)@Ri?#=Lxe})HsBBq)= ziK#T~JF5wpo}#?5P{G%8L!THxjNJcFN$UF9`?Qi~Xki~w6#shYWs!?gt4FayrEfLp zQn?cJyF#OHj7cL*grqcVDb>Mdlj6oQ+x}#njJ}tG^K(;I?eI|tQRUm^zcxWpxX%Xp zH@-(EIZ4v7hvFlCJ%OxOH6iF7thnV81GnLPtZuLC38{kFTpk3G@uNkCDEVc-B-pEC zx)*q_vGH_Fo4faB&u6W7%(KJI7uGLr1hh|UI~Q0BPLFbUwM)JgSQ>;+Aiq8w3aE?J z3OzdV>4gJH>3A>J(kczTXEMq=R^h|J%*UsD4P1K%RYHcGL-sw8@BSt<7K)Gqf5PHzjO2S1u6*G6vj7NalOI>Qg?R}Jf@Mh9 zj>uMJHe0TkaWEglejEG={(_YhykDxBIW4v-k67~nJ>Fl`f5xD@J{v{TP81K@@h-`@ zz@b=P?o7!a7l+GH8cNMTYoAbWx&vlt_k#GLOrt8aK9o)lfiGO<#^sUoUOWFRo%pjQ z(NB?%X;XBZ_?a~!o9IW&=h^pd*Pq4x;uvLttuXt^>F#FEGRC4l5Ep7E!}#KKkeV|7&Bo$MFlJo|Asvv;`tY0M zVmV*=*PDmeKHKful%uH)ru}fYMGx2y3+4bC*#6yll6^-QR!LHLFRQ03MrvR`!vVfi z26Wh$6Pd$DK-xBg_KJd*bi?PM2Dm{i@M^%^+Q=`_wXoXEOiAm#AvIoqy)`_qee^u$ zGwz>1gX}+{MS`wlW6{OP=T|fo6MrH?8%`(kF}#q92@TAs`gCqKm0&rRQw zJln&?#3ccSjG6!qvqb;z=q_)2XjH?UFIDOzR~1lcvf*1-IPiU%Y;ldC6DZHRCY)1l zO=nIHV#Xv_LqbiN#^6!HK#LpGqtL~e0KhIG^Pm2>Tf=| z(3x%M0EOb9!9C;B4~3BCZ6*^PYXQMb>6UhrQmX<$^a(&@C_bWG@|B+l5|hnngYuGJ zGKKoWE;crD()jx^C`){~nTnshfId@*i<6{I02|JX+>qfEceC_BesC*1vTxBp-YD>K z*J!=hcA*(;}{loZ-qD-A{wKFB&fjt22PX ztOp^@un1UI$L?Z&Lu0lySu7E7m1Q(EMVZ);+W+hl(KP_lTsu*6x&kSHDLWa7a_ z89LJRaG#DZ-kbkL^L`4rmeA5+U}vG0$-D$gWe+WVRzxbRNWCyq00^UDFp8GhXbh@t zJX%<)VK-3>J#2x|elM$MA)C_8?`Mr!>mk5`&;~#E<2#Ro4-V#G?ghaMezuCkLAg&~ zDApkeQ_E%3bLgSQ=AMX0k`><#3KsYKD?h4K9)XsN_0)^;O$Nn^#gp@dgtark_lBbV zGqPNvM&(&5Ox>Xw&hORSxncFT_)s_<@?!RqOPv=wb@G!pJm<>va5^up^R4(sJQ zu9HqB9qxn|W#XaAZqs|C9P+ctNdpk>nb!LVTm??`#^SM4{${oLRzNBu^8m<##l=FY z3FEd&nDR&KUq7F;jbnM3|B*HOU`XWJ+$KtboEoQET=?fDP2405Qgc{{%$pWjGV`k$ z+l`$3rsWWmejI%)qby%CZ7#FiVO?Ic-s&xrR#P~iJ|hxfP`f;;BoC)KIHAn)lPz7B z15MhWyeqs~^6gH&+YSnrzgfegzUMW`a&@v|Trz`ym^uG!BX!W(%ISx>T0G9~q0)_k zm-TraH7#j^jnAgCz7JF)ijw{B*{~rGc`xOK-S@OsG#997GC(2S1*g(t18jMT4X1n=WqgYZ=UX=~tYQU5s$Q9Wp+&{9y{cAi6OvMAl`MzVDQSuj2D zyuuVz><#Y_v>Uw|z2QRlj4&1vxcH-zD$`5MtYwjQ265QhAwXw`e7VNN_q~YVHtAPO zTNeyy_U`^=m}iIzKhf$ve)9m7W&ymwhzSMJQIPqd(gSooJxfgaUqld~tEA^~QG5`OqkwY&i#21@};HC#hl2yuh7CjU4*Vj46ErQuaz56f@39 zE|tM{At(+G8YDbno=}uSJuHXynnOomw>F30vd082*QrG`Wpc1g4W^m!@St+--17QZuRpG9-Hh_n%bk zS(&^!9q-|vg8s6!6CuWrWiBl1E}p>zTVPRIGt8lDDl#f#Vyo3F^H%KzR9N60DI}+3;bHl z<+N8#8O@<`0U;}R)zJ(twpCr%V|;M3S5dHshC?Lh_>fKuJxa!T?`Wlw_% z;GR~aO>(l+#T)~U&55YyE$Y-3r1YdYHrn*Zj6=qlgKtIZIc+%A_x6WJmt+y9+jZF0 zqoxt0T86{Km_x-FzE%p)Mco_<$sB-mf<-2rTJ_`E0=4+9W83$ka((dhSES zgSFPwLha)}^c9zB=>fiB)Cke6DnLg#`_=L?-^VSfPp*<|Y&Wi*&32C}vgKDrIh(R6 zFNb25PQdP|YgNT)gjN2H!$qu=Y0%8h@8N>?zKIjtq0XDOd=V(NRCp{~ncTGL>Xga{ z_}0t9g2<|x&78%SUru!D88TU1OIwSf?E{_u-lq6iNkjRx0Soi z9rqWLq6{kZigE7M;T8zaoaVN+d3a^IUxr=Bl)MqDzNXZ&@)3K`%JqU&6C*`4UZaeK z3E`X>T%>8vi(sWd@_@Qb2a@^f;Lq`)8P$gNcI|`7Uga0K6ZC2xF{)lJfo)c>YZ%NN zrhZtjjF!eAAr+d4`I_F1^Sk>Gf0ioIo6N#z9+psH4-`PfHBF?NU|B5K4$Ew!5vb*K z@4&Ld0oRs?1~8q;^QlKMw-V+vm(%O(!pcB(LHE_WMxn!Hfm^nGsN^3&Ts-@wT^p?e zqVuv)9aX>eWAEOS{5!Q~(h?I+6eTNh^o+j~3Nu_MCwz#)El zfxR;0Xs$VseLcFh7fvOI^gIf9i(?0l>hu4Zv#c7>A55naveiCA2zsUnkt=V zL>M^6MH1cZnC5vf(cR(*NxYZN!e}S}psq)lHrH!gn}z%719e_Ic|8g{zasO~9cNY; zxD&z`4}Z71J8G--P`hugRFqNfE&XKRM#H)8(K($^R@DOKnUV&WBvjVUeVf_ZYH=3D zwMv8yosUJIFiLYyHL&Ruf5|?}r`&d!w|S`=#eDA7#oBNP;Qm3s0Pl%;LM^$yMLY+8 z68ogYT`$}!-Pl}Md1h)+=CXmA9Wq4rrBJ8IJr^0{aV^Oaza4Fta_d=&a4 zIgg8wnJ%ikz~2WQMAsPP%i;YlY*r{Zm(#)it7bqFMRO$Pz!FED4rFnaIeB7II9t9~ zEiCW6Ds_U&HaMLrJS{OxV{kia^g9tHUlmx6Ixc$=|5J%R&8eT!1`RFZUA7HBK!_=*Bjog$v8dyb@^7hz{RmBBlF=jdp`??$7hNvqK;q&N73PVYBV8WLB5!JC<&beuWY zBaoRbA)gvAq#`;OchSYdsB`OZg0u}01pSdMTK_=dl2TZ66Enq>!H3NQxdRbo=z3|> zD*#oCl6NFzY?fJy6j>HuGhHv9EF0A{bD=y8_P)NLE?s!_k!{IcIHUzh5nKDLDsp&p zv&)$Aif-S{p-{Kss}H2Vv0zXAk^W;n;CHjXY=ImI=lVpmL4Olxb8;B-L80W+jDe=C zY9MoBRad<~r#5*my^R_7BGgj^NVy($%j-*Fah-gOdmaoFx5Y=g__SEIV=d1nt>U|3 zUHHV&c`sZ%Sv+@}U#& z+L%|SiDCNhH+tKI%Jka!zKPfHjyXfKXQ!)wVI}I+N5t7jcz2T;^boB zXtdwau-oOdAC2&&NPT7b7atgWfDJ{9vK+0v!Gh0X;ZSc*L=b-b77Jn+QRh%&Tn+aP zak;X=WAUE(KJ)rMz`D?k{A^yLB5=BaYfVcgdvSm0Ko)*A+MID_t+ia1f#pJH?L4b^ zmI?uy$5UX#~y%!&P z)@4;I$-e{WJ!<_yH;Go!{MH|pOSQ|FYX;P-@)E`9-r<@sF$g?r1=gyycEBJ|y__n>P%8H-P zaGU`vJw!=OJ&6>?0*$OZ$B?qW_jvluLNQymq3^}^b~mm59Ar+j&JN^Mje^4~!!7K2 zz--_v_Nc)9EhXb1WnxM!SvYV{9S zT8s{mO^93EfR-Z;CC6~fFlFBnGjLF;aY;g_vM2v~u)r<8zg*2;+5Y<^dpsUd#=U7c z2}8U&*>h&_QktYV*jC9_qynD=V$=68ZkQAzA^^3uE9Z_tWkh2+ymItopy~^|8Ly*Y zWktxBw1-&*it>91_86Q(wBVQS)Vc<*DFlQ>Exw0igoRHdsBtg$HVSWkID;)HPvNgU zC5bU#y@2KM^&yI0g&-HAYUw0ZPn1thvw;1gb)zm=$l%=flU{q?WDvrBlx55hKVyMc z@3`c!B>+l_gNka7aoXir*vNU+J5J9k(s z1D*P*jbJqWEoL-Fs!4y^t(;$uOF|8k>v<*kLv|i0 zI__h2 zC_0z7Ami|=E!blxrptC1UFk_(*^Va%>joI~q~-D6h$n;ZP&1mc-?JFKqz^jcy3$44p=$&y^NFSS9idVHM4_(OXcef&jaAxUAF zZ1ctu`1URBu#>kqy8NOtbCIbR)UOY!ZLS*^f0{|&9R5k!=yQ4D(H>6|3Wr2*Z-4oW-PV5BNrdM1*D951CNt z8dJfi$}*=vh#9&q8koL3Jn5M`FA)-eZN&{_D1y_9MwL(cjdF8y?@nW$l_m9wM%Pwt zqZFamyGQtP!MHS+`6%|BZ;J2Ii0S2QP#rdY2v=4N&6ruu?dxFWm*=O9+t=y9mG#Sxrcv={Kx%=Mr?7 zJq8)HRBu?~=IOV%e#Bu@HK&j^u@i9chdl3C^JomUS93iCxH=aor1M8t&Y5fM{Sd7-jq2KOEwT2-Eqkl+Ke3#fIc zgtC1m*yZE;+HD`&v)ecH7xtB8hJr>c@60t@-u6o&JWfJTU%0WqYrus|8dg{(U98Ua z4!D|#BE3Y)7a3)x7dK7=-Xr3hzqa3_^#SSEo)AU^x5X1^$BuUO*OZ<*T$Ed#e=Oxt;N~X zBWnn{reBNx!=QoL&kS?Rd_?XCCES(=U-Y^i5(B@w*+6J4_uuJ_d5~@0a&jWEYKUGhw5P_H^uZUEsN# zL)W>H5S*LC!}HR{hQ#XFW&v?alFeVy==R|@Xm~^6ZRo*5T07yDf_sqzz%QU)79wTt zUDxYqQR|oFiK)%QQG@Zrb6V*^Sr5D#W8*yVMIpLyi#j3oqve?}A3)=%1T5 z^-qxnt%PP&lrSs>DLx;{KU{VfI<9fMf1@BXLsy!n)CqaHJ&+ipehP|y!~vbaz93A~ z;=vB`HQN#)7hf9V6H`_}27bqXh-Nc3hwvk$So`Z=N(a6zjr0zR(C_<~?;I_8h4>Yg zAS^Ix?FjU=Qh?K<$d>3mkOISoP469aT6F<s5Pa#h%0ui?mF5 zE{puZZS*nNV&2?ds6EgnZ@pNm`PAbtm(c^DSN^QzUIISsh>BylGhc#xZ(^&d>add2 zE48%EQuUpMijf~%o0CblUOxG?69@?&C75F9kB(BSLtTGqCO5e^i|0PaP-7aW6G_+W zn!IkR2}p$QHp&`1VG6Ac>SJh%-7ih!?Uvc~z$ad7EfWB>Bv_Omd$XY2N=lql8c{H^ zDVwQ{B<4CeV5ts{W7MQu`1%phC_N-|C@60bpyzvqPLB^)PNY$ZQJ>Ssc#f7&`rXPJ zf?XN?-ulb&A60~6M*jeeh{C!Afia3)lGNoyG=HxedV7AjZKiermpQ_MT(NiJ;A=-u zV|Zo=+|zhv&QWe#j)_!Z#0?W~OXeJY9qdj{ zYW35Q2TN@L>k1EoU_+7HvuN~df8_3?O8@mfOcyaYdUa5q-*ouCRzAHls6OXgR=IvU zDk6J?4Pn#<{=LJCzXo{Crai&AZjzs!TTkX(;^)_-2*oyR9d41>mi%!X&s@SR8c*52 z<$2Otj}xCc@>2v~K@ciYb5CvCx9`abF*TNEywu8FxYmX3rVDVOirnE08CQ%hy1Wq|Y@}i-w!{koj6kD8?J9Y#wqj0|u>CgRAX>A&6bFPXy+{sP z(nK~3As|a}^%i|+Y}5-H=@zB$&-cZ(v<&omMeFdgrkDTv z$VHRhy(+%YF!~Vk**&y`sMh!g)+LZgxqvmEg_I%vf6ypLm1H`_SKj|jRL_Rp4?DW~ zYKHTpd1mR=8LguZkuj$zx0a6i_E85Bky5uyxA1S=Bm{F+bxO|)hnw3L1NeVEq|38& zDSFYwm__LP_#eF@`@JA2n`d%!)$}M9OIDU>>^&{r8mVcgP+s`X0x@Uz1EqsWi&j=OJ zXWR*{cEr7I>0^c8C;He^;-bN&`7}2U`%RBm*WTa%IZI)SRcmnV*)qq4x4Ghfq<#i@ zsSz`k@v0?`HYIV-PUd)_)1I};&2o*I0z+}OlH1fuV3(*8m@#F2v@+`Jp7QSuU9 zmtTtDq8FZB@-QF8DW}OBM*3nmrQi2Sl-`)H)@$>XWPC+PA!NO7&1@G<_~zn;K$lD` zrpdR9bFEwATL#n6kYUMu3&$ob{n6L69&Bp?^qHl@$$4(h*{#?VVx|>}a{zY)_IBBq zHR3#j78{&ic5P!^Ag#j+5}@o+HE}uYa4dKQS3R>1*T&BfHJQJnyLd7W4dmipX2aC@ zQ6Vkh>{gy&Kpf*MuVU;Su)!1l0BYiZI0B**sRsk_YHH&ybu4 zo68(wL#`yTZM8F=vEQ5kZTFUoFqS?l9mWRNmXBMS75>euG~fOykJ>`(_J+dU&gsiG z&bP%w+c!jRG|>CyokDUTA525;BG z29`R)A&L5r9cb#Q$6p-Dk4ik9Lm(aR)`+g1cUSxQ} zlhlg%3*->_rO9FPOFZC!&qOuFP$TqkHYvsNK%RF8cUH9h5j|QqhFSC~ak>szkmCeK z8^td(=BKRh5}Nf&gO;9xtP)yW&J!o1$k|&0UxPf_&pV0XbFG-$V-eY(3w&`~PoBT` zaV{vltLS;?fAYgL54J`p;eHK^CaOow=Kb2d&}Y{Pt6x-G)6u5nB?1QbU(U=xe8yTr zm$KiEgi~!aD0hpGRR*TAeVPsm)-PvQp0X;LQShzuMyc{GfQcJ*GG%k)#TOOp&&U!+ z$YL+<%HIOp;UG2+(inf~l=kw&r;PO*qR_JmhP*xsK|%_f?#zih2zk;wnDwdi`_IXj z?-^RzKPjR>Ls{GSDqM3f;`(B|C8?`jnNXX{*6!WCw$eY!f<1eu)AY06{T_K7WxyT$ zG@t;d-A?T-F?*H+V{S9@h0r}ji#%JGJ^h6-4w_wv7Z@ggHsU+9Nd2O1yQQB!oBYp2EA}k*qX-kKTkYajT6c< z+Y676_3kMj{PWGA|56J4pL6|R-;9^xc*G)qX39OkVwR$0sd}0HKRX-BRd}hh79VFR zz5w1IF0U$OQwP1S&7a7Qgw?olZthoL>R2+o|=V$D7-z{Ay@xBfU~C zN7+f|e4D7A?0qgyWx$bK*H3{it9|V^qCTo@o$L<|ipBcZkR46)e!T@Po}-mM@mR=-LkqXC7q{4K z&+oK*G(H21ZHCZ=5;wCRVjwfvrz`A)(@ve{J zx=(-FzTAv&NbiP!ZS4_Gzsvl_1+46@lH0`H#^3J*Za6ej>UjwjCt^n|nSvU+urR(Y z%(hB}AuOuq0LTWM1YwTq6Xoe21fm)<51H9surFAKW=+tng4^qB=<(kK0~8^L&?II=_1_~9pFdB zu5k$P|Nh+wMi%tL^ot+)SW}C+-Pts_*xAC_U}}^3W7`i?sk622r+fQrFg+~2|81>6`!%rFx6XXFzCw>M7hTQ!e_6Gh_X zxs3~r3*ZT&ghzW#@1?+3puHyX%T9VygVp(B4T}es?iwTZZ%&R@8nCc(2a!-b9GT`! zOb`kdWCXh}tlG#;Dp3s|uC^jl_Q9TAUMIz}0SBri&2Eyr#0=*U#n`E4XO6N|hhk|) ziNUA?U0&laSFjz#!Gm8X6guQcY%ABa6QEu_P6M<&bAG@TO-+>ZS1rcdQLRyfJ9zmo*LAq2@kY?x>MY=mxLSpDn zk&*@}>4t$3hOQyLJ?D9z?>p~xJ@0qqpL5Lt*>mr8ulTLC*ZS?Lr=JT88;)}kURuj% z9#gQhp0cSI?8RdNrO=_tEhlR27pw#wL^( zlLt?e%{}kV7PlrPxuX#Ad)8?I{s+7V3wJ+zk-xF%I0zZUI{)VpGLD;fAT(F|PsD%X zT;-P?4!TN3f>#`Ch?&~z4~Ax4N>8G&(l)iY?kX)65*^=foQbb^jg#dqNtP1wvWL)w z<@ZjqW#+XV1!rk%#5=hfetgfGJgN6zyIZJ~DOe~+b}cB)jt4B{jr9zN+d-XOpErMX zl#)6(L^72WZ{%fl!|14jIc4{xjv8CNBH}O|KN9xP$BO8OMdtK~6<<^MTTZwGcHiS2 zNaW?T`X=-Utt8WaBWOCkxwt43f`4r8tc7DAQ08h_)lMy6zY*B&k~RDdR6u{c(o4D) zosHB)*uK5(RWX#*m}l4X4*N8)=8#rns!>!8YvuYoDXIJIYR%h1JgoQR1jW7RG{`I$S^@f%LcP;wlQ~i?4-U&9o)f6hBSsL@TeBf(ys%ZktbGw!6 zy;iHMRmcfsLuL79Ua>;yMho4dKAB}A&?^N|)f3kq?YKI0MEP?j+y~<$im}Gn7kUC$ zkPc3yekyu#W*RyK|7E4z^XuReGYyND*znV4A6I<#$@_D;(6&&$E3s2~_m>YP&}@di zhL^F|6+!U|E%;h{_^2R(&A7KBjMjHmEuM>+5W&)VRBiVSH!W_RB~CAf@44_Es6YsI zJyBf5xzMBKfpHo~Eycs?<8osa^ckg-oJr%dkvvM=<9W$7wtbqFGn6Rn9#h6>A{_CM zS$3npnc?m4I*uZJ(b1a>%cTMZ zNX-hDI9s)(E85EJd@rm+PYQ4b{EG`f{f=E6ZJH^t9MODt8YYC(0-mO^+$0i!o^5 zaE*33Tm&urnT5(^XCsu?PU$Z)vB?YM%ufuMFkJ%Sfm z3K-}yu8;?$#uJT|lgMMRb`YDbT+ZQ251G2v`4-2g3^O5h^Pu=|2WrBcils))~e-TxMV247p385B;)yqIth28YfqC`A*A*WNya!on++==p1 zQ`ln86BdN~QhuS&@)&BJ1P%;`kY`A`I4~RgeDKg4>P7~px%I=S7oPrOi^AWw^5cn* z5`}j`N~wNsHdk-ULd0P_F$^*uT(()az<~PNK(D2x*rq3J0Uzp}>Nlzp1!HCKCUht} z=yAA5r?-EhDf)EyB_TaXjn$E3j;Bk2UuW*bxKRB>7nr^-l^j!S?(5xsRyB6Y2$^EP zan<2Eun3w(XbIInLe{^#X^OkyDv+9)V5*d+(O&(y8^RHYJHAN=vKYM_*N8x{Ce-R? z0ZX{^1oUV)=Z_a)W}fq!TVGyi_!~@+Wbw`8QD9f@|^=U#Zg!<(yH{74hpf>9f zyNfB>|G83QCZr?jQn}uzQAdUASMlHfgcH;2HP@+)A2a%>8j;V*1htf?vmD@!*W)v8 z7=lku-%7GbvIGO1RHhZaP)f|C7FGo3-xT5hUqSxsw`MoZ_8<=It2TP|L)KGVdv2W! zx+W3>S+5;p)q|Dn?`t>r@maISjcXxa6J>fRp%k#p;IyexjDn&;47Jt)i}B|r1Urg+ zD4vLy&(yNUG}0fHR~m2LYPEf#3f$)yjP%sh&ZW|52o~^emNyZrPAEA zS;SQ7^|{$sp_NVANP(ZYfnf-aF{w+D!iz;RctR2Sym&O4RkFDTnh4x*QmuBqtkL?k zI2+3p^wt**(z5~=)0h{HbuIXw7PD2GC$^EoU*i@D*Nz74#8AG-0~ugOD#A|nAFESI ztW1(9k4G$*LMZOLFrB0fGxiCpn@5{nwHjyM)7h6D-fjIn>S$^O=mmBLOlK}+UOzd) zpJSQ1t(4_j+nicu=US^jPL1F0m#wUc5zaWyl*-XqQgjWntddB??jM_~mFue&;}~Pw zTpQ)>*V&(-_)J#&C2`ENqkKiCsY6;y(NX-xscGm7G2ziu1ey{IP^i`Jv=fI4a?TL< zJb`=bntrb!J5OfW6DDrcW;b^o?I6wQqG*@t{dVXx|21aby`S3*QEA>;_efrNm?;tdS2pXM_*Rn^1ZN)@Ycu*& z$#y)RvGpk>Z~-5{9rtGR;m_;dBe@N&GlDtSSi*SY{e`fM@hM zqN2)^SgDChs{6oZo;h!jg}s@&&9o!b?Z=CozQar$vmv1OGS>nfH(HQb17$rw^%2+l za3vkq-^5qD_-$P^5iT@IiDfp`pzbT?ys|?wS*9%BX1RskZ>yp3+GN;kIWc##=SoSf z`|GlsUdVg~S0$T)<8acig(;PHo5+NEUjs$$EjCiVkVho0ht?86Cv?k8M-$e1)gpm$ zYj^yzoXhooAu4XLrY>M`G^&@a*HFO=Nl9T8-Kf&te!71!b&5HLb3Ne2k5NPS#yNtw z<)hJ9CBsVV*5^e>nSCqyk%F8R@8eB%GoNZ?=I%5_&;H~sEW~V9y^8DTRT9<^TLmg< zEr9w|womPoXzW?94slG;U-8$Q&`Q8AR|>6XaX z$UM3oxi%UnoBr@cyyYuM4F({ODyJcMo;I3iPNNOt#+@{zl`F-|VGtoru03HnMru5v zFF)22xdiEp-qC>ScW$y_!s3>4{QmcRQ2h-AEA<6jpKO}z zI>*0?*}0O*p5Zq$)Jl)WtCf|TADyAZH~jX#sPDVWutF?9s8*W$A2B16cPO9sb#_J6 zJ;xEL=ReilDs;+pJ)X~cn9rhpqxO)>G;p6P_p9x`f#`Qv!V0>#n|SXTjA;3bl3W*Q z1T^iAVU^6%_182;WmiQ9GpsTxyqL|wYaeJb(YTPXwDr=buB7XI8b277S@%^`IsHlR zq|)wb`4}Q(`cipyYfzrbw^mH99F-qgg4W+jFjO>Ohm_&p8|~HNYAUZ~sM_;3Ge+5{ zSu*@c7S5E&yoDewHqLASY&7$YnLHxpO!gIgdtV-kw zLZl;ZCOmUH3y^;5WaUB7J@t}RMY%GI*CQz=>0YGe?X{41+6d3;LMCduLSu4rxw+Pb zOynXyH#r;n^gP3HWITRHc%BfTv8}Hq$`^82{&MNfxaoY<+Nawp%z0YEUSD!#@)7T}OVnoLlWyzafdk@}5jiSEE5%sJ80VCjO@eSJtx8}3iGv}DFl zr)0M1{)92npp3xO6Bp@Nf6q-v_h-L zjXgJU`t_8=wF4W`9q|i!&PG}Ikz4_S;yho-aQf$<&xj`vqd@^7~1yGGtJM{z7|fEfz;5@iF^pX2>J}S#q3h= z=$9KwZ>PC0Rl(PYZ=ZXB%dPCFa}>Y(9#W>hyuK9`#CY)4v!Rwwv!wNfm#6IaENuHy zcsWkLKmug9w*_<^jC^KOjxC5&*raoOz>sOuSVrPtQO=omo1=9`sH@TqKCA0FW6`iP z8L&zS1t%q(Q`U7*yUPc5wdaUo-1M&V?ZlzF+T$;8;F|@T-)(=cchMcJSA*^LT<@#a&!4)U8G|%P&+C5jU}KLF zPJdfA!agbkX?&&@D498|BqJ~vZu0>!j_pn$!wiB~bs zp$j7Otiw7jIhNY(?v#wRG7fH z{O;6%YEiffC0~>cOVeYmZblyyS2`}6v!J{38g?QRzCO0F)N-lrtUgmp@4$>n;zX(I<~lK_5Z ze7_R9w_dQXXU~2%xotQ%;qpOgD7RO8O3+Au%DnG|cW?i@)}jkR4<7w=z02wElJ@0# z^17Z-Hh=B7?{w61KS!rd3cIU9ZYprSH+OI18q=VZ3HyWsM7w!vOteN$sc|DRCWL7-^Z@8(7KR|x55&DMVFfY%TYUa`UGu-r`0uikmeV!(#7af;@4Wt;m^Fh-F zvs;{R3f(e~6yCP!81cW(i)V@5pv1S&>wHLDN2AODnF_r|D|w;f%a2Dsj^y6N8kaJ* z1Fk(Gpkm!(VoMw>%y>l>HJtC_kuZefhzq^#tu(~=z_ecVRB&MFUYD^ZM7v#W*NZNy zAWkuuaKJN41|n2z8sQ+gaJz%f$!{@)^5yqB$7?x_eN~X%ovRt!vi3xLgS^n0rG?t_=ax-IWQ`o@X#;i#Q)BN!rcy(18D5AQ@`nTF*$bTKJyhRREai{2>Lmx3 z`xsAT>`x7Ipo-vbW?9${pPO0g-4bzcSt~hgg~eK!Vj;%0EHZx|oTJT7Qk^bkhem(y zVv^~dolX_xIM?JG#tl8E5x1FK*zcm07!^y(b8X5m{B+KsT}RMA%*7QEC~eZV!NDd^ zpisV2ke0+yEuAs*4&orQ_I-~8?^4RSd5^_|p$8mBxn{NJgE+j$2}!T%n3w;&6oH8x z{j!(F)gU#UFA9j%hBB6(880o#Y|QG@GyTxPRKpYHeyrzavOrSoZkN|zN;fTxDw(PO z?e?)&pI4uNi<)b!KG1!o(&2MzWsjO}ZwAC+?x`?T*7)>{+- z&Fn+6bm4E0jmR%%EKk#?aMLj@xlM|0Qg-&*p{BX{%OIkRF}{yz;Kj?IFcaWV1o_UL z`^E>oG7SU{!dIcAb)|BhC^vY=To0X>-%T&{+m4Na@(z(1{H)`u#Ni}+?ic#^=4#UY z_SI$7JqsL;hKd{V%CaF2XTKh+*bQEaR106Ji zZNDr-KGhTKel5N2j9Ps3GL7&LCz_PolSR@1OM-gyI}+W-c0aegdn<}xyX&mq>WOg! z00WD~)R;j#&hWi+zgN!}D#iVXUj*ti0~eceg@pQ=DHiD#oKWVGCd$22AZyAx>1`pU zi@{OL6$wbq{|jsqD&+igw)V<)=h-j6q*b%bw}H9rzisv(A{z*UX4iilLL6+mJadlH z@EM&tl)JdQI9bD!eMA*L4mZ|*t0Pj`o2NfdNZ{pDf(X53C5<~hQj1`RtLs|(=9K^3 z8)TMil9g_oe7D`$$O39uVSz9eEmtx;W0PxTMYE#9n1m%X^Kfn%XboOUS zFBT&?Zu~$%93EYd?#wn#@HTgp-{&r|avXK#x&;8x z)3uMg-aLOi{8!xXA(vCPC7+?ex0+@)_Bu=(vKF2dZtQ3wB!nKEv}QA!76~?p^%N4= z&oQ^lR9YvmmX--hnXhH+7LWtBL#3Ic=>U*+Q}SCClsGei+`XOV?v$5ZvC}Ah+O^5U z3QUsD>j9ptT zy+?-MdjIsd;O%nz5&6B{*3=w|UG5G@S!ms1qOwd}>C@K9fL!uhiE_=T_GCJ~d!VP` zy~|H@p+Q8x#JSvDOj!MBMv2qFiek*7{AnJ;6@m3A9075%@~$nh79!~RAa1%Tdq8E4 z5zybwy5gVfdD-aDQma-+<~wvknoGhn!rTA{oivPQET^bVs|oFz0tkA%Sz>=72x`fW zp~HcHqZWGkJ$6isgBjnS(aDUiqq9#ow-BV=@_QCtk=ofkx0LQeQU*#bHolX{=-;ilJmv^po#H5-B?3xuKC+Mb3su=7^1a}sZ2Ki|*i!-?m< zpTdXUBYSozC}XS+)95jP)C8{e&vyEEqxR`#3Xi}-Bvxw3*-TkQoJ_{jT_e3eG;8hH z_c{7F*KJ775# z9A~{_nDaYS{5#~qqZoKA?z46E-D=sNd)uVHWg`x>FS(t=!m=~59WZ4*fZymn(DkeT zs!V#%u;;kG@_CW?%oTW>&M3ur7he?Xzi|M6f`;>k=Bne~s$v$>Z#FSS<#-}aQEgui zW3*t`Qg~2lw@v(u?m$tDoz+gA5ZdjzWjQ zB$zU5TEdi(<7iM=+-I)TpGGxGc%zpye*^elOlvT6Rf1Wb` zdvXlt2p7ac9EZOUF_y?pWv$P_Hp^U?ob!-UXk0b>bQ+^_7FNlqtCkeHp*<~q6}w-X ztQO;X&RRa5Ej%z;^74>0H)+FPuP}ksWGU@?p`LfFz0UPcT7Sa#u)@aUS5EBYQdvZ# zz~@FI7~4nfmb2?Znv4LHgbS%Q{h9#f!#G+e{katCBWm4JdDc{CP-RSl!FOc(I4l8* z0`-kM9l|AH#nj90q4{s?b6h;Cqs2F~_%c@~6!4%cpjaDeZ5Kii9bLi`|5oWWhT3jaByB+zu6wF7cN z`9gzpgUPi4ZG52z*6#oi;eWx@*A?@Yv}&ZtM!g~DMZv;W-&JY;+XV&o*!@25lNfYq zJ}Oks0B0_g*YA9iMG*$h&)CQ4DcPNT`<6A0$aC87WY5yK zmr$=J9Q+PI>G1n=Bf1MA6&|TO_pkvh=ZthE{i+~;^*w{>GcM-ngxE8Yl%cwE74vEB zxdc}{v5E%}4lONAe36~2HZ&w|SR1RxOm3X*OwdEt8<$$L6Cry|6K58=;;6uCrOG_z zFpKNj!B*X?w>qjAL5O`hW`LwT z_kY)9R0Rc`F1nsaOC&rqD^&b%!^!oXA3qJHPMG^*;{4}vt3MXoUj(A}&^`uNU|oJf z%=F+ltDrwwHe9?hl$FlQU~Qz0c=#9|wf}~L2%3boM2NTxRFQqGzV?WitE^B$&g-F` zFV#M8$_z1fKgL4A^jwha03VIl!c9?)n)6*1j4QQIaOk~{UMI{+DQ%FAOCa&?eHb$s zREU0&S@!i$IrK*L{F27IQm;W5M{I2^QX_qwhpUJVDBatKpdDgN596AC-z->b3E-hqCsL_Q9#wH4=%K*ca zeT2ZjVQK@4sS|2_J#lLSaY4@Q>!sOxy_Aen8p-#^JhTNK6(;rW0t8e%YuKE*cx*3K zLVDG3O(>`0RQ#lJ$XYafZEfA8UV#ug<1nbWy%?qZl(4Yz%yr;RM_IYWXlH_H>S{g@ z=u~xCPB3L)W@b*{9N#O^{&X0~RuEoozrti`GhtcTaW71SWXub0C-UmM-;WhdYu~1H zQeyq$&vS8jy`l%YE}KnP7=?v}M_AXbYA&a9)gb6?4JW}rf?Me2R;V~Na^C0sc*)pS^177cUG}?o@yAF_ zx=_hUlN~7?3OuqOj7+v`W~6xzQr&v1SH$|@9R)7#QqD_uhcdP<`OCK&ew z+GQr6ozos`EscJK>~doFE0JZucx#5v-B-Ul44%J9<1Tby56^GMbvwLlvsV>*S7FJVvBt_$4mYNzwk1wSVKVxgoI%C29hSyECmPOYt>VT$YCL(pEc zlPqYzxRzIFWuM$oZP-qn@W}Ra_Kf5;Q6s-1QKBoe8C6xkzCMD_6e`o5pTCCQr;~>W zMJIOuBd_;=ToTlQU{@1Tv)@Yg6G5P;ITrhwRs34Vt>;+3Hbl$~j&-S#PEITL>QiHQ z)@=fwqc+^CI?Xd*Z5@2Ji+)Ul?|&^~QT3YIaHNh#*~oBp%KIgioxVQpo=nM-3hHyU zC>mtYIMH2Xe<5*xdrp0jtm4gqOa6vJ>absW&zTE86WE?rjpB(HwB5aAIU2Q$-QZLj6U9&Rg{w>=$xeV7CTyuTEZAQ3=08zbLD!)&~L*u z9>gSLnweFzmG*)Mn48AE4(I&sIvC@e+c@=VUH}&=GWtoZSiQPoy<%c7(=fuB(Pt$bg9haTAwbjP8|q7|imEgo}GV0g=E*olEh0wFd$AZVB2SCp0C~+glmh zsF0y_eBba?+Zo#B6(s708jf9(7O;Cp4cE;LUX#o-%oW`wNy_`av8HQ)d{B9I{;Hm? z=D7T<>q>~D={Msw?qU}T5&;p7b&Hagul2)d{mPf+RR_bDsmVD6=oumK`jy@kwm1V% zmuI`U=~Aq0Y)gj=VR1iysx<$O)v=1`;;hYAZ1g(YNwaGv5&+y)dD3CjtBtu0V@JKh zsOWZDx9%!JeqPJa^={MW&m)5A#HVk&qSq_Cq{#J#65a^$V)E!JMDRZg{a+n((88OD zeTc*96^oF-dJ=NqZzY3piQG4@yEX<5lSVhkp1wi6giX`xuwMfdt+Y2j`a#Nwr>!Qd z1~!j?6ENCE46JM4@K*OampbhUW-F+Yn07;nHp^>#_gu@#X5i%PElex;>b_CE(h11D zMGh3v``B#_O|xu;bt+bMTTlB$uFNl<9?ZnabJY*U*VOO5Ups0*`ug85t=Za#Z@ zZ|F@tKR1Z>l{a)=C$?b@k01LW%v*dU>P*@TaMNjs{hx~p^7E@FD`hhk~ugBfi1h{41=?%pTOdv92$~$H!kMdiZ^6RNoC=x82G1bOyaqtK2Joe&X1E zgnVXTz{SVM*H&uyBF5|VBs%)0mtJN)xER7?7q_FM1FPpm%=_t|1C5W)K2}$u!u&U7 zo0-LB6w)%n(_4XlPp0RGcMlKCQX?JZ*V)pXV3ck5yrIzqe@Vs>HhTo9?>B!#9}Ig6 zlX`Qqu|=((YYPGrsQVg81bBklX#Gw z=4tMiSDu36wjAgEQnE83O}FkQ@9^!gv`;K*!m74Mm2C2N22Q`wkGHhrYpHG<5ko^S zN;{DD!?|kPEu_lMtVTN4Yfgu#^D}F~h2V#EtRm>k>Eb7wl3-pvm|dr$8}DP|RvXpG zo8X;7S>Ww-`s%L$# zV)vR(_s{9lgjcBJ>6e;>+=%8wv+GtpYYU+=)pzx%nX(dGNw#mHJh3cvE%r>M=p?Sd zt~lzisSz5m82M!Aq3deLni$m#d1Tpx)=(O1RXL|^D3 zW*a=2^^6m7{uB>l;eOUg*TL>D=knqVA=WUvsg}j!IRtK-7Utb~Z+2^CUnR$7fAD%w z8!wn&QpBF4_2$n5yspC$V?-8|#^G$zoP5&;$z?V#~ z^RuTD%xUiPFl7xOFU`^t8yKwo#!r69Hp@$iPQQQMC1qCHO+*2}91E0ioV28T+CeC# zZBobx9tOoSLEc7I6Z2y*^;2VuDcT?Kl9Hcz!sNGZQ8vShoWQ*ucIT%DU4V>T=ikGv?`vMk20_@|FnBMn(P{Jm%y~M0DJd(+($&^Br=f3%JwB?*wSVA0 z%`BQ-PFYF(3WoPL;fA&HZkq#3RnLeNSxw1uJ7-=yJsp9;E@d|tDXt)-NQ1{?254KxEc zy?h753JM;#%3t@2eYSGiMs-ChE|vG$MlEV_NL6`{a9HhKn(G?Nm%!q&uac!TuQzq9 z>y7y-h9i2y3$1?~c0cU<%a2&6AN2^#M9-X`9i+PK1`s1S0lM7Y*{L7jdv9%axL!E} zZnzLBQERNAoCX&vU`FVIIgd{roP`AhMlC+_y(~bxpKP^9rlfGNX%y_<)|r66gOAkc zUFn4Nk010aJv8mTNppYn@dl}~$n(pDw`a*-k6P3o&IgG~!aizE0I%;dz8g?@grUce z({oJFpqsg>=YadscHE67t?VaNPpVwy*VU%x)c)jvnioX3J>h|+@h3N8UO z&E`A))AuW%B>fZhG8-DE@lui38(kdAd@BV*l`r7AseN!jig>;$Y)hv0vBg!RH?4{%8Y!mz&{FAPp z?EwIHZ`7(a{mozx4(ku5AGRUA3kz;f78*3F@2oa>fob;;o5F@wKo?0w!KGF5golTx zZh637*L5{>yXgv@b2%~y*n*0Z`01|tx~piMu3f-DLE5VIwE8=D%KTf$u?5MZAlen` z?=1I3U7>aUW;TemAx-Bow7JdawVD~X_{%VCkFxe<6Ltc(;fH}`Q)25JEjC>@#vLSOu)gM7hO6uWyxnY;w zyJ0!eA{h0-+W4*gEt;YcEZ3cWHzAYm+dGjLkuZ-B&MoihM!TR#{I>$+oM0*4t0X#4 z16y(-4pnsJTm?7VFYirbun52y442aPB4M}pPj>naey0%&09g#eb2)S*8-1|*llU!3 z^r}ZiQa$#y20UN!XK*SvlCNbL zT%kTP@@d_ouwGr?Wr=JdwV>A>q~H`Q$8il_gNgzaF`;{~^wa(WU(wvWJiuU}6!HDL zgyBcxuXL`6Pw)>;3=sI}DwQKjA%1dHMI(HK3=X?3-v3-*-|9szCkKb+jt6!~_605_ zq?e!d2EZvP$oRZO@ovsZzM$d!YsQWA!A<9_#jB)7TYz(GdS|2^El zykyJTbB8KH&^{|Wv81uHqv6$rKdq}wuiy$bGWv1a42VYCK+z~p0eaxV3>iKT&=Syu z@0MG)E*;Ex&3x5`quwyv#Us5pDBn_ETK8{5KDk8C0f9?-u&u3aZUQ@EsIIQ2wXyg* zYH-PH=FXlUKI!68oWXwZffET86;h4p2>4Fk@1qsVUW&88UT>a$xi%yBUrT z$%*@dwuaMRmS!*7*g#HBzT)x(q#lU=G8avpx(2mk*dy2Ocy5!?ba6e-xGmGtiS=7F zxg%sbJBR#$Jokh?H z5vMNqWsrsvD^7>x>54zQ0!^Y+xpFbPfy7OcA z^stWS{ON8z41P;=4Sf}g^cjJ`O+OPSANJPTEy*Q_dDL*fvzA%`^+Lhbj3^PZk3A1c z`FP=MPm!Gi%fE&6|h*;BMP7S59^@Se5)r(ez=t*gL^f=nF{J~ z?WJrDdDZl9MX>Yy;VPY{70H$p74RPO2@cT-F{DC#^+UsP{Yy@N-m7{i`H}<(Aa17R zRcuI!?S`6d`?u$W0kVpO9qv;?0lDgWE~wg{zh)65?b{DMurWFiFdE z7p=5Pa_^B((;zeLCOsK86mmUOciATrY}^~su^W0WO)@I$i5jNNwA4Bh}No@w9^4 zWicYvV&}<~tn-rMPC+49up3zlAk?ofXjJ1&UVVKKFjbG{ zRry%lb41=@n=#}1h)a!z^Vq~W*Rs#dN5>NhRRFp)=O ziI*HaVvpa?be{E9I(X2V07{aN7;QAAXMn={K(?hup z+irh)`EV1dqv*0RS#5W;6i0Qg@(nhs$IR7m_?a68AjNCH!oBweAKW`xvSnVq`efTj zcxQBYm;yOgW&35oO8qTAr_d1+Xjn|`Morgk%MbA&&ZuH;#0uf3`KDiH7BTa3GfOdP z(T`MVQ$$OnXk(_a5tBZZI*K+Hw5+3kf!jnBk{=ZhbLeqsUn$*lB_SmZp{t$Z^g13) z6Y&7OYo6c|uq9BweXu+K<^~u(hEZ1H011>cOIH_Z-ytTeXO*e;RMx;{DA^C zKJlIIhp$V=L{4`*{)+hwBP$qcCtZgDspB$v|8kY>fBQOq2{6j&RTmu_OKFcZ@4qJ< z%G>L~CYltJAYeVsu9~I5eT?{qfgBiD*# zF7V(CpwSKh@hL-7SV$rmklaK0d(Gk zpbLbQ@m&Y~pru5~f39HPu&yKQAmi2W7i(&JEAh|#($tnrMU^KXvwCOs-aGy3>0VqF z$ae3PPg5Kl&*Ax=Yy&> z-a>huyBP-SHxg6pwBHsGI?Su9(^V4LdZK(?$G`M^E6nTTo9wht@7jGruh6+a+YJ*t zJ8T#4eC1EWB=jTyTD=v`ZzVZ79?-}7U8;Bdf$Mbg_O@d(}D}P?lxvqt+M*tNV*@zn-Z6qmf^;?98Z0-~#f5j8G2>)=N zuOY5D%qNN6!T{2`2W}x8E#JK@kP#bKE zbD8h~E4`_ak@NCuw=Bryl3^oSPZ3&SqmP%Dy5hP(>hem#>2zi4O+ojFTOH z%BH?7A%=<{OxoWqRD$Oz8UQ?;`fUf`4AXpgQPH#G-Bm-}_#Kd%jF{9HsfVYExRl)3 z^jruL2Vx*E_Ffe;4}JejW-jyi*j-~tMqrv|<}e6`#gyo#tn zg#BsR4uU;*uOEybu3H$$7`}VRVEyIZP%-NXH7Olc8a~UwSxs|X2d75$ z@3`uBBfC>YCr~Dr3800#)hbyaHKQ0DdzNpC4f`zFCNSj?l?@V1#}lAgR59tW#`jmr zJF9FL@+E`0h4SI=8jkz#t^)EERnNmut6Vt_723t4l8IdU&U2RrEVf0N6yopH&^zd; zWx=Y@0Bzo&iQgC9tpg^xWyxOv!_#d{6LaX-)d*CQmw#Ardb+J=_y9~#@Am?T2QHkl z7`YwB3oepsZ%6A$e5>g7OAvm@G>ctzfmHPsSN|%c>{rS>Yavn3)eR@P-`^C9_Y{eG z++Xp^?|lhsDX$a?R&PtcDjjtT1yWBy*I}IR>lGbUf1QK)VC8AilL(Wnif$v z2*r2%CF{p`-xoYO1*rwqI=O*+@uVvf4*d3^fyd?)ISuaUaMc9m0=oY%;j|r8>;5yL z?tjb;x(Kewg`{w=5Zc~~4f=)cpYF?KQ{5UH!Cs8R^NyB8mIf)V%Z84#4$WXJRRqIS_)I4RR6SaeC#>~f(#A<8s}JyC}= zV1CE^UHMw2i{loBmcs>9fq{WWCx=_`jaAsB8>*g$V6`H#<>yQIyol>Af>X|4|&q@FRSYuP?O7>o=k%{ZMpnuI^It+K=?kcJ zHo|+N3egV2M6;*fW4SZx5bXmua9*+mDvrg(+`WCmD3O7?GvH`>;O?|qp1R91VxLhy zR_5u`57|8;12LJHVxJbM;*A-(SE%WMI%-$-N~(pQ75rVQ`?jngCe>8b6B<=ZhRq(t zkcr?~|Az}eLw~D#P~F&EIeR%OGLl4a`^UZdM9cy1m&g*0FH7GIGA%vamb()o6BAhj z0s=5K&)cueyB_qtNr#l}i-M9Th`j;Trp3j@ZNh5@B8c>NA|C36`j&p_qC`E+%T+1D zPMsLu&gyj?irT;2)fT%q=>;;MM?R@U(DpVN_s5MNC6WEUYCme{_ba7dY=&acdN%aY z<#cOs9^e8_)I*52h|()6<%QlJgKW2x%{?EdUs?RL4Dsx3`oh1@3V8e1)z>Wn1KbJ- zPyL77SJuLd3sQd*qiAn|)8_RueYx8WlPNnMz@CoqdNsMD$seK(<>3K|* z*Zf;(Ge1wG(g|Tcizy0aC!KkwfJ$j9>(8_00|zUyEw~6x_w5V|~%=1Gu9-O^b zrS0H>SEInLG~V(jW%ta;f{1k^Yq;;417daudim=Wv&% z&;w)eU4~(z%aw)1>Bt9;`ueHv08QF~6CcV`hdY>L(6b$JeHRobn8rtEKH)5Wkq)zh z^P&5tvhI0;NGCc$yNs2FM$#FAUsuUFqiq-3 zj*xOxf8G)L#0T0320nxttejI@7}}+=UZD&7H`C}c=ERW`D4A=31Xa$*2DGs9mQ7j3 zfh0~wO)UnT;mCvL7X~IKITMqtjLgiv-EI1_BaoP>sHo5j)#Zl)PVUvw!ZZd328ZJ* zYGVJJw{GQn^?&2w=H`C7_m-yDic6%Ros5jkWxZTq*~+S*cqK*uI?kVGXz8N_4sMW| z;wDbpBdyn6O6z=x-kw&Ko$uTFI}LUBvGJU*0v4P7*$dA46yP~nL$uXC2g}NaS3TC! z)3R>~v7AIjch*h^RKZGL0yiH5SStD-@m&3L5o18FcDg+lA}u7m>Ybf0xkC`|$U3=> zc1a7(B-f$s^P$qUxv&;Jby6womOM~0D*~J&?_xgFUF<8?uY!1GiHu_4ej~fyXSM$< zCarb#md2a%vG2@arG5**U2u>HUzYlFDT9`^n#bv?SO_ak@-R#f>gT6`duHe@Bi982 z0sxzr!G@Y&l4=5ioF|n)eu!y#;2;K%**!`YbX8(M_{ofPBbXBl| zaBJHg5CiTwgB6-|21x(;i~rU)=(7(Vypp-a1FVo91XioX`(g9v5e2hhcJNmmx05k6 zHCH6OA-g`$jFm33g3mN75P#){2r2}EJ;ek*PkeQ`ei2QDa$N_6D_22J?GuPkDu#Te z^i1ReRZ}25%!SF)qn_LRK({`C4K>@SYGPQK|LZCDe}gX27QkN<`U`G22UcJKE8H{A z*2GxJpRd3G2z5gJ^j?R66$UUXxS};hF0?{C{&jESQleLaU^-L54K4h74njzUInaOIO(WQ`G+R) zw)dlb+x9ZjMtI z(`ty%0}M;vhotY3K*c*TDn9mMTj&Bh3B<($#KpYl!jFx&ONhxUUN z-erBhcM+`yj32={`Zjdx7hOE?SWnaJh`9c?Jg;Z4<1H6eUjQD|J0EG`ljz{V1=p;LZk%52LY6e zM%1k%>CgQv@ZP&sK>oBd*tdXleh&PB&{}0Y`GrC+LJZi#lxa^C2%D7{<}$c&pWC08 z0>Cp+L%<4GATcbMy>o%j`qa7?+QglHGVoV=1BRH}DXs^D9leZ3y!&sl>3{th^clt$ zUVp}nwe?@ZtoF?iAl-{R#xz)=UbG6s%^-o1v}=EiQd#h0vG=-AEem zjE}!e%}Xd?ZGeH*guY>n6J6cfqjF11)&|%YMDb@U_Z-e#7+4TLRuY@#& zR31Qe@__VeS${ZR;7fp@MX+y>dy82?Ymy&|=!AB!-)m}36-BKH4(z3AtivNlE2BHI zrPNX#F^360IN%bzWA7p!mW#YsDC*|Nc!+~azM@I!I+rP7S0t_)me5UqJvDDnC!K1M z{VCcvHr0~g$`p*Qz=1!k<`j(AnVDwh|59aS3(9N#B+;)puo7EPqwY{g zAWjz*mxVpF$^(1$0b%~Lp^Bd{Ev-zSHh==7;0dz*>F;FXTn0q;qWMp zN^Y@wk8xh*)AwHLVs36wDLY+s`f^-O@c4XYgvZ6EXQNkP;+>(2(MXG@N;-kggbw^W z6F#@qn1*#65hEFom$XjPvV+U)%8|)jA#+G?UX5e)ZgEV^M0R_$lR0W6% zkKuX+7GmLYM%Ve7!$$_qPG02Zj^PYh2b+#uX4IHzn522KVir+XWP;%p=lALBYj@Al zQlH$lUo5RsoiVR&Z?YWm$XrUF5dQ60%RW`7LpwFyUt{4~AR^47!?Qe`%_i^>)-i!W51&C4%PUyg7)NB2<=>7(5lipI-)}c3DbBg1%<_k zt*s2*P2k~qsAY9-?w>3`U#Z53C_EVXkrr!B>hEdxggV!*lmLn(=szy1O&nc!3~Dxs z8u}HVMHM^KYK%5&BX5np<&l`%*{Lkq?0o^Z=R9NY zE8vO%M(Of0(&?9WyFcp0HM`EBst-YR*2s=!R!P9=HA|(d6uD|>zZmS}!7!-8So66| zf%D9+@Bo>a-L*TZIacJ9mLHNq|7+U1i41!Vb01FK{=4Ce#2maQWzb)s?0DxW6kpQl zr?|Y$Z#N>8UcYl)X3n=DK;AfXti%^P@EKQQ=8!0~DYr4#mTS}!UohvtZ zpg_$kYaZ;v&XyZVCC>HRW_FxTu`7yDKe|j?U`X2_;wIIRA(DtJRj}CO|;`lh;lX z@(5x6JhEG8z@Yi&J&ZoGMzy@0ZbVg0!~dbJl8y{IICSfByfnAU=D;S?D$3(5Aigf^ zi%wBC@lFTRiE>4OL%`BYJbw$s|GN@^RK3C z1^Fvu$RVdb*XHs(euiKP*SBX!(v?<&s#|?nD+9lR%pc?PZaPvves%X4;*x(?q&gZI z8}Nu0Oz;hISKG0`!kKRLzu6V$51ZN2QL1R$TY$@>Qo5u9elrK9s~sL>+mfEJjOWx^RBnqk>zh02Uh`-j`t4fj;T}JH z;=)1PCQM{w5|bmvJfW(;zhmj$cd6bnzpkd%#KgGF*u<`jK7vtL*^$?p++N8)?08cK zua;xQd$TdK_Qj7G<{d%LUJw28>bD=lxj*(Fu51){&w8}p-rt(1*vAyxU0#znNc`jFjG&4TT?xvQ9<-q zzi!4VZD;3o`q#y7o-+uP73-&)Ik_`iPw7%97ri|E-yP492yKtwd*K>@nmFudH)%k7 z+j+Dz8rBfhG;~a#_(QJ*#m>slpFI*W%wuy>dN$^p606wv#I^s^b60KGaVK3v4Zb>K zjo&Nxj9-7?SD*b|AEI=HZ~TUuh!bX;L+7lz{16>)RJ>iKBIpz%jm^g&_f~;B4A!da zl$5{8bEooGmU8`W6f=BzqMW$aidjPOR?dpz5Ilmy@WMhjqk-H;bTb07J(DdwW*inp zjTSN?N?QW+7(=}6dbK�bJW-47u?0ANg~XDO&-Mol=AzZNN7#&{>N__~aO7*Hoyw zjSNgjMpE2?dPKvV;*OG#epm4wIRaX&gZ={ zCXriVA@BWoyg;$_!WHkXjyx)s+x!tKTiUwi!-K8khDsI>vs!Ks2EM41?&~(@5z<|= zG00Rq8$;N7JIcr3;dc1wBEL1iN#k$vQQvP2iO>q{(M~@6@{VJ^pd*{SD$w4S~>8&Vs{p!7K=lQ7(HXW1asTos9)#;G8T7ciF zhLGKATud2)xWS?abO&1!U+ZW-SNc@_{Y21o)R}DtILxsR!598Xss+l$>(sh<%VLJ1 zlCZ`G&5tSb-Z@T+b_-OgjucnrSW+TybSH~7_SY3OaOrsDxFn}gbB;c7N%1|LA1^kj ze7@k3S6Wp)=DvUUXD*~&?T$`ng|2YR*^zmP*yJn=oRZpT*GC@xdi89}kL#{$Q&_Rv z4spC*CYI!L^QEAv$_(|w)55Il#qPT3gMwBB%GFlO-P~@_!j2VPSr+(EKi=s0rwQ7@ zz3^oRXC502X6e0DDCN3SL3e#=ireHJ_xTM+KWzIwHMDcuDZ-#3%0Nb z@(n!h2Yz0#wh(-TaCi6sk8k~ha1I|ZlJreD&? zFTjJN!z6acxWzqJ9NklHRH!oFnhPxoB^Syk@7~gWZMF4%CWph^#{>3y`MM6fPuZd4 z)2U{YqQ!%Vl;$P%?!&r8D}90q6TIaU_s#a7ucZkpAJj)c@T_HS-(&wL86;beIQ%QQ zEa$JW`AMNiA5QN@1UlZ1%o#eMy0fPe8Z8mQz-^Y8FLIX4EpoO#r?RL%x0T8n_A&Sl zT~@buU=5A^?&^S_%jjpxgUQ^06vHE9NAbBy)(3%a_Gs9?GdcHiA@H|9&hW_Qzi;(a z>x|QQ)bia%v#N=mQ4z5)=%l3m&?2=-Q((*_2mhDSF<~fDzTiZv!P1UoOVTh!OjvKz z&2hXp4I%C%lp?jWfnRAWt7Wrnpd)oyk+Rx5^vqqU$9Hq&xuBghEXIXyc}hQjw_<^Z z^Gk&Uy9x145uG4MGQ$r~33?bb4?Cr1>mQ2-V27E5}q>)xjT zLGDVf%WF2VkGKj8B8OwKQ4cN6df-F&cHscP+qM%<8-=U@ijchq_z zy7-39o-R5-8#(^8eS>{7kV7j}yx_jQ-P_S6ya0XzlC-^Ch}-LFfqU2g-*^^6!_ex# zj!I?Lz{YWy@4`Fm-oAe`gUkxJdZuG%g2&Fe?Oz9FkHH~_3d0i@vBD2r%pfZMTF z!*O1RM=ASVdYageTQU=D9-bLa!*{F734;xZ;!f_k%hp!=d7-WfN7o!y$aH^C_jYaQ zxhH;GxX`9r+&@F>NF6`MLSU{YgocE1Sp;4z54s0|F081N?b6No(p<3yqC#8}j1W*)3QE zmRxl(IYFZTFh}yLB_;59`>cff#8+8Y6*#PG-hJ^_(%BhSwzNtvzE^UBa5B*{-Qi0? z^dRblA;w0f3YG0 ze;5Xk6!+UyKndE=}`ko9A^Z>t&%t8gdKGz6()|;j)h|T z-W@biQ)(&u`ns^5r#cp=vU(O>pF-4D_=7Jh0`Kh{ExE)ngZ+_nCOBA!aIbr*(KHW# znO#;92IgTgt8ZN%K#$$e| zs%YD~_i(%Ys}~{VSJ|h4HBohnU^BF2j>URA#F0~WoqSP`UEfvxVIp2$cWn8fFvrx8 z6TLgw+K4%tSD~|}bW`q9i!%&_K6qi~cCl*ZbA@F^^|!K)>Wgz~cFy`lK02h8I%%R( zT8p9WF=l;cQrKP;v;7)kr~nwXho z)uECa*&|4mMr`2}+t&L6s?t|kIxv@3llVXG&PU3SvDFsuf7IV6txmq~#2n5aZN|+PLCL8l`)gSkO+VE?#N6`M4vFsHnt%Po|L25; zuW@oU>YKI+k8AtDtiGVX7ivDf=oy)TFYmO$=xFtG2UkyM+6H+&5zP5 zl>Rn0)a9Ix9c)vO;Lh}`)7s5TeHP9EJ8F@l4N=rA&=4HCT3nE;=y+VM?nVtgj&Sqd z|K!IoP<$*BXJn+yz8CPVS?!@!yCjIAx#XvOeL@Ept{izc{wr?825OoeMiRN+Pho)X z3dvUI3#qYW$hFEF%C(+8dYqwj1!+U6QnC8+Mn#I4@~;)!fzIYGW8e8zC!P+aqV9H1 zmOD8XA|Vwu>t3b>Ml~kuHic|Jdv*LxJ{#zul8`+VJy&$GR!i$z|ES{2Gmidsa`)2J z$V7H7vOf-mEeozslDV(7!h^UzMnzr!_RVroy(9*3ujbBDt zhsR&O8x(4>TQfgS0cSr+=KApWe1dM&*j5Plw$aecr}u2~-HQWI=ac%(=dPc~?S!V_A z1ML$J$1RX9B}J0XFJyEmMggz7%2?NNoMWrJ^pwDB;;mohDx1>IVrIJa)Id|%~)iYh;mOSysJ{KF(}hn zBV_gl)?Z;^dHBq=zD;e7@@#hqdj6DOt;AYQrJGkM-aLcJzpE~;)OM8i`D&lSYn1AQ`d5rZp^!V7c38pQ)f@d;oRprxi7NBuE^$K%+sRN0~+d?p(Ap*_njAyjoMCY;j zM)_x*g=`4AH_e**uOH5Qg^S1L59I)K7?ROuWe4BJ#-?Tp0wKgFd*uKz;NMSZ+FXbu z8a1&K<+(3YQTxD%TYDs)LpLjxADU^J-`4F@cs6cui>h#RGTw#(m&Nac_C03$H2~e7 zBa>PF3Va%6^y{Ax{GR9EC}=83@q1`0Ebe?~Bx;00l4CA&!osiKkU?WlDz3Vb9_B~c z#@FR(>tc{A)WeaL{_~BIm0x6de`bY=$71theQQU#7AU$XJD*sAZ%=0&g-=B(ApuIxSlVAphD-&+pY~uGM%3@T3xBTFt-SYmo zt$tnrQ{kgONHXS(M?af?wFr3IxK0GQ#S7k*k9y&NTs%}H%EKC$DZO7^L$3GYNZ%T;Y6ItmbRbz|aBUM8nDU(EW{W{R+FNyj+|3%|uKJ>YM)Uz#C^Yz=M zlasfxjy!QoBA*_62bW|tt>Bmdgt%LQ#5jGb+9q! zj!?YimWhfK6JfmNlqtLv)-K$HaOg9DYi|$Sk9YM8Kr!{psJt+H`9ZEqo74r&BK3=_ z6t7?ks6!Btzo_U;&cN_Ul0K81r*+(8h{KXBs?r99cle`MbZCRtSD0rGX75l(;#t#b zlcZ9uB;LsojJELdi3;shjg`Q&JDDYGJnlRVcS;Cm2`sbb93W%j#eL%r2pfQ zE8H4q`SB9U5UAiDGDlAZg39p>S_&zVeL#|i{E8xWD?nUy&`K5Pq)Y#6o?K{*=o5;x z%D)20FFi=GNmVh4A?Mjr>Q3;(957EZ@V>P>+Xyn*y=}^kW9KVx6M(WSM~MiS7cGDv z@jajq8a|JD_P<#V{SsqoIy0HyDMc%0`6g2OH0Ho2{iK_5iW}em$ z{-K1*a0UT*w%dVmtW&w0GB{kmJ9?-!?S3#0Zw+`|C`l(@+J9kKYV?88I8FIa0gN0< z0sMMTZ0quQT%QkIR*f)?#9}wHG}G%{38AzOlblhQqmA?TOhf}8C`W2!>$Zxn7UEfl zG_g+yp4Ai>SLv2JjNbx8DzDZbKOVGpcgJaDJZfbRVY<%1z>pkxaQkW=K@XMWMv(nf zwZ5yX>->cM@Fy99x>S^6t4lAx z3wviXoZT?{2@EDNyJ#eHBQM0EVtfIoUGVV36{516{n;wCC;f(0U|=EYlwv>e9jKu> zom|?OdX78#Yvr$qfae(iQ|53K@51Zz?8!10e4Si2=f;oL262Ww!dnkj@56cpLr#%Faajdbe@*X9{;c?g*e+mM=;Y;AG6*d;Xxt+(f z8kN!{@OwlILqpex^?tud#4n2BKCVUHo9A($lWaZ4c*_Fb`j_%w_8i{Uj!6YugdG^) z>3$qp`ik(K-g8^S&;&KLy!VgCjrq9p9qtBnyxZ#2yul%Ue>(PrGVYJjRth$U(u%5G zQLF!F?r9hT>K?Y{iISW=yzx^hP&l!2k~>(O;p&UAL~pitEZ>e zP9ZwMMj6uNg@$!Y>8Xm%zEZt#nU1b?tjx}$J(gEfODh8rTy>L+>j|aw{xk925wn{d z92tWX(8tw4d?w>epqYt7TwJ`^d&?5XEm#{XQ@VET+Cae1mbWwL|gkYykH`Bl5Wb*c3~^3bpL`u5Gkf7)*m zaDQ%!idN_Z;`bmQTg#?3gXP}azw#9VF#J9sUTD<)tY$mcD&GXFY;8xhyon;_cs3E~ zxt7z|eA2HKS`9OtLuk9NuqNg!QLTuJn%zyTz)DGPp9lBW8tkg)-c35x%b(?@6d{t>(amQ*}vCrGv zTT@+~O(appt;Im^<&Z)WqShY>Y$*Eppj?oFD_;R^6Rp@i#33ZqPpX7I0={FfBmL-j zmMZXLZ-#+)(=qpQe1qLfzp%) znL)_6&N-Y!N585>XZ1ZnsY7o@QOA1--DyD7`*oZ`NwWNaWDsN&^>rYKp7E99B1Sr` z`q`fc5U5*H$9dGGWu2}p7Xm$uv;u;TA$gLp4+z^Vk3g*~3$#?=QpTah`|!q9#rC5+ zvn!#W2pOW{Z)q&S)APHug*w7-A~59~C?pt~*_*bfR!VuTkMyYrR(^Y?KG7gV!2CK1f=v zaahpKs`je$2!b2Fx%jhIN_nPNeX&13X1Wlq+ z3nu!`0fP#Ba=ie5vL8qqqM#i23J6+GxrGv?)At8-qF8}=ni+ytRqVc7na$P)g{&#y znF~H|B-|=r>Nl`}a&jsZ(8UcS4LNc5$A{J)PMMx=(!blu<6e)?J$Mip*8%zNm>}g_ z&g8ezP&+yQn%?9i&}rzfmH{Nbd7J=97ny9umoIp5791{Y&afUI0I*ka@$lrz*30u8 zbhkKJPoSM4v7@FI8NhwdUU#~zHpkH7`tBe$4uDMxI`1c2<&pOXC-Nv8f}03I-0VX0 zxQHYQsCgR)uYK((!m{=i75!)ynJiMjzlK@Rc{D9>;_h4EVv@}*ht-Zr_otEyP&SGE zfl%{bmPTRW(mqfKhEZ}TeYU!~nqYGbS}h55H0TsPl??HgLFYZg!0p8gt5IC_#i#o^+~LKCcNz^^0*yIbe_S5P*g*_L zFLo-%b)pc7u$8QBn5peLeZEmX_sB!`QeuB|!Zf7#MgA&gBTk!^YbB zBThnL60!~CM&=h2opgPNFI!gd-BO=$*!5~%QnDJ;+I5YZd4 z@$F0&v!Dcb6NK%|fj-`&t*_6ULfI-P&qxd++>ha6oDhFD?RX&rTpiY~y2%0>Dz%^t zX!3M=3aAo)U&GPd>Y>H6=Fxbh_;OoGCHAt?+o#h-wqM&LEL37X@03xhuzpJB%j2G3 zEAlB_H)E*<_b6u)niVj(4$-CX=ww;>FN^mN`ui_`+`9hnDAj)kIf& z5Yv}>evul#Hpna}zwetdj!bz5@Vuw47*aK3_%t*Ls|E=f8|r%@dAG_I5I`w zr z-lmY=yOL}O_mSP;%I4T>d({7r;px<_f2IEP?aEI+vHSODa^dr8 zelIfE9mMRB^P>Zx^+ra{4l-bK{-kaEP!o zvIwlz>m%C%gKOK!uVRhh0&PIqZv)Z=W?ssl@80LD-TzHFj#Jz$_WJX}EJ@|{2!GX@ zAcLr5l7`M$lKpU@u`5nVE6#x^%^w>-w_}x7|ErxP+Y zX~b+`y0_iik7s>yiXK8n*PURTh+k7uO#|hbsK`i~dtsD#i8`5P1GOOeQ~;^ZknJ&? zQr2+T87R%R%rpd_$@uQEaO$`(`JO3F^Y?%&VktTYiR~ZA5*9U3XWafh`QugVa_h2? zkhCQeX9&AObf|#1vg@VjyEpy_%n3v<&t?4Sy^19k&W-e9zjT@C=N0Qpd3n+86V1s% z|Dmx#ow9rsq6%I3(J+v-}pJ)ZeNkLgTw7vFO<_=1%;A!pMndAUyLnB&ep?xLtItPwo7H%pt`S@ zRx&QcSp71upbU(YmUJE&-D@;80o_1%ck%RvU%wuOm~YOvqJRMMr<4?FwlBN89!rPi zY6YjkgCZ|}ylp_y?cHZv0c*fH870V0NWB4;#=4tV zT{J1^KePbI2+ng`C7TA(o9hsVe69%A_@Z01Q0cxtmgu(KZ|sW&DI4}VWW$bm(!aIs zQv|YaU)C;kwC#l-#=iAczJ0CmceS&#hzZ0;J0SelWlGP+fpx|o7ewjzwyAa<1a!Vs zpb!s8#xdBIi90#?&c;axl}D7nYU%NNJXmlNPXSwr)335WrKSQIsJQ%sf;$6OYUcKi zl0Uk+5iLJu{|!|3B^s7zq`t^^t+TmqSI!9URut9$0V;~y$yh(mffGB^qXbn6Olglx zbu!QoQZB0Ad=Enb8F4Zg{pHI))=a?o)VJ$R7G`MJ>&4WwkJrd#sV?29c%lf3#wV2p z%78>Ic@qHHwAve3i2`|)&yqSePB8n74h(8wtT$#~7#=}|HJ1g`2NBbWxI{aCek z3Csa-^l^I2IZ?$FPwIBZ?9zi=HWqu?ml{ZA$9+M+?Bo#0^Iq}`wZCb7)Er(@S*Z^) zRrr+OmdeSpf{g3Sj`9qoA8;svhRn>&UG5?ien8f$y{o3AM2nY~cdQbY1Dgg@n-id& zI?xiykz5P30EtiK>Txt-sWPUE#CU{Jn)Sz23P&<82PU%|w`5{b$H}Yf8+q-=E&-Tt z2OL{6r35EyXLzC8>!Pu@ZjwZw$BZFxGy1EmkZsvVV?(`pZNcv8g;^Y%_sZG$-I(l} zrw1{wvK-z*fku~!sLcQ=U3`z5bIIs?2jLxC0r=|EL<*HuNz_-p7O3vD_C;9|X8WfH zKx%}HW%t02pvQC4Jx;&q-a|dpaQ9gt6IcrFjTrq3D}Zb}R7_skN;)#}NY*Pf$^-gr zd6}6HfY^w03hSB~LAyw34d|de(<(BRF!6jl>M>FmSV#N5+zf5^p|-s4&$fHfoVWtf z@?7uSm^nz&lh=4UjsbM{2f+DA|FNqlJ(pQt!kOjxYA0v>%r~utQSH{SicyG*idG_} zaM|wa3xb}TO|Tf2xkFvr&*kik?*(4B3DU)nLyvFCGFM%KndrXBjtO-`P>qL-*vWXj zM*2#9&v`D_>&z*Zd}Z#STq!5d+IV=if9f#XtK#Z_N0D}rhZ+J1IH*YmmI@K z__?`VMm-hm8Z`yck03efPA!^PV(eZdwcuNPEnxo z=;$n@M#?GwL6J$#*umU7>jbN??I)l_5Ad>K~;6szbp- zZWyp2C(u0I9rC$*?L6_~($i=_0!w$t;PU|{3+VABFtI}ES z?fg6tUre~?9)JAmp0aQ!a`Ehj#Ku`TW_`~F6Cv2$-Ss%Gs2>~J)RkV_nG@bssD_Rd zz@mSGi|Nt=7|o0FTo8gl{}+*$_XM;r+B!QibS#u6W`PbtC?rw^G^V@G(B1&`ar*{l zUG(x1=$EOIf})V~Uc~YYY+LN*R(XT%Q8REo)fm)!~0#dJ^-N@@2=lHd_)>VfB8EtC>E&uHb@NAN;ag3HvC;gD@= zu#9H^y^R08jQ>AcqclSL%dYAf+jPvhe**p}!%BjN!N3wnLSiC-E(}_+s%FQ^8Wn67`bl?d~=~WPdQJzePu*zICE^Ve2UZhPl2q@gmALO>bUj_w)`T`Dj@rr#dHe z;}`4RF558{Sncdw>;jW7a?q4fZFp$ac8<0JsUH-dx0cdDO8abOBB?5X8^2#6Lfb^w zGIekF=)~5>X60W`!q2a^va<4@PXbhCnz|$SQ__z+oYfEBAq-Ajx8NC{HP;GF@9+-| zTZV=C7w5R|6he^`-z&PMlWf_7>ddUbnTfyVx7co8^wwc>#qjD97K;Xt z4=PG{2530=511RB4<_$_L3>an_gm+%CudiRI-EvapPQdg3?WcjUXCa!*~%?Uhq;oO zo15Pj6&0<#_0yhu|1u~ps?^D3tL}~fntTH-@P}Tp6X&}QP!3=FC8dU!N(8h1Te^=M82i4DjxXdD{w0=H+O@rH7W=9)6E#Htai4&S7 zv8x8~wNN*T?yiB{YWoEX3zp%glYlxX6lxw=g+hg+O^ntY;H9PavT}0T-=t#rfr`{c zK>sgoe+MbO5Nu3HeN_?{Jgi!i|1L=W-{PpI3HS^&2#uJ=XY_c4#A?72X7pV-|2EYA zPoDz+1d=|C50R!QBspXBW?y!KNZg~BYq6odlR079k*KmIb!6?qY2;gYH(#sJmhC(1 zIS8B`{2QtHc}Z#zj{m;!{r8Wsd4s4_H6+rD-&2wkyOQmqzsaS-tg>^s_{(6}{sq=V z!ZS)*R8qdS&;uC&D;f%A%nw<-!aeQgR47q^o`zmMk}E0?=jqRYUHD}2Yst>0r*C+7 zC$vAIWYQL7)zwwLd4Phd#ID z>>3&!L^ffPX{?H*^PaXZ#Xc;3$a=#7VoTruQO zIDmD%WwTH!nG;tuO89n|nBWHuz0Y3S55lzWHDJcgrgshY_;2f`GkeE<^Zq8oh*gb@ zyon?B-+1~U`+TS9-|lCI4*u$bY(!gY&k4PLsa^67ua+Pt9gD-#%)ntv0|^gh0SJ^zNPD?n)a*L z$2*G#o$LqwrP60MUoz#CA=o{3mPctn+zWgVYu_=pgw@qbo*Zvf!#uT^B?6v7 z)rNIL)$Y+gn??5?EH=a=3(#E1E{d(#=*Yn&=F zv=h~}9Xxf;xhd3)?sI7pX6G5&0-j}tW)d~~Y#+ahUKE147Ez~?Z!(r%mVMMHzF;6C ztu7$3`6yB$MyG1Kv+Z#+<6@j$>t|VN$uFLIAwvPBdifr|$ov)D7TIp%()amdA)9~T zj396|0Qf0w7uF!?DX~g$-kMKwhJ7FttsA0s^ytLwZmB$miavS_RSo*?w|WC9DcI%7OcdOb>04i4C|BQVMTb=N80)pt;xZ7~%$(8&)u#hZYtV4s~Lg$SQs?)Xk$z z^~63(^+4QqP6<%nX5Wjq1ZgKsmI*7GBEqLW&?SVgH@UpWr4_xet;YV*cBZ}oyBTNu zg*Ywi#iVbmbqw^t((`tul`Bz}kM;e?P0Vx85SD=FRV-9p5~@LBOxm%#0|P6bsRK3+ zDv6eHmu;0OB7WQ5@_3#ibJ@Q}?4nxwHLoR*DpvRhlNwqF}I zb-DI>*VG@9w7H3qdrQj-AF^{BRU33VM^{*_j{gw!d`pSSiTQ9VPf|9sg}cAdyOl6Q zG~wCyX^Zav{`8NhL7$WlpZe_ zBV5B>OMf?VB_z{{m95M%qhP*{6{0$guxqqeb+_sl% zn|jMT8uHCC5ia! z5St(PAt?+FbNpvv4K*v#JM*PROSzS&ZwmuUpk8kPG!(3+H|_ajIWB>?dJp6*d~FXX zWtCo|eE&REd0av($UeK1t?3dr>Q3b4tD{zKJV*GJzANdAMKsJlA_ZwUxPvsPsCY!R zW=|%93LxQuBZ*LUGA zgu>1*ehOY{{}V3bd`2q);hl6|cQe+xnrm+4zwyy=N+4ruy8nXsMFrg$GFq zzFQ<&SlsTbUs;Ics?y`87U#W2{RJ>vc&nfg&aq2_w>-kfHLVDNE|}s${?Ufu(!bJn z!CnA(7Bh+Hx<*4vePrm~sWfQd{pWIwMEtTNa@;7d%A@L*z2_02S@m-xasjaV0#Oo6zCuZsm1FD|_Sx&HD%!+O7egn^XA-;YqGo`-7)UHApon%768E2)pzgMH*m-#vBFN+Oz zVL^>$h21N}XUZ?q8~?XQhBg7y;^7gZ*8O*cS&>@%j`w@YIGA@hSzKi})?6&a*8|&+ zOl0h+C1Wx*xkL^{oiT_g3Om8o!6g%~O9KIy=+{&8`+)e3=ZC|d8A{lRLFL@~W&fM_t=Tq(h)PN(rxo#?COqqv*ezxEB^tQ<==L9lY(ck; zm-S8$1}%P)$u^VP@+S#DY`?XuZRcUUG$FjcKSz>=6|0lmf)UIW5hkiFu+5xtDbp(yyHCs7G_7{cUv<7FBNdSs91irCaJc*>N!InjUrube6%ulhx?ZtN0qf=a!2e^>C~ zWQs1AjNXUr&uDT2I%Gx>EPO6>I`47Kh<0ZAoSROjiD0*O3m>M^6r=}d=m4E1RP&%v zXqO0bl=ZWoImB?YtkiZV+(*AxIV<1Kys%9nUS>2v>i9(r3$~I-hV&Kijv=>4&eP8Q zA2sHT7dRxWMQJ2OreZlp?Y~0gYu33tYu-o{Do1qje#Btx9DDl8j*|HPihBXa+~cY_ zyH|JvE#eR_MR~7g9MOkrKHvD>J?%_n&85b}pOU$mvgVc=_fRh=TquAssCJw%jT78o ziWO*>7q&ECx9#$m&dItzTz%#1*AqvmXF}{{%Wdy>Qk9N#3aIv1s8Q;-I7UQ610H@b z`&UowRKs_Qy?=#kw(R=-KXYc~{pZkbo1p;lZ|Z4C1M_Q&>w&)rBOL-pb;WTxSNPtm zmzq+gX^#&U{r;6Zm-WM1lekdNjlzq4xTO?~92=8-DMeRnEB0)(evrZLI)>_|XqqF~ z_bAndh3_qY3uZt)H%K8fIx0I1L!ye(mxzge9`;sW(wTo`I*eQ$<3M?PK4QydmP!?y zt4LpEA*(uN2LQ z%EF$wYn8AB`bUr%l(18S2_%&Kl{{g%DQF=4VRR+Ic{BJvI0JLz&R|ydH^+YCWzZ5q z&)M0~wUgmk+1sAE4fe|Aozubt5jz80mJvKXuL_hGtTMUAU!{-xV|@Iq?v^nH<}S>U z3p;sb7pug1n1*nB4((7XHGi+J{$ZqNUWD#@o;qF0r~MrI=!~}ap6`pu#re!U6jJGR zwws5sQq|C!lQ@n1Z2UVM_C-VYz00kSvUedWvb^vsystyrjuIL_bPv#~24RI~V?*&c z{v)9LZ=#RvdvY&n$mGDRo}E=3O06E>-OU|+D9oDQe_V`%@ipH8{rS%;->KnV_=E$$ zTYJ_Y;!WxC7m}OFv6Vxn?d<6d{yrfrA_L#yf4yN$WoM~9W%~M2JC5PZR015US@xV9 z;cqf(^$(o#RrEw&rhiRO;3N2cMc<6AaXrxg^9EGk0@9=rbnK8x63QHE*=I9g<47DI zsu2?3mX&n~Qg_>W1;OTX#=`Hj-DSh89mW&r{re7@*euQk0$u4I{~2d6rY7c-h4{?q zZa;Vy1Qvmq^r5I4=|-*33x=zN_ym_`e3_|f|Gs$zr{%*n8OOdpH}EishgPA~#6p?D zwDd00$0-%1>=j7?-c<}o+iO;Ceogv=mM!fHz+CW?gKu)txk`)wjcDJ*tf~i~{{J_7 z`5%AbrBbhjXI(UVmjx`|Gj-s$sK_})@7MFn%61|fYKL*FTq_q!1#0POyOEbtf3w5@ zMcWS6qox`f3LY^E2l#X?;3(Pn1i=$l<0T1RQ3%>AV`Wq2m|@4O)uZAimj8Pfc?g`) zzjh1)zYpjFQny1V2=M^pMYw|a$b^2zf9H@~K$@@-n8EW4yZ>hs!2K;hnAq-auHFSq zoX0qY+SYI39gSe(Js=4t-r1~pQ^Ns4Vqrlcg!?8^rB^Thy-MLS?loD(l5nGM;)lRB zq}_F;C7b}CIB{*w*Encz5Y6*rpPKZ-M$H@E5&6HjQ*e&P+Vh)6z{pJ;>WkHia}A$UqopJw9x0l?j$pzTkZgShHNJepK(Yt;|2WBdxV)#rao`K<{@B>AdN%8)5 z3k!>#@!`qm*(D)W8Q#b@shVaBzAAYOmc@UQWw3p$2U!8F z*Pp9Ue9{|lK`uwAfBQ}RWBfc8b>H84IXm5^rqXFQdARt7QceAz0A3H1`q{G_HR;iG zq9uM;$_#>@a_IJoRlqNC&)zQ=wiAy|`w6xCgX1T0u2&vOgV_{ho?;;qCqnxEp zt~qlD7MU23{Q7%^`)xo^Mai&t;qv@u@J@$bM3pRX6Wk`iNYYY|Cpn-;1wy_=PbRJP z_4U`A@GyHicwBOuJt&OA#0aY_Uc#%c;{UI|g~NdoJvgN2!1mu^4%K>rW8wP!sb&Y6i6@{a074%4!hYrHBjCu)`K3+YL}_(-`HW*@<816*97#tg zTdjX!ldB8n&&)iwvF@unz3Y+MZ;90xDb!9CKe}UKo+;t5UN_|ArRTo0qo}4d?0un2 zL)S9<$jy3M_uEvZU4=NyEyER`iIKjZN;qK}y7N1a%v5%8h%J+l1c@Q11P_l^^exq% z>Pp!4Q!{*txSc@%VZXZ6-1evAodK}QO7X6MxXOUTQdgDud%fb6rxwE&CvU>WDt#*e ziXhq2&SH2msbn#~{&Uu5EMnS@yGR`NcStb!kINv1{fz#V3y|uOgvvcFZ^8iBb-hc~ z{Xk-Ha#kzuVD5O#Y)j!*?&W2AQh}PIu&j6YlzzRh|45GW0bT?>yQ#uwM|A-ibkk?g zJKRgy+lraP$eH#p1pcxJ1yi%w2V7XqdCNln*{o{z9$(GGFc*9jsO~gcB*v?6BH7Z zi>uw<%)ixQgn9LnGcjm`lGBd~%KS}B+M?J}TY30Gpx@79bUDv&qm6pvbpFa^{4fUN zOZKj(HB3yw4o0qjUUM@GLEWZmXqh4$62&v*tdeO8Jtp-aV_p5>`G+&?dl(wXvCL~Td?RP{PgkkMzbrCJBV4Gtp*efhPyAlA8ZOOCJhBqs-)xD9qIFN}#u z9SjJm5?TA6Oi%B7rXy;Y7iEt3E1afqVSEOu^;;D9F1_Je$Ekd&F6j8irbV1lW@96q zXa-X!5opEzxZ*Q;-|kq}unDb3x_4*3U6~cO)mC-?dm)_7$jlB1@Jh|&J@h;E3Z zt*r-w7l7xhyfK4%uef=unx7pRp}aI6$U?o6PuwG{V#$0`pW{zAAd4WSn3BV+I4}Tc z3R&V2uQ|SscqOOjhuo@R2lxusvOeQFYIK@m^#ceQ>dn1wD6Jhid};#!=VpiX@^ zur&IF`{OE}uqWiObz166NjJ2gj7@>xvm#H@sVQt76RqZ9-Ei%fM$tZr2TYxdMz7NwHR%`dDrsyqIPQ2#tI$r-b&b>Ls76Xx)J zxF-3YcT|@&EQJic_&w1VQ`OWA)F;NDl~Ft;$$ubPQTn+a`+)}U#nH!pwM1mUW$HDb zsW+a}{n=FIV$C4O64Hs~AIkl5eQ89BcznZ#C~a?jyr|-IQ!iO?&Rk3CaQ>~+Vy8QL z=TAAN%WG|-)(W$L-rdmgXReMJsM}1;V$3q;dSVBN|xrG{NlvJsLPh4?{dxv-MnEk(^ZvfkTBejLLf@FY=CUOFp==)jsB0TTO7& zEIY=`C~S6~PxPTkw1n&*>y2g=7Z=YBr}x-q*5o-BWH~PN#4-nr3|=SfN9aMcQr%YK zGzuqQVP-GUHxUaVeQH8kqgV|pEHhW%M->@rsXvUlg1!o)i|+H;r_0!Oxe83$Mn{tebx@M|8$dc86l&AJKt_R zKXSkLa*%r+D38hT4jVt80Di1g&2(dg*?JPnKX`O<`WU2^$(szS-3=a#i2;3vRA8kz zbD*q;1rqAvnN-)1n#T{em>^C^%{AaN-_ z_&qyo_G8!fs|@P9#QdzJO8$L^8p$=(mvtjUd8PfsiTSWSa7BnteVugb6#*NLC3nNQ zE&;9xUf3(bi`#xtCjanG`zzP7U&G%tdqsf85c|z7T)kYY#~LFGtssQ^P$W?y0R2G2-TmyG_;BWm+N08=c@-{=F>ftif;JI9BM?QL`@wPxQf1d~ z)s+0TfJZ2$yr%P}F=ikcg>OBzLT<6RT2`;(U*q6L0B;kls-K?hw4HnfE^|X=1jJr0 z!q7}^8&kr?r?tL2C|UWfuT&sC7Adm{1+1X0cKh{+jgCLFKluu%jf~u;E>3-8UO&(+ z$3F{(fTyOXvU7;bO9%J(woCiRjZV(a*?kWjaktO{V``N(M4Tk{`_)?ELI#zbVkbqN z>0gecR{7^6qlivxj|2SzoNa|KzOQZ&GE5i%ai!Cx@r(F|OF7&@ap8DiC#To#?lMHkfGlg*!2x`=;NO4<8e)RXz5$ zIsL)O3Gkp;n$phz0XRO ziVF&=>mC0HugWY$x-VT!YMQttD4k$OXL9@o6D?+%BvE5?f9-0`GZ4{!#z1o#iy*jk z*wofeUxOEf44)nXEebW^iTB2f*W4d8ym9QElAI6WzF`j1S|%I08)=QOtrL-bgomT4xnsuava11 zRPY%eJAuRY*k!5ejZer;Cy_{`0~~ab|Fw!)EDJf(_3(_evr5F7Q`dFXK{~G5=3dtm zP@~kbsV-VQ&e~Zs5U<$}QSh}GrlWo1=pSpjy)^YKc_dQwU?s8SV)RK{i8{NeC{kVW z#_ei#Ff8g+8_a{fkvEF)$bfMED`*sL3Ub)^Hm93J1xrE^*{2)|0X*ejWSRTk=;gaaK3443k%{BX;iOx^>r4-Zeif8FHnyZOm0YfVBTTxKFkN%?5$$i zPq$u=lG0^9n~&=3On*Klq*~J-_ClNa)PAZ@nZOfaMnvc@&`pd5U+&XSzxV&?64aDZ zUbU;BI1(%}vZ7iMCF`33k*Hwox7QcL*l$5zUv z+ININZR}zxTUEh|<#5BGh0@5FD2u4H>&}XD>Yjq7gDK`3t(V6#%6cI*9{E5=Awx*R z@NuM&AsdYbS>_8`@eq)-9F=AN3b0 zid#L%wM&i--4{|FSBF7d7)ZpdEHbWf?Yn);x(JorspmE0pPYKp8#@{v5~=NG(E6dl zVf&)XY(=la&!MsDmVRAY-k!y3b$h45;OX^rxT>9pxqHZ0a7}=nt z{9-8C*WM}@!_N0_{SyfANDmHa1+B7mo!9q(dsmeR3MD0Vwn>imk59__w=rXzBs#W( zSKOy}tQS8^d!{2jOP}ji&SomrfLd{#&D1;-;I*62gm`|-iz4ps9F7QK+b;1ucKOz2 zzjhcy3GZ!H#-;G^vHW!$o4INpxd$81%B#vfN08ZTa7<4&W%(^jS=CZS109yq7mw?+JMW8R*pNCYtEW`PeqIx?Zpoc0~H2D_UTq+Q0Z+Fs_6*= zqW1KztNeM@X;lZ8-VMDK1s&u>4qEq`1vWkBXC#H~szAt5o_{$>pArqr%Sbsw3-)ES zxy7!u#SDDtTxr`m0zu%Oj)6I=OUuK?3UIz>Xt=}UZye!{&$nIWQ~6(mUafzhNAQUT zmx$I&LGV8M`lE_W*4Eyhw>06uUz-j6r1{uNjK{d(?qQi_tt@ZuUn*FfQJAYrPiq=J z{_X;J4h0nzs%m0MF}Ad{bl(-~uzi1U zEi)%b)XnKQam=;j{NA1M0F0^`k7^t>b!?njJ1A^Y;GfCPmP@j_Jx{We3S9dUH~BVoL4>- zO~P_@;jj=lp4ai61(^#&>_v$@(T;DD%jO*|t_wZ)jpn@bJ3+-`FsOLkS|@%Zj#~xj zv3=|7_d49lJgl4M2Mq&{_E!o0PveM}mo)wrPRP-qE&l#g&&xJyaqV+~;+TlC?SygR zo|JLxyxs(i;MNKhJ93;m3V0X<%Au||CkV-urXM}I4PMM2YH~EjEC7Ec5d?&)I>RWqz?%U2|P8g1PIQP0LWH(mR-no@El5=Dq&@TCYNuBP1-MUI#aK2mR1CN+q=mHXNO3JNI#a(Dk6Wrh#&i&l1qF zb{%+>5)zWRj~PHTrG$!=­T@c;2TFvnDj847%N{ze^KdWXS7SL6`E}SluMv#vr8_d1<787SVn#3^Qk=m7Wzoswg$# zSJYf$@(@ws5ZPBmJO(r0qa-tM=Id z)AucMJ7RF{vm2!+lPc0f;3e(OBa_KT*8x2lSm@3g3<4f-W`Ryfzt4eKUX7ozYrN(=LQ zCN}+zZ)7AuxpLHvy<_~;?t8Fwh7|!V#i$Zq~SsvJ< z+DnfS9+_w&D7|Q&C?x&^&YEGQsJHqRFA48mF%b>G2d2elW^|>KUZ|Z;wCWq;wJPR6 zqg?;(W%^HUJdwar#A4Un9!o-W(q6K^f4xt5g6HNlhlELVmg0@nK$dI5{Z+*W0r36b zHgj~Hq5^nFo^7nlszfezUzv|W1YzImB=~NY;^y$-m}ub)SxI=J%8`hk&k<>d_FAUG zZ?ba5lVZnngDq%&fFNp9usmdcU|C@x92A68iIXaT3EgJDkfpzbc*pu*X{*kLWdlh1 zg(z%2-Q$w44aJ$0Wu}-P-embnJM4}`JS(dzQ9hQCkqkkhxtNi72t(_%BGWLVJ zlWg{??{oi=P`gEwc@FHK^Z)1m?Z8#<$?^BW&|-b<`#%wFPBn>bW~-Ntl`hngmKj8MDccJFL8#N%8OgZoOpPQFCOZp5VPWcMw-@2!jlzyXt0l z#YFl1>MX=3FUe=9LTBrl_gVLf^Ye(B@034Ky@2YnX?dAxpkBsrzP%DL5bu-2M*A(b-t_ydqv>U`kP4;Vrm+<5*J0RcU>IibG`_2~7rfrnxRq2?Q(LGbm3$CJW zmL*o&2ZFsutwTPDUUwww{L__9sVLN!F)5j6B zMhn4vNJ zeQvg^Tbx~)T%7AbDDIf`?m&ME(s=EdD?1>YrO|h4`u{31OFU2`CP+NeJg}{5R%GjX z*E)xG%*x!)kEkKf`{4k$(o;&aBffWQWtk=_->ldB2FLh|;e)sh`|iq4%@^W5&P5|7a;6P4dlS~u+5%nu%*kOm$V5kWlWWM)K5yKa`6 z>XB)_vvH%K)IwYAL=bdF;<|iJ4ix&Dy)S#6A@*{tUq#n_7H$Gm`F-ijpi|$Ko~zc{ z;mmP`QJN2$8hzbfi=CK=w_fcUa-W2nT0M@5P++%x(8aYRl2p}Jt#tB~v5E2&)aBT z?Zi*yclZJq6V^}ROc?Ptg*i-?7cH)IxN}+bFDUu{S#j3*^PE2-D!$oLbgL}~bVP=Q z3>-M{mmqB2g?`e2XynSOm1S-oFV5<@6=vm+6)#{3qMGC`__>To{@Bdc{+i2|m_&_b z8y7lxNnG(m8S|F<5N+WIb67b)l~;swg8OKli*Jj2+eKz7TBp83{B9jpgzHMDrHA3} z6|YOwt}Y>v;`B)W?@VdHU#ZtFQ=#QwHP6fy+f;Z6a$Fy3^EaN_q(6fdWTO_h!8{CC zoOGPKzIA98J37v9NV>5mj59-1m$vxaPnMpFq}#uHG>dYP@<9p7(Qb_y&ly{X?cX0)IV3(aHybl@Owzmdvd> zS4;cm){i*YxPB@>GWBlfpv~R5kg&4f&1t#dtiVb(VrRilT~oQ7zcOsPqM`K7s7MCX z>g9_R=_%F}sFH5InixgdajxF;-pRbNIU=GOZ9JyJqaZ7TyJ&&3-goj}S;`Qz42g8l zeL%A5(p5j|p&Y&cX(cn87scPNK)TY9uK3pY-Dgg81Rjt{kDQ>QoU(mJ3n9%^?O zF&TZ+%o=V?u(D{*7Y^4WCCE!gvu~=Y(q{KBxN|hqz;h1M9vk+hZOn(3ywS8;dzP7- z(=BdIZ!;31BG=)-a#7s_)e*nGES{(}E@)`H3A_putXCoX1gHs7g~c6Fld=9V9T#S} z7f=(Ta+3?!Pm3`;e-1K#=Yon2_P5dL(GwO5COl&~rhWY<+7!e$DbcUyJ_oYhdfP03 zS9SPNvA9lkS=inO9-k%mZOAXnM@!jXuRm|^MTup+LTFpqF&do)%FGrzOADIcWu6@O zS}rVYxZn*@m;`6eEy?Bf>!qDE7_ljqu^|5Rp;mh{)TNvouIAh&&2EDqF*_l7$SN-q zd}*s1hYdRADf0QTgfjwb9u8mZDWBK^N@VPRlb|9Z2`!%H|xJ9tHea)fAH&;v5mZ}U)g{M#kn zsr`#8g`6e79MxIH+!Ql%XFpkK>n$Rf%VU!%0@F-NLlc7C9P_1@#%aj!CR`zmh&3MD ze5uMRv<$ay)i1KEdoWt%l+7%pS}M9-PUI*_Gkw3MotGk88m}1&ra<8QZeYgk(AB1B z3qtg525#Gex*?}_g*#s`J#(&n9W+TpL$@9=WS zYY14!!gGn76$a;i5;rlWZLCLoH#gl+pN$Z(6jI7l4VQi^f4OE7nomTPC#afih?iCu z4ri45Iq9g3v0{_Vy2lmyWKp>|0RE$^y1wm*`h7(oAeEnYAo;1oJ%-1+ng z#{{XuYe~4jwJwsu!18kDQt@uj$Y3g`a!JJ)y8i?uOw`zYyVKKjKKY0_h{{|x9#YKC z&7o|%dr{f6oJC5^6@a#|={p(Ta}<7t!h-@Ze3Nuk{k>7qB53O5lFLLFXHDj%Uh_Rs zf^$vcp3-=4r|~}5-A9AsyFW^n7jLc5iUn*3*~AtbY)bDRujsG-9A7H}rJ-Pk8-n0l z$_q&mhcGsmw-cY$D}wilp-s_wGIEgpR#2c>LD#hGEV_g*8m*82dogVqyAECluJin?pn$~`s1nOT`&ir$v>sxBxGQV9r zXw;Er;^?TqkFEIL{OF!ZY#J30YRC|Y-%t?yZ>~>2uY63l@$)?xTN7Ogd6~je$55M#b1skA{c72JT^)MaMllf?5VGbe9h^5LR*w0eY$> zvMuad_RG=T_Bu~dZX!eWC15BZ zArli5^`K3GM8UC`_#Lc(%@i^(r+~|YqiOVuD_8-WOuEN(s({I01xcG!AzimygX;$XNxdBbB3y0g5O zqZKbLrc+}YFX|B-#P5!Kt9$7R*?owRzx2*{xm?g~_j7ek_X@?_N`p~M2l-eP8kGGO zwB=fsRyaOFmAK>E>qLHY+rNEeqp8clH~JzyHIE?E8R_%$&uU7Hs`rsihcMP$4Ct>^ zP7MeIYl7sT%73QM@>Oqwl%hsDaK&-*>A1**}{I{F3xfsIoBdy*4{s)}s<4 zU|7xW(#p(m{D~+)TC!V0pJ6lnL+g#O^L?);7A?m6P5CqRG}*6&a+KVX*B^jAQaEkAJaj0Ui_ZeZj9N zZ9a1fS~43@fRJhi6KH3LzA!da^4fRe{U3+KQq!%K%!m@K#w>h~?u7 zQ|tKPl+<6wna1Gp!}LdhML62m2i5)%t93E7xu5Ld87^WO=?euHrOW2LL(EkM@2Uo| z_o#-Fs4O6SMODO*KUDlUQGIX(v!&2L(#7qZq2Hm`KGJ0{g_5YkX$iLk`ui64%g){0 z^2U2h@Ek`ZpVh;1$o&PWMXqW%kqcL>YLYMGxk+PJ3GEq?jsrit^u^2{6R!|$CH(Cr zY&@HMIETIs3iHP4oAA*C`^8!4uEdl`jbawNdGTz3BKZem`}wDjGJjrd?XFm(0-5SB zcVv`#M9WqC`RCSi95V00H<`od7KohRd{DyJgM!+yja`7UAzWm2vJbJh+;X!F(br@B zJvy$h;ZE3FZrSS~5ZL{n5zhbpkI$9CE!RZLdyl|avFoiZ{#OdqP1yWSnd3sdl>Ao# zO#9r3+)>lpP`(B{cSB3j8^haq)^15sIkW`|=3{e{h{cg#%}*6c;yn#LeU6v8kgt3? z#>TaKV+pti7KTKQc9+j(epnE7O(VLe*#6$^qx&3q;f+(yHJUDZ3A<3@xUk+kyk&!0 zpE#c=MLJ)itnT_ku-ctFvkEGS$d{uyx=>M3t#4bpZa7P8I zYIb)UGHhNPTZ%wM$#gOL;4JH#FM%$g1v+B=GLRuV*&kjYH`GgWD$jte4M=6UT5JU~ z0!U?29>eshQmKNK%Ap@z(JCp{Z3ztO;yP72qu++*2t{-m_TH{DI&^0k@=HkG>qV3e zrXNTMC93cAHEAnTm+MyOf0&IW1HEQHqv@>%l6#TCc(G!~z~7~m>sx}$v=p3Q+&qx? zoqe6NXOf^sKzyTVA&tq7dC{*>+o)n)UQy3qk;{4{z4dx%iAg=TYJbnd>^A5DGZWfk zuhl@Pd{bm03C_YNST=V8im5vtA~5lDp4Go72RG!ar>O7c5;eS&4fQ6A6EF%d?>1#N ze-b+OVGc%U!9p1U=)O~ph6zRsVjWVad_G?c2sJDht z9I?dVrVsDXeF&)$8K7(ba4=TEm#e45ubA3Lfa2>X@(&)&1Nu&^-v(4DM5lFq^j%6>WQy!!EIX- zC^{>AtpZ|CWF4Flo&^M=Vny4ieHc@64>OwCqFg)PzcC_M#0&+=Vz_vOWDNh|kHif; znR^g%%Z-%XKp2JJ7g1%ly&5agt9tty?ibX;_H<;?^#lvo8vT5A_+85AOoP)MY2Kqr z;jD$@fh1Za=t1dy3~Hf|9%+-8o8aJ-1Ty!0 zry}014!&9Xl~6myt0s?G9wUvj5Ns*~r% zow-BG4{?f5=h37()lSeEbhfD)&%WgGE6>kmBe$SlwfMw@P(6mYe!>f!42c5eA6Zga zCJr=RTwgxMJ5LU1!hIb>^dBYVpF~m8#ATxO(&aWZJM83Vp8^7M^_$VnIeI#g%OTd* z0ra#I8f!~#l>MABv^9HnfwvA?**W7~m-l>FrZy$VsHL?~ae34z@EhFrI5=}(wM?Jt z2jJqGNxkdJzK!RmC;O6G?rrTSyhC@|E1{47i`4#OFH|t^FPEUkA}y~~8(2II9%+~4 zyFk>;rHw7dx~aH&5(4N`B$sK88I6)jf}hTt4TU|mJ^CVS+g%v5+aNpJ7<4C;1hlG_ zl)JwmdD2-e)X^m{5lsz#vYZv{EO2V`p7%>!4XEY**8DRqEv=;jS3DV+#IIj(DkMng zJh!`d{qMO%#_H}Z%UO}K#bWA**Z4H?_qmQ}*=ui)YsRbHpiFagMZs4&Eo3LrcM07S zD>w=lm!j^OKEIol`+@rTZUvifV$tGDX|=sQF3&=}-3qE;S3E-PO7N21K)v7%mGr&~ ziT6z?A}jWRW{|uBi{LH)tt4Y9R(Bbc1qDZxhBGUH5s4Pc*IJ`)&}-HR9q+~9KjW`( z`s7?5+u!FbaNOUXygpDWWm~-6YNOW=2cJrTL5FlPp=x-3H$VA4RrRDhAC)%EnMk}< zZteZ|;sQ-v0$SK&)gtF~0l%3UGHUn+OMi89%Epqw*G;~SiADPzk|J*Rj+u;g~pVbV}8m6Uo7A@b`odLIN^jHx`x;9mG%MZ86Qj z-D+c>ZE%g~uiplM7WPys@Bu7^Y>M;}JYaRj`a_#5b+Wjsmm?5-{T_V0MCtee$lr_J zzlg0QjB9k=Yh@6Ach_z2FuK5Fto4N!FJrE@5c_Mz*eiBfX}@D=tQP}9muc4*K1Vne zW$>5s!#^a@ic)Bh4c!Mv<-rn{*X-Qoe=YhSkouqc2Xwfel64Vcf4uXGKQTR}xg)e2 zowD+w!&rGPpH~N#B7?ex+S6<&BBN3UV$3rmK=ohI|6gnVVIkx!yot!Y-~+?29PkLk zN1jqLM?0_{x5pMfS1w^lOw&wM5!@No!vr2!GyHwLh6;voxJ zroW}azigU!=QTh;OM@e@Q{c42{@1Dg2F-$^fTRRGTL1-@xIUm+HYx73z*&h!MpX?y zUTdqwvTzdYo)jf({`Z3UZ#%KM1P*C8;q(xEd>Je1mmkOf+kgK1lY7g7V`8w!Oy&UX z9)^DGv;6%Bf1Syj_zIaHck2F0Ooc7|k5!Mne<{clL3 z0)X4T!4n^HRuT-7^#fjPVk=AZ_g63#tepr1|B1f9nMP zwPoxdXdodHk-^5;kBNYgepc!sI2((>7uand+ybZ6FUG>C){B#~?po=lWK9GPB zGBDf7T0+9g-KoF=^Zwi@KGWy9Wto|oyCr%DB~fC|Z>(1bH_>oc&O+mc;L_647sV#e zdU|^sz)VHWHLHT1m9tq0?rN+eE?V0IMZ6ElcExeNB@eQhQ9nSPZ@8l2U(UJ&%B zq__oO=O(pfv?0eQ{KS*FkYm>By*BHeOY!(oHQPDX4VBKDF010(J<97Ditd>h7gm;{ z*KHrbD4)BWHHRBt`T9XN+!<)>%>y8cPRc^+GK11rtVST#BgBVrlYxg3eNaa#NweyV za&_T!KO1x^;esWY@`=)b(|`v!4WFR4aGqwe;lY_3KWrNhqPDK&5#076AZ=8y-UxRB zBTdfVWy)eb!g!uw3QRhaa8M&>)Ic`?IZaKY-|z@kz5uLaS}rXK{#x6Bmk1`Xtqv0O2V-|gR*WcXwzX;~=nn>(&X2%+&PJGZtd*pPC0}7sdJ0YzUOe0<;)hE~ z?&Yym&@?qGWLOrLc#{*JjBd?9XfQdc>@oRU1B87w?p8+4_ zHsHL>kfk}Tly@f~)Y$!aZ1qT2?BsA0pM9@cz4}VOww?TUamuryTTlqlx7~8;aEP}I z*8Zl9miF;nrTPf57dT3XyRb=YXDCu+|K3bEakCgI&X|CcoIz5+ff|8Ha&a5^DB}fX z0>G=@Wu>)9MI59C*$4AvgGk*b&f&9HfT5>TSbmbMRQ#WKX@Z~Vxm!oy2qs2rq_*%o!)R3rOq~gir)+*PbJ-2h$NXZBXE$ z<}BQrW&}$~tUP9dCVl}wz;0TW<8T50F(V{?dymC)gJm6zCI-A>*V~iP29tjNugB-Z z?Ep0yf%35e6xlDE!|tqqw02`u%YO09X8ixOVNPai0bAw*@2j9vT}P(}+6WRiE7BI@#a~{Bg8Aer&1ju{ZN5p#t3O&cKG+0g^BWhzN<; zIEcICYN*5?&;+gO%W548;eYjXD;KVYCEigeg& zfq4q9RAI)x3iJn`<@dl9jEwac#9lGqd9?C+Mv|Q>PTfwUas_0wa3OP`*v#X5(O2z1 zF41=7EO21T*v+ z{9qwx+lK8GRuIlp>)b2q2Z^=8KMW+-eRL`q>?cRxJMuFM^gOn(G~qb%?d1&iAD0lE zZ5kYK-F@PA7;cDfH>M=Yo|Bx4#SW&Mz*b!u*1tlo$Up-1Tr#8&w02+AstSyiaCZ>{ z;3uXK z6qG81&U{PNJFlhOCd1DqcW(dO(kSAVs4}opkw$ax$I>jPD>OEF+OxhK?Al%E-RwHJ z-1bp=I+(Jn>bXP&M+i%9?)z+JCoNg!k)6?_(W0F91fE9DJ@UWA0|=_CL`x$!L|St; zuo`KN%5jpzD}-#JB`_7cu{C?KW9zR`@OToaae{agFG`HubB(~W!%ldDmqg6i01W_; z6JxwQRKjE0m4dT;MvIonhbIFv{@3IkY#l7G{h~dYp7EP4$GD0%dfxJBW8fkadl5Q? zK{ZB1mNoPn=Y1gHiVZjK>Wjl|&~m|TXc)hNL6+Qs90jj{i@KZnwT4J*ZK};eBFEAO zM$}S3+q8rRIm0Iw&{xBM67>E%6EuMUP#h3-;X@e-h)yhjpWkty$U=szU5 z2u*L|=eJ!K;cRxlv4`-Z?_&8zksaPC9c@<2f2f9>MGz%zLb^G=_fxfH%E#Osg^>}f+=d=5Q(r_|s+(ODNP}#-gI@gZ1k+>c zHwfZi>d32mxk)RV-;YHMX*`Qx;S(&j$4@TuTK{srKz}Gj#i)YUkw%y%_Mp>3yMlXR zR_uh}bznQ*ggG_@JFriU_qGK)XSh$=3Y0F3UZwY(c`>=XlJ#*> zyzB%+9f>Kl9c`?6?lJf!#Y^+V91`KX-`si8sE#;965=nBT&c*K`jf~RM zbzF1{=Gf1&^@A>+7Lp9} zW6E{DWA6f$dl3p}GN1rZ7&(-_;LM4Rm)@3e+nX}T*(R49pwRF|zieX3)w&$3*2cAC z(%}j;)cV!eg*RdwK1f##ZIG*;>D}s}uXrDju9 z)TkOjp%O6^qvoaS{8Y?#CQdti(IR^`jU2?884D`D@(X0^UV-JrCS;0$*V@kLOpxxN zE-o_8gE_R2t}I60JRUZ0dfwsuc*JA7Yj@j-*X3ZWlo7wGjN*Ki-Rd3z4vxwAnN@;; z`z0>Zb+77A#2v#SWsTC$8)iXp7GFq^BkMgXjku%k4-mteu{!sh+e2sqnDi}(j^{Ut z6^{(=3m*N*zJ|04WkkO#Gu&j%`)M&^rg z9R`tc_HmvmIk%$on53mbPZM)Za&q`Zv0z2hB~B?mW$?w{?;!&ZTfyA)kKoad;RrwWp_g;LX|g* zme+rRyGPr(F>M;X(PDIL{Tsc33%V`VJ&}S;mmq{FMkDF2fDa@dybxh9r!f`TGo?iIx+BW zxi@f`D7|zAVUOMBs}>)7q*8WF%PY>jETdeT-L3vYKO%0-3LEgAs&=3VDqvBy^(fIZ z{oPYs)@A3xTH$CawjS(UQEMXul|FjEV}kn8u_ulG{emYwEA!3$?YYTzSjWzuj}e+= zI&f9%s%+tdPG|4Muh~43VeSfx$5M#gyWkGlJY=19<~a5{yGHaTfp zW|xD;ClSwT=jG|1_q(;Q9SZ83qb;sAjH+=SrCC#3N`0&lal(vYUw|pTbh&4{1_0Yz z`-o8PCuFPTVjI?El;2eit&Wm{h|JFreAwjnzjBFA2I69^tW_UY8GMl77#A#^FicsY zIpXX+TRNLoZrO6Y(&vmbd$>1SX8m-zX1Kfhgu!_=9kL~L2U5kvf4%tze&T@^f|Ct! zIjlkcY;p}YZ*%hxE(4&hc5K-<$eYoZb#cr|jRY`HlC}zA*EnPi8-8aO1v~9mEU%j=>o%c9Xm{L5n)Y38EEe1{ z?9I_5C+%}kB|+ug6%XT|r6z_Zm~XNjMnmjn{O zq>MkuRRbIS4Wy?}UGSqOB!4SJb-ZPp2_A3`lO2H7jH^ zq|Yb;0APYI@7aaqGZymOe>5wO6-s1wqxXX4Pc}>LvrA$;F=w_vp`)*A@v~5>0 zepi2q>#0v~xIOtkYVn$qjCDn$L#dGpytqnQ<<0SZl$P4ha;mq+Uw4>C?F%a# zF7D{8M+<5p{i;6g_1%4+Hzp*RiDVh;>$;-nT6GgH)^S}hPlv25H|IT*xL#3vpVM4) zd_gL58JwsFJjPlH(Vqi83qefznIBZ}IRAk3K}~_282W^As|Mjnhx}9yiQU#xyV75t zPV$xxZj)(maX^+Wk+xWcnw7cN-aNiEXq7 z4~!h8pbe*09c;&$24gS#;Co`dbC`f`fyj!6IIo~%Zs`5vXPy)KBS&ux`@2cvRvX#- z>fN)9FWh^8SuUb2*kc)<{(R28&F^=VVET8OeHVMR0;~&z+`e{p7luoz5WG`o&{vpg zxY<}P3#%Xbs$Z(_AF~iLj$JZPQx(ZNj`K}KIk@elR$;XrZT5Tv1w0v1$K{X%J##h* zbmhX&wx*B8zTa5b?GXTt(?Ujjgxx{B_D4RSySIZO!;m1SH2}+qn(} zrdDiuaSwV(Q^_0@jb_Gv-PNKYt+w>&kx%GxWj{&TQ!%%2-qaUEq$6>+$Y!aR)2F=K zkT1 zPRB-h89700Z7N0N_8Bjl-pv%Of#9g(MasK-f}B|&$6p|A>fd%)-Ko6&&t3ooT0_&a zk~tx(v?d>b7Z-lOk^cr4@3A_FjM}?zD=-knNw|wB#0g!_$;q*uZH-oOI=V}FI&46n zlE}sI-APl3zrp*uH9**Iwtj2ATh^tTSvoNc59Be#hm@N39+q!f%D$Y>Hr*@pcY~|C z9S;lMn3^AL@;eOn15Qdg)*O(eZQcSy&H4EFe*Zv8nP!34@zea5L>2r(GJ7y-HbVhb zNf;Ie)c{4q-?MLRN#|-_)1Yz@lD_70{DYl9#f|P#OzA(u0u6i4v9-c@m*~W((iP&H zOQJ+!Rx0Z1(yFSeuTFeZ&Jbl+YQ`NrnFIs7Ilw&KX)wN|BBMP{m^Xq`N3*$GKoG>0 zlt33WI&eqKn#OVi#+5Z+p}MtsKoIjrHNkVFLwl{HA zASKsMjd#XUo=@*-*#zE&*M!{n?P)~pS;3E!v)zx}R*N~LA_{Ydh>V-gXJ@_;FxNcJ z0m(Xafc3dQ9D-Unp{;SuHs3@VQ_T%veP#^l<{4LicJe% zHhfr*ddoxfZ!hUeeE?pKs@$3NVsJ%aD?vr`t9>#2<}bTao~8;vEdr@ur(G2;C^NK3 zy{ZVfOIpA@bBhL#@G7{aaIQ>oVu_&ui1GXG_QOQ22@pA3zt6~&L4UeN^x*xzC1WYwntm0SNm*O<*7p$a z_e=mz3U<29WFFL#9(@@&A9^}kJ-!e2b5#lS>H-{L8Hvy&!@*?&VK|*PS}z~Wa83Ao z?5%#KI6ZLCfdBi}2V}V{gC6J#rB^46rI8e%5ueJo#w4!CocbTH&xQ^R9aFKP-x~+|*YF7C zUI8t9UkT>qDuDg`nObuSeY~GhehQ>=czI3^0=^~6e;)z$g1!fjypg--qjiOecfaZd zBVV_lrchqBN`PB_Nh$KF;9K>5!P=YDu&_D#1vDLEre%dmLFTpBoud_I| zZUJ6#;i9sz*n$igxM^@CxBQb+IZlOBB?a6!muHH#W<~x(GDnjf5FMUm0U!pa z?1#G%EMs`Vs=N@q&~mm|@|xn}HOqj9tnAX3S1nRtnLqOYPSZ0#E(S^-H(qNMH~@l~ z0Xc4lt+u*hv-FzUhn@h0$^;urU8!^kNdoGb9FTQ-RujmR z{e7!3e0;(P`%KPEZ4?8td!c#V3H_EWb-N!F-lej`ax$j@w1TIG~&RhmSP@EhnbHf?VyWVZ0B7 zK>Y9;I8Ivws{erS=OeiQ%i@4%*z__Z{ZU}L81NF^i3o$rvACs>DRt^1i4U>3^@>q0 zIR%SbF0OLH5N{%&q~bSAJ8#Lz0;%a5A}zXkDmnuI=6neNn9mlh!?H4_c^wBg6-#V3 zQ3E>iCtw|-;^>G6r&9PTIMh{}at0+8Ai1o8f0*n0Kr=7b_fU9q0t&m6pCU3KTKb+1 zi10I5AL8+I0C~6hw*hBCsw{ZPRT*Klc?N_}?`lHiX)+A)z5?Knb+V18C^v76#SeU2KOmjO08I?T(%TuXu67OYu!9~R*>~!ZpnHH%VJO|``CV)xSVGljkdtr~@JVLSbdg31 zOBu3ZFpTOHm;jCupbT(~nz8Jb6KpRgB4cF)%iJ5ftIRm8x+fLtR?;XV=3ig@f=|{D;|M`%?!)CAfrGToDW;|Agu~i;n_^@>C12j zgJq@ztmg``!WWx!?-{^zHL#3DS$n;#I#x4Bsd*Vs)l5#HnNw4Tl{(kKZo&&tYEQjt zov{9BdUXNIHvkmm_J$mRr;?bkf=~|^7Y$r4iJH&Ae3O+60a^!cZVGtc0t!WW-}dx1 zqR7CJ$X`-(yu?bs^z|1Qunr-P30RnsbHe{PAavo=sT*Ovj8hSCWstC&)f8;t%NSiK zBNhumf*lqMyRHBWb*A(>@6Y7GnGlv0m)8dCyhoO)J@$fTU=ul;6{4E(z&f6|mT9S{ zNHEwgJ~$LU4JYAMU8SVJl>lMm57$t8W0C$XkF-zml~yR1E-}qbTf*I5lH|O$o}Ggey4u z#h(NFC>rJ*jJcaYed5-3Snk-l9B@O*k7lP?5j`EtzlF3 zY)|UYk-p~UVKMx5cSCW})oMn<)3WVt`2jkzek3!a3KLT1P>(H5@o?IGwJOd6B;~Jr z+YHvzt8lfRIYkozSxEp{w`fbNKa;T{AzQ^N4SQVU?xxs6jM{w|Ur8fPnWR1X?s@=%S)J?DC&BS|B%}*}!K6IxL3D zaZUyxvBwTIKewKOSch}5Lx12A^=DOpQn?MOG#b1aU*1$`sQmXDuSXxn$*?1syjwS!aHz&!eR6_akM(-1{7d&hVQ$7L;QgxP^e9rzvfgy!YrWj} z*Mi}Ap>?TMi38RF(+ftZv>g@9^Y&3ty#DS_ zT2)oD=&94!!7Sdmxvg!)Y-=nE)Ig{#W{-f8PpzQvcvO(olHmK;|zp_cTw4;crb(ISB#licxP`I8utCeX5MtfGZ)H1m*t!YstRFR z=p=1l}ZN4VI?Wo>Q-La7lI`jd3Q z19!)VBUUvf0;>jk(Myh4q@O*OH|)oJ9VhF4^z2hnj@q97^K_n2S1Xq05!2za*DpLh zevHP+<_LLLaOq*0pi8%nu;M31+-4KXFhUOe6)S@*Y!f2e6H|^(nye83;cNSu~O?6Eei zl&6#Ut?2evN?RI|(ffB_TOb~SxH_Il-oKuy&Obk&h0>p%Allhp*7hq53b55<<<%fV zUN<==n~We2nNjIYq$j!|F1Ed*l%Li8 zHL9y&!n6WWh`cfC=>w=EOCE1JwGSb?F%NR>5W?)jbe(cl$>qWBJ&BNX=mVRUnqirihNfq!* zYOd(tBHD6lU^mH~u=cwzJtx_=wu~>z#xj$EDw2^ml@9u;4P&N4^ z3uwibN-9^9J6Ti9)ul_4NvriY)zO&x;OFv|rc+)SY1g?f53}LxTnOI@gPjiV6)wDG zz4mDA?4_G_ep>+f9f6Pl7;h7eacrQkE&K>Qi%UMfrQABd`};4px3`1B z7;g0a>yD($%CQ10#FqGDzULWl=8$dtZi$-4iq!IhJ6oWd}g1bs~O5pEm&=ZSO8(QxK3iRTS zP^NwEM(#M*7BoCOJXQNu*q54t^a>l?QD~oo^-B4zcotbHr#pmo%T3&H@$}TsKF3Vd z+G4KQU=ORRwb}HerTBZ5_Ns3ZDf56Uet6Mpl`k?AS@v4Nb9$J;9!!$KyEB6{kc+21Yr_BLK(l75O1nUN5$jqzQfXDTby*_AMh z2%T{iU-Ui2dFVI$&9o{{HIJfFp#=3xMX%O{LC;iMh^Vn1Bfelc_e70r=PE#D#bg_Z z2-;-l1RLI{EoFE4%@)M(_dao{XZ#f^JhLBpue8+3=Rx0$xs9b#CxZ3Wml4Eb&L`#q zv`Wplm7gE`zrC1ksCr?Opek@~;Zr>;Kcz@4X@p-Bos85T3B8+tn z4=3zF>i4?R~Ep;YfvbuuQ`Ynfx3v}y88`+v7^-}Mkd7O$I=04VZ)?Zhnv z0}iRZ?)V_alnhajA*|n1cN+6*r?;cRZ6=?iovzN3du9%r(znI!%l><&lwb=ZNt{U*%tpxN<>UA3mQVI7PDUZ#6 zi0Xc($>J>EY$;=ZhxLFkKB&>9qblXux~BW70G=+)>fvIBU5A23u1!kRoEJ~6>vM-u zBqt@bx&-bn{S{oJ(_Bjw`%@ibS!QJ~1uoc1TPl(rzu-gIC#?RlHIDU5HhxIvf!<{R zjj|zmOr0s@!6@<9r(@~;1%~HcGd|-oN;91(k$dGg7vX>RDc`_y17$1uxAOBs=mZLX zLF=**ZZPP<7j~Rq9DO47urpmuL%#w3N;s7<@9F!tH9Y>~f(z8O*uiqyRi6UMFDaq~ z<)SP>6bH>TViB49M_T#kyTJ(nA7UNrb>y(qb)r(A5t?xy~C zesO3$<)8E=T7VRNPq>dnZm+a5(hXK)&WJRjAcNX=#9pi+L!DF0=sT$1)1N4fs$FGS zmP>B@dt^?UngW4{;!NKBn$56e-1jYS?N3q71=uP+o1)}|j*d*b#h`2hy(k{SMH)HA zA^{_^1cAk+CG;vALw|qmCIV31!*sX(Q3;}h84vetOvqqN$02)5V`8Wv1@ zF=W14l+0^fO_Kr5t~edexToD_>;(W=O-VI>Y(IsoRW=?ceuF;J{)#3)-t{+FExGkh z8t!NJ_MZ^{dmCT!>j3rvAacFDXj);WsG)4&r)eTmy6jS*FZ`A*8M?9Mgo&no>4|FW z^U=M>u5uE#`-HJGH8S+-GI#~46rIca{Y(9sSffH4w?cOVMa5k@Hcmxn*UkBNG8!j+ z9I2;d?~Fbkr%XOQ1$6P`qlClr!GDqx-!G>=8<8Pgn#+FPZ)d*P!St=6mTP31fSCwB zXt#!&>Gfe`56tqe=Uo~jZfB+Q+jqvW90vi57ujxOag}xFhTCGW0L+9TnSGZBuV9uN zx8IOcg=yY_f3z*$O4LZ*C~VR+#eRzVqzzoJj^=W9PS9SV;T2|#%gE~ksa_p-@n@Nn zf1@zr>bS2Saz>aqC%jlQK6>h6Q%->t0+a4VwW&FL1jPlW^v zzEs%KcAtZ&`!1tAew3|iyjsXGiq7bLG{nTZm-PVW^uTg7DfhO|e2-QbTyn<1=PKoQ zh`{N$uIhz<*ab%o+7RFD@!GcXGlfgLS_QkGtf@>bd-J!u%9YUv${t!Be?#>;ivnvr zKBhI3BB(oEW()85>GDL$wdZYIi?Ob@Gjp-Q}81~)5U zNyg;?NQl;D5nk;PSI}-mz`1Qk9PUuM#=26u?ge zQFAhcdKPggbbGYj(!}eTRV8G0c2A@*B5Z77a_%pzEY-7-4lKXyX=Zy)%1;wNX|B7L zkABX5HW8ht&+_zaTsz%#3NM*-s)LhG9n2FuYd^NJTyyI}qHIP`IB4L_A{tSvPIW4tK%tQ%^Y`_&8L%CYk9_WCnn9zZUb-jI8E zm*F2PnyunnLL;XGhT?r(4i>92D?Gl3*V@+z6aM%o+iz<@SBHKL^`fT|7fq%@eHl_= zl7kmu+x3O>u|DBJ=iZL9Rc3zsvoiK#YWFymrt@^OaS_sPZD=rwhbVKt6&>XI8_dZ{ zOg|7UjKB}PT5<+o}Q7#pP($vjZuWA?;~ zl-zEw^+7jn>L%mJrnvQiv)znRYSGaU@I$vz+v~UmPZY#bzs*|gRwtsj+{&_o9n}2@ zF!!nPFO;e4xy~@_2u0-1Ypc}>v#+_RL_{8IW!BZLxoZ@~r850RL0O9$;uxn3ZgcIo zJ5qwU>UMxtwCp3YxvviGYHlto(v%Lt89nN354JdcI;mLdAbs=5%Nt&(HJ0@~t9St3gzb&R>z1oaZcZJbwujO#I;=SJukU*DiKGM_E(K`QlKSPBq6J$U-U9RryF{CkUJ$nsG+i2H&RDMUB;Q#_Edzd zY%D@d{hJ!tUwNC_t7;2~jmt_Iy$mv{T8 zy)0|$*U*Y7L$|Nq@LET3*pJmL+?Lf=gT-tKgh163W%y&XUQKP*AMcXqgh$Ai*-tuuY&3$yx@MT=I zBr37Um>JKvlpAA|pWt#2{iz024V6^Ajx<#YESH==&F5ES5+kKc7u&<@JnVAE)} z99!)kURnx(-0euRjRKv5a38qUYu86!fAG{&>&;U5D|`2Ub_^;Rw> z^O_j+cQm{J885pvH_K@!;&Mfd318zCe9~FL zbB$34OY=*hp+zg9s7}UgmO@qIJrx^U&E4@W1|hba@VcDbuiS zNTlV0`?Zn%Hwb4-ohCn7(Y(Yg_KKPcF4ke~d6$8(B~wjRo412KfTQoPQIbOYVSNqC z3GBtZNuWvz45`rLT{#FGd*_C>wm{O(t)dgftYxwLTMx>X7kTT>J2ZxS1)vE!;TKM8 zx;7=a**WXTd0z({qw?Nsb6N*df_r17e<}bn6M=u^Ial7CVqm(?O(reUp>TL-_3vhr zZ}3?FHT$aokqpeF;8$5v%$}Vh!SPzHofnf(@g^J87<3}aQiAyRB2b-7>a#reh+n^m zcevQ%8^QK$P=4+uFO{DQ7Keqg)gbs~>aw8SJJ&|$F6fT;FzAlEX&a5YD(P^H0q=!g zjok}SFl+LwE8$smkbl3M6F2ge2n zf%2swD}{RT+W@#|ar5*ayhqqp5ZQ(~&C-!UzdVmpzk=z3;oeJOpmM<&+ZmRS%YEDH z@r!x58O&I+|2%ODwP(7A)0D2HmJ$6Mk1AhZn?f>X_9eCKDUw~EX&#cMImeR;FHwc` zS?J2TT4@Olx8kyhg!!CjNsxMRQA{82bP(@rOs)*R@JlVn>O_sC%f{%X8=nop%ydyi zwU1LiCTtYb%sbHN#asO@QxAavl`M}0HdY&(H=&X#lDdCvh#?xMXG#X?=FO*$8-O)ATi0%ZxSNMtRu)&4%&Mh?CG2*Ar% zsC3pP zf)`t~K_O{z@RrYGEB9AWDTk|R7mjzG^QvFF(l8W0=<>(Ss!;Z_wPv2lZ2o2Yh0)6v z{6f<&e@s>FWw0MdJ~(bOqo)Wpu#$G+Ow%otolP$Nwr*yu`el9Jwd9QCuxV>y^Wbn# z{E~OmX#CSyLOx7LxWZ2GEncX11yXB&&-ObUIigSVDR>XA090 zelAZz5%uqK@<$gr%K0+HJ2~?UIl+mH-}pU!p1XNG$@gYREp@3hHejq2%wgJ(u5tZv z;cZ9W8^g1Ms`qA7HqD|F*p#9ZL|r31)N@lj9Sc1i>fXAH=kGQP^t=Y+u4^NhrkZt6 zooOvi&q>L>bq|)AySB7`W%Jczk-PggIxyxUT^4-4z-fNG@Umo{`qWl+^^Xo7aV=IK=z+>Jk0f^Nj>@UX6I zXbQ__N|nS;vg(T-+ss=QQ$LGejIC8+dJ=RsZUl^gYO)VL0<=$l)8-6s zWI6U_YGy19xHcWvV?YxW+atC5+`J2O?e9Go(S{R7+w(wvwo!SYCH2i+$7;nJ|3EZ9 z1mCIbM|Y<#f9S)7xC1AFTLw4Cl9#eL3G>bJ_Fk&qO-rlCdK3+yP^%H28?0^x#n`#$ zW)_0T{X$9%omLi}IReyT>)fh}vR;Lt>nqzM5M`ZB;d7b=#k1{xxcL;;>dW>Mw6Pm` zzsELTJ1Q$n?T%$`Px32k+96@?DUJtU9JMP}9c?9@?+~^np&`$-2v4X@>SSq5@HBGx z75Bz)qiQr^Kp(*=`W3g{+_BfOMK>l2vx$guC;p0)aemX5*pC_Lcj7zn$`Nc9FTsP% zpkLGW$eBSmIR^vC45q_j1{a7PV`x^j z+VWQUVdwSW zlZ=Kow#Zmoknw9oN559IhncPR;`IW>xfs9=Ahzg#n9TpD>~w=PE^Aipph|5?fN@iJ z0m4Aa#LtwlQnC$FzLaEzz3C=08j=G9j-MCP1ZuiYa!m`#TxuvCi0p-lai~XrK`KEE5DksJ)JpQd_(h*b`G2SYeg$o>vKRqFZRnG zSwBC@DFHw@aR=0t>vT)6%2c_$Xj8`Qpb~+LsjKTgTO^ti4AGU6Jd^w;0(0iB{GNEj zFGr1xQTT%Tj~pR5Qg*+h35V2*cI)|yOesb5sADB!mi7v?>;WAeKpt7ufVY?H{Ha>C z^K+^S-NY`>Rrlf`cl+g7e&J(4veQgOcwyV^!tS9-=J8P6S>dSRdNe<2o+HJ2XNM9b zj9Rq2nX|&ul^(LC-`9pN@FjiLXcJ_SxJ<<6QC+7|#IWhj@8Am!WAZqqZFfI!4;5=Z zm2}kSQ}S7h5bc3R)Om{`OCfz2Peu&IFNM^bYF1C)bbNG&BW<9@!b#>A$x;9Y0A+w4Eka2lqXeCF~{e%ex$o)9a%7;Vn_S8CQFIw>nC3OFqP2v7zy&Wo)7Vo#x|< zNVp7_*V0i9pZgFqc-vU{{99H*cl2TCvVRu{INksf#i@eoFR;mr`ut-fIECE#{Q$w` zP|!bh?CkGMyi7OlW|2!K{Yq{GcK0qy5n0Esapo^7#%>bW)Cf|0q#d{!z@J%e%75I&UIVf2i|~h6=$%B&s+f_$ z6u8>ssyVx*Yn{ix1OAKWGbDJ}xK_k}J5z$s)~fX5qLL2!13_nX9&Na`z|8&oXUVC5 zW^9)AaJ^{_6!SBIJhVc6S-E)GLM-eK?cLrm>(Wf;*Ex611wXv#_w5}NF&7PKT0Bm^ zVo}niaJJmbjY4JY~PE<)mTF&A!p#LlMFMEbMt3Kx?By z`@e%P6T1PfZddd@>}>ucDWM7ARI{HlktjWX08AO(@$~qk|1&5dd(6CE1@H6yCs>2x zT!!dTH|wm}JMhit>(^rC6GY0S};E1)C*(?+@#dr6c+#fRG1ilZX>bH~<+#NRmb|2_c!%jSGXnVez`5PBwK7ULz4`oAv$ZK8xJi5dAP;$? zq<7A3Ysk!WbFq&^^5e_Bxv|kG&uDa^)U1=$oY`~j2OHs5e}REcmEej`Jtbq~LKaM> zDiM3M7kvr7MZ!=1HV61Z5@llp^&4@$b7CL&#}RW0meb%_tama(1d(EZL$Bzes%ra3 z%h}bvAF}&Ds#Xz8A2#~_$2tEOQIy*P5?882FLYflwFi9UElBH+I_G=$rzbj0wAnO5 z`k9LYe#{k;ZDVd{qEe&~4U!^`~Fq2#mg z+WgFRG6cQ21*(@zBIg|NEXrga*zwx#zCP`fPX_w?6&xMcKiWCB_=T@!q^GlcK%q3I zUrnPW5!Kb{rKP2IIXOApOw7!!&de1BwY9akCdz)`MrD`33;ajf|J#Q+Ig*KP#G8{- zxGJ zMrOA=qB%?UVBwK#BCBV zQi8|pA^H|LB-3xbGGxOOp!&TSqRdX2ioONXSxGFnN*s4@y za`Ikl;o3|5;3)t>N|9Lf;(x z@cLK_DS|p&V6NH%$Kak9w{!mLM_F)0`+aSpj@NF9{Y?7c-una#A#OrYG5iLP`~Up; zL5JebO40xd3>g*^BTKH3Ui-;z-^yS4H4EkRwORmX-+cMF7Zp`1l|X2N5+Z$*L<5?g zm&bg`8dw7Rg^buUp0dMAOz#M-b^EP=nzW}IfjH9m`BhD@$atQB7C^uI34CPGdlr)5 zM%vUw8w58Axdx~JeKhXWJ5|n9W08_?-n>csON@++jBsD*yt2<`s{QC&=x8GmuhIBU z6?v^#7JKb&MVgUF193y0-Y%&tkB-f+cSs7pZclJ-982x>^3_3i6R6dgVjsnfaAm3m7s7iXZP9MTLiM^CZ06r5*;_Pf`#3Ggo-SE{D zr0cRa=GchasP2IQ#=0%bx@RZV@pM(rOZ^Q(!N;4Lt}T^K*d3cUu&ovL-mCPfzV#bG zeyOX~@ew0L`1AUAkB)a(Jd*JGOP$=N{26xz8D+45qE8G^YSLh4U3`$sC9TsTkp1cJ zHKI17x`WpcZXJFPMmXsH94U8~Qve~Ykp87!U_9TEjAwRSOeBXd0Gkq$lX@o|n3x1) zfVQ&|_dAJKoSmKR|FNkec5oun_rnXdo?A}4qfT54cde4c2Yiy8`}O?096b1?e^I9P zUfq<&j|!cAtcKc~SCnP4t-&b}Z~1qE%P4w3n5DRL79tjNBW0S7&6D)lrh26!a*1T9 zze=hyvQ2C^i&9Izz*q7Ebt37w@6JZ$-jOEYKfVI$!KZ^7J$W|!H2C-{J6-#w+9VL0 z4Shs4u2)^w=nj@7kA+p!!BtZqn1O*wxl|ix;A~hN)j?lfn@8JdYG@|11G9Ure5<4G90hRF>1L-`zZSxSj||8D4p5TdH^z0Ka>*8tBh;|U~NM%(68%FA&e-%+iN(;vis=3@bGaWpuitF zNawEo!w{Ui3hCVQ^R!i~zJsG185u$QvF$%!9xBBIuJfOfvmGkoklFY`C$FQU(;{v0 z6A-i#>{MVfwci}qod=8<)d3^19IU>6mfb&Q&z7h1Lp}PdSA@tjMB*kG;~ti^np(Gn z?CAr-WLXankH^D%bzeJeEG&XF)uqepT8>mP_BA9qoir6rrMGg0KnWd_5VN<|3rl9sz0MKY+A)U!1@%<`p$#_dBqkH}-yK$uL-uA&8G)UbE3U&FVh8y@bq5 zR}5wJ&BberN10AUL~Th9>(a?=KOc1&v9=iSMf*~*1_FXd$H{8?BbmBr(u5>;bQy4@ zC4f9Cd2BdNuaNSWe?W*|+d;_E%1T(PKsBE?ILu8^+8sV*k>TJrU=j`^vD+TU7w8sW z?|}wKfbDZYk3zAv_Jkb_-RLtLT5!mcY}WaV5YO8r-Sf5YjiRKv*jNd`x((rd9E_2I z3jzbYI)$NlpoC8~HHEfIjuva)0l|Or!dl!Fz&K4Blc1FcOT7BN0A&gUx*Um%YI67)Y@txE#Y$TCyw?X z-T$Zz6c{kZ8alL0&Nblc@g#a~`<3#6n3a)H)qVLR}~ke?rAc)HyH(8g>#YIM18ycdb=o!LC5fE z4`I2qvzd*EW1~AoFKAG~*Ml*%tEaRFbln&4cP)gq#tI6C8fj{3UNZY9?1>3=Cm!HY z;+Q0%u9(89zufAj1%FbYY7abBMd`ryHbp~+o+g3W^tGl@O*c}o0=pD8e#2^>l#Raz&lb|8rav-# zV4r42f=rtClr(XxM-R8^UeNtSl6z}&tI0^vCKP5n!3>RZBweF$hmM7?m7vG%7?XD%t?Jq<;Lq&+Ju$nK)S`-$ARlFRSkC`d0#>!cZ zj<7&IcGCn;aY=xEzV#Y>qp5=B^CKBz@TJ)@zJ^q(#K|_e{Sdd1sfX#9+^opTN|4}z zn`-`O|Lv}rl-~m?!IkYv=V4M_E_^l?d=xOEeP6|P>}IuzE1nREM1r?_u0^U{M)y8H zN~lZvnk$$vywmnatyjm-W7JN&0%|OR9P8 zl|7Tq)j~H5)cB44QARJN$#2_LcE*x?Z>Uv?(lVq zfK%Qpi~FQv-??gJzgfB%Gxa83pTw+C{NZTPK4}Z=?vu74OHvj#AX5I#Z+$m>b$)n) zvDS-#9Q_%StY&@ftHo{%DOfvidc|JI1qT-)(ZV-~7?*(XnpU*dkpR44!D=3=#|- z*MkDm9a2X+Fa_=YmPsy+?#EE9e$dp^D@xf&I7dhpf zIA1KpR=74-8jooV3hzF`&n@bYEQ4NqCaQ$H9Cddk+(>ddN_Nyq#fz&()8PDD?*Kq7 zFi=mSKw_bC5F*ibA0=M}L%}a794@~tRBh-iQcGlssDZN^kMxPRt(_6!a>wY3 zm^ygAY-yXHUkQm1b1oJcy1m;XEXh75ju1V?^{nyJ@<&gfk_T&3(uKX+EP>%2DTBZ1 zyA2`qn#^y~;UWy;ZZ&@v6LFX#%u7adkg=5Pv?YQg9_M+DJkq9tBmVfp*YHHDLDX5S zuvzkjPOD)S*~i{1s`uL!C79ksj-_?VaEsh{f$=dcFX$3(m%^$y)JI2pg~u_tdfB zMV4Y~2=+F5CI`J)SIievdF8PzezOr=HXl(f8tO@@Jd%0A<>M-VdOfj2a0`eR2w&_| z4_-)f8oaWV4RiHVYHWX$0!pM5+zPhb?zNZ)JR{dh8SakvC!T_V7m2}nm#!$%X zCQ5$i?z0Q~cvP!ps}ws6{03RAkN~9kc3Qcxx0#KmifVug`AXuj9r`h7Lt7rHiz*RJdXzZ1+7M$2qmI=t3Ss2z`jkkcfdM;&zSIxIwzKwrwgUE*LKtqmqv9Kt~RTtgvu(|Bu=;Z2( zN*62$hDNZH8Q(pXMWLV$YUCSyVMj)3Y72hcA#+lVEJ#jR-ra`?>R!NbUYl=m7A}@O zMZdZ@4CWPAb#T*LwJrD`&Q_MWAxjUgah(ge4j-^hX{e z5N4?+ami9 zy=Ddy;NU!;64O+#9X%y7kM7B*R06R{PGyxSE}w4%JA>{sT>;Dil1pJ&ncddye!JLm z`b+8{`NI9)0{jv{!;WAqvh94^s8W|A&#sm%MA00Z_t#YRD<$qRCQc8jLeuh!Y*SUR zNkYOh4!`nsyF9CG)hB$nf2sF3AOF(0edO zJZfu@r??3ZdF^aIs1{@#bL7j?F|Oj1>5;hm+33n!R_}m0aeE?%P`bF+lk=$rkMpSl z%U8iKRu_N-I5^kbdU>Z8-4TLmYWT|uT@z#C1ngEGX*1HXM?UE%SJ#itd#`Io8l`r| z3vsY zLzg|CIZTYaRtw@N2)iwOO2w5EePDtbuUgqDFP1*_n2{=X4+fj+hgDg}@&StlvC^ztdGxiQ`<~D(I4HgzR ztHsD_UD|i6wlEw9PuQ5?+=L}tfp8k8-c#5JMHrNSbvz;IB4ui|D>k$hHtv?~gF{UT zgS)P(#b6}5&Qr5^yCd5KKHpJ_RL!lsXCxP%1XjwN8YcAj#QWb`FHLmeT8T1wmPGK<$EDl93B;|0dZsMi0jBcj+*wa0902C+SvcGzaI_PNr6d>6bCqRtk*YjWt)f%ytmMIbcne`nXb^=_TgK zR#4Uq->UL#+Kq&s@fNQ+0my@-5;zrrXqb#jm*-L-5C{|e>R7Su1af|5MU`NpHBOKZ z$mNM`I=oOAB$oiq&&vzR%*-?gRj1}=1p$haSJLGQfp8D})-|Aq>5E7bj7mn<%RV6D zA}JDl;Fgem*&?fOHk(l+k)MVTA#Sxq5plvuk)T>bue1NP-asv;w}hUEfkyR;W$q+8 z?`JV}5XTqi-`YsWot}WZEAJE5N5b!napU?z)Wb6@2zyoJT`gzYv8pcSDV{gOQcm}d z^)e^oUAQD|@bZc!>03#9{KaKc&u>Dneoxxq1yM4nzxoO@P-au6Mri273X$qcLp^c2 zuTENxAdTS3#&nc4>bH-R))28w`N+ROvh{7`|dwarU*B< zia$2riL^HMT>f2Qo-JL&jBCEp=7|qUoDM3NY;E|>Inpd>VG&BlHMNjYpo+_rHo`^Z zRWvPb70#;?)8CGK()`;Pmqo zJK_gf&jE)l8s^(#M5zjj$A?&fc}7rO3T{snRkXFW9Xy4AG;#S(kxCDQL4)CGx{f4f zQsVs#ZegoFx*h@o(AQ?mY@A(5A5up+2RI}Z2NZy^TP-e!zhs{uWsNnodt)e^<^7x> z=~|A05bV72nasIml*oTEavhufoGj){H-BiBXv4 z4YLdxOR;Q<=W6-(Tz}*UdUrziVynTKOn9|qA8SYrng4bzXeu%YOQ zC2TIy5r{7c`#TMt@SLQe!*%+{shuFQem*}X@Sa%@+g`_z1U4ISNEw?1brG)-?-1`+`Of^?c#n9ss&mG$Nq#Zz)@ z@BVy}1O|RIy6t_bU>6vOE3@b|0Vh5L`*!*8SEmTS_Z(EYz&(fp)pBfp(E@$^q|i6g zF&r@?yD-=072x7%lV1=Nl_k>L#HAiFrqs2g14Wp+Xd1P}Yq0TFu7>GEk!^7EtKw29 z3vj!IUL+&0JJHPm3NI&I-mAI3RYR#!sksJq;O08zW$RV#5Bx(++?S@v?7N%RvZxT~ z#E61mE*c#Jk|#$sJdk|gvkpC2@v~KR`ACo}@!Pm9JqLD!j*t5AZ#>@0{Qdh`d0v2>!%Z$1TDAdz(C_@3$6dhwF)ok?%4b-YoW}>tj;9* z2@hvWscLsur>)F_2N5}&<+wze%qFVH)Q#ZH6`UXl8MnM@{lBf}-?$}R@I?TzyK7=u zV!1K?qWO}7HPJ;9`N?JWzL?FQ3}m?)$4 zfGhHrgJZ0*vbEli=x8aLjeyS_ZIS-AMmg@e-cG`g=$E`yT4PK&L9EMO8?g01G|iBE zMg7%1@Gj%0xoa^u;3q*rahCE9^V>ot@AVX!T5dv-;~Yq@cN=WlEu)fC23(yKt=qqD zkLZY?GHa#%$sE`AYL`Dbg*a8t&mo50Am`u9R2GPnV$1Q-T0xn;3U_}pC)e|D2`w?< zRi?AM^P>~7?{j{giSBIf@2`64V%nzS;-uy`zR>+?Ij&Y@h+)rv%PYMkGsC(Uy?=SL zJMLj`C6{Ski3jG-mxY<205EG}IZzN`sZ)XKsp!}3H)NLn^nPqx;nfQ)3xe+_F=KPpIi-O&FT~Cas*U3VD}ggzZ(=b3dbi+epcMRVaS$PZ})hemse_?!PCI zw@d)<-nl6F7i!Xx%4C#t7m8aN@3T1?JpQ%j7q)_9>?{cC826K8iZzt_6^pp>XTRdI z=5)ubGL7#dVRfaYqe(q~ES@3{p!Pcngx4_Bt3Y>8WPfNwg0*Z}zWe~E2WzR0jK6j0 zTI@mr=N{^0hJiNCOA)f<)&6}pbr=hql)J*$`q_;~4>~6xmApuT?NT}~=z4xtl=zhv z)4a`w)jz**rgx#LXP#)N^Uc%Q%xr%w&wzc5a@XxI zvoTKD3$A91qX!9PdKOl0f22QrTmO9SIHE{&H!?NNbeSRsR(iJ(()Gmc`>-=JdN^7V zwfaJV^CJVW{rOrAQaq2ohj>#m^lJR~BO_I}&Xd>@&p=LoW*{wXtN@O5(P*>EkDz%i zibB3bVIa%%D))a$gWCG`gI^1;duYK8XAULw8y39@CT4iTcGFg^zxouWM3y~$xN4&; z3Y}*guW<<=>$NOsWBvsIIPgr$%Vzl>g~K!(oSY&@kFOj!xG_i~sHCRB3puyAjy0^l ztehL488E-HsdOGuyfs;6q*Uec`2$N>&Sk&weu zOn2#Bjakqn)3LEG2qp8)2Q7QEjPr9(?}0-iL1hjnG5bs+q_x|=p9=VSo%Ak-fn&WS z57gypa2~1PPD3Yg25Sm6pmc;&IQZdk zUC>lDd`N1l#*<32D%>jX!9~^p1y?xOqd(XPb_ON}RX==au=yDt50U5@SER^(~3i&o8sfRDCNK4phKVi%+vX1iVv? z)-S0;ov!aE68Er{3h@m%>%!(-1EFK&Xi^sqIiB*20SHW9THD1#*;R8j0J|zrt4%jM zNn)?y*52jD`xbNfaKL-R_iW_kiX_Hh#3xqponp7as_54p zMSbsphqR`eAAacr&wa4H*MFIxGX*GY>E`brK!20Oetqrgtwrz*#pK^_4|y5z_XmH_p6rFR!(ZO=(}Mkt=^eL`^toHRb_ zaL)!P@C_ekJOc#hByqVb90pFq*8x0mR;%os*tKJVyIhT!*N48+)gR!XFAId{yix$3 zO(=uJZ_Y!am^M+id_m6z^l^Gw`fzi1$p8p%@uslHD-ya|fu-ObnJ*s+a}E4rj3!u& zIDeAG;liuP0AgUCPiH9ZlMU06cy5~kfZrqEKMdjzjymt#e2mIr{ZIg;7Dyujsky+Q zf!BRc8(?I=-v+Bmbw+@W__%~VrytLwA7^358v5K%Dc&Bv@?)Ae#ItnBDU;7r!^#n)N?fA1+5$RI*u$)?W~M1ejk?zC!$9n(pbR1gbMOhcV) zNM>7;hn_?3Y9&|cWTRDwnPGe zwnjW_!eM&^BB>qvN9$)T1g^I~;8Suq2XJb?fI7swc0UGc+6@L9Sf!QO&xzgm1}cp6 z3kyLz>lj5-Q}&}=`y_wT$=SJScXKHniIll0V5ob}vg9PWDM0N=U(9U=!X)=E_#*xK zH-LT~tcAL&fI29Egp2c-G995ee5;#$q|!FRsMqXM^!hRq+oKdfsGO}V4?@; zDHE_gyqTXbMzZ)4>Q>@9ehPFA<%Gl^9$ujYuhQA?yR7ER09{(l`|E0vf5))2uXoka zI01M%0P}KzsfxUpvxjq=pVUf_WAX;oS;vj7t>FWBPq3x5DX_`XocSj}+9vl2Sow1w zG|AI>zuBXk=a-lHPD@x1gKNfY!ascYKmy#)%ltGyRM5zLpI#LEI1@}7{P?FXe8~%3 zoPOJOyymHQn_+IvSe-7amTG(}K~ z@*qmJfeHdj?^Q&RqC%+BrFUsU2t@^~fQ1^WNDG7>kgg!2)X)+jkO)W(5D*9@K)#!2 zKl@$#eCJ(j<2nDh_(SD)w`*punYkuAPke6)CA#uiyxQ1G3kh0V7Jw7wR3hYPbeC>d zQQc(V!@E z=Y+AkC_W-tDp6+OPNeFP9nkn{!RxmL?`# zdU|?qqB1}&XN8ib@A4NZH znXsO2Xs_<8In0cw&L?bUhYd-WSYV7r@PQEU->Vk9bB%VkgV1*}%#VDl#TdwX znsRgL8)bUvk8Y7&uAOK7IwFdj$c+zI97^yvN6TG#U&>EmOX46Yw+?*R^AA;v_8^vVl4toTvenEP+4GU#6UlZ}Yu zotFxBr$up-A|;ivKXdOc1^=s%>gEPN36BHhAu9B}fX%0WcWV51-{qf`4scFpHge%*-tImzJ4u`FH;2gt)lKcZ-VH3zEr_PWm@5e3l=V zj*af@M0H8`$Pb3$Lq;ZEg@rC=nnjc)El*z)8Gc<>YjKi7DF2@5Cmypo;}C9hK=r!YRh_)i`$B5{PaC& z6pYN^F4fKNr7IA=`s@$kqh%Umro!($41zBsb0(aV1G6^4E9uDYBTJne=FF0XY>YG?>UVq;G!4>>{ z-y5p0btIW)T}4gnb>0Pj8xkv^;j3LvVf>RWEOEcL$)rvCzM&p8mmQIm2E;*L98tmc?n>=;B=+ z+@!84qNX@LQ~5DLxUi^CUeaWs&ePp5iZSz|1?}vib@zq6ify+$g(%i{O*UItmYS4QRw<1A%8?+})@we-zRbKLV}cgE6; ziTAB(YTcZaF(nNT&Ed?LH`@92_cKE3wBBKFFqvrUX9@BjI-=G#X=2}V;J32>u&xQC zuh60M{BQMtd{+^l#Lg=rO%|9sIj_2vvmP3?_A7xqZOlq&$2~?1_rj0TM(lEUCGh&F zTA_jXF0rY|wR1Y3`Lfd0F)M>Kcn<%Y;h7YWyK`L_Ygyh7LCFWW|gK8eO3QkNTvyy_~}BofcXN48a>pg`u)5&w9Y}_OcIGX!*0?I z0fDk?#5`O5@J|vSYYGEg+oLpC=7-I&Ys5@`)Yq)H8m}{&AUdQXE(21gjAJ7TW8@|p zRw|cgUB%zw;-Pxh=-z`a+&xNVR2nyTI60~e+2w3$)o8UDQU_U-*UCQZ*O${U(jJ7m z^9DtYE!tJo?paEax!Q%~w!Clpk;{0oZ#2z;`+ZuD+jdnT!4l$N>>L8Kj9*w2(qUN9f>3RtgUoclA*nZ0Z(p2P^mNIB8|NN}-?=Ro-ISbMjePVpUWB1vk9acFw zu$~E5yTT*|2D{!UyuQ2rG+m*<7bjGK`u(DI^&N}(CuHdljMAf{L3CZmIn$n`yS`Sf zhCf^o?in;M^@|sRrv_p@Q)DhQ2r(?MZH;t165+OpPvtU?PZdpv_f%^17f3tAV3D|d z3*-2w$^$$I)_FD_Uhn==no3B`2ts14rC35#v;rYam@Gl0raMHUaM-6PWOyY7kCAME zUKv8&Hi1>ORKGJ)4haxnFBD~O2@}!J%G$ z>`#_XLw9eKF|+@j#eCIdEwR&cF=Y)ZHe&p-PIWBvx3CBk->SAfJsOXGnB!=wKFw9+ zy;SV=n3MR(>8%*3SU&O$pD4}Z%CX3Hsnw44U>B#)%~H}~;JCY}JgZTE1P#%; zXFwAJqsY#EBdY?aDlWejx4~b|=3~R=3X&#+2%0gG4|Ho^`zbDug7pMv(2Wm0*v!UB zBQM&?dkDlpmEH>pSKK_}43z$>e}t=K8xKNEYzWz*g2&IAc-#a7#MDIU$0~&C8|1Nw z?s_SSdJk2(y69XMXE&1?G5oudj+0Zj?$UL>;aNBOEGFKs?2C0vKK++R=*7Z`wdeY+ zb18!_ZnsxH!13!@#S7FZiFTQQlU?1dsz3cFNBj9K*E1Z=+81WkM2PX+R(w!Ol~B6Q zx{M6X;%V{1i4~2)i89EM8Zqb5Qi81|jd|_bg+d9xwv=|X|KjCZY7w4$puNZhX^>FT zcOK4NRTXG7)`&pa%n9ptuahzNfkb9dP>}KZ{3Tj@FVmW<){|VooIuP!`Uto`4+$Te z@F@iShrVQva2em=fPP>4zW6DYw{S8-2Z<}wt`5O(s(niCKJG-vkU6>Vq|EZbjs|c@ zB1}&{>!9&#dw+e*Kg1wysK(>n$Vn5UxeT{JR|IExOi2qzCz9Faw<57Jvyp3dM6W;U zBMuTbb=sOss!E7-)W!F^0_90vcdc>OY&{aU_Idto%1Kg6)`>9*uWXF}{0Endjsr@0 zqpol_l>91>PchDfo*r#{XqNZ^5>8&YY-DP4D6=}!#9Qq0z~+`k_Coih`=NbP>Wmu( zm!WD}{8_51t6Jder9NY2^FgDb00DO4!Cyf<^Hi%P8?gyvaGs-~3NkT&WYVE@K0qJX zCNe`n?y|)bgEd!`#Vi; zGSX*dI2T593Qv-}Nj1(<;C$iZ+Eeid^&|iOd#s6S_YY+@-QEnE>$dZEdsn1?`IgQy zCjp5_2za01M*bGkfI4ekBbfWkM~R<>O^uBQZ6WKF_es3w!4x*wMzd?N^ox9LOG$! z$RYH(G3R?k^M$O*?d8Ln9ezEEL^oj$DB-fQW;I*;z~wn?qMW>|tBuO}~k2#Fdtryh3NC9#Jm z%p-1bgvabmcNK%`jS9-gdiFm8t#OWLK=?MWV8^|=CpM(4wRkyX^AX$NJ7rpD;+u}D zzb>hsRnK}MEO_x^p|-im)JM#rj77`%M3?!|^^_^x#J@n>vS6Y9u}0U>eY?nHWW@6y z^BHeGgf3bX!h*_{Qk&#IwEymMTI(;Ztle_la&W^C(+6KO-HY}@*wQ+?+-Y(j%jh3T zORX-vBOcZOIa=hsmeZg(x3eV8nb;Nn4DAz~V5QUm^N<^P+H!ksL{<@~#_6zPt3>Ykf_7BB)}N@P0V- zlGjAmuE~UjMGYQVAD@~n#w*j(Ug;#}Ta20JgXCZq#RlHD z2GO2%E#zLW8GCPNAzEWIBW8)({EXni9?C>6uh@x%)K~27T1$OO6q`*!$Q+MYcJ;uU zW%3lkv=tAp&*i!W=M`vwe#ym^I2(D0=ltXT-$t?jGTnnk8SlpQs@HE<2x~K*)p2aV zns^SZ?baJqHuUjp?_0B3`!dL%Su?I@!!%OkGeetm$9k6Xi@pj^K`vaLwH~5X_(*BJ0ypAxN=HgT0I9#CrT8Kjj%_z)Y=T;xV!~;7JA0#$qaQMModuZAsw7Ey@ zDA$oQ>}>m2cwD4R`;V@$-0^2MG;5-owR4ZN6@2v#n#`_ejV4h)$>U1+kC`cEQM# zX?$jrl3Zrb5aSJE4((00aIE&9Y`?QkMA9)e*{P%DC&AbJ8lvwaI zrdLISI2hQULBMyEZ95Ec{mEli<7EcqUQ6t^gyU*wx&c794Kv!;{n2KXT>;lGp@|i$ zNfR-B7TBzy5N$l&pPc0xq5WU{)yanA$Qg7I*a=x;Pe;ceJr!Pau=;2WJnwAy=QqG` zZr|LQub2Fjo%pPN0wj&&Y|KuAPxl5NQTNyVGvS}Jj+>U+M5G7LU(U(N8D<-;_R0X7 zQmXkLxT&q=-*e=n!b^{L`gH^c>>VUz2F26T`NYWFKB+UQy>jAc7C}0--1y<#I*JHQN8%#V+ zQHE9-7vGpw1L%q~_FF!O_jC?Hgaa>MEa z%`=7h{Okq)7}dQ;{&Rh;vXVQProjHXCc@j(6#$+iwFx#Id-0!nx3xpSdWaz(}U;UdM8w4-_@$R zx5Bo*h?6Z^RP)l&(Rn~uI(;=_Ay?%w9h%fgDVC6sNLjn9Zq=hot=CtW`&c^TK3uLP zi+_~OXUa-k9!vT>3lxnCh$T%{(~4Vc9F9%v7>kz1hOk$@VbjS@^Ri7_dN!wUub4*a zTx22TK;}a^=i|pD+_M%{aYaSND{%tX&g!%P1;2c40ocHzG_AM&3Cj97XNAlXz ziCto$KW)Rf-f5)89JwO4zh*Z$D)1}XGSsDunqXl@P?fHk78m%6CX?$pSDPGG>!heW zzA+hlYk9Rml_}Mt`#{ef{dGTbJN14FAXLN=@Y;@FkUHaihEB*`&S&B}Y^_&&NunWm zy$>vt|E}^QA_ghJnY!C{RJ=YjOEWr3nlbW}p3NhWtOYyB%mY0mTWE}Y%j6Dkc^blY zd& z3M5$tK7X>3cyY9~<{&P}&m&v_NuE+^9>IV=1g3vyve8P!%Jnhm<_|L1rJXkq_XFwp z00(2IRNp}I4TBka9vV~CgwQ(2p7gPwSRkR7_W&WQ7;V-PLcgj{ zn7T?*al>5kpC7pFUQ76nJH5E^;M?X*^5?38>Cw3MF2}~8jA;Jg@2rk+sPDc2-1b&a zn3ZouhANlWXfe?W-js3Ie=PUI?|Nd7$wRx2+Ha?75an08QzSICDD8`U$uw(4Zf%#Y zojrRaVCa*_W??X#u)<-W)cccgV>11l^FbbKj61kd+T@Ly#p7x}?;J6u73X+l=pTItDi_rh^*1I*J=v3>`t#!xQALzUU>cJ~_qNr*9M{kS zed~;wFJHcNKnTNas)NXZERQG|_A(PJY7_!?t=fIKk5!9}M@Bc73RKQ|0n%4`k87Yj za2WEH;!49%oNi*cYqe}wN>&7+V#lWj_qNs#%+qDrGN zAF>ZQsHthPjhkA`YJ{0q)cJ-bTw??uJ#~N?c#wftdtc1;>oTg z;Hw^y(*LaU_;|P{@9FI0LmA;r%Q2@VzpY_S^S+?e&x|Buclq&)#Tnkg^dtlYDelOz7qokdkr(il4SH#|)vLP<+tL z+>m`NioR|yu9)d0d@FaP(yXOuhWFb99!eSnNWf!RfT8kM7_jeUj z)CLjT<5Er?3j&HqBZGxwH;B#kJpcxayVDe*@WmAa)!oWHpo$oUVuX=08>02fJ$1DR zU!E{VOH*c~WoUMR&XQgeWsEc`oqY&7Mw@Be4uVm2S>qZBrq<YucNYs&?5qzkKC{I|;Q6o$IKDjKbS zPJ7i+ekFyQ9_c>g*7!F}&Wg5StNPVuUzW?-$gCF4)Fnz$4gHnui{d*1(jQstw+NpK z?vQvngmls3p({sc5)S6ENxcDXuh8QMr}sU41o$~$%J0mw<)Kb%;ZHy^pKIeOmwwfr5PzJnxFZonB#*7Lxa!s;vIZ9pOggHzgN}GUw@;` z??Vp+@x(E)VLOwTvRmK{w08Zg>sV_dK#`auD$zY}En(Y0`m(y^%j@C7ikO1$J6|~r zhHmKN_PW)3R+b19mQ)mwTw_kY>C4CGYeVel(Pjn!mzI z(#R1B?)Y(5N!nRz!vrtiSV0=@q@T~Es_HTSnju-$II`IEF_AQ@7D8 zRzL2!y2P~?R3-C^ON)mevs4nYiyrNf-t9oy+Uv&82bQQ>yBMpD@`|kf_TPHx+wv_5 zek-_FH~3l+XDOQZaF7I0IOm0);4y2&o08H$1@{$&g6Az;YI76YI*a9Y%!X}P8?w? z`DN7HuZ1hgnQL0c($i54$qls56?^Yv6No(-dro1o4=EX^0;9T15ytQqrNCbT_<`aH ziE6@qxpDV?YFJVDdhX+8W}?u~U1&#DR&+tJC!zrUV!GRQZ-Z1azqICaAlfE^Bm{z) zx|(Z8LHOhR3B154i)$w$ZX5xPgR-8Vhmy&_r-rog1*Yn1&EUM6EP>EJCw1^>Oz!bYC{l)5N}^>#KaB%v`sustQ z?{nbVKy{_PR{;*&TpPTnO~A%2rHxkA5%!EK_6$w3M*KztFh1O2sq65{V&YLX0XN7q zyyd-I5I+Sf&rH@*C&VdA(Z{u>DaOxiYro6(~ zWhX*h!95SMN~KfoVe=k$TS$Ir&<|N=h;fZ?SL-r$K1=&s`h3-)$j@C0CHkkbT8)<} zy&KJ8d3(&gOB(Ct|DQ8HxllLMdt@Idjx9T7E}lngom0N$nhq{U+t&`l|lG{_z?5a^z(zI6_`ukl5fW8#(Jf@$(Nj3y^sDsf0p_e#h z0$4ecWN2;sXOzJ`DrwwqXT6d4@#7~;p;_oZ!0HJP%4M=~dN2@q7btjSSyQ*MOQ$Lr zIbQfTqdcSij~nd{Ef4{;XYE=~_6$aangNnl#MH?}H;yYRl;LDyCdeoI6G-7P zl9j6Q+~ps4>CujzM-x+X?8{vdC932v_eA3AM=V?JCkgkJn5j=D2o1XPsoT!87F-UV zsa-Al>AOk#oPLMW1{c8;en6Pdur2V(bQ`^wiOkxiAVu1(8KJ2-jv?$!%LYtYJJlObZO!FnV8Am^a|Ftf z9jv++0qfz>E2cZ;$e}BfH4OBO(R_}N8vxr;CL#@Rc-b9&%Be{8y2}XZ(9Rp`6~O(-qA~z&;r%LJ}KuA zQgGH)nBL&*HftCS{+09>uhoXM%(s`L~7vg}UJ89ljzCPkqpzlL?<46HX*$=vE$4*e@cUjy`^Co$!1NvienDQP!W zQ_Zk3Znah@%II)!2th0OIeSxVF!C z%Bg|Cjq%8LKZXxSH#LiLAFhB?v58I;c=VGt@B_yl3^>JpGiiafi<51!*pkq75zByK z2irk-HpE>N1lSZ(lNV!fqPTT&eZ5kkC^eFa1MXc_O;;J#q;8+;4_c{N_9LKfWT$CA zCg(O+wdhDBRo?Obov8=ZPupaUwJX#RNuf(L9wWZKz8)07!HkX`<{zsItfv2_@4uM2 zZXL3s#lo*7=iS(U>aTE>@uwVXJF7h;5HmmSR3G=fc;5^0XkS^iJ9_}_?RLjTbr$~V zC~2md;aSF>ce2&y0x?+I%6Fc|5;{`0lw{!~)<-F<{@t}aN+`pQ#~fc z4pBu1x-o~Tc>GeS8&xu`TM!;Fn_{Bs?Rmai=%ez6(~Jlba__peCil)Dj%S<4<@?Ah z^@C8&5!~Qtc24Z`HBUr=y|y`8v3LB+Pd0Au7g+1CF;+^-a*=P+PvWhMX&+g5WSkd< z%JeJBD=OA!RHba(?Ut|v+{ec1CAkJmCm@jV(N%0VNLs}nxZdtHG}$nGZc_S`i4V6z)IJW zN8A>xGS8GVRb_c3ZrbA2G*YBWt=*FO@cT zt5%U4LIdAa*WiiH04U}XjoEjU4cJHP&>yc|bbCOx!sp(wq);0k?2XwAcalnU9%b`4 z+Z^Qe{Aa!(Lh}qb7xpu&&(1^6gmzSSIs52f)*v-OaUYkCxKKMBMHOq*hW=22SN4@8& z$KF;hGAql(9_Goy}HGB-~J^F zHjq4f*1nqurV^;yOl zvEETH8AZLNpo!D;{_8*5hBhFWuy;2MV7C>?3AMXImqG~NIs8Ge;)m;O-JFcUcsTLR z0GA`7qVL{%6oj_c;c6Kzi&DCQVA4+vp?-rD`wA#@xVsSB^J zJXVVfXNt#%QWp2gaa>w*azS&mMzV0@^zvb&u1&y^-7Km9{X!8hU?l^|(&L;1gCIui z5eS5o=cpWvvRJ*u>+1tzx7lrCiAkcEc!6Kr0xw#{Reuk`F^?Q~& z%TE77Uh!Dkg>LNW>o#p6ljM7aA@FO{%RPlIuXp;30{0mRXkak#e46GzP0rJO)2BqF zQa4w0>dsXLzFs~2fGC@-{}68VkB_l|XLwf8XNW?Ue)W)y&!%p?@b&Y{3-Gki)a+n6 zjg7n3{X}nYMxb#yM)wg8nyOk_9U44OVpwTO#hsN4OB{betijF1!G>(AKxcrpD^a~d`dqz!DX z!ql)=cEFGix3$^cCCLw#n=^+OTRS~wKSimuy;?n-`SQh!_Zb=Dlbnmoj177a&(+tR zHOA`56kGg%W>Dh&3 zRn`(A5B^H$T~II@yF5Q|(aC8J#FI6YXU4<9`#u6BFNVCdq=Dj^-qkxYVlh0TDm2p2 z^e{*>?0hdKn-7@#|IUA(1J1s73i|Q>0V08)kh}gsnjwUaN5&mJRA#?u>=ieBdYa`| zN)^(g+V$7>H~Q5uayJy@LHvN=`=?!XTIaA#9}pRm`BM@7jt}q54hhssPwLWq3Zw2# zODt@H6Io>bioa1Y4aAS$zk4?+!}Zr+&OoMnez+n}tZ~z>Zj73m*^m^$!Z)MR891Kd zRoj4fEO{rS*9UHfgGNX0NLj(c0xuC;lE{<_=s%oG`I8=fnh~i+kd~G&R5YMSs?Szj`qXlIB7K+8;kUWLFZJye1%#_!P48-06+Uzw<)*Cl8{i1- zbF0y-X=G<74m{u1C>&9_*W9_W9e`z{*2TsU@wa|FM-8?ksrb=#OkPTbtKraz$yi16 z>NHL(mu*b)%eB3EQQOISS;zY18Rf?s9eSt3^>uHe>#(tqV4d>aA!eeo9?r>lma>hV zhE{*@rK}HV&ZJFL?UH5+*XuSCYd5Bwn`A4fZh5dwL3rTP*OC#x`JlH^6`!sY?Q$2^ zuA%bycrchap0NC|oz0ac>pti2A>R&EjPrR9oGOX%>^KPhK1YCby~XaD zkT-z~`ApJ|j*hk1+~(h4_(O|E^&XXdf~?-QIGKtY$s!Y-XsvqRnR-3J>yrjf5S9!+qc7m@*0! z6Bow@&7|e+Gs|VUzz5VX)$A7t+wu7->n&8jIq{NP=~1b#TgEQ*<;e#!w7#p9k-9*y zp^zD&5v4o=G!*0jKsYa3qjxdQ?=9am{Zfra7Wsww?w*FMJ6aAy%K^xD@x8V2GE=1Z zf~O0rdvbeF?vd;zS&!|e(Q-qRrr(WW`7jdOImIU_K!^QruieQ~+GUlOBXkKXuNmoZ z7CB;-2|{p0r_AN!Q0e#r@hV}9jV|}REw}2!Q8l}?vtgkq{daiFiTW6r3}^bxeOY7b zj}B}P43gU=smnQw=j&^p9tgA-h7o}MA~yx5`fL!wsG%ay747BEAEfF;t!2MHtMmEY z{WA^(aog_LV$yR!!|DD+KYgK>IT&{RXp#y*#C;-jwpY}2 z#TyL!9;{x*AuB@#G?|FwqUM8uk^sw8hp9K%Nwt`r)1#adtjtd zVP$31A@}SVOwdijLX$GrW(&NA<6|2Zr?&_2mTPN~)JZR6!^4?f z6NW10#j0W38P8@O@q{XFUtl5OH2GIT8Lk3e*^)?vLTVfRb4deqLukQ^AYIUb!N=kuY-42<_O@NZ#}yEP1|f`s!q!ej4Jk zVeV$M)+qJYMb%!AK5?$5(ATL$uhH`;h)vj{Sb&Q-4Csw7Js{n!1?TVG8};p(tL)c* z=`72Vdt%k2dMwvKW$QvIf#8ha8g|M+iCm{pIvc`B>xYtwg&LLwC93c4V4GeTjI@y9 zMTd4$l{DmpjGwil%FQ&IP^yAg%Ew*brT&tNOtan!LG6(!w_8R!yErDh(Wm7t&jSg^ z5o^@&(^pQGiZ^myD}AN>?pNL^uZWQc^3=2TU@wJm$?qDrg-iGtgC?DZ_%Yqp?_Xk+_Kil zK$ zHp8i52BY1!Di7SkO0Id~#v?F^#Q|}HkuNuxWEnV?1ENx;A{>JUseVLPz&!wvehWv=QTSkcB0LQflZXOZl%@E(qh1dnddU2sKecvAV4=2q5UDQvC+egC^^U%NjIzXE%q}YvtpN~^ioDF0`U)mb zYfwAEf_Sdv0DvHej#4KBS@Y7muiE1w?S_3+EfX!HEp>tg*XoYY#Ix z{K^~*%DSuRnm`1RGqbl4B3NB?R7!DIXh>;%D#$#&4(IZg$;=?*Y9GsSIIHARd*ZZ9 zYgMgJukdBBPC_AkZ~YC(In=l*aP5~fYyiTXx`gTVOsNTJ5NfEqaE=CF>|gG=g=c?# zIu0qKOz|a?yJF}p$rJaT%qZ@{cJTJ0T#c%YP22<|*zSu7UP#dFq=0U{+T~m0@#8Ir z&1BUFXR~*6n5p-d4(5>hfc!nrOzVCYJ9B0KXRmL^80<`K+G6+u&}HT2eXO`DFM^e5 z(Nzcg*tc&_)FH~4F2ad29wYgHyj&MEG&SA-VR*8=z5V^$w;$Hl{6O&zMv-LWmr^zp z3K5;v61dIZt-?kg-U+=C%P)E{#7kom4F8G>1`g}2HrM+lL#w+5Cy?QuS`P7+Tc4b6 zSfD&O@2?C~!y^gP^WVZk|H3HkeaiO7nYBft_+G$~k*&hQBGm}U#df3G?s9}=bu}G& zujOfB)3B9ItVsE{WZ@n&m$JK-K{MTH`y|*);5BrnY9A0(Elj}8si>-j&N0h5eGM%%~rabHwS z%m}{!5a*OjhMZerC34!@fe6)SB>kV~bUz#U?+2`7RmacY7#eJB>adRU<{Cd(MH(IO za~R9f&-T67EP&BMUmsisB-eeq*4Ebjnt=zxvf7G5`lvWqCX=)!x;T<}0) zQNbUq%BsstCp<*1Fj0hHk~lNkS>M6U;Az_q-Z!$qqwNR&Zwo@T2e>uLP_oiFHi~-!H{c5Gfp|bPK9_A?59C_dAd@#Zl z-}oQ{@|?3^j-4T>SioW4b8Y2Nq<+`PLheF1KNT@4t5UN`ZZ|jd!9Tjem0zwaUcan? zw5-GM`GMTc?AzO7KBNOxywF7Cf&$mE%JiarR`~jUNug90D2vzvmdgOsz2{fmK?>^}EyogwzAmCdwjP2ynOksKuc{tkm(!!z1oVe0o~N>5BzlF_ z;O#ve`b#CxosPY&nf~vg(BIM0ggP)6N+E(|aHm!P4X4o9Bv7|sDw+}zUa+&X`M9JK z5KV|8su)Vgy?_H6fMGk#ms4qctLrCA(4m7|5O%q@`^&mz4G+`;%L2s4_VTCIgVo6W zvxo>suo<;xp~hCA2-gnilz_1$tTKOA3{3#wI7y+DK|w7l(3U{2EXNO>KZtStxBXSS zSBhQzw-!L^5pap#v9-#8y3`o(DBD)r?1}&Wn+LMlWkUO9(Tztap#FOLB3RxQ%n*O% zKrr=BnESJIKhkkuQ#wV<5OWGFNxL&|TI(Pe?SJkDYvcao_3GpRof$Xx%fdcV=^lk0 z=v4oGG(vxYr}Z@H%04B+0G<}>gRJ9!vI?3A(m8M?>hiBKf`8@=re)23bH27;<@!Hg z`CosE2tEXwhifC(Up!|L^?PWXQhx8<`!}ii!Xn(aKi5PlihwVnJo33FmM`9TxfEf(eWm1Ph6kZA zP=?dgcDB8i_WpitC!+;0Dql`If@-@zSkd{LXT<2yLbD-2GvW1*rOXIELr(snn7#EP z-b&C-<@yDq9PF^J9}j;ZrLBAPm!D32K!UU7G19|0RGR~aU!MJ$Ft}gBEPQ`-DUV1Q zpIM?q!*D3fV4ELi&a$6}H-wy>G_-10cy29YufkGf6|U(m4Ux?lF_UMy_dU4!0mS)R zb5{y6Y$cOYoVIS7QFYHh{w?yt7M7>Vr9HPi^G$TU$HW{=TB#uiB};^zMD`wyj1DR|L(;MWR5J06Oh4P`=xlNjU1pN4`R6ws{NZSEs1)1=)Ru zAI#rFj4vCR84kyDdP;cV&|cqg-5UVm&!Q$4yM#SiqC!YxEq^_H^}a@xFjA3FSame-w9@H zkZxg44=i9xH%XXFmnRm`z|dWjYqS7f*ILRJO;3Vek`C+WhW)$$UoW4UAplZOW-W|! zF`dux@J2+KlVxv7#R>Z6o(^Q`>PzGBt;zaxW@gi$Tdq4B5Z~r@6iFX8&vK?7r=E4T zrWui|Jd&TX09{e14<_y{EW3`B{S}^YPYPkc6HmH~0X_QZV^~N%zP=UVw>!8eVhI(J zUCehx6*Joj!B%Ijq6GwL&LKPZOc{>%-55s_vz;N<>2IiiSNBRSZqgA4HzFAH0uCGE4;x(eO zlj$n_v$ydJg$sj9fq{45hc@NOBLKiS6ndAPxuTzWKtgx7CU$;x|6C#)KlW{tRYTgC;}t3nVN48tiR z<2?mN&X=C>tKpo7dH-Lf85fklK9aJCSw4?Q~Y6{D`p+Z^>3k&utUF!$D}-N)~>&D72R^o8)$eE z<7FS22a9IEn5tWym>6bGtW`(?*6%)MhRlofj4~r54<=%C8tfE#d z4wgx=D)o++S?}s)Q^ZEaEzT$mqKLwG3(Ixi%@koDkHBi6;$wvcHB7aY)tn5ngbNG# znCY}bwYEN1riHNnAHmoNOx-r!{2Ti0u!_vGNxKgO+|F+o^79b8xd90M2oV>DoZsGqK1j%*M2?~z4UI)Vc z(`?O?_U30C>CwBA=EN21kGt@%&j#{!QPYYl!C)VNO0bL6;~2J78?G@P<-8(}k(CZ@3B zY&mkdw-%+*0)O$oUvdmFz{{K4;OrbD)+Mo?KvWb)*=)J$k)x6nL1@AOCTE=0j?5Lm zpPi_5)>Zbp`=ZZ2{$9CwhY!|JKk8BI>7p0jaGA`9C^9aGtjq7@A5~M8`2c@N*F5bH zWy?72XYzNmYNtg(-M?DEH_8T$1?PkX1yPln^R{lLs>>72A(mB_&Rpk{iG#NQd%0cu zdv$pl_?ik;jUu$OL)-idhyUMZNNxbQl58lsp8srUilzJ5;R^~M7d-t~7;7?P(No|nJ6LtmVzBDGuo|}Ub+W2gm#5yC zO~a$$D9St zxRFujihxI<*<)3E;6C}ehY195C4z0YCsrH}k3o{Y{4e)CLUX^2;LgicrT;`8U|2w7 z2r+T0fi+4^_i$#(Ji*L_@10;ZO);q}3^X98R21a4m&{3w&qV9bA+wTmqskV3{$5|j zx(9V8#9T;x-5QPHm?bFZkHtW-(7RZ^P_8U0*kGZ3P`JcWsM|u@l4nq;XC7{k* zxL0&l2E$OH+)q*|Y>M%6;GU<$PZxWVI8&|xIg+cdm)_XIaAMK~46~&{nFiK(OhwGj z7;I=~D7VbXroT57_y1=@K?5kGzwY2<-pr*Mhsw~EP!%V^<3|Ndrw^QyuNManW>^N&C2x6ZuOS29lwKotCy znpmfY6-6F>g+1h6C>^*Wtan!yptPCV|!(T`q@eeWf#%QHBhA!Z_G1kAN||a*tSQ~@L8ZGeU#Fy>mU#sqIstqe3&N8z zsiOdx%hpe`fvGe8MB^^NnH)WN<&KtCBox zGEQCT61(l(iO!m+3hJx+thEs_q=4hg9LHP0YC&y z4?V2dl&?0aI&$(_KhHU`>ZBrLWdRhj}thTWR{xWCg`NF{4r-Qo@G-;gdwEq!lVg&@sP@SE+`+a=v-@iYb&EBWH|8FVq?|GD|>8DQ5ZSL@1%E$2c1>@2~Qc9t~A6)skYUKA+7Vh`mM4+(-c~Fw-m7g}|+>8d_Wu3b=QdFExrw>@Z|4YGgF2m;MWt&ep;o^H($9G3aE7M?q171f>&0;B zRS=4L5pHC8lGYDrCIVfy(VHkx)Z?}83ABS?Q^5@ndJM3OP7mhZOBwE&F>{({{f<=6 z`u*Lh_hAxX9)R)wJPFVK{Dwb#4^)n=e?K4lDWl3DLnZiU1mpWSo4u&cEY-%_G`02h z_np%G6F^Cwt=A@_>i?nbE2Fa9x^`)h?hXZ|Te?F)K%~2+8>Bm=1OeTo(kR{CCEbX0 z3P>p34QD;#{$APpJ!70PzH#=S&G3QezSmrH&AjHdPW?M0NV3%mwR#Sd9S3=jkJ5cf zQXqxJf3_QjHGr;1;28C{|e9oeAj9ms^T=~t}YE;wM zUu2$hCq%;!RDF5MqdX~`9JVeiwSMFhdyAQBUtJ%5ERcCE`Y}_6p~P(?P^si-Tfl&Cj#aMc5@4lJLV0{<(oQLray*+oTv#$`xY@zi~|0zzC(|I!TB@8tw(B) zzMrAx1X}+hv;o|3g;wXYRBk(2`~_iU+)p;Ab^tx3Q=2;cw&ih9wy@Liu}z9f^+^^g zsFsb&M|7)T$OkhS$3BqfdNh*UcJA|)pm0n!=sE0K0wj<5ni?|Dk=^o+*l)hhIsY2q ziu&Nv8l?mf(ztDBbHF(4WU&Jn;``MYZo;1Um+U|a34vANDX@WAZcG|?y>8uxc9Li3$+hMr-eUmT|t4+(|E!3!r9Yq z8mgkwX-MyUqGeQzYA064!Pt((d`5ehq13v@oAUZ&CHRBNP-pQC)zuA2U)%A(Vr3CM%d1uchz zBQNdB$@v~>n>op0SUK80bN->PHI~{}m{-5&FbeO`20J{$nSJs#=Wx2Wb=7&Z@Ao-Y zxo}Z@)k~!IXoQXyn{&lW z?GDg?iOVTZHMd^fCUYB^>2%0t08LtR9A`qa`N~;OsAw5d7Vz?-aUoKNk$M5;0S5o- zwyI_}cR0W}oi6Bvec*G>oA-*6~t3!`{2e8zepL5-j z(5>c?x`WM#YOot5`Lm_o(h2}%-u610mF~!p$W(!1A~(YJe8;v|y7=C>$60tUgEL?h zth82tU9I_E4XMsXNj6za``)<`;P%n8+C{1@ z;POIJE^}L#-)YLMUcj!?LL z61mPmDh)`Mw3_JE?8$$%+OCI8k5I$j`~<_dNn%0Ef2EUDO4I)*f3VAh-*GP1*8$w^AdB#^gbp^3z$r~6{H zusM1-OOLkmRg~aJT0O4ZB^Hm|*Q^nrN8_k)PsUd!kL zXA)$~fH_3(g;n^Uvxyu~g^?b%PqNCeE#Hc%g*HDdAa<`jj*GSG$=dQTR9^PEVdf_Y z0fFFQ`q8R|RA9?HLce2Q4V?QBU^p1+&wxnn#QkVY-5J1}=^j5;1`v~!I@g_TU%CI< zm|PM8N=@qZuRoJ2qijx{l4G+@Azf|C_-$6N+rwk7&q?)hN6->V>u;*KgMw*U zJNTck>g^6%>KZ{S9~AB^Y`26mb{bv$oQb(urMOR?RTbSb$hmAO9qoqd{Zt3Hg(YIS z$5sRSSM3I>vR83;Bf@cC!4INP9^(a?b%!IeTMOR-^ug)#JsOVi1sazB(#j{bfSlI~ zmca|i0K(tiqooyuncgMFn$^6>?M_hBel*k-!!!KwIg9i$Ya39Gm3o=$Df!v%oI*d8Sa8HK}Vk?X65>nGQ$JRaAQe4dc7 zg6k-+t9d9V#q_b}N?Y5lu{5DMC%GMtqo=l5U(|JUG6?Xko!XkWq2%#a8@BkJ=Kj*P zRi6vOJ0`OY@-Fmp=|0)Kmwh~j!T!wshs=N$pr3UeA5ij;5_A!xS<%?p1kwCd9!V3u zQzH6&(*PB?UGleg855eC^K`Dw8qy;n=P4$8oTG0ymueKS0^UW6p+ChUWNOP$HMVy~ zU6sEr3463jRrXQ+;<_bZxRp_E;7a+T(-{hCON3{b)*pF6>%;Z~)L}3hNx@Ba1pcwG zXfp;*8P|=mnHi#Uy$uvG%1Bee?;2=?I7Mv4FIKO-H; z9<&F6N7S4?v&Q~2u>2qOCVULTM&ZY$L=AVGV+RQ8xND`sKjCUKhX}L-+}{QpE#~7MyVckIAV|byQUk z;_aE6FJIcJw>ty2%vT6#BVa7V`HKX>lobTz=>Z~|n1lqvQZBT=Rq{W|3HW?KxXyRn zjK%-t&w%EpYd+aeU2$#))>m5WsTdbFw%8<_!6eoF`zHDjD5``gu0#d=W#zC|?l`=a z;*BkmEv|$JCLKorZ5U$!67~IHLe@>_wv>oX{u{?tQ`^<&+XEia^^FGtJ;}U}9S7eJ z)dtUG+ci0$LL;P+871?`USQgTV#cR^%=D!6!b`TtW$@EyoTWMOar$qbVTHye`Wm^y zy|ypN+|p5l9w%)MOC~7#Sfe7DUaQy=2Xmcq?&zj@v_{!bzU9=BH*NBj1lt+sdPFH^ z0AG7QvoC=lUeP6(#f7c~szR3;g=S?E2316x29hxHeP3H!26r6DWSVW&iy>sd!oYl6 zSqTD@BKn?rpM9KRhXKZ{1B~Z8to9QbQ2<m9IEbo;&+A23T4x5)%JcW)vpaMO|>o*ilu++3pThsWGqXou`~1gH{pzZ zM=#6gh1+X#xFt<~!g>@qk&!ZnYuMJaF4$HB=u598+NRS&s?(L$SZMy*-Lw!Nb1aW9 z|8fkTIOBuH1DlOw05#1AZGPtaOt;C4NciURF+2M+-}lt4te9X34(0Rb&zRWQNY&#F7Gx2?+aCYe2qMsHX>dPyHtqJuI{x_a|7l6^X^+ygwW;1%O`tB%ATF+o>=5Us zG^B*+_Dqy=(vcAbb#T_j6DX%O!LOp$5q-QwZ8gQkTj~neO?0NBRT(0p;c5U(L zzNB(>+54F9i&I-`yRzQ$Z>}cf{GiwC&h~7P6R>XAr?0QCQ%mXdfKiCtY$$~@IL+j= zRy+iKeX5*BtHRuWbJO~9zQVl99F)_`mt`aI&CSAkU^ItR*hlBbcK|i&D9|kL<^N88 zXW#x_ub=omR;Vq7zK$NHJDcmm2-Js+1wGr;6$YKiPYv%%V9r4sMfi;^PN-h1+~QVx zme?2rYH>#<_ilO4DXHTll!ePU|2HrQsd7PX#@sO1>2AITm|8I=X$;Bg7Hlrq&_L)C#RBxvdB#Q?wxTtNv2(_&m@zz}K-l+)KNIWS-` zr49DayfP{$hjtcIm;?gBe*tENWrBWylivM}ll#Bvr3iEvL_7Az-#pH`qp_RH;PDqa z1cSXo@T7cDg@185xQc)6v_(4DdqSXy#Wz{cX6(c9s2FKtKm6_6xbUP5CES>pyz1Rp z7#GHGQO$=|kwOE+;W9&Y4=a$uS8(wk>{;TEKhkL<;`c#*E=L8#hVZH$wYUra1f@KO<)@2|_m&M5-vzg~w`C@I85m@1 z+lGeZ?@7eR#L&im$p6_%487jyd6MCIvaO2Nad79v|BsMeXd#-zjGX=CKUM;08_y>o z5cFE>q>2d0wqpr|WJ1B~JO!IUpk*y&DSof!1yo&54pO#!YH(s=;yd@Aza9Z95C5nr z3`p{tn6xoj`cY|a%FO|fpB--QlQhmsUO)g$_pYOJ^)x~ngtex-`0ZQF_V)JstgIz< z;`@C6T`%~tfnG%7zfw;BpI3$~gj5T>N{F7&^Co|ILO)vrdvqyiWH3MgJIq|O?)Ci~ z-@XItu-0*H_j|OwTDV&9dw>D^mAD_Q z62n(*u_Fj0&-|YTRRnqh54@oBM2ftgBlz6Pwv!+7`RFrX7CyM| zSt6^5x8T7JgDIHLsW+Evmg`V@dU_gF)rKc|7d>GE^WOMuVQ&Vll%FNCQprG0=Y zBJ?Ywj*w%{mxDS7UN|%?Olh^=xCdJR5DRd<=SKwldv{^Ko}W_>zSx=D7~>z>15pnz zia(wJ%u!A<_{U1eWnJ8yCt&q5_SW`!0Q6K)z=%r5hbt{DZNAVX2r?xcr|sD(9#0Gm zjAbF$|ME<*y_?#QW3NXs7Giy=yeF~%jL+IYVhF&|sjl-rd#@-3` zl{%Y_4eYRGHJC2|Cr{MDp>#Sh(YEfMt*vdR(e~e3`?u$~-QEA~13`II)oJMK-|v6- zAuh~7fXOVF+2APt@d?ixut@ZBm82XpB(Q0S;VpFF)8~+1j%Hz{7{Z4H2x4>lfCf;z zYLV#AKmGe#z9EC(ssBuWKt%WFdi;Yi4M6iQhBvqzeDf9~L(dvSchn)eqf0W_ZwO37 zRdJ>f)O6GMtMa=BAgab2b+U`9k;Yk zg;B#AMojbe_AVS@;m=JL0sn2;e%Jl)QhENsex(<0g%|k#MckpbkbgvE1`N>NVQyE6UeAbNcw+kOwY3v?DBqBfQX6fM_qhZLtoepOq7>@tie($ z3f29VmG9`s_<9oi6&^@cd3VG#2bj(rn?FQ81jX&D0TIt{D2d*B3T zEl2#frL@oi{dWm7`7ZwVw%qQ8%l%&u02}+6Zn6-F%cY(pi4dOl8wc1l{sE6l z8~;>50$zj$J6v_Nw*FtgejK6)9`HOuch3SN%((y@?C@Y&nOEdFWWR+}rf49)0(q1c z+O~2FJ5oDC*~nnjd)hn4LtO>d&$rBH8&*&_uin zNz`tPSfeGAjc1bbkM?DE44^(CT*nI^PWUY;j|=6LIN@lg+N5`m*; zXu8~&{gj;z+%g8l0f5Losz|qfdL@aM?vB1eHcZ4BXjf4YZt=&vhFGE2PY9zG%4tPU zU}G9`435tae$a{hNB6_;tqbqgi$T?W`UDq(LD30@Q6c~x-{5ALd;%*pU>pg~&d#1X zI2tn_0-feh#sW<);rr_m?I%o*SyU}EC{**ONF9{8ThuVai8mYrMPlW=@Ii!U<1)DT z@w5j%EAypf-Xv>ts74Z9Xma8#GBzcd*~YRtu1Xg4 z#0SJ}9Q=i_i0B3_e$b{vrSI;TaESVUqlIWjuZ%;=9YyeKL|H$;1WIrb=HOF>3oKvU zIcXxWd}t8MXOw&vgGLWDbq5)G{cE;7!{TaQn&;8apb0%sG+Q(cw#gLOVHxu(#%GNA zw7wT>@9kZ!0u@bcRN>PmIy#qyFVN}mGSr8XLce7RsWOQs4{g8>%Tg~ywkA(c)apy^ zh^YCcoTm=i?_1{W)9%a*lV9;yuvvIs4`S8itACmk{HZK27^JS}F7aO7+?-a==gcH9 zFz`ML3kHbogRGY&|8*qojlpY^xgYVk-r4Jx6u3ioB!sjr3uhhzPe{}|uNHU!j8l?4 zuv=@|%0ll3;K6a%rGuLLD2352GsL~%_4xAYN>M{2G@0LZ36P|57WI{x^bKdqwyCyO z0>$>QCGTu()XF6$UEK+1c>-@vgq~XFCHP!dR$O_gbMh=cWbwzM?cvw$hDt|awHv~T zZ?PFC$$TL!OHm)?XY8N-oFOaosLZ!xhbk;etk-BOUKR4gY|a`k`jQtyouOUh9rPL$ z)>++l4M+vtFu_Ym>gbR*G&bsb^PXMa*+aDq!E?w~hV%3yJq`ch{PZrvw&a zuc5-*=^$nf;t5`{+|Ra)`gM>2SGwpL$ZT6jZfd4#9H;*0@2i z&zVbRB?ZKcZG&YV2URqtp~%B4!22FMYbNrt+SI58XKN#zzOR2{gZ6OuJd&b>Yc*8* z{?&@z@r$-}xXc0aiS=MNjOQOX?j>^BdM$ zBK3|UmwW){(vXhIpb~lt3!YR1Y#ySXz;lSF#)NFVaXOEj6!x(1H&?*-uDaoPx@&m3 zW#_BR42YT5SOJtK&$e{h-4k8JDjy+g8{1!fo9v^&NkGI*$;GMWlRyNctAZXGw{cP= zPz6@8p2!ML9%HQ!K`Ou5Su;AFP+t(;?(?e`FteKhjSl}z+4u~ju1Fbpb75hGA6qsN zJQR174j7o6L|`wS50>Ztv0o~{{{%prs|FY2=73zlNdl(|V&6-Oz^{)V8QbAv{@N9C zzc@ToeLzHIz0joV?%Qy4eX$NC_o`8SlNX8!D`lY$MLY803yn2&H=_}r4hb}_sO^{0 zND@(%=11Ep8U>TCPeEUE>qan8gn}h}B9BJs^!k3JF7{X*?A2~H+pBl)5@=5KeIShIdob4 z{(A0!LQQOFUz0+=UgDmRy4ryDv7(O78%VVROa?sxhQVcX(w|(=BWG@6W&#xlapn-mm zfOawUlqADc;zP9S2yn?PcxS`$J<7SH5T@`x#1aq?u-wxAO0p#gbu~~LeM-Oh2}z{7 zE09)vSH*^8vJ@@rkQ1$?C_F;yWv8>-y|&`0l=hW=>mcK26G$8eUAN6#b-= zGlZydX4~F&WX5T{{R;#)DXhfh6V{Rr7P%hbd#WT&^ST<3}pE>t9mOn zH}h49iRcuZuG5oQX#V*T+DkZlmjv~r;^oEDEu|i!vv%>URs(A?!&)|9o2t-|TY{&Q z7k_NqS{D^YO}FOe=en86qfR4ow>2!n#`slEkF&R6s0MWaatyOW&{f$&2QO>mADQFl zQESRRT!?DDsoAe1l7?gsJN&4l<^jFOJS{V>z6323J$6@zpLM){9SP-qb^HSU>yeLe zpP=9wksu!UCDi%p-tsAAeEw-|c-jv@*Axn-pETa)r;9{w(HpPeXv&=Yg}RWC49D=*Q-N*uM;romvVl25f|9k*^qSpf^f->~aSVS- z%D|hsiy@@t=UFlqdpdUc6uf>NUEfQc&%H3mfbA~P@wCPbzBw#x^ySw`x+q`F)(!U$ z-zv2~N)j$SR?2l^+TnHD4q#Wm5*TqjVBOvt%gA$jy(yW?e&tJLixc}vqH-e{@y(WN znB~{-ZQ@)Vs^+oru1Qfc*W=BpSAd*|$dgFeM zn=dnkLW~yvVOnTK{1HHIR@d6Yd;hNJKm8Vc-LMy9J|ru^lXMY-WUSagxfxm)>5xaC zgP3ta6?PcRShWF`59=phV3?2%xS(YRRemtdDMp3m7OTI?brw#v&p=7}dDg-x@iBaw ziO31Xmj%kbeNH!UimhepNC zL><_Io30-qhs)9jzKckEVJ4#)szo6%uJ>`9qTUJbtMGIVIXzykiLFCWJS@7K*Q^)^v*lTkBIWHlhgnD_3M{vJAKf>k*swd_`sPUpSyR2h(Z3O z+2<2fL8o!3&Ct z-#j2O*okimq>Bd9<>Eo!PpA^F%xd|glS+tPZEbCh%YXt_y)J)S!_Q&e*lKcq7gRul z5a4xmaL@+iiiwY}w&L9Y3BvD&-rt@=BAF3OhoU>5_zC`wQx7a1Y^#81r^gvu_CSKT z1}Q|GO(e)qe*2x0#4Qd;k-Nk3^74jVMav7l2E=*eZe)NiB^a96 z#N)Y;q;svbihom&+w-|E$LWO)1t}9v^oy(25;arLDou{+HUq@ocvxjhL)EEV&OCh8 z&2G%J;p@v|1d{=R#4S?`@>1y@H?J-H6(v_JziVHdE_(0$6D}9muKgX1rPEm4%pdx$ zQJ=Tw_!%yyg-pb4yka6ZFK57bqCIHS(%)R^GX+Pk|CLPZfZ>7t&jon@;pA*jTOGsmeyQMDV@^x2ZGuUi;SB6IEE z*Fsfr@z~HA+gOm#L?wS#{xZYx#wfQ{397D2D6X?K`xp?wzc@eqL9GnnV+>2n%Ztn~ zOSAtB8~tDgAzr7vkTwsIx<_iEpR%D4x5*0X@kj4-H@wvmX3ms(L z^w8^obVmSiH1qggH6Ie|pk#S&eo;Mf6%&IC`WU49%~P=q>MwvT>s6tVPk94MaBOfk#VB3w zivKudB88Bomy^s6c+o#4XmDG!u=(5ui<|)Q51(6MA>q*xP!lJ?v@%(Bg4@XY5N)sA z7z{#AHn^9UT>>Zu56Jsfva16Mo+PHGM!k81n#AJ(14@4~zJT#q3OExiLP8`K7M7UM zZ`LLxu)`~S;HEw<4jmtfF;C7P;v|0L!|7=AjlNz%(DP@^2o%@5(Ia}%V6N&Diko%e z;#HP6{#F8xr)%5SZ7ADeAw-VYr(Cq7p_YxTmbn2nkmDQEmoI4O=!h9E|;o)B_ z8!6}OuGCQvWes)^=|%tb7a)*9Tv3FWF)(e*(`K|s^ylW?`Y1{8<`KBF|ERht(ELg_ z*7+RvO=%$JvWFlc3e*cH{lQ6aDM3mKDF0o~@aT(U(kL^2T^7TjUi1Wv4HuRbL=bZZ zTmmu`aV@P6BW4N;m>>szkpH}|IFCymNksNlwvRAe^o3J#OdBz@z72)pb<-r@33SbC zPWEaG*x_x;?xq#_ufyyu^0kNS9r+2hKYeNOMO~a9-xF!B#185R{yBfsWX9pMP#FF4 zJ*u$$*^pU{o1knX8i1vt3dy_Kk@?6r7bW>x?-sgQcNcbARq{JD>xxaprSBfH2If}| zWo;>Y%6sqFO_o5XdhNf)ej3;O6qA@(Ye8I$#80pEMNlcRQzJ${^uMKr_7GCj6?G)M zqcJT%ekTDwJ!gK-km47*TSF{RBauqDQK-cTrCC}!eEml?iaNASVG3F*pDIMF`rfA3 zNN&>3k^A;2{HWA2+NDIUN$|UdZ7xSlTNw(0+>dd(UB43(d)-EHod%aS5ELEWK7>cK zPRe7ShHYK6tu2lw*0*8Oyn0?W8vhtEtrFUZNGwj&P}71py9Cqo`NPG+^>uy^(MbbQjm(fm(BEY})q4nW& zg(mTlVb#mt&HA&%C%`J&45fGLpGkdQ_)!U2t(*J zP4cFY(5|R;5I8GVk3;j7$XAFM8FmXzoHdE@cYAS=lL$yjK+#yAYVhqHV{MOmn;my) za1V}f0RuhLnfwv_bTcL=`z5j5in^5f_|HtY!xtgZ&fqqo|soX z;4(uO>GRuDQA9f%~u*1^1M5k?tsbWGOYi$Ngu0djp z@H)Pj4%+;xyrI@ec)Y#+ef`)Zb=Lx8o)Y+V=OZ>3y>K!cOwD;A0R4L+CDk@Jr;uKU zudB8Z2dYar2Wvy>KD>+F@2HSaQ1ZvU8$$0$0ph*>_cc3{zPLpQk9XVwV9$#Q`%;$Y zC~iGS>KEq&>Wv9E3pPBodGs6A$F2TuTNum3rS6YcV%}g2pYBmV)Ts%m5`~^Tob#Y%V?3^*ZZ?$l3FfvcaL1_1kI3#9Mr(fqtcxBeR z`nY|e6O~<$Al~ah`IlV@&tqweXj64bq2_dob{*f8K3wU(f+;sH$Z~vAXhkLTRZsq{K(*OK^VwYlLLiX6ZH801W2nsJ5WZ!9@Qh z3~8gAgo;xzc2nFz^vtnQ_AaPknB4qNIQNsANJhx(nzF3@is{k|YlaMSut(Zwa1fiN5r7T1xu2mE!~ui8^YnhPb=xuHqeZ+R$egsJLDY8F+<;s0OjWgHMvnrmArc|k*{mr5eGph;VaU?Ne6*r7K8s< zPbd}V-&_EPvjT5)%a9}H=GYv+F%ixb;%J3~bHrSVD%)J^DD3hmMRD8F)lqpn`ou9m zxrHXxD*%z)tz~ndjNJ*~dk0dCK%DuH46;QL($$HHT@tvvYEOdUwFlshPwMH8_?jCrt1GL2BWGoFJrF3bmy z^+2?jh!aXzM=P(sY-v%r6Rm^7!!_S6fN2c>t!f&$AQ+5Bb9)vVxWY@|HDZ z`|rLc&tH&0xk4+6;m#)pL$`H=;|8syL`W!Eb%bp11rFI6q-J}~xr)P815S-F>@Zpr z95pOEh$FwGA@T-ExY=9?d10l7D(&q=)B?Kj!rjf}wlDO<3vOC^wJ&cB3;1>)m+hmx z*_uuUhUfHR@cx~aG*Uk@YibC$rYm2irs|V3*%@ z&o=cRo_!7TmvCJoomZ4cqrlRRmV5Oe@{fUJO17ViuNZBoD-GLI^N$1>UM*{hJ$ zaQ@|cI8)MiqKvIZCas2XCIilN+9FU9TQ|qZ^+QOm4s5bZ59qKK3QJ}GOZ&is|4j4$ z*hn+1+l`C^t>V!Tzpr50s!_1iTF2GNV%uuTNr02&u~3w_sC2G0KL$jlITCye?5M}F=X)sg+Zdqv

W%DY(va7T+gihk63UsNgX-vqBRV4F1ZEeR zx0`LV^vjcfwE{)tASquiVR5!d?5*V0rPYcFz=jt$Y$n}x47WQm*$7tP()=Xp{+$i~ zcK>$6(aY?UvgF`MDWT@LT1bY_w*%F#=TFb}ovrzu@z@C1-}tX~uQq9TyW$GW&Gl2p z#mu*~M})kMd6%6OKXHVH(K46Z#@gV`8mZIME_?C(^+B!ajv2&I`tw1sG&Dl<&!8QD z)pKzDm;vw&(qex35|@~Qf+9zMwSYJy0`TRnd>Tddy4av|tEo*l@uL7K1WwV~%5t;t zqZZu?c+R6v-NrhROY{`E>YDGep}dKx&D1FgBh?}7+=}8wCL;8N6(KmeHxw6Jmnv~zL{H! zWM&d)6%qI-Xbbq6Cd#YZ7$py%OgoU|3JwC5MWfi(cD?KrK$>)+J*BVsdH9NsXDn0{GGL$%T zwph(kaJB+79fo+`abs9Xc+d}a~ zmqG33Lq(5Y1ygl=^L#iOWy}`vJ6tyw#9Hv@GcWmiCx6ts6V@B!g-MvZJE!|g&cY3@bnXus*PrD%t7VEd%FHDn?at252-%I1 z(8&cf4AqJokBRXaH5Ag5{|kiZ@3`d;$7Miv8`o^qlo71qmTW*dl~mK{5fRblaUybs zHld2%9Mmt@BEJU<~+nn0b0^L~xB97E%O z7534D0QbeHHiM#uPQVvk%H7^ZBYHiRS8+!1MK4zKZbT8jtio zx^9>#dz-9bpx40PaPM=O&q%t~N((GZdJxq!C)-Yy!Gq>g^+-@^Dg~rN>$EYB78Dc| z8X77BCbBk01b6-gE2DuxnxqNhq96#h|CEC`YJP=MPOn&?5uNTs*Q1MsT(}z^#Att+ zEHu0pakJEWkR^5VMiw1#Ftjgu@==bEz5TL9P22uc?p|ooAPHLr)wmQqG`Gx`C#+qM zl3Gw+PmR?^Pk<+kfd|&rDK$jGjQwhu#on?EgNipI$krt7?OA8%<{XyV?+M&o9&~qi zD_jDsf3R7KJ3_>hoXT`A0K3ctH4%Q#**6ld5rGBP0-$+9%ShL1LpHONe6h{Z*n=Qt zNjL$@wjSfJC_}F&X%b^sgHc8L(#lZ4U?R5}5Xd4o~ zI>g~~{%L%`>+mnDXK({Kl{dr=!FT+(pApip9y)(#okLs|7SJeAmuP0sLy@eoo4?Oj zKbU23UEM`&%(#J$9kwn-`9%~iZEVZ>1ts-nV;;GdSF-PrNI%jMYYIzuL)i5OPDIzN zgPgy^yT#oT+D@~3s?8xf$~nRE15i$At!}*VdB5Zp1XZC@`@kn90fkGy-USV`-&DT= z&d7);q4THrq|})#k{Jm7xSU5pgPOi`b^6(k}?)A;%-OUH5ISWK=(tQ$&p$8+;zsZ9UdpmW!S6YE)T%5!AoTrz_}$ zt`3z~#zv0DZOs4;g9?d@&n;Typ-cyHgtsT2Qn{nULZRRuXm3BWD{ECWfi~C7tt-u8 zows*xM}vaz=pZ~l4|$FUTI*HZw>FE4juY1R^qVqZ>o@KtBP9JYh`^wK_LhX(wjJCs z5EC9rE*J(NL~1^;Lw6HMi0}e|pY$U6<4{3|abim=>HUA-9*qD@-P2S6F>YLX5&z851%p~!||JHqlS zHYm-CpX@4R>XP`)bJ-!lcs1;UGj3(hwU_jF=k%(_p;ra(6bY_r;7gC>n9@%+e|I;$hq5FAytD9*5 zBliAF{rp<`@tZ*ts~9t7*x@Mvv;w_E6Pu>nPQ{z6AFg2%gkIpXCy0^20yk6uG*y~Z z6#02yflm_c^Lz*UKUWFOV=ok40R;W#s)p!R9o(ASS67y82X z2v$DQMTf)}g1IOn@p2rG&IBaAV~riA`?g5}MSZdsb7WwLTm2J;J1NLM5Hg~m>|>F* zVwIXo?MgI{FV!`?XwNcbP=6vBXrxj?a5=k@(D76tHE`YX?MXP2_>c(2X&>^$i)0pK zvv1|?sO2AtNnVMwF?^20 zMBbNM5lgWm+;3)3JZ?Kb-1~l<{7$5g(94MZQfp<#V|S7iHZ=jNYApbf)RpC#mxS*7 z6#$a@4Gs==f~#EpAhQ7mpbpX*L#NERJk~}8yo933T=AOW1MSjFO@K z7$vupYtppJAph`eD0JjvL=-YPp_qPL@-iy5^TgLsB|%t^nEbVx59O$( z;lN0Ulr#d|%pT*f%f?BGMAkr$VB@EhBzjRp&_mK!0baxH9FIR9D!fe%A3CP^OoB(RgZN@EaFCGTg?nLyGA3T6 zoEIe@rH4Vy)LMi67CrtG8e*}*PINJ>tEUh7LeF%0;@?bm4zN9FSgL{7eD8MlmFIH0 znm_IcnY|#Q6r)x->@?bp)#yYbTJ6X{=Vtp<>f}Bask-*)gwn$lfvDX2p?tJnE1NB| zh#%onGbnb^^Bjn^L<#584TB<%DfMxXa)@=?W89p~U%@1N8n`VqY;;t`&BH@X%~H4A zG(ynCiyi9s^x3 z@18tzlg|r;S!ZZ-?^E6Z4p%x1DQ{ji6x=@$dp zW-sFvVPo?Ke^$PHzC5oNA%^umHj#AaMsgNG6U(D#5VrG}U*c#(A_KX~C0aFWqP?9c z5vHNh-nIDYcRW0*#U(tGl>JX!P=&#jCK48ovC$?9?mvBn+N2fJc|qk&lWNKgXmat+ zDYTVQWVd$g7Dh%u702b~Fw5y;#p2@xo$z7__$>qdJpzt`nysmF#QkOqXOdei%VH5q zO#WKo(Ff2gyuP}i5)vXu#Gq|=?Q;b8%vB`dxc0!SA0}N6n2XWu3bTA$j-gucQefj4I$=iZ`p2$PJ9So? ztTF9(if>@5gL$W(Js3qg+rH~~A8M!Ofp;lS*@p*jy|4;&ZHNoDXtwqB#;z!JKslIE z=RLB*>#6s1Ho zsHzp=OFuH17^yyO+e>ihuISy5!SXI;&3M@r6W&8uSt;PBr+smTgYZH!G-ukzf&tHe z4-X&aC~n)Z9x^-DghOlK{42}*HIpF^5-2|^sb1Lk^hMRx+yL4_>yzDcX-0O$x3!V0 zoIU&$PG%*(RxXmJnUa#SKbflvK;A#)96isqPdUBL=82C>4@_+SaN&ndKklmWP)NBu ztbe*Nt%i_lUV{#%J-9i5{b-!r?Pb5g)}!9*%neN$XA2_xxI|)lHS~7wGiD7QTUz68 zb=wPx7T4|b!rtq3X70sU&o26l;Jj{a3Dxfhy?Y?AH~2BiA1l*UwwOV#k?3<2Y&i zRK&zq;{15etXxiS z`i}K+@dCZsT%VW;U@lsrl6FyGEiMq+oK`b6C5@2}NMx>guXQAtDU~qsk}PLXmZr() zl!onVDxSUkJfGi-OL=kJem)<(CMhkXw)2`(Ei{GoA+o&0IZ5Z&0X!s}*d%_2*d!Av z`1cv~M@^4UpS|H0Rnbpb5LeM>_C(Rc6fNa<$rass>1qPAG!qmazxUZ6CsgjJb?)rv^*t>hG*hDa+8+H{MW zp{?osXFeF;8sXBO)vt5P9f?XT`cUE_ZjpOh`^blSAdy?%hW(-!szNi4m=pZ3z`*OL z)(7k-daUQJHO2ZYCe3+~*X28y^Ej!V-|=s22PAFXv2*!=TC(}I5gYqH^)l1#3_vfk zp|R$b`{rM&G6~b-4X!RMZg6sfa>Dw}mjim%JTYm9mA7fi^D|i{4DW_}%{nz16$`Fb z@|QOK_m|yXO64*(s;I;*twiF%!v~i?z0? zF?2daZEcxBdXuFCt|zUhFDkmQ;2j6UYJ1>fMA~F<`56wCO^&nxd z{tKbIHNs($+cFj!+>jxuQVU?5#gM9O?M5H?A{Q8&KkakMgyvZNh}JMHuSj*sm!pZm zqnbmdQF-RDnBCKj5e&&DH3g6D^_+ioanm7h=Y&i7`Xr6JjB!}|%~;~`a>VGqh1ILi zbUoRu^qmB3IBkpwX?;7RJ7+JB#2ddTo2E%<90y_a<-;Zh+oN=l`f$8|wP_KSBi5*p zD~6UN=njM9qCbvdaLiNYYge$dlApxyDKlH$XTK1*=)I>yBmQktXJw;98PPDQ`5Als zi0)48`6)aQ8m`gygZq1=lz>gVyRK z9y=6ZMR`hY<92`R0E)AkJCb3wT%jOW>47#`OmQU+rm)JW{bMjDL`n5BH>h{EL)G1t|1llRJXxidp-p9x@Bv6(K>QoZ|+j{8^10G2uM z<RY+9fC+|spHK>U7LTPG-RP)dDU}V@9EqMa8&mhRPwcDT$uGPTvA`scFj_E#`xPy&47y} zzGTZ&RM^gGovk1{U+=HA0dVK2c+)?PITSoSq&B5Qp7Ia)lY-5!Y;E20>z)_^Zb?13 zFXesnD|w?6OQw$ICgIl+&3bv`l|Lm;Tr7#P-7cDd zrQ@E%v!TU~AQd0t{>Iyio^;|N6q6+hrKw9t*jOisHVhW#8x!*T*jR8KlV6w?t(?4; z>7^uWOk*UpNy?toyc)(7A13KI-ORtzDEPkAH}_!rNYv5YxA{S1slW)PMsFQGsZjBH z$!3!%;lq59M7T&!)jHXOz0YA9PpoJ@ZMwFflxvH>zQGMMIPMBIsN#asCGq?i(&I&s zIvD%`w8C|9nDk`*Eq`Hz0Le7^DsBpl;%kb(lq!Ak+J@r`HU-SgeNne&=RY}W@_H;? zq%P@b1uJ>G9i(QZtEM&gaK@omq%YYVnBH$M&h5!8+qt(>*k<=*LQbsXH(XGzsBlvR zjd(}co7{22PL-K#?h#D!EPvK#n`3OhaUstj=?@n6-=Ul`PQ&SSfdSuDDI<0|SOwIO zsytl?|IU^mrmYbv>TZ7)0+9aY@m(St`Z(b>nW4xIcJFiSz4f2Yt3I2FGF1O7y-a6J zoup`;Ay2p%Q#CEW{B=K~?b*=$PaVr;tKL^NoluS&`wEYdA4~9X4JuUfXat(l#08m_ zk6<2Iz@*iVyZHvuAh*x@8?F<5hrj8xHD9vT__zu!w5z=TGcKP#6)!#Po~K+s0mGbC zXCNj|IUQBEiZ zeNGyd!QOrencN;5$^_Iv#0x@jvw%v}_^j0f#sV}d?U$@1JhpS1fg@AT zM>m==9%-mfiZoKg((-ajWQNVdGF zK6IQ)>C4j9<{1$(19PMJ zr2PnyoLutF;Kr-cAcUoSU1wK9*0Q z=ebo$4`6`X+ucJ$Qj(Gg&y#rk0VzdC6Sz`ueYO^`r@}4*f|b9p_hv+5t$|AlP!3TB zEg=o(`{?@ZG-7HG)vABD0aVZ{L2YMu{QC^sCuPqS9n}R(68iDrP@3cI;Os86LQC3} zhT|q^vgvPg7hUQ#JIOmQb{8>%S#7~96BL^*rnpXI)KrXq(yH`<+9IV#5wX%WDe>w` zHKS~)f8E5NEgBD(P|uC|EbHYu&! zffsTggFz?r^RZZvzC4-C%u7{r?D$@^c-obQBp1G-8&U+tM4rF!c3y-0N4|hTYi0O4 zg*Q@Nn?0`{i5;=$eXLF9)1LmW_x)muXeET2yPrc{nq=;43olI2NEsR% zoOvlKg#}1vysRkA#Vsv$NFd8r3mFeg=rGr`$bhNX({jGtSC>Cay1+2fEVKSPtDTHI zjQ_dY6t?RF=R2xoxdHeXnVJ3We(KKu!`@p*b+vZ?qF)dN10)1CBBcV-h|(zvC?MUf zq_lL0q6i`=DIE%kl$1!LNT+m2iL`)p!sv@@7X;FDLxtYOr=fLmQAM7cs)%JZ^hJi;x5imbeeTj$X z{6ZKYJpZng9pv}v0b^9iMi%qm?ZU46uESK{ym%J@;yP$=*nj=K^4W-VNORQK#a#e< zu~h2a0?*8GivEt%B^BtocMVpXwVNyQ4dp*vUrFm(T!54}czC??oaUwvhXr@mbYZNHDEDs5&9ihJaZFli z+~3~GUaHB2HjKB4BU|=tghmAQ|7s1E|`4m;ew<{G4@p4WmjKF58ud6zy zt!kcTnT9P?FaB+{bT{Q;dH6AB-GSd|Rk{+qdGOEW^Iof+!}&|o;#~pQrGx?41s%Uw z|AP^>^yYA3tf*DrxsJ_r>`ZL0zgDqtcS7B?E?+m%kmWc2DX!a)Y-`xf9B$LpwZBPdB$(-|7?(LtICNi>rZD%+ zw*m8zZCGVJ1U)A=ym82W!&=Tu*RQwJZ!2~BaG;pnQ|<$7Nl_^%GfnWn83-nt0MNLX zy)#_;&XEdjd)a=4c01A)nC897o9!bE}70@8cFr#Bk391D(t!EmCVs{9)Pod7Sw$}U=<@-m$gM6F zrBDGndaZgUm^{$j&#j@+r9`p`6_W&afuywhmr^tHU{gb;)0Ua8Ud03N_^RFN#G8O0 zP%xHpyZ5QcxoDoB_H6#_h2*XGV7_L|B9YqA`F7R>`@ACJ~{=hTYS%D?p0#= z{E+NMIjp^*k4k=*+sWU4*8Nu-PX1HO_l-1}#Muk;;}-ru^wj+C6z#;lDB}zr>^a8d zTC$WJJfho`JwN3s&lME&ya63g)aG3}-!sUYym7YwdghGWdumdO6xL(%g8nR-?=_-P zHm!8uv$hkmP8VD=ziq+je;wnM{!lj$VH`W!M5_y<7T)t_Nm0UPo@?2RHrTHPGRHLB zP9!Li?noBb&KTg(Jd0bk*1usvqNhbe&z;F?ty!65Wj~D>=UEBeoxdolv-}@5oa6hn zqp}{M80D8$X^du+mGmugW%!BliOd%SLsBA8XJLr=@C4oV+(Elcg2?j^uu1QY2EZO7 zI4%4~ssl*-em22KQ$KYJcI_8kKZO~{AtGG1HthN^ilYRN$Mh3eDbJP5-XZ&vocZ(AtjwEtBd@tSLfiEw37$L?3d4GRpI@D745`73H?-~xF6DpeuzueEvi8%0g7>RN zkK2tSH#T6(gvjyPh4L&@X(&N_N=%i#g5DZpwv6X7kfn;B9x*uf6pXa4?Ai;A}H ze%|PFTN}nabN>2m5*97Ox5*ZD`54n;%= zzJi`J;ih%*)xuOKm0`uFal54>%8<%`Zz;u5Q`g*#Gs*a+Ig~-LxhE7rU)Q#J>~o^K zZFL=SyRPFh(x#`knmRnoNr+g)Fk?47jW$uUBdanff&C@;BFFFQ(goSMf6W^*U)pdvzrbH?Deob8@uthS)F&)# zK|z+)oa6O8?cWWTPqo~~YAn2RjA&H1FRT7pi8@?O441IzZ+a(*<`$x^OB2eD>oy|G*>r>2blar9L z^J3bmJ`0|ESYj`&Uir$4Xu+ZSwGpD5rA%9{^U|^D-p4pt6TP`Euxdshryra@EH3&p=Is_I^FInC2w6n>oL*7@_4 zd$|4qVn1i($p(96?oJ77x;9JDW~WbGOQ!4Q;|pIt+2U{4>)6$!w9mM1-=wy5*@tC3 zrOYV)-6he*YQ4NG`K5;KdWp``9A#$o{L!8R;^xFb92(7XT@%jf$Piub(lgJd^QRrx z9Sjz6wkY4*OmjZq`8DSMnm0|5=ek9bpl0$2bfQr4@qO>EmS8FIa^D)Es2bVmMW$Im zdzx2Ei_8UXD~x;h?k%&Q+dP`c2fv;cAGDVbQBVNV5&gH{*R25B>x0ON@+8!SD0FsY zDCJVH`LN1b(KB(1X{Q@_Qb}Vvs|5-dh%*cUEJ= zn(2GM0#DF;I7FbcA2bB!D%3SKJ#|@u^`bw=#u8K%DwkJQM4?~Y^t_xNs&H&>xNf$l zrcuml!&YyMI8F%TjYt(S=M#+hyEz_p=GVCGe~6!w%eD^T0L01YfDB$-sAvC1xBv5b zW&=GsYqPPDbg!4Asfl+5`o29^mZ6vHWc#4eZx|GBN>U?L+jQgQM459c5zVl%#E9w} z%@gqfnuWew3HaCXlF}=ka`CJ)S>(CL0hk%Z@2nKQ1302#1DbZT7lv&s^9ZmgL|ID~ zy#?~9US(w)r(gA^ev&@I0|+~?A^^Jf$9vr&hXO+5{h&`%{m#JnDResu5Az?hPwlI0 z#Re^}c}g;5w7g;ncla@lp~1qc(3N+;3%ex*T=M{7K?=@uZ?!~s@6t+EpP79&NBi+< ztP#^~f^p8NCg>e{pj_$5KEwc1RG)X~_9i693Xo{{o+f7b;eSi|=Cgw96bwpV&p+V za0PK-^N4?D;H~^p2Un|eq9;Q8Wz>9wqlC{Homg6(_lk88Oi{lt%$GFVEOWNf#U*UtY@94$D;|Iciz7C zzJw);6<1!Eui->oh{;8s``aFtw|tk`^Y!)hM^a;@bsi?f^H^)njQ*7)x>WTBuQchi z$u%@H>&1JKBw#!n<39MH+Y%tgGX!PR-##^G4g<8BL;2<7;*SeJRM7wnIY{gP;e-I5 ze!7oPC%pgU?Y-7e_u@#V1?gze5rrhowJW9xM+$dWWKQn~Y-*GXL}7RUv|WPo#QerY z26i?j)FyNWEgoRAg}%YY@8LP~_)GE769B}pe0}9rPxz&@)15|Ja)R7TNz1&3h544T zI1MOhD(!8R-uK^*lOz-_p15mk*jPF8bgAmc?VGj_a_(7<_6kY*obu9sD3Hy^hjw+P zM` zamoF0WE{f}ejhs#M;aA%OfGH!10Ns11qK%7Y}X4dLB6};U%E{`eFTJBM#+wg(_zIX zQ|`21rRmJkmu{lf>4L?K<}ZG?{=V&3K=<~>&oXXTZ1=%+lgECQH_{k>L)n6kFkU}% zb7i(t!YqXLdzq^d03*g`xqSN#W%arE=xS$T)ShSDG^A`;96_^07|^U})vu z7=$V1A#i!VM%xdl-g}7*kbGvnS4bMb!QJLVdTgELe$x!c)nH>Xb*IqjhQva zPS{m9ASds&!bmB2g3U&bnH5uRO&|grEPf041oS^Ac{sKi8TLl{7)GXTSlgUx_ZGTy=cJsx{HvH4f6^a0etbAMIHD>l zDz0a$_Xg7)K4AH0r1$0b@t;daot~ooR)vp|iNm*k$&_atYq?i?_c!p_1ksN!N}H2i z*}sF|p`xS02N5*#^vNaCR^s|gu4l+Xi7xFlD7rWb95-z995N7?Xr;k=+=r3yIP829 zOO>JaO!W`F=|)d}+42PV1fmBHkI?f7L1GM{VLq_4AOOv1nk$?5$b}ymwV`)#N4!&Y z4+oAwKRrqgCPnp~dML1R8Ti5IWJh8FOAN{P6k#l$VPhh@Sc%HYG#AOKUVKPk(A$P| zx&LKP9{t$$>TR!k&R!8u-|MeAej6a*dKDr6K#=c2Cc3AJKnd^0`a87ox-nG<2jyQE zu3G66&tEv6hKzk&X6zw2UiXLa>VF7-n63wB)-eIKU{->D;pj9dOQQW$s-dc?fS4;5 zz%11SZ1hyI3PWPU@MZj^ioK2zvsv<&-M;E$JRA@(J9Ru?^abPj{+waTK(>Y8MYOwo zmL?8ySW@0^v{QvH=&$u8M81iB&6_}5mA5R$(Cm9-;?WnEsFGk$#lfg@4l!rermigH zqJJ%q{F;&b)0ac=Kkn>+N!KpFS3Cu-(9=~Omz`HFb1W>n*TW^dt?U)iXjb-Ys@eME z_M?VJ_k-HpE@I8h4C6?zir#KY>&~pL8bps@4hp3rIa_!c;x~q!u{>Y^wMRi1~d)n0}7R5{^el+BQ@uQ{_U+E zhkD2k?i+#gm#ac|{NyAAhQD{#YcIje5zvb?PfQOSzy4OYDVDf&RBfmPRFt9dnbFmo^}{GP#9Xil+PfOoic z>3G!K!q%2m^T@pW=FUkW-o)&*OStI%CxyaTNT^wC-!sH$y7Q}S`X}DmrH-_k+4)t_ zWC2?vD2HGM`!T5o)GPsO*>3_5oEiJ_8MKMq-0PYi6B83HM327V{rAQqlqW6>w8z2Y zt2n#}z+(iX!4V6+#h>Bo>4i5Qy67!+dX^)`Y;pG#Po6!!h+k4T?Cj{cy*glx1G`i_ z<6s{p4iEP+9_{k2FVZNUTy|R2UHD^ANEBJ<43GT%A5R(NVVG@5@%O)>VMJZjX}XE) zVJEJyDn^NMfzSg71J@Nf5it{NR!cZJi9*ZqSbq`=AciZS#?8+P(IPvp*VfjCzHIu1 zMpVT5P zEYsb2+JcTx7BzRfMvFajJWj85*4ux$rrKj=QgO?Nfo!BNYI-g0TejY!6nBJ^c>A^f z7l!nIHM@LdxDyya4rM{pNrn6*4J{6iuAF$IHJ%4AesLAcv*oZQI(~T}S0rcFv)pE` znzd<7N3NTjcOg;OM`>DlO;Hyd9JQt(rH zyYn+X%z7B;%Ax+FUO9|4*8P@AY1zB+2P6;J+$)4D1SG`IF);I!F0<}rnSUxSxmWr& zDR$GFz@B50j{12B;}kC6(d8m4q8_MReeut?4qrq082T6=2AWYMaquLjf(Ob*$Z_%l zTnH-$me3-*Q28AC7_l2CCbGkx2~e$_!!Uln@D7Z}+?sWb$2lqFEr8;Gu&&Hz-AJSP z{`C*p?EBp))uP?FGvS+V{v3oao2GLnhp0$cbzhafMQ@|PnM|$7aqO2+*TE?5wJ%p^ zS;Tt7Gx+vq%b4BZV$dW>>&g?(%a<4=I8zPHuL`$BCp84ey75k$$={ojac??5Ckm@3 z#5;fOic0I3W3Sevmh4{A6}-SA?aIm_7CEOO*k2$?9&t@2P<~-&_jgT}3hbIE=HDf|^;E3xIX`XfcH@aLqq6QGi?Us%j7?>@-BSYYf&zEM z!>&*h?rMoh#d4=t_Y0b5&Ib_DYyFbpgN3+U}O$ALAB6 zRF%ndK-AxOusjYtC-faWkjo-=RCH)4&FXjSfVXeo8mZWRhha~q&0WX7y6Ri?tzsLKWC@Gszh3aznHYAH zD^(ck%o%BEUT9IDBPp7T3GvCd4^HYx{mIUxoJSY5b8P^RP)9ko&5&)G!j^9~^rO?N zfo;^F*bJ(FI?pkCeB9cdVrx)S6T9m@cdw}Z1G8TkZi;T27_>|=qno{J6_==pXe>fQyL}xV`q)bvI z8e8*i)_Ea+C7`Y?-*3^Z@ZCtzByHlM_RDtmZNBKt*kFpWx&VXYx5Xs!yC+k3;dE|@OcVg5dg z)@dBy5ZBfU5Y$+6aKlL{_p%hC`C*>=aX2*ozQDK`m+v_oBP`C7hoE z6zJtQzC6Yp_GjNL!;|h_FS#*S*J#T#h6TLhk{lQw{ovPq^2xrR{4qPrk|bQyZmt=R z+USL5)?Es%!@u(_e-}i_@Lr{ev6K-fWEm}Byg4Rtad-WPqU-EQdND6ChfLySq3xT5 z(%q^vk|iUx>G*g=uiluA2le@tZ&U^-wsS9BI6|f&7NEjT;9H8r5^q`L!F~1ys&{4( ztJ7To9vE*MJ`exf&{^bRnYU*#tgv2duk{mo2t54Zg-k0H557W0M@Oecir%8-8%M6f zgyJuhFq!NeqOaxHSxv5Ze(mz8Uc~pFx21fYrK&QTO`IySUrNTOn|-=`@Bci>8y4Ub zni#Nj?aPF;yaXoW&x0~%~muoXBo?PrTw0VjU920yPrx#S&I@baBVzpcVUxN zXmK3<(EPLNJm)!w_hM2z7dv8azicy8TyKbeMeQkeR1l|Sk87WliWz_^Ic5rQnU%G^b@sjd1JbC`K}v&C|()8{vf7B zGIV#DfM43xlj4xJr1bE30Sg2BJ~>E|p&cS1BlVuu|ES>j+OHBUk9r!VBqkch1z3dD z>nWmtC!Pq=mvpFfblZ`t7eRn)&;ShZPh_DEqgw85fR1^gY>Z07p})v!2ppW_5{ljH zV|3R>YkU+)`qE$6WGG8CozE$p8O{H>`KDU`JCj+We18PDhd@u3vOssCx-apju2}z4 z17iVI>H8&94VLPXQd*KK{!7H}^dzc1tpZt3$t9lH(z-1q{^GY%7*tErd+7L~GH-n) zXMO12q~Hjd>s*=nG^N=Kw^JKEi`Kr9JHJd+gO(oSIHr!*iH5xt>tnCa6P1q5bjgyk zyTEh**E5Y?tVf(-gHq9*u2&xC`d!=j$T~8<%@k4N+IYTxVpSk0_nP5by{!9eh4pu< za7u<_GOV`UCKoM!^6E-uQTx^hRh7`&{MuZ1s3s3L{8jEU(M3?Gx~v+nkQ;n;EGi_L zNw@IhE3n9=0TUw$(Cz0$uD#?lYf4El?YJ8+TF`0*raSM)A4DD)^P8mGd{L?|xizD~T-~ zVxyIW@+IR_I;Q!_dJd8C2^nO9%c+vZ z+uGT?VcV^fX%On(r49lr}D|d`^$3E8*m5?NY#9ZHwM~8;rNo z@47~;nzM`9ua#^Sb-mz+mF%!dGi}4|OM-eB?;1Z3k!V}Af7Wky9UIh~?g~uy6Uv&a z=ETyv`Z1WVza;wm?$~|Zuwm1|zNIX+&1?OA&J{o3KeP&vYTc?c-Y8tJ4iJ4;&?k{& zgR8#0JuoCN;E{Fe%^<&P{DqyFx9eBu96x2XR6ceh{rpQ1y&&d!%uqHD%*dTBXkJ~_ zy7X+jwa((L+PcKg;mC|@c0@P?(&~w9iMF>j@>e;5AqTMc*Ip4Xm5^-o# zF$MlAzQxmSo*CP}QI>AC+;(><%dvWN8M9NO?Ih=vNvWWMhm%d1EDu+@aF6tNK4AqT z)fwiQ^zWB&<3f%rKn1#f1}Qq%D=h>WBDn zAv`fxi+XcM!x1|=auT|-XsIww?|_MzUTT`Z&?c;hzZDk>gvU^s(U#o%<+drNquQT* zKX1lFo9>P%mikOgb`XLtIl`W z?P8Rs72+QdN^!XR;(Ktc%62cpq(5;0`W6S+&h#qIv1Zyu=GTeMJQ6gSQO7svPQhLN z@?!2M7S(P}+il0}MZuD`?UhZ7(L#6jwsCMTLr(U`eydjPQMIb9akvz>6Er}wG2?0d zw%UEJooaj*+ajcEvy{f2EhCo8)qLFC!h4+i*_nW)Sp(OYzE569d~_~3xTQ<=bqR;G zfe=M`+j>P*3Sd=>IV6i_kkrCu)K$tm*jb=o+L3;JRR~od|5*sZ1 ztd+|**SsSo73w@!qMVQCfzxQ;nSo}coxao{`CxtS0Z;p{lW3b-tNcw2sA!q^sN&vn zB|1_~VN4&zA(a`v8B)EN9P{VJ1qMNQ983ln8PfIN+T{z?3H?hGG*oplCnV=s4YRZL zH~91KtiLwu4>k76;4S64>HSo(0$1051g$EgGp3oAccpIRdDF~S6CB2ILJy@yrvm%@ zLHn4dk`>dDviwkj-DzyPX0q;^_&@5LpSG2CMD=cwV<__~vQYGDC%BzG72vjeO*Pvw ztdym-o8PgDKYv=YfS+t@ur6<5kfkh71GNFo*R7u(>dw0)54tSgg)Wo;!`{#}*RA%4 zh#laz(_N8FGaoI?zV}tFBsDj=krS0}XBT5ry|~f(DN!2iSM|gQga$7$BL`W+1e9bk zV>+aoOenN3fwS@ZL>zDDB&7to+FSbwt*~M#!DRGi_1%IyrGgtJHc_prb6W>yrr{dj ze@SN-79FPx8k$6lN9M|$M!G>l=yswse(cMMI7L>6rV{(xUU*B(E_`U( z9V^2xOXcEi{Kf-!O8!{=ucXQ#t;#S93oN|$#H>d#iY zu?xQ&&R%-uHt;)_8F#+hK7*L*kGa&i(C8>bew$T+9%n$Nf2(W1H5 zX&PKxU)H%g(3O_O*S?_Unr7#qWG%a$l10SVBJnD#4 z$)(L={!x9{TfPh#za@Lv%G%gk-wt#?brPU$lNJ}B`gXyZK{m|`hgz9M`)>Z`i8y{< zp*j9+>5TK5V%nLSbl^hRS8 zH&mO=+hVS^yJEz#V+$UZrt&9{ane+=3H3WHd1!d7<5XBem|o#hWEVlisGjJNfQa1# z5hG&8kbhA?x~L)>v4Qs&OYX(_7duloiXsA z9wU&}QOQ&GW1_xfZcg`r#IQI&#nDe?D`nub+7n;m{m7%Es}XI&`$hko&COrr(U2c% zX3=J0DMKc&m#Xk_)+O7z+FXsUcB!sr*M&s#qJwyH zj*lNRW?AJbY9{=Rrdujme3+@5_K8NaAN_}z;1_vGUyk7! z^cK7IPFCr|UJjNQ3T;B@T`TQj>-9-?Pd;XoNM$=n8$vW5lW=3R1@jy`E|&g=R99RhC1z?1!Cw_3+wJyNzpDV z^&+{~BIN#^+(SxSy!9(sH_s#-m0gj3G@i`pR~?rHcg@SrM3yNub7p#2-#gZ#E1*I( zXWpwCw;aoY(iEUko5M**Z#%wzM*H`xv~Jgo($XTn%%3U4fqeX>x z+NKn(u%Ti?Tr0x)j&dpF4gL$Vl7Pw5q`~dNrGk}=3uqI`g#vVWKa9mQAB^wZ@G%rK zS}tdymA`yXfr-q^9D))e!6koA7}ypPrm*JXUe=047lgHI;|QpU9>9B%*JUG2^QJNG^c|q*s`H zuaI)lqImwP7cuLE>bs`9fN-QinQe&IJV9?FE}ZR)?ftwyJ9%2aZ`ZB0QD2-W%u4j7 z>h_37vEaQ0wL$e;Pog`jff4Hc`riyvd>6ZTYHhGGCRYCKxXJjA=MBmEE2;vyW#^th zd#NQFS(x;t(;9Q-CFF_*vYbkJlvl{P-&e0$-quc$;Stqd+=f6^xx&4g!d#ucUu!ml z_rmuDs_^e4PcEPt4Zdn;{wBCPEk+0G!dxqF{PY9eb0OF%hLE|;5e!GG$uDtXeO{g`J;j6>yY&04J=zy zXN9E0*+fcrHcU!Va|$eBH(ld5g*T$uckbQ|NzE)(gV`(KJbO#&NVT@M+Ejo0HjbX} zNkaXhWWyPA;`H$-7MuQJHAKjzXTRNv&{l19ktiK61^ey0Gx5spIF8V!Kujz%)L)5MDcFq=Ue@x)wBS?2_ zr(!~~f`t3;Py~J=1~1d@;k+u(5x!E~=PP9`9_!b@SKfrdqYyG5hmcwQ!xhQeS%j}> zG2>ViQs>HcwN9M*E6!qiu&iupvTa;AU~Rjw&mP!nTkKi;;%K3a!>Vu>|F(l$vueIb z%;rxCSEs&?yip&ibiOd=wLZ>Zjc(8-E|s&}Y%ZVW`2A1urE0zv?6WoRnycmKm00v* zD7OAhCh;U8d3l-{J5^~xOBr)lWhup_SU132p7%C^N7H9(S=!PfE@$*QFP%b$(JYdHNnsMlkT(U zvmJ&IZIadTsh)x1bbsjLDWcR(1(aijC&g%lpkkA?@)#0D`!}Y)Ad7UrW@oyx4LWGE zYPQih*>JEqhaxjDzpOtgGK1qwvvEdDb6Hq=XwRVON)(X;=7f8H=Z#*P&ffyWHIMTL z-P8>lBdB3$Q`oCZV&WPa8kF7)3WJc>$0rL}m6`^hSI$t<)#BNoT-Vf2vOB;0iKaqr z3NaDUTF?2-(N?+95?z+QJPjVH$+YGXapjkD)wFk<3}B0DCNAcYj7#|wU?1)7XS|2) zHOlkReq!%*LKXiZp~Z>;>oBBxEzkr5WD+9|JfwyCxm=Wuu!f`KI}rlzpb#b!Rn;n- z>W;UPwLi4(LtJc2!BQN*sZ1u=QtY$zpjXI`$x*-la|PRaPG;iHXjtvDNOh;`XvARw zuU9~sm-eiQ^BWrz%McvAUCUp}svoUmi&HX4BRAaEyIt|uomqSA_i62M;0vuZiI8oT z+_awT`K0B~VY@IoFgN{+njJ+?WfHwUhuU~)|-2R3v&zw z=@fTl8$TE`xSMT1yIzn0GkJ10eyuxZF0IW)XD_>}FBi~$)nH6`Eg;=^yQ4XV%T+jb zH+}mJ+sA2nQ3rnEPU_4cJC*^)_wLL4+QD9FTv1Gcc-fCuGTko7oVVa{+V>ps!8fyt z6m}k+XSR-Lg)l@btP^{TUOKdl+hcIQ2IuxB@e(HJOgkM}ReW?T6-khV%2Qu#q9`Hu z+^e$%4^BgkkKe^6clTJzXSJ~2d+TGwhEB(7OGo^tT!Oy${tl2yvv4SQ$19Y&6T5B{ z(xB~bo~$=G&sR;)wFOS7vy{lpSC`eLC)wN8u_b1XWM9t(1QJ%9_=zR7>SCLbb?vvT zEZPL*9T&Cgr`L;nU2YCeIb}VwRONPw3%)`WK5fU|td-f=HZodhbp5i@drEt4`#iqV z2?AHggOpo=`7*XpBuj+J{Zg&=MS$wwDYBa>4zPz1k%&p{GK{Mq_$Whmki`CdBBX=w zy_Q@hR=Icms@PeFWI_^hy2uz67G+JP`o1KbP6`#4yR_dY%;cL>#$990khJ6Ws4kh` z3PwhU#UN_wZcy)es@0D56LG5neGDOW3QPVKXopn}N#gBr%i4$*x=k~{egMieWCnU z<%W$_b5j*_tFi;LqkC#}IFfB_P|dIG8{?MYl@jdD2)D@yyY_X;(O@87-IlV~x@_FO zzwV*c+=~~9`i;ghG_uX_hL%3=ZeMn%R~97_uuR6Y#9q~6nfUzrtWr%;q}^2CqI!OE z<{8~EgS?%_D)Y{mfUK)|TLrS3CDuzyy3E^##cxWk6}(?&)3lG&h9MkvSu@7`>-yj-RkZvzPYj>1+2o+ zMYEqv;cW$l6X0Jtnv_S1e&S!v_{Q**`C4g3<1MH-nW$N)ewrtfExIk}TF=!V(CfG{ zl^pq(=NkUP_S)r3M%v8EyVsAV_N0ix!B{$#BXHQkzzW@SFnClJ{B*@UKN!rJ%7&u^ z_OzfU{%>p{Lj!R;rKBkSAgZ1!if#31QuoWe>X;^e6izgx6f-36NWQV4ev`UJ6-c{_ZDDH=g;N`RMgV2iLk#`Y;XVM&s)YElu^wO;Lf=>H|S#eU!FI$U*`vLe8r0U_F_fpNv>v_!T| z`z0R~I@TJTVx?o~FSIhG-~kTelf0VfPe4$JvS(mN&~ww8$A^Eb>kzHSe3b`^sH<;bQ5<)-@oGU{!*ysywmqk#vE3GV?*4)MY3no{W)P8ICI7;KCr zgi#StyeRYZ^9TAJ<@R{cJ!Dg-SdT;>Tq65kVsKryzYwHDaJ=aE5E7;ErLOE;qU9|6 zm+E$p$%+;b62v6_m`78bd&l-hGAXyIR8Jg!sh7q+zbpi@qMb$(p_UI{Lm`U`irBd~ zPt~%)l_0i;tal3Dn=bg~u$?+M{r~*T7;(=ftN)ElE&&tpCipR;9}s>|02nFvxkO0z z^`k$0eFkbrNKWr+q8?QzQfo)J#0*QwyEFEmEQ0z!L83qwLpVeLA)_{2VCne_mxhSJ z?-WEcD*HfGKmcKM(#Va&I%(gom*c^ukjaV?c^ahnz^W!{l{mG+qL38LBD+_vj<|>E zysvFO6vFRHRerWkb=X7E-1kuIQZ6i>c>*4!%)4te=vbd5fNP4T8jT-cM6%;IQ9RVt zB1uxw^ssdWpPW1bFaF@B6e>LiBWu;Fq)kdHM zbM2;q+Tk}IRJ%wK(S11o`iL@D5#N(-vy=r^lIKVbL*>S69FXllF_kbAa!cwcr~NE< zzf9O?UI8>C7Lkm{usK!}v+JbLctSkdKX~H8WstNRm3nlC*%Vi9PyN~4uKzql1UB1j z9{nmmRNFj&eK60xqE@+!cu*e!o-tei9~?>tO)$k0@pc~MP%Dv)70@*9OrwJ(oPmMV z5@W8{(hvXc1Do+bs3bF?8gW|UcTNzQZo%BMf|XtqeVF~grdNaO2-6j*85z@b&RP@y z2dJ1)6_9>D4ce+Gu7>eyH=>X8kH?Am z{T2T>uTFF3$^{YBIu-FbF9ojdQbhr@~>jRZ_+~?V|mr%~EOV1ym zha6pL*dseh6pw{5xXDR(Bj4V5e8O??Hp9Psv^Db4ER=0WOlbYS31yN$PjmkUJV3;F zN?sU+<_(Nxr*cEppAZQk&9Q{$`tqe`%XW7b7xIle#Gjs}VEzWFo08^1Vqhnx17U9` z)^?s@C*iWx8Q^U7Mg8Lp-AH1f@pR47VO}q1JCLs6r#>nO6CZ#6B^W;a9wJ>zGNHQ1 z_oQpDYa*DXbWQmF{ri`BdDVkI13NRpIIRZRK(##MS{T%LYvv4D+F9VhMA*5P2+N_f z&HH(Lo<9$QAMZd(95{k;4%6t!t?%E8DLpWM(i7d@!wd>U-*b*e#lLznBGMsTsY4I8 zUk2!e!^BvBDsjKIG_h5TY_AOGwoYQyENpkKhs{lIt;d@N7KJEqJ({$+z-R2b51cs= zsqoiE!FWuN+xCWIAJ@KZIl?0U_ddvABkXwf%c&Z?1_OLUU}&>)R}yS>I(D&2Q}Im)G)8!I< z$qV3)oV=l1s0O#ei|Q~>?%#=LIe5W1X)Gm&-6Id@{ddo%@G~$KgOU6aIgbIHF2e;} z@nrFbImW5|K%U8Js2pt)(t4dx4@7tId1P6?pRbS&$h8_3e*iOJaYgm94w3jE5TING z&VOvt^z87}48e?g#8JOC-SGZD-w8ze$pTKhJ81@`%&h}40p#_o4P5PZ$~jI-iF3C?O`{DQf9Q=X0FXkK2p1TuDGh0-Nh zHW(Uz(_7rd#ieu=V+tK@VoSdxqnUt4^5v&bTs?UvVH#Go)a!!{g#YA}@+9CCGc~TH z7k|VBu_>Vh`)+B*Ey)^Tcz{XaOwuD1Iu0S+Rm+#`G?`Nfh7H$4X`rX4#|qX~b6fp< zc7fMU67W}-IXIMz+uu&VId-z1~TC;CoSed|UcnK_mNf?xJNeY-zMI1k6DQT zzAev}6+ncRka0B{?+7M*qOliq^5jxep({SS|8n2#8`S&y4DXRV^ZD}! zlK!_;l~WU?h%0{enxAwlTkkZ(*$QxxdWI9QLcKGi1DD~knP=i;&hx;&9J;)Bi zi+AU(&x9c1MeQ|a2rtAy!#f){ia(~=4@$3Z@L^s=Mbn;s@n5_KfAugyys@NqMg)ib zpG!e!+>0G?Sh@Q9Aj^@PUSItaPpF&-&TJ-2L-T+IR3k8OOJ@K?=-VVmvJ?Bi1q<*7 z6Eeyk^M|cKIo-ZzlB^JFiy;hlhEib?7z_6|B1x&AucEu{$D}K#O^g!#Y?4h zwL3WmjgQO~WF%&9{w;5Re)@$#Q~ecnn~q$L$o}OJrhbh|LjVu-Av)x6IT~;|P3cSw zr^68)Zng(_sLyE@U{KFDZ&Z8?lcr!Oo&USR&l><4h>nP$hrwiF&=a6>1{y5_lVhCd zm3grbzvz$35dl4-d7U!`9PdMvY1rq!9NclNE#RSzv#0%mSv|{g+ z!EU1Wv*y)$ryxDOIA$F9xsh}HJEc06s3p~rxdCZ`^;=MCT{3UhcA0;I?7$dEgh6?l z#$6dR{NqR7cpy=66CgOxVJP{LvwCFi-;=VpE`}B8;ik#YB%p?e+<_`PoQYyxrxCM) z?=b+ITF;PjG^p>wZZaWQfN{Czjy~mN!P@(TiHPKsO^+K>oMuBJ~p2x?p%HDSlhh=;6 z;Bgal4)5ESjK>b&jxh7S&T+i++5;)gsej^<#D;v;AApP!KH?8kp-Sv#)Yhv7cjD{g z?5-*m3966khM|_A)Qc1xX6{$PqYxuSlYY(S66kbYThfRf*T`j(D0fngkJHKE0tTU zCmHmBS(baCB?z}b6q#+-$3dr>{Z{`>IMzQYuVVsVz)q#UYm~a0g$dJG!%edi)#YwxepE$f9@;XQn2}~048{np*5UpB} zr>{?iG|%xu>Kdj19%})xzSZvbsszjq(Vy;2Uufc;t7(enwIxHmo|(StB(4z$Th$6R!fj}gm0PvEapdch$(2+OMUzCsFxB;KDkaoAym(aGHlZS_=dD_z` z_a>^h_}G%VMF}-bmGB&jJAxM4H-zQ3Ahw#yRzydj9BBA^J_IIKggM#YmPqFToH&&E zY*0=zqRZ|AWW-m~zTehrIz7LxHi&lg39%L~;esMXhk)I$?zkBYGjCL`6$!snF5hyy z{FDr28ffeX9Fv0?RpnwAj|j&914`sC4{|oWXEt*K{hOLxwo}1;zsryP$O`ANeSevT z#winXp`3ZI4btqoNhc#qHG7xFB^ZX)USt4l<=l4^p>c2@S3ZG(f298K2%w76QUI-a zw{g(=ESgaOMj2iWVxRI)G6Cy(xWhGVb_ER}vF0AEg@VsPX0~+G++s!%rf`hM)aO+! z!iHqMiI(_{Qczm8u-{aIFrY2y>n-5Th6`q(foDu`&Vu8o^;kkHj1lZfiGSR>@`C zsrFa*D}8->Pez>mr{sGGeFKpqZhVpahpqQdowKij7{79(yTgfgDjr<|LQ}E=2Ob*{ z&&=3s^nhj<+bIPk3A{|k1u@3bbeA;57+6A)Hf-0fB`T&WIblvfkfU1WR-%w1S7}3> z{7<1rp^JdKt9gz+<|yr~_Oo@?q~9KSh=I>1_6X4!LUC|hOz}`RO-3XO>I7>LH(=(* zD+H~fybU93f}uS9bK``|YQOyg%!->_F^qLpih$(tbd1pT?t7RMhZ2bk4cD4$V2cqs z)~P*@g8nXRiFl$yf_g=P5*J2HV@3vK!J7g84|Wz z#7q!zjWx%zyn6L&;3A}lGxj2OSce@1N#t-2vj~NBwGL-~V5`$Y0ThB-$clKKK)_Po zhJ+F|9o?NSogZI)Q~0(r6js3tUwNBf;RIH7za-LgPEvec?5 z@R+sW#50O41}3Kuk_q%fd^+VQZp$H$j90ylDo-3>?hO+l2?LQ-@_^$wnQJ=y3!BYE zOI$}Q4%!4{93FJM4@uK1QG@AI%(1R3s{W|;^$#a-F1$N78soeJsZk_7#+STxIHz!M zGz~~*^e$_op$=2JTzcOR68zly;u7*L*=dZY&@dRO;aRy+9kwRieM6g0Zx1_a3pQtq zn$^D|z8J5425dr(T@`TG0`;kuzM7I86Zq|on1qH#)Bs(P@RL)-dNQ9E;eTKeLqP^J zBircP#gev_#1i#t0}C>aXMYBDc2;mp24I?Lbe-wyv@b|GnQ`k6 z+@b;Jj(M23aT&%9^;I23J?uGLs#s6YR@`vr$}f^5r8|N>zhpuSsElvo$l}= z>DluUl3@OomXcEFufa0yIjrbcui~dwi|ou&>;~Cw&lM)q zT_tRx)VMWpU56lz#qF~80m%Zffu=7hW;f})-IjFsba8uIvh?(3zG=@z7;qMbcS-cz znyUG~)O4emflDR{4P1}NDC542e)vgJ?T>K03i}w?%oTiar81qas;mF0+nFkz5!yTv z+Q?&^Y??=E1C_PLumwJ8Kr+(CI0z%!Gf^Te6ubrrh_n}r!~U(l^#WeduVUMI#3+02 zR}L|S!v&%dhmkHsj|>pinrP@Po$1)ua?%f?KZysGsN7RbU76b2fq{WmLm+22kjmT1 zlP9@*7kOoGOo_fYkNDL~Ody6K@WYrr@66$E-48K57=Y+S*}oXiAL0-U^!<`64vI~a z2WcwT6|WVBTlrJUG|>JNHn;ks8Bu2&Hj~#jG%W3;D5L}`q^V@ds7y{ywkF*r-Hrl( z8gk7~oMIx6xaRvW;{*d?cQZuw(qZ$0adSUubH=vnLn`oOk&?e76kA+DefK3RHMDaa zAR3sWVP0dr;ys&xkw|`owDKlOM=A}Tg!}$ZXXL;PnDgF&W!)7ZEhdoG?j62$E)u)N zLK@)o?f*Zp-UkP6e^L7>1x7ynU-1QE$nD)CQAKjxf8tdvm3=&lnP&p)@p^~_Kbk&x z{TI|A|8P;-rTtV+Z!dT&|6juwq+RfkYUH|fn$r}S3IR=BRQ#k}L7rGUsbSs=_2*xpzUBIkEfd05wQ^02TX z%2WcW2irKlKGMqEebCeP=Ev2JKZ83idU#BOBo$@+WaRpdND?PT9H+%VT15mv^-3sz zL;n3OD2vTsfgOb3v{Cu>Ez$dE^s++w-buvVaFNkJ!{?Un1cV_SP&_}j23%4t4s zz{;_wn~$#Pe0qsiB(1ZX)VHpp_Ec)!!o1o=Elzg&u)f`EVuAAhllu9evLhs?zTJso zfAqtGbl7TnK6jGf^fniyC6R6fFe+gtN(8i0!GOCPH%|U%ExHLDfsHKzjiXujQ}lb0 z9`m~ks|ASDLYmqXfhR=y3Dm&j=|-cB14p1luNFU?&(X?rx(~KAKQbUqG!FtO1YIo$Juo^_-dTSe#QmQvy+HGc1-}ycTe2YXIKH3HAi8o&{zDo(P#^l# z2ig*J;X-+nf*HT^99(D^i#D^g;I>Vv2&`j$nU=;lxBMpz;XnJ+GvruHb*ofuBTG~8 znTn+QrgVCeEV?+?_`R59j+sNkTF%g`q)Ez-%8zJ1xo*hDKPcHSxh}Gaa2zf=1a*J>Px{?MR;joXom@8JokA0iaO~ZF95PC z<_n}3hfY(Ua7EjAk}ZVP)Re#KWL>RKuRTO17~P$fbwxHm)-P-C+7$Sl`3X$tiyz3w ztlA*_pMEmk9oP&qD>#W%MFXUd?6*JV!7GHSs9}tz@)^7Y<9k=$L(#%>1gfGecMc7K zKb)IWf99-2yLP{@c=_tpAAN9v#jrbC3Z8}P^B7(vkTKj=68olgcv--tjV&<5g^{b> zC;$$M7oD%bP)2oSZGgY0NUL$_nc3PDBSh5?Jk{byl1+k!#WacytX|tTM=fFy9s5uf=pw)W_Tq1B_ol-hVUD24#}@Ri{k&aOSzUeT*COWEmWJ87IkAqnDs}%6 z>&vU&={&cB4rL3Q^K0~WWvXehLzXE`b8n6E#jR2@WNXZ2N(;%XDx&+8+eW-zQmAlFYiR(iy)9IpfFHjD4v?RMdc2UVWpzhK-4dOe*5m!|Ui=3N-z z6#L1)yHTYeD>PiuI}Xj_&cJJnpon@sD%{&k8Zeqcq~TL8+74De5Oqge0)inJD?#C` zmcv}vBuC_RTX+ht{rd8=$k6<9wb25~>cM`+4BI95cz(r`REN$? z1Z*bwDmCDpifFMYtMvAfDiC^E?C%(p4Yn4wYmL&u$dE>FX>yuIqv(I^lpHv3a={xZYDGczepW^YlSvdhCq}gC_bck+`;_ zeRA2OrE}|xBnkDlvJcWz)eG(dCuSIvvenkUyj;6$nGf>s72{6DTxgp<(Q^4syfyu6 zWk^y^!*(NAx=*=1GW3)Di456ClB;QnVL$TAWzKt|i1*k*Mshs2t#cga)d~39ahsWIFcs45A{=;UTMd}*9#x(u{ z(bz3*-oeeq5+$5+QeqM2L5K3}2ygiO2PUtSJF}p4FB_nxv1qpDA6wgK>)|x0vOs!{ zl`m&7@nYl0FB+g6M&B~bj(zrx%m1)zew-`JVks}M(3Gn7&Sl~@12Vn((d!P+1obCR z_FNhumkrc%rU$qit`4{)DJfZ42A9Stz*QWBp{4chFK6<_pY8Lf)^g$)+kD*bG&cym z+3o2WMQn2vB4R}{hu-$Z%ZMdR#9k>qyn}{;m2t4tWso46A*_hewvgbbw~?7V+(H3J zcQ>H47C;Wsc&@BzKkR-@*YXo+Vq>2~58R01Ntd;;$)yyrj_IXbuVcuGTeIYdCMdDG`)qqe&FQI9vcR=myu!<*hsAh?GK1!8G84q|fZw*W%M z+F1C!cyVDKFSalT6UAznN3sCo_5HUKWKRV0x?5nO`_k99-u%0h7-}Wrg(<4g>2|i$ z{4J4}m#5M&L)Vk7F!~`nI=VDPBAbVJ7CpkjH2j#LS%PBO{9&#vyJ>mqH5ym8TQB(A zb&Ogvw#c{K+BqlY<3lyyU?cx`q;+8%d+FcK?zCU%pN-7V&e(bIDp2&_m=eQE`~{~S zbrm*Ti{yOxbq|b-Qq$I;@;eK4uEff*qb%rJxS6}iHXUJ<;4P~dL&t=Lh{m{pmXzlL z%v?M2!>)DgMw5Gau^;CHu9YATe+9J!6;qi^o@{>VlS46SXOm1)YZHp_|x!#HnZ5aG#`Vg$}SE}{xr{%az24>65ktoaRIm3Wysz$~Qx-?989VMci&Kl5!MlBHhdWkBviA+=*QcBQw(=k&S~LT4)a^gA=yq z9BagO2HKw!`TqwNdel`jQ|IOFC_xovOn1Jy#X0~NExf0@f0*A}=YwD8gR%mY5TRIZ z7BrwpV(Ryja-!D9>q=(vw!)3Ol)PBu#hVQB9>c9e1N*Bki{UhItpOZ8okJJeNBgEi zAIyb1bnOxKq+G}PZ2~|Nx0QnAH;=piFT!vX$gWn6Q+9`UBUhu)i0pv6a=M04otDjUn3_egNt2} zNhOfK3%ySA-33Cvpu+wira2P@OsF2LGJBYiW9|l^s3-hc!EjbRE-pp^7KOT~DnFw^-;LBdZ;^5yEWgp? zwFb|wrN^=-SOb#I9#ltG3wd(C6dTOO+FZT0s6AxopXoV{?hVNSr2B;(lcz&aR$eo0T|NIOIw^-7PwX;_+1QX1h``pjBq`|r>iyG8?CoG>^)_c8GWO(ATUj-)U_x@p68(n}o6<&GI58P4a*Mg5~ zkE`Su1YehssDgQW95AQOMCZb~J$js@Hv8(qu5+_^xeU|e1rV4oVdA3kZ$>);%YD9s zwGT2;Xkz2IcD}n!Q|rx^Ms_q;&?VIK;zdt-9IJtPU%eOi3EkP*bWC`0g@FlkcIt|< zz%|77h1X@H$C-x5B?-?r=VqTeG9kgIm1`81ZP;G7`WX0Qy1*25u(dSRMp2^EHCe%3 zqgf~&De$r%A99r++j(-Jy|YCCc;ylm*6o>_1E;u##a(j^xDd!WN&yEeTD^SxTgc9 zLuyvlY*%@tHMH}_K%ibTpd~9c(>^+3Sl`ooty5izp#l>Xh|ir2`9PoIcYAP?!ENSd z>f2C`_JJvr`Dwu^$CAW~Esm%oJ-vu?vDa~q&yJ@OpO8`?a2KhDN`JoOH&wNy#5>5_Z+)@64dh^y#LGO_w9h}n!m7QZTE%6i{|+%dXM#@6QZ4mAIe<(-3({mGuF8X?A?p9ClCHvO8Jk-* zquP<6E+qb`k_uxix<~&bNxSvKLsO>lzAF&hT5Db8A^%!yGfg)b)@TD)g%e)|4%rDJSj$ncakX-Loxj)ZGXW{1zEQu4KkT|#c2Vemb_(=Mhv5_@7;q?W z|E(FBgnp^I^`Xf&so_cqGyl3f``Z+S-Jv46srwTsyQ?goAd_0FKw&G}Pess>@Y@kV@_( zU7FbL&eM++jml57N!Ac`9207ay6vp37^<++N^l2KOn9wV#o+lEsw>ZKf0~cPgmRSP z=+le$(C`hq(qF#9Z0H~Uz(cx!fVgM>ui~%vc@vF+7^MpAF@{op+ur{D^V=|@_yEC$ z-_e&St2>J@b)JR75*fw^R|kdoSf>SP$wr1OPvl8@G@{ww$5|QrU3kDpKR78!j0D!A za9i3wTmR|(N1*S2J=Yz%8W3q15EgckML8|#DFw9@T>F?&o0 z?rsMW4h<2%`1ZZ(%0LKRK9{&WX9FQ*id<{jj~qS_U5+{F=evr^|7q*UMfG&Rn6JQ6 z7nqC%OQ#&h&J;k=2g;)SMpY2J_yF<^^Na|o1Dnc|AO*S zvrInHG(gS!pw$Cmo75_VXeQx#48%%>>NSyD(wzT;D(V57$Lc~04Z=Z2#<+j}h#=74 zY#s)y1eU=-W>q=>QIxx_!Plcq;sV4Hf4De|h-=v{GRmWKE0{B2%EOAd-MijY>e-~< z4Ddtk^+3@QJ(1dT;XaC#ME&43xr5i`NhIV%8_O=`JYKwDX7+lx(N= zUpwMz6G7x}d@*Tu{ zie^gHqMT+Wffw-#={*60Pp%Oc7iSa});!C=P@}6B6%GKZlI6axMXw_C`Q9#xCa$cw zRWH&2HM!Gt57Pp8Pfe@LpZAfXGP{!c7}Y6d$q5Dw_T%xP*Plk)PIq5K2p_F4cM@(P z(%MNwG{_s$r{4a?*S2h{Ou1_92ppt0El=T_uO5LkaQi7I2{Wj>`ww`a=IsYftMbhU zb$}8ayRl0TYW~9~Pa4@;?Ymk3hFcmsRs)gIX&{11Jf-_mrx9LWJtW-o3aTGniJ6~? zSKMChj!ETb0?Ku+;QD;?VLCe2Rs2l*wXvp%t2-^}p^(f&W`WXCHrvbck6Vr>8$4gU zDuR)vPLkAsrFP|l_*$oUK|Dr`c!X?2bg!uFn=6u~D>xQ}J9HcDqnMSFb!-)f4dUz{Q<1kjEjNXe~U=_p>J%s?Q)5MHDeLQ21g?%i$HB|4Kc7D+k51E zC8AJ#`}XZ&dU}N4MPF;h*D%Z3rV@>zsC>h3m7`M8dBQ>T&73-0 zJO5Shd{<*qijlqHAdgCH$4aWMO7VrAIZ%>Sd1^PCjhp;IO-991CzBV7l+1;l<~JVY z3NGC#j*G0Zun8}9ZVX&MHlnMJ?7Un)~OzHH* zdg_}hYissE7pL4t=U(#)m43CSQ57jm-M=#*uF`ooEMz}-7_+cMIV`7PGFN{!KEx-Y z7K)xsho104G`98Fo!$?ZMJO9H;5odL3iJLc(<{}>GLAlc1vuT}E4R4>ANUu+C zPMNw`N((6N;LIT5PXg9WS4#IKuM&eEt^T+{DllUfYUjQ7dtR{$kk^5?3iwJRVZ1YwQj4Jn2u;s zNK^LoaO~%DGVj*lR=z=o)6w3v=RQ%wPbxIu?C`YCSM50oJR`5kZcUs-i=Tcud5wX! zUQVlbB3tx6m+ZWla^bX9@3n#~U58>7v{gm6&&EIXpBDm4c zrIwxQ@+{W`OqgA}$2*_hm9x|2aSn}@4TDkJueo9F&%^TCDUJTz-W(A@>6Gg+U91;T zQ$M0`>8ez#BG0~uO`gO^J&hhce&(^V2|OOSYC9iLx+x*?QZ=#wo%++wLOC})GLjXp z+YR*aAQ^APh5|K8tRgsH*a!?kd0IyjEs)fe01A2#nRa6%5e6q4rpAEsj*t4L?74@#|0b;Q4*=+ zp~^>#fHbD=PFD?3sV-KRyc4TEu*Rb4e7mM$V33<(_bc+<;d#aGVtepB zteDQn=X}9dy-J|>r&VJ2=vOK!=~KTTq8D+DyN3CAH!iQ$-#3Dv0Z%hJOF^+vg3ryr zQ;shdCrebi=JQ*nXg!CIe@GoBNI{K#%f2F~m5*PWC`qc&`L*V2RpJBeN4?SZQ7N=$ zlyG~QaYS*$6i>;_l10l{I8OPAD!28ALGp&&iIb~Xv7bG9Yz>&yBd5+Adi&+u2OD)^ zhV7Nh$Ph@zzT5yVwdJ=;ES{?qI6QRldOMT)T0m43n{j8h|Im;QVrK+1+IL_3l&N`T ztYJ?>PW@qhm42Y)*f_i*9ndPOT`FLQMf7v81h6U-K_UL-Yc3()lpa5Gb}0H z^J5>Kvxsk4YMjCSSUSWoW<9_^sH)wy?j&N1_gXoWk>a}SCgMljKz86%5JD%mu(b57 z)q4Cmu@Wh~RFOtLiV2YcB^P}zJ}Q#XbcvG-kU*WxJeyBMgB7kU`57u+NDZ!K#DpAVA* zXQQv0i$yt3^DRT$Y$XDeMD|Dl+tMLn%U#=-8Cmcs=oP2->UREHqas0Ebc^!%Ifbiy z7TRRC^pp5A@Xfj)CDTA(OcRd*z0;p!AmRP{YGBGfvc3CY6f9+__lg6mJ<*h@E4|7c zKb82=3J9w@Iy&l0#LvEbGf*0oYw~9QA2yZ&Qt^lh6>8Og92?yF-m|fP|h=n7x zwQ2OEc)Jgac$ev=2NmENd+BQGv#yy)i2$p!sb7Bw$>qpWVf%gq{Yl!&DjVnlHNCtG zMf^@nW~&?m+qUYTO}q34a++57p_M1ww=tS97+14CKc2^glw4wAJixopfZ$EGxW?s> zx92te(k@Wdn5LW-0UwMH-X#E7&psDfy9ReDGsp@p1R?oB4I3PzS09zSD~S4wGPdIs zpT%{Md7OcJ7bMs~B#;Z!^c%SL^mOGSR*NPpsEbJM)_w7+HKNJvXOQFzlTX=AwL<6Y zC{R#R#Vt`sCt`WWq0XUi4=wevi|peuhRruP3+gory~+A~sB_%2lO2BtuD?p9?!6hQl7keS|(ic9&6VJSp)8|8n&X^4=*q~4mBKMCn+eLWo5`BcMvWp zD1LAf<(`2#u;ecvQF92#-()Tz-$6irqFSE0p4lp$0XIN8Jco_0JX3b|lKB7>Ti6S7rfsmozd;0l`sdsIn?Ii~EBoxM5y-+8sLpU{#;Aq^jhQyb z9HHq=iZt5TV50djrhrkY&o|WlS)%c(Cw5nZ!l;zVHMgK+t~}8!=tfgggW!>kEb1ED z3R#@en|80`7ed?ju#eB49oB`~TCGgwm{EuJI5gaMd?vdK6nrs&?Q%Z&Oj1HmMa2lJ zYX%aHA%#zWQOjM~VY;#y-x0&UeQVy=C2#?P^#V(aU@(v`8G zhs<@spcp6JnPbQ#^X7^_aw=w5eWQWfivh{nYZg0awkW4s@XQ z{SJ<}6tv(*%O0xGQ;ubFp4u@#J_xIwz^%XQS=DWNlWyhDH>BQ)YgpOM#17fW+M!ci zC{BCOFhX6Wxbj08V=8J21VD3@;9kfGzrO7o2zLU2vK7ug@OLdr6ph5TBh8{r|7CFK zDGv~4L(Z3z_I)yj)}ZHx^fy?wkEZbT@`cm=7D$(X6r-=|v@i?79?zatgF6D_*|V`P zu;h}+_>sq?J24#q;C7;sv($Wia@bo~da4+keWKWd)(;-M-7vRM^FIrn+EsPu81((9 z(v1@@`=Vkd(vVbnbAJ<7l$v(Vxng3b3Mp8MZH7)KDF{u*V$Aj()iOF*u6cp6WKzoVv~1yVf(mDG-k^sLU)8l&mJOG=uK}&Bo9bK z?TERks_rQoMgoP28yv=`;ZzM>re&;9wn(G^YmyP>6hW4wb&@K|u4pR7WJSqM9bH{X zRn<3XSvFIhcT-il<=+}CSbp$E&42S?bKYpr3Y+bVJ6e#g#FgN7pXz$4{j31L-{f&u zH23CUmUpI=UaU!+(h~-6_86rYc5*H~dTgj>xmf+;73cXPejaBpT{Jt% zeiw=p)6S*QEUF2VBx`q}E}xkpdd&S2)-m{i@RDeKf-31sPP=9xnAVOD!ep%4WABl3 zpdX@#VZR1kw4VCpdtuBz_T1Cih1n^e^we^osXaXG(zmuQ%lWV|;ck{H)p!^G9e(bD z8)<2D7taJg8Ack|wr9qf=YRW|^zUXET1ykJRCykzNBdx8jTobuIwqKQlc@mOp8)?b z*1bs;^SH*M1N6ba@!bzRQo>)U@~sdtyjas#eOAi!;~uG#SiOLktHrb0o?_|fJntK% zaqqku=9#|GQ&~rLRs;1Be@A^i-l=$0EeD&|rra7TVpX8>SzLT*sNi--fYZH@=}x^A z{+?UD;u=F!uzMd~^E}3S4!n2peCN;K04Cl7HK~jK*K45YGiK7MCDflkp z2Hc3ujWLI~LxpVm;oYNbbkBs%COy;-$!iE(0|PkibPu7wNsTjj6^#A0aD+lQL@4Q| zXQbsNIk>`xEJp+AP{UlihN>&~RN$Diqd|zipLWgU6vTRHwHwM@sFIh?@*-WaNy!2l z(~8gy0EwV3c+$K*(nD>w#|}}}*>>yi06*4bx0rI4wJ+3xA<}6b(t_O7aTWZ*rn;Zr zC1C~w9Ay5K{Zk4=*Df1#(TKb_!2yuD+4iyxC9-#sn*Hy^U;&;hVR_3+odZ+uT)i-A zF&BYO6svUMM%$DYG%DSxpAm6T-^hFzdGd~k(xg+XUv*QFu7Z8xpo~MXeiv!yLu=mWz{aFpC=<=DVsCF@>N%k! zmR^_3Tr=<7n{TkeTQsayUfoteN{vplDgF>(emO;3%^sJVTWIH4T|e9MIoQEvD_hK$ z4%VE|G7jbohg&VRgK;_Uc1uS7x?Isr)yaNkLyJ~%=^kl7t4)`2UKKnteUf^C#Zz3# zkMGD;OdvdeMXrtz;c#uKjvv&n?r^(r2Oe^ZIHj{7vKucu=NNRI5;!p@;E44h8kBr(cghS_;$!AVQ9E$vA zK=k(7ndAq7{y;A2Fp>Ca4i0%3P)4XGiq8KU_!1fQ?tlCiWg%KDW?!8``kj9s)%72^ z*7`k7C=0y(A~An{i1ThuNV0tYg^#3~@y~D5V7n38a~;*G8>HwV%Hf6xi$T2Z8I0KM zL)g|xi6H(fACnX#Lhlo}Y{bd+-`ui6016f>R36<<14wR)SrUiTzEbeDM!>$|Jwa-a zG-OeQ-9c;x{-|u}<4GiLhe^rF$%lrj0+B|0)O(cNS{;dJV0iHoZ?@Bu3zk?G`ntt9 z9^)O)DMH9+>-M=cYTJfPZishb>Fa}|hw^nGZh}3F^m(iC{jgW7C3XM%)c<_tItP)21M6ENz_6V+nO>PJBVb?j>)rfdq2r7wztR&zKiPvR2|QD$z}$VqU{tn@JJ>(} zwHH9OjC4;U#ass4|C?C}VK*$4w~UFv&Rf@Gqk(Y0lnlQ`E0ShK7n%HmZ|KaO^YF(? z^Z$V(_8UWH=#3)k+yT?O5G13kHMN|SwurduIkPn~*+i6xblg8~eq@9`7*KMy2duV? zZg3gLojYwd(1H*14EowzwcRxm%RzcY0Aa2 z`b9CotFV)&dM=)Un|(WrqD57Ae8@j5VTx?q#eEGsY-^W13G1TuD8xKp9DcNbViIyn z%zza;mH0`Rm<<_N0oJqEuz+PR2-Jb_noB`J!IF1|cjo~2&#Ofv1{cE}?`Hma(a4tV zmnMuIwcgOJ{4|6I#2DWg1Mb*TG%WsRxWbJg>wgx1FJgGyyH3*dzw2*@v?qCbu8TWj zD!9oYHz99uW_f*~gCE`VRw@#Hx9dCj(Kq)m9x{80OmiB*3=Go6{FpqS=^6%Nd1gzp zl5}ywidoDb!-j=X{`2R{8KY-r6$30d#oI99=YpTOBaza=cCOrlnMEyR`gYL$cW~(s zcR`-6LXu)sbwR~DVsvy3ObJzh-h=pExO>%W+S%n_F=)93V$wKYp;waH+t&eh#VO>C zaTw6YyC}sYU{(UXtz-a7&X9~@^rI? zIxorrcQFaLg&!=Z+29ASqvo?0!gW7|2stEw!Fp7O*6;T>7Ky8VArXD-q5_dVPv5bO zS#=+foohOb$3Og-*JBgzv^*RNFp~P@Nz%VQ>jn~bL`MAO|BX4w!r}?hyf^xYLw^zS zE^ST5z&K{f6BgVhOtO4H{LkD6n0UYDzP>!73)FtbIuBKSYoI-y(%R48u&W^~EX?6f zaRE6#H%QUmVSu^E_eUas=Ge>UIu*VmT{Sq5=T)8ZPv*%R8J%6P4p_Jy$YZRWOt-yt z@9~RE$JbmF?el*;YAv`8j0>oBI-m0zv|NDUsprvS$E2=b_pqAo?r`>@+kW^VMErYH zw#SQ)0)&k@Q@blw71{jUBQG^UY!?g>M9qG2tjL#~Fn^%~#0TL~MfoEFUARpSQBlt# z%wOdtdEa-1Eh0r-s5mF>7l1tt~{61MooW50241@!d-pOO!aT zJJ`eaUATmRj3)U5&mMBk4m_u?2D+cs@{2^9y- zOz&Y5W?I+WCiU}-jFPZzbH2KhQXn=ljEQnPSF z735amZUkc&f|X(&oe*9H!dzJySq!iMS(ukj($XT1ufWMtF)}%P`0$Xg%i24(GQ{?_ zEk)(<(W8o!{I*9)98mmRTnbswo<1#jl6TuW`G3I1;bL>Wzrn`V1(=pD+szQ|O;eQY zqee3-j+pc^{_Z+rW!7o)h1EMPV0?Yr|MVb##grmCw>ZZ<-06*(9H-^nd{RU8C(aBF zmSvZ`KE+9g3P*)I?`zgf7c1eg(zT(vwKN&jW4)rg-NnY+vi(mzFRW;*uf7(n^Dv$5 zu*rOLaR~rpnVVyKU;23Yje2L;Hb%W#Tr!)`RLQXRwYiJO8uDwLU~rjglsx+-`3<(E zqK{7G;wEQ#{(#)8>4d3RgEMEEt|t5Jd)$7vkQQT|TT<{Kym)niaxafFb^H3j@55bY z#d*b+*cP-G#*@ZteAG%IhYWT=6q_-3Ld(Q|#ziLZL0)d~`vtakH!YVH4$dg%73vRf2sb&`5`qg104sD2vfPtT)&7M{$*%hPgHV4nX zz2CKfTTkxf)~(9eIBY=Pn_OC2Z1dz&Urzn+*@vr|KjuiCT^=Q<~&7OVAkR|emx z>@c_7=ZH0%hcs}psU7CM_P-+SBn3i;?ZPoy-iBLP<|1d#+{;6g8)dR>)EBwvuP!P? zu9~U2$32P&NDgMH(OWGI)J~|!sTFd22G=A%udyF2_?X+8bn5+RIY){{Z$Wlz5)SNy z(=Tb2cB9WZE>8Lb1G3Y%=fk^B@NRyAK(i(V!^f6!*z~x}2Os}Zpd367Cux^<6b<9H z+ICO=mU_$CQuW`{0|m{$hl&D;0Fk8btXCpDZLGnqffH;kOWd*K)NScnWtBqAc1xXx z=`U3;sv0w+O5l?5moF~R9%i)&q&Pqfp0?-?B{jGRltM{Jp>-=^exp^uUhaF4;#CaO zkBF+eLsYICwUESuy;?&E;rlY$fUTjzV0c8tYWlQ+#Mt;m*@sa46?AOW8mnEY6FLJ|I zB3p%T)mP4ac}U?)!4iyAgp|ZLj!UI4+Lg-Hop>Af+1-gRI+W#Uxv#TwMY-=$+EH&q zZ)(ik^yn&|;%D%Syo#h)m@$`__6Z}%{rP-wa2Z}6jSiK%@uE3*3I^Pob_1kc4*=ms z`hhayhId}4=LYFa8s@rZ^5lL~-MG4>z+jj1`4dhn z>Co@!YThc5aYO#ESNy`(9dY+RFkgM>@np4l;VtF0=OkoR&l+WKwfa%85Fs;)Qy9~* zQrVKk;G~{6c*{QO5{)gZ*-Xgg8pJ^*sPcQB;qE&Fs-E}cm+;P( zZ`UHsRa;D3BCg@3vAY-M`6~SKXg6_(c1>xwP_He<3L2Gp8@NoqvbN~|u@_-}h^)O~ zTP`n~F;F=38&{5NS?P1ouy{L}dUM)Zb;dL_XadB6y@B8+i*r=EthMg_!prNDvW9Mn zr1u0QwothF1;@S%EVN#!=0t&4-L}LEO%U`~WaWZfQrrL9Y^x<+AIT<|lSB$QfKnBG zXo*$y0`wi6+VjmoUi?!bAoinB;IJ{tMzv%$Cn+$|z+ndSA|=4Pm;g9w8z}ek+J`Ak zVG*s5-~&MCseyfv{ zbUB@zbMtk`86OrZ?EPOF&7%|dO?3ye=7z`K9zI1IJM*EWcy`7+6X++x5xp~~3Ro8n zH2MbTLTWahHzYzuwjk47wfQ1Sy80{V5F2@vDrv+FZ@Jmk^`NxP?>8MJSD)>Fm0hsB z8m;p+R@OR9JNo3V27?G(FORa8%22N--R;eKUe3+%9{zr0z)t60`a>v<-vtbGtG<74 zWhpNbc#+~v*6T3Ax<0(Gb+Zg?;5lt3?acyKp`y8js4R&CuT$^nOb!!jOX=Gu zBIgm0c=k`tmw-GCrnzINH%8BJ7q^OmU_*sW8DK8Zg`os#g%xWvO&oq@Kv4BRIs~QINd-m>QJu?zw=#MO|19j_ zS(a?-M#VgdN(!gSmUoZ$7Qe(O8Lmn)3FNxk`~6#HOeO1qQxTc0zCN z!hNEp75S|aLZHd`W8`ZQffANyg_48 z(WT)1om5YoiMe%2&PU%ysY~i?+*nal`AwPH4L@um9c=eq63`s{Q7`*Ugn7ZB&8%1^ zovp`Z@!@ja9z*>#hv|R30NVE+2&})c|1edZu=7a7e8fVjW@=&Oe#Vnl#+{XWMEi6_ zie@U!X$tQ@9XfXOM~#s{-jzbB8`jgOz2D(}yn0olmD2akBtz~Kn4iZ1>Ga|1T32DI ze)IW)<&kjX=fyiG-g`)1AiFQ@@v|MHBojjaSri$u`x0V;{W@%<-rtA6P3MK`d->pV zOn-<1r$qU~YVVQ(wwA-a+@L53HSe#WH&n_?jSG&VQ+Bo4r-)U0Cql;|$4q6IGf8T& zZ^pSq!DemA(m#B|SxXsCthqm@wcM$`hr##a`k`xYf_P8oI{gp(sqZ+?_A~-h&5tme zpgTTSdtpb&TPK7(e0a0FHPS-cPLIKi_?`sR(MpeAIji3KGrd#Qfwp#bB-|9I0;19s zE9l*CbI~3>dJf0L&7A{2PW=3;Q3 zbI;__;;9*BeX8T5P`XA&M5H{08#CC>4m&2)9BEK``*XE#MUFBdx@ngK;2g7x6}flF z@!aDF#8}8YwIMHtDGPClwj|dYblDE&?~B^>*ZkCh_SZ(Oz%ld+4uxYF=pAFO z%wU$n(;7PPJG!!s{_esD2Xla{2Z6#U55q6TqNe0)LSbBE&IwpiEXw+OveM*$0ac}N zcqB_+xn}ANcUhuR%yUe}U25z`qH`E;csesZQ!_7HJIxAiu;8>Dxh=01bL-YEm>%x_xf7-Tr42&@XTgGfJ4zfu0?%&of{3@u zpDP#7driaShLqHZ==0oD3tqNQ_EAPU+%*) zyA-V2bm)`33|%`WhaS%T4xFMNQL0tT(FeL2!wT$Yaig2T3Mza^RRd-?f7%WTIk$ta<;-=^%`bZ0e=N~f zbTJoiemx>v3DcCxg+zAAEBZ}eohN-Fod#(VKnIcuSq9-pI<5V#aIm(w&6Yq0uc&Qt zvJ^oR@gp9mb^r+6ge2Y;anoUn23~EH`FpqvVUG@c=_a`9)g!YtKZJXr@SI>_z)B0l zC_r}u!C~r!>UeAJe(>2mWj}Pt@oFb}dKxc!0B#a|WbhLly=)bVik-X2zcEJ?HQn=9 zAq}o6{L_||S<5DchEHhZ8b%jG)7d! z{Ox-vNBK&WNrMiY6gcDPeL0I+GMn(pueGYT$EW9@zEvsT5wH_#y-X&R^Q`;MrmwpyUm|dp6!}mh2R&5SzXK( z&nw23-2_^B2}0xA`0eXb2{>DK6Xl;yYTI%yy@%A&7eVcSCAxtU9Ka4`b z?^yRH1n6UO2mpes;T=%)tP!x(i5kX@qXKqLdvf|Qofx9 z4ej>7wo@cPF!Ii{S;!^Oo-)UKz%qjZAhO*4xJF$qYJ}G-hEn}r1%AsC-a@RbG63F6 zq|Q5hqf)*F9;HB^c3M!8{jN=b9JsAv0+CRONsEMwbeL~))*Z+CdbW^b--}n`O5&yf z*qs!Z^*kCyjWGsN?Lz!GNRlxl#B>%sWHGIz<-_HeisAOG(YRds%>`vm`E%;2ECrP{ z^)v~L<92Tv7wwY(E6;P0xV~_Ah94@+lLFQ^>VU`*u|}M9m+xL0*0G?4lHJ~Ggj`lv zwKH&s?^ay@dTNRM^9BwV;7Rn*Nm3WG@oe7B`MRPff3Bg(K%}UUPV}BGm_MFXL%}=(9 zOh><^fH!9W#aamvz8tTfLR@ZpCtU3zsxU~6Z&=q2xs0nRR)V63B1d&yn!V&)sFrhq zaGK7!!B(k-zq^PHM9k+h8sfhr0O=6|P}D*Kw;`Y{xou93YEZnz!dqpjqs1(m{{u$d zBa~kYBpy$n+S{I8i-9|3+9JP#SE)2m+jKTJ*1zp;1Q&qir9ofD3l|c9@R9%38LlCq zf=eyKyB(9083Ke_BEmHP(H73?>U4Jl7ogbGuofGffnHypBXAE%Oo;#=a@TVa?#L}v zr&2&~(kRfXf}?~Sr7UKjDm5CHx4b`xh=}Hb#Lu>3seNo zi}HK78?1;^>DeO^yin@@xs627-l|8Lbn7=O092X#R7m^b!(xF$&JA$yl58YP0V!z{ z*R*OFWC)StO8xuKz$S+}pAou(1H$LdT?caXQM-d)JM;IR)9_eCS336XV4ZXm^0y5O zZ?GD2jo3FPziBWGB}HK2`vGIIt^X{+6OtJA6dP#}c}IV#?G^<5u8Ri7*~|s+r3`jp zAAc%TUT6z#xGuMD?|Vc)ZgJNh;b}!9~gNi0^RW$s?dSJ=HS9gv(Sb%ki`mCLKd|MhoFLAu83tebs17ABMc&6ZvfnE%!e%f8y_ z)j$v1Tm;$Xya`^F6Ue9uB0*#TA&2Ak?xCQVo4(%gzu@oben$9Ln(hfI+Z&Nz0)*|{ zC&RJJ;w_wC>W6ocMy+2Fo<-pp`Ju!YP~w3-ti%--XLbZyrrJCz|33uxJ@-`L`-Am- zOSf+uAk6-;T*m0zjCrO*pjgbIc`E`x+y%)Grgv!k+mScxcFeV|rX~=4#^ndms~`*e z`Mvcyl=w)c?!pA6dhtflKozCY$_5kPQce0@xqBDrXnFi5(u>7Pg&f3q&d1++Y?g5) z)wUp;x7t|k?eCd^rpT87Ip6G5*U&O4PA;7aQtq>)AlIq#xn9NT9BMRZCh?rDwSvmH zFxdwNyNK%%%-+?(GJd{~Hm7Mh=Gv2?x6e;mC~>vyAe+NSzlnVG$DA}fh%$Ccgw)nL z9%D|AkiG_wT*v=m!NNQHbdHlm>j>mFHEx6kyqe*#U?6$xR>QRs9ykfAmtsV?Xu0sQ>?h-}PT_`!wULweZO6pt#j_nZ zR33Unv_4{KCM!p%o86$D8am;oKgZX_-D_uiv#fe>(EEdcAs2Yp@Y_sYK)BmB4%I;y zIUVN59u6PK?t`yFz zJYZAvRWq(he}cEhoz-z($v3C$+D1eCldB=t#+|p$;|ppuYdRv-3qZI8?wx*VGTPpj zJ(Huw1%qkMe5yg;D#CaB7^kb|NFi2hp4-*1F5_pxO&)wf2JHCY;P0rd_aGOdI33)X zZ~nRMX;6xF1Cc+9j0Vet)#$&>Z)ec7_vhV3@k77{hVljWvNekj9T!;t9lozefz0t1 zB)1{JxMQcgvPQ)Y=73Un7Qh=RIXS;|Roj_^uIIr?sgC(q@z!W{;ik!aWL~AK4eJ!W zh%Kr2)czpL3XOG(eWIb?5+ZtRmyHoqZ^Y|UamgM^uBLo*?a;hyPyc$N4WwJx90@wK zBU#%Lf=F)|nQG26IN^r1M@p`vh**&E-|S?gEj<;oH^BmWg0yZK^&hEgOGDMji}Vesxb~h#RmII5g54ZVS2918XE*M)(d_O{r6WFqB^Up>O;G)`ow9@e|L+bUT(PlH;)usJSOV_8 zU<8*eRW`g8UvQQr8iP8$21=Zibo=Ro(IHSQHLl`@fdJD~=Hjx7t53cpD5wsXK*V*) zS4{UJ6Oyp7q?bysh5T{{_39++Rc0nKvRF~6;&mXhaDMRCiNH~K7WoxMMx^f(QdU)S z&(XInDDF?XBx_qNxjV;Hx2)d$CMj0Wxj+J8{Xo&Nnsi=S{k-cP-zz>KFK9;(} zU4q3Mt3PLNB>qK3{5XT8)t(^E(>t=_)|GbBN3LasAhq~Q{aBEq6YhwK66T)?NAfNU zY0URnY6{V4-i>F)z&Xf-i;XZ7AdP21?p|5N!KI#o*Fj2=|MXvV!{-k_j8~zm;X*}* zYn?u#?4s}C!BQ1HQz-Kp$VPk&L2JG_C(hO1`)==b=vEpP`uj$KC6ML|-iCGCDPd^x zW+_(fE9j`MM;Vd$)8zrEETyGwt>1b>@*e^|yUj|m{A^}p`f$-AmB8q|`-b>$zU@_= zn7(lZp~1A^mv|fV;@VPE3O?+|JaS1F1CP=tE)=3#$-X5`eKs~Ty{x>wcJE5=VHzI{ ze17^ff2C+Wj>7g%v@k0jzm?zmD!v9TEgCj;Sq-N!On^MFqbhenu7OMEs>$E}*T7O8 z5hNjcEL)G7QjtWegh!sGFlXXM#+#PCV>z;05}8!ATTj0EJ4G1hrB522m;vwpp?a5% ztSBrv>5nVLjD8Pi`t<1&r(WGj{e~E~np(eJ|%9*urD$o z~y3(s<7-e^qr&=vRiaO6bMMEa6gT?F^NioJU_p3yN>#uRC|u zg`dc+D$<&1c)lemY_UoHJadA_$PaRO$HYI{*G=axik^(X|3tCB6~j%A?^fE5%s|$) zy9cZD{F6Q<(YAG6LlXjAp2GK!>Ih0k`a%)85-|v&Qz)Lgb5`yY65&`6sz((TV`Np( zuDPkRDQFuXxY!%SLU2Dp5R6}?ZrpgLIc^Vg;#lH9$XS`Eng|&>t!01_xH^p53z`dq zMhYyqvC)7w=p{_)B7IYr$iDc=s}!O)pD6{~`wGQ8seM|mmx>Rk7zToL;js#s{G2#5 znCF%8p)=O&^ei%uV7W6vJg5saa@`e|(#Gd!K3&UvFn{DOcU-1$ugiSC;7TuNSNucG z{|{~N9gk)E|Bqh{B^6OpWRz8iGO|a>-o<58A+on@Dk-AukWrC&k-cfzGc#M+d++sq zoi6u%)93fs@ALis)5GI_dta{eIFI9XyvB3T^3V%vo4T6#;;SM6{|8w-i(x;6*95X2 zEap-~EEG?QEH3-D^#-uDb3ZU`vXv3(K@5yR`ryIwUtqetJ~#65H9Dq>wR`;66u^`a z7d6@~pUZbWG!WqZo{Rp}DWr;c5O$ts?|*)bmFu58U>$~46qh9@2XsU=?#5ZFKxlsU z;Hnburn5t8B={-EBLV>^ zffDhRJLrzC8f3al_Vg98X@rSK$)+YLw`EL5(^5L*>`B-1J3#t+f^Omr$+e@=2vN1R z;wCZ@UoY*vb7yeWBWYo_C5u|G{qAOL^h}}iqCRm;6!efEwM0j5Qe=JH8oa=67dpXf zsS#4*q^+D+V6UUOwNf}ZR}$s=bC4j%eP7wo+=Jy#6Q%qKO}NS5<$mT_Y*E&pOm~n<6wg}h zvZ}8Heeh@&#HI+af_X93SN_JzegU9bhwNAUy^BsIk9Bae_Tb8EuSasp)x&IjkV_)v zn83AT)?Yy$1IhsaX)+42Ao=*+yMx=CCEIavS%9ye`*xgVglazaNVI-!7%fO~IS@bE z#5v(f;DyX}*)g};42gG0aX!;`*O*nIo0 zI~jk|KO%(1PL`tz5AO&1)4|H2$=yk0{;zER41CV|Yu{?iifA#76TetGowM_mzNq3v zv<#iIM7xidzq)7hfiM}L)kRrRem#sQYrzKXsNgE6mSeh;#DLnU<8n){CvfIEU$_5V zWJe?JXtAT_`wsf4%LO~7Wd4M$#`(v{rZuZOe;uL%;@=rp`iKR!yEym+1R*f1teUSE zWD(q42*9WBL_b3}6+;6)^t#UTt;6xeA3$zcYLlG?1l2%l2oWIwBL(z82Mj}`zmV`f_UzHy9{HGDaxQZr&|6VU9>;Q4lT|=DcF(y*QuTlJTlU^3 zeT~@WQ;2B=`JNf~zj`a1Do}6((`y8=R4(%&^R6B7W+}c5)a=$lASkEllvib&bXs3? z05q-T^^m>ORvS&IQ5t_gQb*GJRFzI&&?Y%1#Op#K*0TVxkK)MxrPD*L&0u%G8f+$t*SU4()7^`j-Gh z!$ou8_t64vemUg*b+F~$m$B?<@=-G@?Oj?SA#B&;km1_-31s1lZ(n0Z;u|ycI2LFF zEDjwcLCX1cM^wjk8jP``x&7A0DhT%R^C|%~bOS~Z&E*k1HvS-DEDV)( zF(52VjRr*yqCv14YjH7-CWAY7tibWuB5fv=roo0|`+BX%98fJaYm;D}rI}(x`kHte?7tfzP-;kmQtdtDm=;auStW;)JUC$Wgcv)qg?xl-h?=ugau9x35QQ=E4GD|DM9VwlThfMVKn z91lO=iA_A$h9VS#Yc^bJkeE zowNey*P*4pG#fS%Jw3hA2k?#dR=`=XFQ>{dvC$`cu@<M~p{zekls?Tf-Vtohe{FNA!Z7xZ_c z9x>~D*Mg}FecG5PjakEzg}a>@U0GDqZwsvS9eVWZ^ZUY;Ki)*d#Ia$v<3s1%rohG? zY-$>c8w=IRK3V>?t)!6JdST(`K9ka!iwD%1zqHaGY`Af(hww67komdCkj}rG(eB8# z2lx8f{aMEzBJ0+&`9~w(+a|aw+Hi!;V!;NB55b_z-5RV48tnV}6ajEL^MPFPqt0wh z{FjV1h5KTi4E1mX0zg$=L_4kunAS=PN$ezN&Qy3(x?D!!>BC^=j_9U*7%L8}J^Kv3 z9LTX^oD>C(N`p@Q_nCI(C~M$B1ODSG&#R5^siSjM`ecK@Ml+T=dmRR}9<^QTa6#y* zd^4$!$<5IoIh=W-S`$4ImlBoywp~IVJd0~}8 z-JMfiN9h_&3z`O@{ZbMK42T=11Rc|y^bcc9!ZV0tO@=jQT>w9RN&Cg)xk~y$urpw8 zs+uBxq`m2^%`t2ru1;$IFva7_M_5WuleR)oFL~>)J~e(92VTZ66Ly; zx7mZR7~poN<>YMB`gG7AnA%G^aubKKP;)jO7^ZP)f-hp7Vrn2x!0E%D0^~Ow9*ri}VMjc*ktd zg(G#_4Zv0B^f)A?SbTqevN26ZjHFdo>92*_LORvj`oR|annrQE6CU7dWgvZT#@e&a z&{;P^hIK=QKaHZ!BFFs-Y7^Qfm5^3c5AedEd9-bMxus7DcMVXNO^gEJjJPLH1_5A4 z9JX3*T$hV9Dh!$-0=mPNma&}+t2_o16Ar|5F7L-NvCqAhF9S?O{Y)^|U z#-xS__S%HU%V*fHn|Er3rkmdN`%_lN&}1!B zw`s~Ku5dY%o&-!yp}uF4LWFo$gO-dpyHI1401|= zVV88_E~K%p+v`yvm>~gYl@`D`;IxqZTC=4rw)ynPff3N^sU<#r+Ukn&0cVUFjvCGf zHmmteVco1mpzCJV&>eMMRq~DCVf4GNB!Xe;X3bAgEUDN8(4cOwpY|(M#tXG8`3P^- zR8Es?(YtnTtCfE(QlbbnUE4VGV%DpzEC3$~&Kv}5tuX%TrY`B%DSPDyL)LNB0yrWpwv zYtJL&g5sr6Ya7=kN`B_o#W~W-jv}{hR;^b)0{ZB{SIQjw`JqY(U5gFK9S!}sQ;WP> z_nx)3kTCxbu_$vo*r%a*djEYD_~}b5b`Z$Y{f74-@OiBJYB;E2pPC`>ESPU{Ep2Cd zk!eTIyUUt72-OzM<1(jM-2u*@@F*u5#rJW?Z(XhU?;{W#T zWk8h5dVV_i6A}nJ2j5%!1HJnfAnTF;ny(+(H@30Cc0AhJ?*wckU!;Uv?>3AM{^9Mt zrbIMpjtM#1-8m+7+HGlyz99<}Ra+Uv8&?;^r&HQL|0!|3t3kSKaYUD4vE=(d1lNT6 zLi`s%!kQTUbJKlcZ4lcF4-UV@GjS7&Fp3|auD}Rg9)3K)Gkc;l0y|HeYA$}<4XaX| zdrJA=o&Vp)+d{~+CBq><_kH#-_Ol~1;%LX=t!MYV`>6wW3!++-RC{hS%k7`7=mQD@ zn~(m#CXCteyJpZQ+nbKMm0)MDn|cq=d-6iJvOqY92U%w^_|m?l6V!d+3x@D+Lp}!= zAY(uzQ!0Q=$&yU%cbU-nIO3rv0(&n(b4kvZTEODaw zr$sL^hCoh(1`hTTl@kKYrXBsaTFlWZuI?J~#s6La_NSFt_?VJ+=p*|OJz7{x)^1i%StY*|sM-f3iwiT*6_pwW99vQ%?!eNz|rjnR*;)y2?z zbiNv1^@6vfpwPz)Jt7n3wXnG`dXU?aM(z^zQ(DgD!sb-UOF(2u6c6g!7pdWk;lLKr z5{i@98?|Pnuuq{a%l6dL51xYL<{d=WS02zCBSeq5+=c9sp)7dLLSI_|@ht{o9Sw}T zp5v19!ZzXG9o+ogNVhiN2sMlp#%0!AySO z=3efyHd{^^0u9uT*Zf7uRSku(48_^ALysI@XAe(WBoh0s15OqgcnRg3L)1kvn%|Fz zRM9O=FfC^n)(tWbp%(P1k4i+06?63VEaWMg6T7%bU<7$HuphDis9=3|OuF-8hNqA*vx|-zfG~vMNAi&tu z>@eu(R>9CmL;4fk#drerbaXrNHq_J~D~g^TB?uOybj}nHr6<5eZlE8-GB{4_MH%A$ zvN13F8ETqZ&4O{%m&9S>g*0qppn(=*CzeHCVv;%`!%w%IWk^TdxIN1+2fm#MUXT+ z+UME;ay>iVVQb~->|%z&tkbz`3>V}^UHo$5ypP7mrM_U0h&v@ zl(L?_*_SskeF2eLR3;r48ZHi#ay-!+t{VqaA_m(Ma$eS0lg>kHTs%CtmnMzq2noL~ z%}@OIj&^?fUtkR}XWS?N(rw(CEdy8}8YEEzSmya__uN_;V9LDPRdPCs)rMJ~lX1mq zw)FgH$09GkNNNeiRo4xjpTq^*OF19QB$ve|@8-HV>x_1Bn+s3c;@a+SoBW&Y?fuVR zSg~nOwKO^TB0?x(iEe_X`@d=A__5trR`n26t9cfHZmJ^_gvC|1tjFR+BY83m%^UM= zjE=G1ih^#~Tm|)OaZ%AD2MjR7MImtZx^wX|*5`+g_is44`oB`0&3mSNEQlqF?1OOq zZnxZeW|fT^MQ*&|@-}B)^s8rC)lY!=4pbztEj0zbDvEzGW@%NU#F{^TmFO(#1Hrfs zwat>JcW7hMic7j8$8QH-#o>Aht)7hCO!#uRME; z`B%P}>^tr_7G>v2Ym-+SSz`2nO*TyWP~m9rY#cNIvZTNO7zCRs|M>XJtAD&%XqZz5 z(mJLJ0)a5-5?mSJ`ZMPO-!F|Y_EqP_VUx}}i2~cB-in+Lehij{2$G7^;`~kW?8`s` zg00~Qm5*;J%=j)_Uyv6uo1Wca3FN-B(UcY~lu{-&uex|VPyqxhtwNCudJY!Bvs+aX@?|c ztA)$#Tja$?Wlr4oOSj~^XWyA(uHehOWOUm64R5Jgi)7v9scy@DAI=VMgi?Q;gt=Ke|1$~t z9zAb?JdZ6OO=ph2q_i}#gtRe3OQI%ja*`Fp#=AH2)?Gzgf{u0vDMUi2T({|t9pE{5 zgyzYUTNws{r6)^qNX`(RIY0CHaskz~BYfre7fJ^QM@G2*+=#|y{`jKe!xLO)iwp8I zC%j+a;+_k9^zp-`inGz~#v9}IpF3ss#;x=73lfWMG-+D0_^OAQZ`E>_@GT{?KmpZw zPMCvd2)*}Kg?y|U8EDE%?=ww<+-xUhkix5HrzV28LP*AmWJOAl_iExWF^f>~VVw+q zvl=u+&&)tO0DALhGYzBYoCTsNlH10!>Z7U8C1?(>)|FxAN~W1w*wF6%WW4XLM5sqF zlMC_>~rGa+WuV!a6=QLHr*ckC1zmNVIIw1lr92r4`ZpAmuRTHKK zr>X;9dWT3#T2}r@QJYl7&~3W*4jh?QNK$gWX8MAi@At~sZ-!+4zS1_m&J_MNQjQ)A zO&*B=2OGV)#-McS>1ctTUn*H@J~_!PDNlXGK4*Y8Ufx_4<5Rn(iT1l7?smEI6LI2G z5=D#QhW8KhX1-l$Pak}-Fdo8eHJpumBz${oIbtqO(4_4uvv!Xt=x>myW*U@eIE)@E zlWfMs*iQOKDoun{|5ZvbA+jjfuhs6oJ=~12Ic3Tf{?k4}Q2iY?=z0RTgd&WYt%GuN z&$*jp4ZV*EdCqsK*!XBZ{i?M1d7O`OGcL`4zN4R!Tkv6Wmy=K0lMMT676$)jyj#U$ z$Bz$s(QEr}e7lsugfcPZU6>c#Dnfs{s(qSoVMV5UV2sU`vc9H8yCN_)X<_xqBOBe{ zZ`ET=C)L(6@WhtGZm27M$$xG!5PPv`@ZCe1Cnh2~`Vj9dzv7UHN4J8IqSqLU^0O3V zMyr+EVDv1>NL;p#u(GD>L!5b=CVjQe>j(zBuGy!2^EJ$s72?e$%97onP9Y2s3rPte<>baK8hmNw}~1q^E8iB0%m zYJ2t5Zr_5$fDqh2?H1F*d9yhKBz412Mrmp|)u`C4k*TKrx%cw36}pvx)r1tB zS19)DzY0BpcAf0Z!Rs(DM2bc1Kc9WD=*t_iI?)zzq{xq2=fgZC1RmB_^=qgp(?74j z{7)WZDnY@Kr`6$d_T`g2^`&H11MYSLc(PKMNGBan>AXB0-bYq*5?}j?ClxtSD#$NM-H=` z`|h0t`FQSJ6VShMZ$XO0SXD!#1MwUjRe(N|EqAZ7$4MYcpl{JZD$@KHeL>ybLC-^o(Ck6^3`=0$*mP#lf(1&w&^Ev46kes zcl653i8zX1dGfA4PAa1#O$p^>Sov!X>_`@x{@KZ~vYC&^d`pPlwMz9#&9e@&RH3}4^TsJ>?Jk*3PWKI+I? zV^{`MwY*7@HAfB{CQ^W-dkO7vyU)e<39J2b+EI2Tx@qW)@r)#jfKJy7% z3_7lkh3SL~emA$83JrfpwqG(M3m*TwnRNz~^VTXixE1;l>0s13RXyLSXNtF!6Oc0m zA7|lniNF)BkLbKrMaB5@2<4tVi0sBcP+am9Z!PRABPan^AYCU_auv8l_Qqankju~uQ$lU9q7vUHhDuX z#qLRA07ra{xh2hkkq7;~+geQzt6#6rS=%?gdb4=c?nF?gTDfrXP}sP@mVoe_w^xkB zX8zpNudlJ_mp6KKa_Wz~d_}(gR60C6=|_XM(=7XW?zx!0w0G~Q{X_FaTq&+mX+Er{ zLy`Zuzh9SA9BGdnQ#<#3U!9$eZ3K3vJUhXKxMS4fJX)1N-^wVW(1ObJn-3yPWe!{v+(_=c z!zhrHt@JssPzL8ys>7HbxA{Ok{(b$|WS36LnQO(nY;8C<=A^PzG<=e|+MYSel%Sd+ zIs<*UZ3dk(r&PiEsOg|^Q=aRO{}wTF{{g8{#9N#DihGaUJwfa(y=hqSLWdq+rETj1 zz(`DyjHqoH zJvlcXZgDibQTPq#;{N-xsclB#*Tq=ndF$?acnzm^GI3_|3iFlW)##ZN#nt;wFHIch zMPWMDs(qA0S`=q~9h!Eb@#Y*Dsjn1Wv7f4NX-rgj+P6AqyS4O{er{+ud|}iojKeSF z`2w>Q@rX&mlK5svR*XV-=3JQ9$_klT;lKl9`qYqEZDeq$~A8?&kbiw!QKh(s}>f|+jq`_}L6 zmD9Sq`*+2QJcv(k1DY=Gy_2{Qj!)nA(V>Q0Y?yese|+(%eUK}Q7`cm3o_BF?(?JN! zxeLg_NYH{!rH_OH0Kgr{lwhPDnyhU%1&B@2u8K3CB?9Qy-sRJCaw<3TbG!;NBmjMl z&s`8Jqbi)zQQd|m<7SpfA5J|N^QX>(P{mkGVXfa@&0&K9%YZW3c7 zD$M=fHt_;a7z^Y^BA-~aKi0QN%ldDx&zty%`D-MWG!RFtpI_ddo%6jTugJ72BGQ-UGzJ?Bte7vbN=V|qA-Q#87J%)C`u3edxI)_Mz z@ojI+D{QM;E_VxpWW@FdF*aLc>eX>=IX@WH>P`1t%Io)~e1%$~aGpvjkPR-U;LWO0 zsE-otRguUyo6!JB@E{n7GzxA%Ir=x+G=W&U|1*r>y=dRF`CoVWzl7-wZv538_U#j7YAk(Z~sITB|H`fPMDE znzt7vL}ysfhIbxna4FGdbL5LuF)zmuz=p>ODd1a7KXaBudCbVx zLxiT;LuG_~)iU323Ry_BCHx#jW`T21=?qxpcEUt#AV7}vV#&nkhkS^t8qA4{Z4uw3 z24MmZ8*Ky_{*pct{y;E#ktJQ{8kfInr$uD>tCw}F)=z8H*JC$wrZAhs-5-mMuvtC&PaPnDvF#>p{{Pg zg@0hKwN~m~_Z)QR^naHOcowZoT~MZMe(Y#d(X4~7X`kQhL3m*W1Y!mQ@5&zQS#2xX z;>c}unzV?fQdK{Kcm)XNm;Zux zokDcTW-a3OS=yOLV$(Bwj)$T@kwxW~QbQKC3UYbP?BimD@y~a6nV~fIdo!DLyas_N zxwi80MLO$Bjp9c^69gCkq9Iu%Hkrez|U`_na zohoe(o^*;GK*fK#tq|1(Skm8@E%vnlspRvpC^1X66mxK5Ggxko45fAxN`@4wTr$i> z1DviPLnQD0Qvf8a<#n3_d=zp{$M0KAn})`fzG9~oaaY!MRzhwu^_sf6;PI3KW-m7~G8}Ad2o3PBS+-S+DJLi5H@}4}XeY`pP-nyBI9Jj z=~xP9o`6_;HRePO*@3;KgZMdkgkyK{jvxi&zeUvztd2)#Vf2Y!B$ElAYvh9e`5uJD zq3c3aV_2s|1^SdbK&m??q&({W`@!B%-FbcH@8G?Z2L&EOD*b;xTj(9OR<&R_+}(lX z^H-~WPyugn*Ie~T)LZOl&td`~h|ade?cahsB@fjWvJbTniSGGa)WJIJz=QW=O3gP? z_|C1ljSTQ-XAo#k@6}(zR3JWrn2ahs@%eoIU<2Vd3$lN|)xY1b?-cw$@vqP{?b(1R z_grky>(?SW^5zTt?bEqjh=#xgg!g#SLzndBKf7%#cjqlwA1Kr^+q2vLhp00(yi4Bn zWNCyV_wQf&L$#w;=qpS(0-gpSvVNE02)Hq2L;7XX*?4{C2Qfs}({8?I7^GS4JAGPw zKT#8Th*M_p{by59oBR3$|2b67e@51vIB`b-_!*!W@LTRty2oSPI>kIV{)l!_xGk-3 zcGUIH-ZnPyBSrlgu|K~jzSK>Hv;_hByQD2L0k18c z{0sD>&JdGGz0NinXg)n&>hf!cabiFAwP zn`-2FX~Lg=4AV=O)&R6E9r{74pYOR33=Mg$Eem1L0${r`3iddqoK}D7%2OG_3Vjrn z5Z=4Q;0>|UGpnY={iF?)#1TAuE<|kS_Dv`a&zyR6(A^)|&;>`LU!NDG>~WmtZwJ~y zy!<@iz!5eQE&Y0D9AuK!P|7VVBABNHBmaN^S^$=r7S5KvJ^CJ*x-%~dshRin{dCG3 znr5Tv#obIY`Ln1j!9snwyFXNUFr@J5R~^@DzG+G__n_@HH#WAo_9;^>VQx0cG>D_u zG$d`l=ci_v_KH%lxY+{#->8CpIu)eiCskJ4*RNT| z_G`MAPh9Jtf%oreRvHAqE~E+}TORM&7e7i}{e!0!M>o{+-g%ZVESaQf;CP=Evl~)t-4pMj?X7vo@;_hfQnw zpjBh3;Qf%+m)~5NL?`wSTZG0DsiAB-yMkAtMJhSn3?JXukhRMpW%i89clOu1z1MH2 zo-(OKT{5-?&(cOGc}qz=_t)e{ z4;x&nZU0<8DCve`uSaHk_p(qhF86rU>w&yuUa;SmtdDY1jl>>pDlJKIT6nQYqp}5Iq^nJweC49{8%E^A~|I6+6mVxPWbdg;VRQ(BI|bP+Tl3FRC|)hP2k}MUoMaw^cKfj^nznPpcZwVSQLE`ag@xmzWmr6{SX-*O=5Xf*VUKP@^Gp$X}47reYyPal%} zraj8^6tS(A^|}!E;tz+p5kWye&Q{aLa->--Gu9@9%ffr|68l6Kbcm(LtW3a!wom0l z?I=81$aS4E@up8QdZ0YZgBxL>qszDVn7X;CL!>K+ZLHbnzM6b?2Z5}9Rd(GLY1s~s zUAF}k1TrpnY<{WGAqi~(vpCcOAjB9Em0mz#jWwi~_xr&Z%W&HZl4)WCz=qBNu1vzv zFtKoBuDTI;R8^)gJM5A7`Elu*QlmW8L>IJz+wD&HozbBk$tnJIiJ+uJdOAsG?1FmB z;ai6q2)Y_XOef4gL1M_#9T|D0fM9KK$aiuu#fT+I?c?=a2Y~E0=9p=BHx1Q>4<={S zdEHF4MtC)Q4y}AJ1fO(8uKl`2ikjG$Xd^sj{u-WYxj8KKlABN=685DcUC5b``c?|1 z7NiRS_`|Jw#6O3)sy#p0@flsEJqBAUrjrNK9=Jtu3DSlUJf;0wGT31DyeezV8M zM8A@)rH4<&--a84R_NV+;el^h zOuXl@a=jD;4N#TN_UQ1b7YRsNY}pjbi*(^^@r1$|6+y`5dcS^6;FY@r5q3R4Ws+Lt z%U_+>nJR0nN3uQjH8&6TVv}K0d)w8x#=`sgub>Z(f9j|&M}oGkylQvKx2o?Cncuu4 zFL|DccCdr%q>{3(l8JEZRRXxgZm@io4DQo{_=O#p3MV{_uuDXfOAm&jG{wH7*M+iHSY6XZC_-O%sNvl1NP_$ofc z)PiYneku~&o)QS0^LhB_(7u2sM0F72DPgq5 z@%?863k=UcL4$vB25cb=C~zPQenI@g(Z`#LcRFXL_(CjDXhpiLJ=#3eoTMz5x}^a; zgSXJ=ArM-35}2G^bkG^27_Xb)kt5z9pZPQX;Yp!8MoCmSuSVs{YTHv6)#XjOxu&f{ zYl`<`b%=4%%T79nQ^-Eq$0P^E5|Az^X& z7X6~5?HO?Z443y8J~*f_|RS$`i-po%(bROrKEJ( zqpt~pW=`&Oj>uepXpF^rMCGIm#%W_krma#*KNHv2`uh?&tJ9GBnicyFio$lPAiZk2 z$aMaJ8JqpXFDLWOY=k^8>(Wslg&Q~m0(4hb{O%My^eWCy26?yn1$R!#@+P?;sK_l# z16YFaG)tBT(W~1eEG;I1Id}`X_S02f+fFQFX6umjN_5_pYi$$j_g>?+7{}yPF{T!N z2qy!3%EFm%$Nd8X&-e^go9OIL$zI?M10x%~%_?p$SYKuG?DXE44C%N6_+^n3i<0i2 zr~E+hBDKQqLaUHDRpg5J} zvce^ODlY&5L7x~c(5bSiYVzL1Gv!Y*+I?ziLAu=}*#MJWx#>_bhzTEeUep_Q7qpjM`eDx1Y>vTc`+O)@0Dkw|@DJ z$>1*^5-K8C2fuiWePBN-c{S`s*71TG_b}XEM`>1&`GpECq^S(59X0@+_2@M@Fj6nL ztua-+Zn}9G8W+%RWzdYxOD@+NYfi8PrPuZxvnc$l&$T{&^QMn{`~hg?VyVfb4t0gd zj4^XHR6MYr(TDhTIVmG$5(YzfDUy4mWC5?@eZRLYiYK#3mm0MRL)8VZT6X~>QdfQL zm6Cehah8%XVfTitwR`wNuXksSUoiO*alt8XiadQQI6|++Ce6}+E?O)-YrnFoxAu{H zVB=3oTh+p#(L&zKHf>s0E_ycea7;1Mv9A-{c6!v2nUJiW!^~y7n8C!z$hZ>vm%D-c zFPJa-7IE|MEz8{4unmv8WVQ@1U$`$uxEk?bU&$B4&vPbRPlcW#St=(^G*}lf?|&r5w4dsXN8lIiifYL|*p^?Or^V5Hk41@8jCs7sIx($0YraBP|La zTKY3l3f?hnlRg$#`K*8rFnp|RQl2?2h7I%))_1=7gdEu0vw#r@5k#X;hHU%VdY`f2 zER)z9CY_p4Rd+L1tPl!O%|iJ?@8War{p8MJJ0>&@h92N2#=N?^>JOG%Ao>&n$(EQ# zNf?LGkwXog*(L#qVnH^dbL!RTjx~nR2I^L4ZaKzPRo01P7=!n+3VB8^saRUj5&2jCyFNX+5 zHbIyfV1aBG4KQCx_U83MR00rR`uk;5_eP~YK5WT4Z&>r{12RBz2+l-!$bkS9&Nn9U zy(E7UU>+r82OB&@-7IJNz9KZgtDJgOXMn3?!f{5V8Ltu1&jl(%CE}1C<+7GwI{m?$ z*&i4j70@rf3~Z?24KA`z};~VD}ML7Jo+8<-gy9r`c+XQ?#-lY zpaXm;#{T<)SJ>y4JcND-1wd*!ADDwzR#m09tegko7s+yXRX z^-GhTQir2~qRLQOTDm5uM~>WKn(b^vsq?^_YCC^mWM1k>v^M*Q2E=( z=XC+6()df|#M5S{w?|LPHox!J?VncR zQnxwOkVr89?%-xN|DrBZ111o1^|?)tk#IG$7#nxxD!{Pe3W7u``Ssf^;X0Dm{8!?A zv4ve=qRe_^pYq)?Y#T_n;nwx!XK>L+oYK2@3TC2+(2OrLOMo{3+dX>gfL_E$0k+N0 z6&c??eE2Z<7XQPI3wgWs9uRfu5ZProBf%x5Iew1u!exQTB@jjL5jf~*dC5FO{ ztzTao<4wbg(H>Fj+x<4}!-7PGoPhUuQjd%wDVL_&hQ;irE`Ma1K390dT7Q#3uv%4y zm4<^>mC{!fwblU55{J1H(1_2oTn}{NkFgpbdfzQ(qm3y;$R*=ZGKj9-JF7Zw1(TBG zGN{5JO@Gs#p+BxYa(P_McYkQXRY4HhYTEm4mX@)ZQ1@NOhiW#kY)YK{2xl|e@Sr>D zciIE)hZ}LD-rjhFDmRPIkhw0{Ew-=dFl9LK8LX)Pn(-D~PE`}s@Nx6r&FPcA%x`U$ zKlKBiYr4%iI+RtBbEl@+Q>I>guLJm2Hb6Fo)hF!KXv#o#RB zGiSu%bjHGI(2HPRR-wYhFynHrTrhZr$asCpcw^9OWj7rpP+xlzKWXyHdDbdintHV& zErIo^^1j(GvQ#LHfami#bv^dLm;5P#ExlV*_0DslTHV~{uh%F@-Oy3HgRdmbqV8Y6 zrMssk=pWz4iQk?MpVnOpJ2}@>%%^Ic%9>s^bFs-i(x+} zvOt7dd!D(!&3SnzoUzyA92|sek-7Id8ytQiYe*8r%Kgx%a0!u~6fJC=Qbd;s)J~q& zeA?(%H_a0feRb|4OaPBYFHd$VZF!d5*8PK6 z$D0=QdMLTx>LwQ#k1V-ESlHBlOW$cbnSeG}jZVW`;JBD1q_1i3_bPW`LnudKJH;jK zqiV*(ueL2LW&;BQ;>gVJEio@6I{6KFB};P_A^0_DGhjxZ-`F?rIB@tIf+;#e6tXAs z!9v}~?$Pf;{X}ih=CL&y4TT}W4{0bqYvM;M_;)*3(6*AOtgFLMQUT2&e{fTga&aj} zh=ky;a=z6W;N@{$=HE}=*Cg}t-f3)?S?MZ~87d7PGuynowbHMRUTal-I%RI~ zbxDOuMo;T79ICM|8vw-en;4{>O45$Y=3CWnlkoiHGdM@D)p%e;U^tR?nT+vvcn$(_ z@>>p(a#8SG4MaKv?j$yE+(~Rt5kEtRla4nipRJ(Nk&fetXE`p~%T!62+yN5wj@D43 z7jk%KI-)y7qKs~KY{GaB*-AtuS%~o?~GDuQIpMg z^00G-M@3qxccqet^_K0oa#`fS(}FtTiiG9T6?&2~;OI_WnMB1TaQoV`w`0|cZ_xDo8V3t>kfRHB=B`ybui z5Zl^KDy^)=hZ}Hm<{%sY;Zx!$_}ydjhoAScW09XL=(k8bSrKq=881plSud)+-t0y| zh+!r5Z-iHI9P~!*(vKjGcOAhu=N49=c&dO9W+Mjz!a>k9^IEf4)J@I>fm))cgbg_R zu4y+RA=s2cPxjACgch-*sFwKwQ++t!wzkfpU~xqQnC+3<@dtIx!2|%t71mD2C$KdT z&dg)J*{jb=i+F7!Z3%>?1)UVfnx(I!vt`d_H7=}hW(>w{9$jdZH~_zE}q05t>{! zNQx%moG$@AoLS+N2lWh7UAr>|+NlZ4Ps$XN&X^c3zKsiStrZmfkUMB;s++O=)iU#9 z9%vtd+FSn@TBpBw`kxWCpwchw`(|t@Sj<_$rv+*DJ^dPaM6FNh&z}c}zagO;(eS5DF?{o)DED6o2sMI(KTmY$74j6v z+JVzS@!Ede9QJJeKsLvQ(HGqRwg&W-&SNPC`prSAd&em*aK1JRa2#=OOJF})@-yKL zVHD3TF4MpI^2k2>cYL|`6rZxge&sHyZzv*ITI+)(p6@~VPZ%ye-k3Mi)P6`#0ja+_ zIkwTx?AA8!V|I~gs{87E=3Bp^zpjo`<}uI>6AKQK*qinUUD%xhUp}bW0b|@u6UkB` zm;z@b>7+W(k;q)+@_9psId~tHp{G={DxDom&T?4UB|SYi~4L#>xrUT;`Q;IZKf!f0?n~j$rfe^8OM01cPFCF z*L*$$meR7j9U~*=x}Cc{L#Syq--YMpHApb5D=FHY*~`MLQ$JOY6fk;UpWvihkKVb$ z#1xosGw!v6JtB z_H7*|?82ut$FUT^LCRJyJqJ|vU!ca3xoKGT;hTA%Gh~AL zsZl&qE|t70iOi2gAFuZdb~9WVe3qk(y=uGfEGHZ~omwL48C}h{+If>#P;k0ryI8|@ zb20W)+>=@${UFA=sfBZ1CnyC9n>iRRBOZkoYb+jtVO&S|1Os#Ejv9mrqNFKdKmB`s ze4xW(2}rM=P=D&1AtouK^=nquh<4c^&|ZtYrHd@%-4s#4GSq=mrD-i=1jse-J-<8<1emy zi15&?r*5Bzjl=PzoapL?W6ik@xEe(;bzO-H!uF-OT>MlK3zUvp$GF--FQ{#B7 zz3y<$ZR2SCXhN2yKJ_qXnEo2!;&E;g4Rd!81G7nno>yb4wqSy0LH3qj1n5|ZgKf$H z2!6alh~ywlgXlpyaEyFmk&M?Kv7ap^S!5V>w_V+rq1D0cZgOIcm3PnDo_c{5JLa)n z0{RZbm8GQf)G%xLXE&yv{HN@?*G?bhbP0qr#IKCPNN>7u-nS-{O$^a|(T@OgbeI8n zd3ouA>l|$d%ySC{;2*}8?4YANSJK?7mwxB&@D(KM?J#MC!AduKX>_vs{+Z&c8Oo?S z3}gJmh&r`-F4y1oa6Y7C^mfQ^g7I$b`vNW}C+s z)gHdXJz1a^`M$NZtXGvdSn2+-P_&ANp0IG4(y@l;*(RkfHs6Yr$X}BkYS=x*97B3}~lN|Idr5pFNck%>Uw`Hsb2CZ-rt;OMG`?gNPZ zxVh6gCilG55NiSt)%i*B1W>Gvk%B%uR;TYJLQ?>Y?;7M=K^#V{h*!kFTAfb-g~{Psk^D#YDHEmlBu>j*$uD^eysGhnh78B7Vq=-!Dk zTFfo0uMd8mbMknQPWm9siY`FNRHweX@?igDAe$gxh%DEwp> zX!phvdQF`zq%HX@KBd0E!u^r3?(I}hd!M+O`@Ng8PLsM$9m=pDo;MgSk2Ui2m(>&2LF zMQnnA@&F9#8~fkF$Q%LM8|r-6cWsJ0K}>q7O)a{QezJsx$8~-*eZ~4;@3xN)e%Cbd zhLw*W6gz_N?x9~jM-Q4NlDTbx{Vie|*F08l7!D1iOi&H3>W{AjBX$7%4X;WpIBw?+ zlG(d9af+hyVRSzHIG4?Qg2;?*)b(aT-bE94OeKG~m28aKSfji~tdyK*}K%}C`Eyqk^OnLhtf062%i{^ z%L$ik!{Mwpt$?pjk4k}TXtHJj8k&*lN6PcV^=%hTY<{mTjst%HKd9&XKE=L11g~0` zK+CqTC%m%T6ILg1kTXW!k0zxk^n_)hvoiUNfz|WyPEQzmN$5ch5C(6)_QVIlkXgp2 z-5K{RIeNCmD*}cDzV=zzGi(J+QxCI5Vj%*rBNk7mAiDf3Pm~oQc=wVoWX}%ZQY-0PNe+0P?+{E zqT!TPACc8c9x^kI_T~|9Iz3(u0&+G6>q${|5+g-0oQU@MV#m>gFGEK5el~Tns%2~? zqD|w{Qx+b2bBQ0l)U(&^&W@up^R^RRTo!(cJ!RZTa*UIWNOT>IO`KF*T!Xwq3Z{<> zCS$!3AP%e4ahnR&o%!f4f?bP=i&J(WAla8@TOoBvTyV`kU*|tq=@efx{t^B(WZYuw zd=4siuA30APA67eAd-fQWjYucT!tJtbmXwIBmUKD`{4+BM3N0(%#-4EsQ^+}q&=>i zXz=7lbuR^;u-E$~W7%Y7HCVvEzHmmd&%W+OF@hOs^>pwU+U-2hvcJ&yp1T$~rCi|} zVP|VTMwUJO<~Lh!W7MGoBf_F14g<+KgYmOcgof4K&V`g&t`1L6`$QcxoL;eQR-sk% z_|ziV>@L&YHkfeUGPiGKto9I3c0fVuMhut;EB zqS(;}c1y@Tv&MsVO9}Me>-+t`fac~Lj-Cf8JD3gUUp`7e5eR62z{~P#YGDXO$+A&` zK8VY7@hH@WO4l?^AVoAB%gvq+wnrc3Mmp?u^I`;Vn#ev?@1`TIGQGNo!=~Au)b3u< zPY;X$aGk0<%3jHqoGT`Ey_YP-UyGbo>IA*{Yo2p}4AeSB3h1vQPTS9>f(xYcTi)AR zj7=e_KRuBSvpw~?lEdr#Ul-h5Ha?kJE{0xxTAOrlsig$-ynC1Y`RvmjOXb+9(u$_ZU-mh2N3LUEUIr8rYQ4PNJ(ez8iJ9U&;Wrwu=^Hv}n=V zzCXIPkmPT6*bwNWFUc5IqYn}EZDkCW3QUJGuiuTE6=3qv^Q6MY>zu+~j*ZWNyOPf)D>H zdy|(JRNC>^8WE7ZygU4iXU^pTAW>)*_-g>3O&$Q9Fsq?zY)hc#CcS&~wanS*J{PKRyL-jA?O03d}9CxV=h9gc2U$tXYR zCu-9q+4Z60A5t*BjkD2@N9ET?dko`U?J zsGMOvx?`*>FC$qc?Zb~)M2QpvWe``{(LKBBlsSO%4D?$9+V@6Fp$%+XWi|` z|41dI1|o35@pCs%5g+l>DYZ=gEbp~{ZbV&LX73!~HT1%1c6iG+io0Lz3oA0Eh2{vJ z8m0rnrk&wDwBHbcva9o)oQQZG02<84hZ|7VRM8hmbBo@0pPvgMOKs(Izh*^+;g@jFeJoNE&1%A%yH%vWrsos)Qo@v}dU_iHv0Ly*Ht4&#cU3lTEUp z&vBl0UFX?-J^wtv*Yo`K+<#mz_kDNe_#Wdk-=Ft;0h4#{GxL#^$qzA`Yq6c;$w`eu zUIqt*110NPva;8tGV;1Mp!&>&w86pfR3h{wh=w#>c_nO{eACeI0Uzz+u`bX9)XW|I z#e3veL84lkmUWw^u{?A;G$t(xhQB(lfV&Wz_?$=(-p=B-@Uc3RjK3gcEtCldpMmoU zMLb$b0fu^3s3=eAac~)glMNw=EoK{E1)Y=|Y7B76TFJXj<{d=2d3|JM(LKAvzEcw5 zSj;b1bW%#T-v=z&XnVUGYeu?WMneF#$1#CvUCr5u=|w)nt(HDF4$ahVKff9yzJRMvU)GlA;~=3IK2r)>fww{EF>c8c;iUiR*9ija zU9OEu)dhl&D#Lx$uQtzKN~p|XZnS#=S(yhVGP6-nY8l>PGxdKbS|32D!WJAHY-lnv zGNNc=ViG*kViE>Q5-&l+Qy~Hp!l6~Y-CA)N8di3=g6xLiWbmWpCw;H{Z6D(*2sNq${qrH7=lVY3YMAXL}{;zCd)l z_jKt;zTrJZB})nsBg5PF>?t11 zh^u_G@f}8b%*Uc^IeuZHU70Krv9yCQBD?E8S^WKbn*|SmR$J=?0rZy}2W_q>B*GZ8 zqwB#&?Fk4}ZAg*x4Zj}3+(1)FlLtK1?v4Nm-}D>c+DfhGp_sH_oM&wV2~AvP=A})T z$9(~nr$^*(dth+b`V&ao`5Y6A1t!GkDb=p}Fp(idIR#DUu?B?F1aUS^zn4B_EQ@EK zmLqJ8*({S$$S`bpx-PGDvBY}vAT`z4GQVO5mwVYy9?duX?3yN|k#$|qCma09ob0&i z0wqsthE*npoOrpa%hfA0mOb&7K{J;@)+k+zW!i@pCaPyhr`Kzo>1w(rd^SlQ2s>=t zPN;O(cYRE8V_JMLsI`o4u+BClE@j`ZQeZ<^DU+1QBA1+Ar1O5Fe2Yj}h*kbrVLw~a za#uqQrAb^hGl%k*dG-l#p<0?c_I5{EPmkN?X3BBS^0QypYFqcE3E?&N-bEvL+F_!* z$j0!k2l2ZGwMo%m-#oF%o6|Oc^NmeVr-@Y`&;Txd%w(tPp0St8;=Zso+I*q}nBTjk zXq?+a`Z|F`8@yfY00~D;WuyV5=fX0L7y#mPqLSA4@4U7t*vr2|G9}=3g%2GU$Te(# zD)S&*7T3Dow&&O>h%I;awu3fb()CoWVn_hLap#>|cy^c{!F} z-DZu#isC&5wK>A+!bIy!hlM|`{f06-ed_;RuR28hU5zKHArCoPwXKh+d&Dm(iDiY0 z7TtVMyOH}NVh^i~hJ7c8*dklyvdp)i5ABRALAR>7#ZtAgXe1|Nt7psb&U33fL)rP= z_@BEhnrW7=kGzfIDNkvfK5j5%kOv$MTL$)&vYxs18y3Hh&ujcn)6C@o@|F*xfoA|@ z2XiYN_MvxQ2Txy#V1eqvT7PJ^`V7~^iltA|-fVIvGN>RpDFCBe{aabZGHPn-n&$$& zC#Or7!YR1jA)$GvU!0UEwP=yE!c3F*=|(8k^9MF~W{7^TGrWi_fdp9sXC04gN(9>) zsWllsX~>^GG3=aEey;lR>6ReXm7Yqknn`t? z+>`J56Mnlas!wI z$C(@|5rh~65df_A_8*^j-S)~gQKS@=%0A)64*E%IIzH` z=F|Azq!33aHJMH3F{bv1MTZ!m$WRf(vg=5GLNR9v^!&nAhm7q(FUDjFS}{(lw`^sd zoQlyj{`bS@yp8(;QZ(HkG>m)+b12oD5P52_2mA!hV^7_bZ*%Gr(|)h3zNv0uSjqnm#dN44Ui+D4rNCX{mr?QHK!SeOnu!xnESoiB69ZX_F~~K z6p>?rw(PsSN|MB>jdwS*-z1xQ9H<;wdVaIayhr$JxqJDUI~|ug3oI;_Ep2RU;y~KH zID%T=<+6-7&0)2O@gQ6C(nU2OFHuLNo|1W{zjqOp|^&MJk~lO!e&e(cNX z`y&;PP^WqUcgjEL(m+{3JMdQE8YgxGGcf;~^0h+7Q4)S>i3XvSB zWE*{9SF_%qW@7}zyllS`_yb+rlk0EdDcz z3NiJTAn$29bjY3rH`7~Dpn+a@#nO&gn1Fqh{}x8d27TPuVroMKGR}VizZfuGwyH!H z3BXzo&{So!(sOkfiwnAmdRhJ(UjzlQ zz%-uUy?gh*6?Wy`j`>hWMnUtUyITi%N#be5T{K$CKOy~PNU+H=3<*X@yV1dkQnw> zFay(#$bNekjeiE>hK04?)7ZtJE9$kLX~}@UQ0(Risz5x>R8t*`|5wvf;No$?19v&i z{w038rVW=YpM!Rbvd-^I)>aK407O7Lzs)*)$G>Fwe;a}@Ac|dILqiq(TeFu8XO(E4 zNCxGDw>1k<;l2d3cWH2gx&poLvZVjU^_U{&TZ~JDq|;xR62J>Mph|@O)=`44YA7QP zl%rr6>_r8SDD6_*MX-nTS7DbDx2+0BS9eQe#j}V#Y%N7FjKg~J+PLsN_?S3Q3G zxU&o-4{?c!axy;0IDjEFy?MQ<5DUM$B`Q0V7qt<->(FV*UWb9_DnZDVwPollZ!h-W zMPNqIS|7olj&?YGA(*5W#L=@(c5CtGjKemjmoadlJ3)nh?(oXlex4rn#SDm6kXpm- z=Hr@0)`pj^{nLD|wrhFv2H0SE2|$7Qr=$I7Vk3l39a03My>Ek(vY875R%7 z10Xx75?kt~{P-FI>TZ|$x_1qK^N_+T`2l6(ir&y;e;H`3B%5(tu%+22GUk>Bgp4~R=lrDPKJU<5o9nZKo+r|+LeDLVAWGV`nngY#+?+E zQ8i)2&J*x1iXmuPYHBK5Ps1nt3yMuz_v*rVH7&;S;h(r=2F~D>HjwaU0G&Q&NJa-u zV_pSx17>lp@aSEh-$6SauiLS15Vp^DS{&ywVpa(tnuoZ25E>TjO-ia$Tvi5r5}+Csi~ zx}uI>f!#A2V$vUo{eUyzMkxP&1cPNIa)l_l8d@Z=Fe%G({XOZdgD8*XH_0?Qa}UVs z>Z~hh?d6c{BqW>fsvkJJ&pbRii849bI9b*oM!-m-c)PExw9E}~Bh3lQ5t-9N{|lIT zgsNJjA^? ze^yu3{KV0tzweq+R5B?V=~oVS96k(`kiO=_C==d7Mp3E1F-N@)r5{ZJ{s>CeLwAOr zQQi&*1u!17&NFDV=g!w`>G8HS)1j8zge)Hj7s3XnPL#a#kbNiNXe@Y@yHDWj z)qi}@#D9*G^g1|KQfRF+A_GsuJXP2R0r((T^G*{eEjow#Obb{5L#2U_*a<~t3+d*@$|{AO`-$_L4bg06e|(m^gZ_DMGxkCJp4U@&wn5B!92z2^ITIkOD2vUSC;6L}p|u zZvc%27EE?wBw`b6$r0>s;I8VM!dxt+@MId1^{Lzv1j)^Wv>pAjh2T9(;ohTnF>g&Je1h!oXI(gY$zO2QKNNgd zAcaxn@);3;s^X0sOmw@gPDXE#DV;rgwT2k1Wzk&qNw{t?zTEv3}_#mS)cYI zxMdax0pmZ#USL29!IkOFlf`Y)lEEh=Kd};nWnNN+`>0Aa;6%!X*mAmrh)P{o?GD3~ zQTRL<31(j2oaSZJLi7m7)p9u(Fc*FTM+hL5I}uX(qljME@n%sd)GKn8UIkVL86si5 zJzLlDJ5tET|2yQ34iSgDqZh`7x1)v4Pk6zch~%W#0>jUKR6t-<^~@4HkE`;4V%2nC zxxV_jG_4XC$GOZdkYWa%1SCuZV9Az0!#FECj{4grgIFj_1|A>Sm2chl_)G)P)n5hm z;$xTHWs#`S^CNuO*5kR)3B5S%Qvg6;(@7tFA+WfTz^w!lw4FPim+z!6m}(nj|yq1UvEfGx(RBelM!{Xr{Q{?1~)* zTuFMg;rssmIe8_ez&CG1gJDkY*--%{`?*@)hRTjCgP1-Bj=wLhhhzsb!SUY-1FOh$ zIFIr`ClrH+CcObNyTaQlG5!1y1W2IK3}Rd$u1f^&tnZvgx3lPo5CnR_$D1?MoVh;1NBJyRYCndhg&3``5t16W)n?!kj0CuRKFY^N*Nrgjl){Ii2u@lbllE z=&-+cW-_(V4}$Ak?BOYm{Hs8WZt-STW|RSS5MZjVhOb@c%6GxPxoF`g{e+J-uu$^B z-{)@^u-^)otu{3pQcz)G?_5NE=FbK5IW9-*>GwwAvcvl?E*IC~(rD&VJq!QylsUl; z_49Dy0((mA72)L_mypmua^@C&LYUp_jnG$G&j{GO(4pHS0%rBnU=IAdrNM$V?fLbu z11I5^c5d+9+y*h@C@hw;^xU(ChkuG1jQ@X8fxRmD*9p$B1B5Aa!%`|b@Izd% zC$)~-0dRGd_Ol-zYgs5togMiMWvr;dQ6O41`TwtI)heIk-$Gvb{Bihv5Y5Xd+_P7)e@9NK<2B~ag{kEa`sgJJV8`<8=P~+(A^uu=HCjX0t z4#!@_F1d;So!<@;^X@rI)X4#p8A)g=m>j@g$PK`U%bI5TNUD(5qL~l*>uL5(dV3_v zNQliwmR2XsMq`imxi0BC&b%XI)>*Qf?5tWUTM{e!{)9aif(oVAbm;?d zYq6!kNMCie0-9iefd*M+zHEIJ6-JO5jLroRkoBpydKf$#3@TrX`{h!mk#NQO6wUpM z@&gpNT1<&Y5f2*F*fnE#P{|XV<74%E-}s0G@uC>GvtS3kU;~^Be_bFrc|W#m!wldE z1A;#5K>XKfxYbY?+$aeDH1eM!m$EMQSbjhKk!S(R!U(nxgBHzlLI5U@bzYkCZDf17LYU!^Ip}{;{-TLxL%dZTMy#wYwt)v*ZmCF0586M#jdXq zO@%#uTGN8>d+F1#>%(X*kJVm>7euRe8j1Q*Kzup*Hk$ip$<9|?Yf3vL>2|D(7XloA zVZ12o86`AGslrDf8?k$x16$YM>a*_Vo|;{}@yaP3^-A5BYxM6EYO#x`>aE zD?721pesOhci`aZ`Mr?hhrBo{3T@d=sZU19aI{E+>gl(!wmP&+VejAO6{SRug8K2z zPfw^&Xn0t{?aeP2aTLpzaWoMv0J;xVTJmfI+5jrPrL!xAcvQ^!XXRJp2oARG;-ChE z(BxlM)|w9g&-Q2_$Fp+eGztE*vGno?_JXbOF6MdK0uEd0IeQw4AS&Se=yI;@fk%bg zpOmp6e}AZ=00ULt=lF{uu}}@0W;x0L{WDrd*5-k95#1LU1gS7!wyD-|sl-58OhF(3sY4Y$` zgzJ!&K@0%_o+Yv0{t9`HsYLc1x#tj)wcr>cLv`hFS|Xyso4lS8|% zeHg&8^+EgGEL^F^ID;&5%UusUAk(}(o3G8%PlSPXI-;`ki-@o&VBk51|H0VbE0+bo z%Vp#)5B&!S}asd9IB@s)7CVM6NnII?p5-(PmJSGy+M3e;=rd zvN{ix9!JxP$JE8^0!TaU0!brN0BiZ(3ki(m6ON@y7)3p4o~0 z{@a{HJwm`tm>X05=^9k5e9_a<q?dtvC;apq;kRMMv&7CjlN#jOG}evqEtN>KB*>e2xBvSdOetP`D_ z%!X+4@9UxhJjjqyL?|uW<1P$8To~4Zd{Z*EXRrzj&Fq`UYGU;r_#U{{C!(YF2mrv0YnG=%&6U1THU;by%)L4!aCen_ODC_ML}q#Rn06OoqK z0p#dVAwhx;sx1Cc2_0LakqHH^e=NvRV%_Q>DYZWbmHG5{wop~Mthlg4UYW% z`{`exuGsmwLJGX+48(Z4*teA4^E3yiY`DVf45R2k$~cNS`NgXO7l1HAdht+np;2l@ z9&TPQ-F7&bk%kB^sKELI#~8N64NZqJkRkhDLxSdjIkd)udxVWhE-djTEYZ406Atms# zpA^fONdqrGi~}5Lj~kbKdXPh5;#Jt}qZGfFz;y(`1qtDBMM*c0CQI$C%KQTH zlQ2gu22!U#27I|?J8}=?lE5|sb#%T9YHERylIwwkh?htS2vOaeo1Cn$d4+xaAP8^! zHctCy8B|n5{DRRaRt(~7Mlhqq#1{}KjRiz}=r^`i+M2}6!=vI!7X$isy%dt=fPA+8 zY8W)eVt~k3fjIJzvV5IXe?ZI!;}%!ghW(2+?+AL@*lSS_hH^61iCYWb-ezWC9gJC@ z7B|lX#9!JtHfeL{7)<^o=aqoml-ex;9?#ifs9oQ0dARFvw7Q+z5)h-iM!%2CGlO7> z$F%L#ZJ6CN3u0am#RXL%$yCLqg-H<=5H3z6Nvw>*j0ja}(eJw2hS;fK7)AOtxOqnfVJb$P2|1 zEkJ!MuCP#$Aq}ZkAT?p~Mrc~a0$_k^%2i#*>H%twH3P9jVSDU2#K9J1kw5`91g*dz z(34Q!w(Af*V%!0^g)m(K5G^SHn-^pIJ`e9pfAP0M%sK$;5J|te>k$6?x`T^ZyK*n> zsyPVn!0ht2ZP2|+M;?BBjz`G!_og0AR=s?g0X0y7d$AJkimeUXnenC7IA{~?HY@90 za)ued?WDP3pq?d=cWycHa?BtY?trtN+o~YyInKny#P9@s+9UF%K7c2mBNArd;Y1A| zyiJpQ8;Dk*d5};QmEr(4_M+7||1gvvHt=>#JQL~OnN+y%4T0g06O@oiR3H+zjoxhv zjg}=SCaS9X1q3idP=j!(CR?rZXdB2I`xBHNM9?`7?i&Mb^i#m$oisWpNG=$m3y+ za|g5&bL)0~oWWn?TW(y$ZlF*h$pLrIj96Hg(Zd5pe9o!gE(p*M+DZBz zB52>$XZK)s z4CFef{VNXuT|)>xqTM!xF{AV_J6*wdh04JVq+SeR*49ehUp#$V5y4f(Zo(-__~OH2 zv;>9a8d7YM7)14I?6@j;CKaLEyI}E;V2nd}8@bosTfho}csH(x?=B6&?#XUfy+Q(* z0QN8MMknhE^#y$2bnQ9rDPN==Z9NR18g`jQ1~|Qy&!8$zg6wp5Zc2q{6cyZPMnkrHA;Q8S`H057*r)&_b>^rW7pjQuc7JjU zHA4uWS}2m(u4*;-L9lifT**NR$|rW{unV4?hOpZ*0=96e)&H<2Cw5oz^N*u?gXdXY zqKF1#Glo%uOdR)%GUed;NY7vhq=nNdvDQRa{l*FZ^I%QnKgP1c!st=7_39MO`A7!b z`+0bae@zd;-;Qn|=?nZNqD({()vaFsRkwPpgger(2J7}A1iOR z4l-4s-YYk1|8#{eux0!Mbb|dL5dnS!YIZe-pCBK-cl&4r2x?g@H^>N+W!@Yx(%f7W zupXVcF0|A_Vf|U2rMi=&%W(GLMZd6MR^6SQav6=Y4qd}xB}3)cHSfoUfMWPwTsp?Ac$T4!EjIIi5wS5<_Dv^j`?-EEur3Z zI8a56b>^j9l&L#Sc)s?~-L$jE^7G)H0ou1%Wvt;)iVzbM0)RyBJkUQtesUWL~S6IZUp9@ZdoO$iya6-vU1@>X54F>FEu00+z1|DA0~G9niYa zuBN@6Th|~nn~n?E_3yuRU#SqvzFon2u`gksRprd6g5zl3r@3<{&!ptfaIZ6b4yx$W zWpG~^QE-jdosXX@(DoY3G<&9a#p#y1W%)t~|CQu(XQvzt^i+$bW$JVyE22o=xtE(2 z=H3nrYPO2YnT;+8tWF(D!2~&8%pRKDJi^Q_&K}S;T&6Plid?SidI<1K0a84(KI##&@IOJj(tghLg}MeH`%oJY5eSJ zeM;*;zAx>ieNEdmacB7x^N7MiR!y;;e^vX>r(Kd5=+FzNk~AKE-dz59rp%Md`gT)< zblRMYX1h`p_fi_S5iikt=c}W!wpa9@PlP&M`xLKIG*lF{I3tp0#$jV|Llz07khg&@GVLn-=V)hCV|6dJF2)xciSeCQ;`lp&ZU}J8U`%vw zQ{mf-H!RrFn)CGGYo8yZo$G$W4waj`D^N2`k2q@2aOo*7qRQ18bA(6};qhBuUDt(+ zSv#(A8XxR9u^lHiVdtA&&wPugwN{N?1w)RwC> zdj@sfy{=NQPMXP*MXuUUGcM`cK2EQpDvw~MilUz_+L*4UOP`MTY4vI1G?3}C4`=9? z_u9C->}_~}6k_1pfB(Y+b^6j;5-jj7rsYelQJ;(-=Lx!rwjlrHB+6yLaNW9czhV0s zl&ab*zyW@9fx1T3+)$0dk(PrVoE#ij{`OVS85pW!iSVCxTPor1l3#iUeYV&VMeRlE z?W7A1pr)DjIbi(Vd9SeP@)eb$xo2OJw?=-bP!aQfn4dw?p)Pume5Ea$1S7SY30foH zx-xAXsD5Zv{NVAigx|UL29V282rQ^+JVcmVe$xai8_pp{i4WMjdmLA%klz+Bq>ef+ zszSlLpc{Y?h#WQVBSSe*cEw602Cb@66*Pcixb|z-X{Z^f0>|&cSA}yMJVeT4E5!)d zg3!lZ2YkV`Z-J|Or0RXdo>q22i_Xg@B%H0`z}E{bI=|5=PAjo=4GTRNc;Tt4YTe(G zutU-vhUPDB(p9O7DYf(cSXvFvHqgJ6KJ6I)Y{L1h==)qc2x9E4g$cW3<4}CmW!XnC z^h`vu1LqWXWLf#jqUvz&%mo1SG(%P`{JaA*1RO>W`dEQVFap@lXq~2DSdg>*Pa&>i zr{r{30U{SyQ#%2^05q(I&{0-dFR@wfF@HN2a7p6samMN3e7G(*d`*qx0Z=JZ;I%;cV6H z8&{H!*~c^nhkkx8O{=c$uxOCCxl(a-u+=>y=)`lM^;FtjY5QHn%RP5(oVB-=Ki4Iz zZ}w8QPlu7tEsNQ@^z~*^+I*aPVj4q~rB#TLqpyvLtY+PY`s3P_cDAp_tP!uDwC*v*#ZzIHQdrH6J`uj!`UOIvx3 zX=R=pn9^N1l1v zGV&1>@P?x#=@EGC7vKuv|Mf#rnh^9JCH-;62Geq8!JLtJ6baUY`uF1E?@CN?odyj0 z<_c_Sq>4H{x<#8 zz6Ska#U%NP0_K6>OQN|VehoYwvPfxH-Fd*P>oP-sf<>WB{rIK5;<4o^lCxW)tV(~q zlWp}5p^FCHh|+uwJqP#aj`Amu_gp@q-<9#%YTjeqX8h=;|5tXkkc4DcwRtd8x=a<- z_s9l~d50_O=hl45BXYtruyA`@1|O3?525WrJ!rWjs9t&AffmOWZ4nHpz3@DL) zNA(KmXv=!V0^E=j^mvO=CbzNDUS19Pd)(!d9-XDm_NCuV+S4oU`CI++bL*HO9+jkz zya*kDX0x7_Nb;}AmJ1_Mk>*ObREP4k3u**by&qOgKn;W{#KLiE=m^* zWH+bpdakUwyCkI59$C&YNuk-Cs*9ToYBZ_tPm?zH*6A6*eAU-vHZ5DxYBT?ut}IO! zxp`1adO2>7gx`s%*0!bloqE(qGCHF(n#jJpEZ4C=9UStr_02I3#YC?y{4jT#+T5@X zd6Q-5sb?i5Oq6!+d%M`;8w%Q#qpF22wLVVrXXlMn7TQ$4u9!KzfiT7C!DMYB{d~uA zMpYLBSsrUpl*A7P-L5=e-FNHR8xs}_MVIF1Iwu9>7F<^zZgcxuY@2f60L{*@0<$i) zwlr-PptIh6N9gAy^w>)*f%XP3V9P|NH(+If0GDFE9~&(9T-6=+Lkt*I2jhSTQt~ZA z_dJ<7e0Lk_zQA#ynJX9;%TVzK9N-lF%EZnqYP@X9N^@$5}Qh0dlWimuAlW~Q3 zT%*g^!rdM4)2wY{l>kKqxu88CtbX|Wn+N#v>NnfBun@Vn1}D^f+@PL#kx9O+H`{UQ za*9ep{k*)@$-$#~L%#+%x^8yx&NDGxNjdCORH!~U|1O!W$a4J4O0Prz@cHz|XAX5f zp1QJ*5nKD>W?eN#8^hJK8gc(#71&4loX`_1^R+{C)wu;McLm18GLQnXj~Tu z8mfW}oiT+XPI0ETQwi>&OcB$kzVr{5_wnm=o~cP`(jSWY&@7m7Yj!7`U-+0SloL|o-;SGyL zh^*fhikq}-_ugvX57x&IWDC0_DtKR2tL$%-Sl4x>SbzB;YCg(TN*}{Sx3To?&~vNp z#H3i86bG!7l&1l~JJy;KFJYM70131k2)}X`p~zbcLcao z(k?nee!lCN?k6bnw~l|T8YgK=5vH>Mv7+#vq585?rrpV#*2w`cC3^Culy;?guT`gg z!0gqaEuZ~LJ=CY~mX=XkHWJK}KE2gttBjuUTxB)6Ks`<$B7Avx!|cHTcI%ebB(^pVtY4SrDLz3IwoAGb-E^UeqoQ_n$jQg75F)7vR&3_&8off|Qe$&e1gcp5d8OFEP{ z0krP8u{^@el`%}7>3j5?2rBi$n5?3v=GUgaE06*qT(xEZ9GfyXPt&dEV zQ^!?oHtY^De3$IA$YG@6{9~n7l#RC z1IpfY5SN2$~>sc)>0n+XK)Z@GukP`s%qnkt30l*tyZxWPi(yHJ#<4IR(Up7x$`r0(_ zwadAzd)1k5e+M#=QGPtEu0$4FJ|bQiy>Wb*Y*=aVXMM{0`pPwxQvE%qv<;|y8AE@am-z-Trl?`?=E3b?*e76PE zXcV(=%W4j4ILJQuY*9MnW!--};r6%k;%#LkxedvQFFY-34o=7pX37P6TWn}Q^i zl0v--_Jo4%pcGU^yb$2>R{x>=FjN9Wd;;V3Q2K9XzT7{&x!$)K0CuoZ;w{(vLCsFi<*6{>~S0N8UC?WKmh<@FV`l8s|9`Taaqn>%WiuRYvLMr6t#U5jB} zo-h6h`I^}5N6M9MknH7v2CsA8gdL3P!Z!xZ%~{{>eN&{zTnyAYt?7>!qb|qDL~l$2 zE*E&olenO<6Ob4XuR5c(o?<$WEv4OJEbr5&rW%PMjh3q0qu-^>IoZiQerK^~)N~;2 zpsqTRFz9q9Je4nz;6HLZs$5o>u-5nH<6NOchSF_z&tDM+usWZCoR`z6@ zE-N)gSg8AA_w=5!e2wAKa+wH;6sfzSt{?cg)$jI#Vb86+&1m*|Xw0V?^-M2|Y%Bf>~+Eg~#S%c1=m z{A+l|;R@C~{~QLQX-c?JVXtsR(~!xNZTF!GWPT zHO_Qp_IquL6>hj%vygC7Llrm-aXx^mkD4}q>(7TqNCjc|RipZ-#`OW9&crT;l|%vY zH!3PBUsEsCTeIlRHw)=@UFylc{$ZQzRJik$FHl;IuAeWz@7XxMa^AC{KOf7ZE3va; zUQD<}^jj^bb<=IPq+Mxhsr*zA)!y~b9CV$W1!X*SRW-A`g`vs}uJTL0n`?@W{auk( zg?kpoz!)avrwvK0F6?@~n3>ob;A}4}tu?5&X|Sbr8W&5@u4}5B?OEG6%(D=BY=3H^ zf12~pYLN(+6B(&`TrV>w*@Hse&a9QP7wV@w+zGsTmDW~pK*Ve)ot?|?ic)1z_BUcp z-D1^@!L@#B?VI0yxG1)U`)s`X*!(yf-Y@)1gC`St5Pr`4W=Rg!E-)GZc1pQ63uxgu7)XqI zoZ?|~{rdI6PT=I11#XT5%QU75z`DW%SR&eI-nQaY6~93otpMV<*Pdr43 zSD)|&aIB)%f-9Zp0f{2TF6K2IvH4DRPI9)yH$U3`YNb}1xa(4lMb$=ibs_OB+6x=5 zCtt0ZNs1IdAEmcmu%Z&(kUvS9)|qC;J3>jp+ivG0OxVv-ojhT7#l52` z>O&TPE1dL&Z!WlYz%rlVwj^}bg|)Sy z@kZ*6ypOyj{TSAYkTjyPz3<<@-@kxEXQ=D%-T*XB5Y$L3-fW`bkBIdj#?fjvN+U98 zfYdd-Ks8SLW=ri>KJ&Wj$`{|c1>`2;D@cZ>%~n**-OQuP@^3W;fJOAC1e8Qg21G7%U5rx?CkD}qF^En%^?c>ZW$5DLD;)|m_)@-$YG`p zivvuluDR(4eHr1Z$TMirumw7dx}Nv9Y(H}qS6(cRny2V1ci(hdhGwE6%$TiQ0@f1< zX~Yau(jKlpU+Tf-+qc);+(j9^yCknQ-1VGmQrEBPXJYI#Ydt4CSRmJx$hLX2$kKx#ae6W>2w`W#+Q_`?MY;CUdT_F` z`XYrR-^SIzB8b<5y?zl0>Hs{P5q>A@y*PsLw3$%c?<|t_*g=OtECX-91K>z9QV)WJ ziBF#|Zf`kpt!&UJe-9B6m8AUnvsa-7n0}FcaRm2_k9>t^d@SVHUP;OK#G}P0fh&ty z%xNKUf~TJFkhdQ{3w8AUpgBy8|IOQRRZ7u=+dsk=)Bcu6v4-X`3zA29gzF5-_G@G+ zBF?a_KD4?{8s8AXDIknmSu+7c3#p}gSi2Pgl@m&;AZQloesK^#3L+wA#s-A*I`n(d zfGz^-L&>@da*i+>;8USjfJ+4v5)TaCr9=1huB2oNjNa{B7~4-gz2iq`o}z-HVjS?+ zSh47}qJokV_*<~3h52Yg1t8uQm|Ev%IvjE-5(z0)-KyJ(q;JH-wao|!WUcDaQ|7uq)! zsP4N)d~GEO-J2bf3Fm}n`Ad&^DCuCk6$A$X{=gok4#v8{sKoV8^*>f0*C2D>J=!D$ zpA~k6T?X31N-*#<*;37IeiW5lP`mjc@hy;Q`>(bqcH-=Bj{T>&6$e)H_*_LRe!ymi z01jwc9a&;}R$SXNHnXi{AFeB$Kfk)Z$d7sdTrW6*(&XJCd~<;+d6`Hvv*d4%{VTW< zUIH)?BuK%o3uBoG-XJD|E?^>H-G|$m`$GL#j(w$H|H-i*2YYwFwD1w%Mt9}SpVR7tA+92^ z-!DB_`|%^9ddEd=AinDJee2y2JANr(mtao0OM>*-w9ts@ZWy66`x4EmqUScIW?nDx?jrV979yRz zF810r{D)1ii%V~c2gfQNqg$%M_ZiIJKe#PfEUs*~WQyd-|A6vMxAC;vqI7QviY@yi z8q|%SBh3)t2+To5ZS|%vt%wn5pvu8}+{B>ooon+(d%~lY^n7aBa(eYJAxWpTpU!i$ zfqA|L>Pv+R(-kYxZh`OH3ryXjT{gNrZW`VETzr*IdwE%5Y;k6*lRm%_7wfEFE0pNx z1#Jl+JI>CeC|=_3c_3G=p+T$ruJvEWgoyYlJY#JgqGEyz(~i3r>8u=-@`%OqKxFfA zRGLOY6MPin^e=Xy2G3*cylK^7)l0*%=D2-+fRY8F1m{5;C_C^^7f*0%mbp0ox)j!z z0OOS~ZD1GH&OAVZO=4r%iCOk5DL*VwHPaANUbL|s%XOQZN_fimuGwnyML5f5WM@{8 zd3b_UQbYHD4!&F;+e55suRhttkJOSIEp{k0iu(OzS6KT5Tu z_{04xhSPUTqo^Wn22;HFC#ayS2jd%xIRx@6aK{bdj_ak1+u(oq-J9fCtVdN#97h3a z)M-lxgBIs+>4x`@Xt;2Vx->N>y*Cb7qBC!|@Qq*6CUOFeO%;GT4}{O$g^5fBP0ip9 zgt!L8F%ky6ubi*|l^5`ju5ZiCbhZY3=pXu#dgtp}SHs}__%y@n5TEK_TPYVzYUwUG zC>K*<()dGJHfR02h3%g$h{h$XEe28>*3Or|2ol$!f0K0`iI z$AUuY=~O3sv)2u+t2Z&tDN6S2qbKMdcB-?V^{Qx!AKL-6PkhJzYRVInzCnWKL9+O(5eyw!cpIQN%vwlt58qQRE`8$E8%3`$3GAi3#j-># zL0|4TB<_0Wi-mwf$uNt|p&It;q1aiL2_l=pYoTE?vaeb#(*oW(bsn1S1-fks1ywI{3O;Q`;e)NT!V_NcV*|e#(lt%KV zPjsbflZ7jdx$;+Ny{FpKGDNK>rJ`D6N>ocu2ot$>NlOGp-}M=v-xL~Aa~zLQh9)j` zrFOZCZU(vTq|D#o0s3#KZFG!X@C!a*>8?L6>S2wEzfO%j$Ju?s0O|P$UKe^2U1DIe zOjT4+z>F3_rSi32T8EB?M&i=MQIB4jaXEgzCr*(%7^)P&+BwSqW%d(JQ+2vE!u{^-YY~tH>&ThJo z!$)62S4Q9R%3xv9yu{Awz^V)hMu9L1g$=<$D_1A%^}m$3(zbK+O8oAXFvpz{e|z7V zP&lK8fY2TIVV~p^EP!P6z}YXUh;OJQ`q>3AZ+DRQZ5z67S$_bR^7DT>dc-3=IoS^i zD35tS=`GMW+);*{oV?at1xHcrFMjRvbBnbT^m{h@n405ssh5X*%-h+yynMpCS8g8e zo31~S!d+f4CRVh#tY1kMBJs#%Y}DLg>*!9AC0|yFuv5nNf+e5%GBRAHhD#;%-MZHm zmgRj?7MEwT6if7w14{fXMnjksgG)ArOIoUCHB126O2T!at}tILF-KtAhip8+tDA{q zm-1>7Qh`ef7=1JXR)ZKFkn*MNn~`wmSmg-u+Zjo?$TDPgeIvIkAxDa(KIcuej?xkL z>>nb98A{2apM|n*t&CW6DL!#sUldqWOA}IN0sm?u=+;QE64mo`zO@*(YJJt;k?7^omn; zNo3a>tmQB3FFVv!%>8&|e6vGH$~(r86aQjkpZMKOa7US> zeFQh;?nB&_-wi@(ctETUND6XkliwcFfh=RjnHxK5Uj!V+7{mFwxxok2dM`plX%yLD z3MNKj zj^M+=W8Y)x2n-KwjSe&1Kh>d|JpURVs0v46H__}m_h=`Y`Zd#M+y>Z1 z^NQ=rq`Vz|#chBDiHil@e|?}EguJsL^%9mx@#hS!amBcCLWMBYe}tqRSCEkO`{_KQ1(?yX z4+g&C^cm1rF@&C9pjsC$&s|@4E0434vmG%H9trHxHk4f!XcnF(1rQx)>z&-cT&#x& z3xerJ*IvLEDj{vA$F&xS!#HV>5J?sYk$^uw1g6ZJm1>JEP~o7NB{&iP{tsTzb_yOG z8mijK^#8NwjY;84@!loxUf(IXdt2)rV9rs#vtjpzI3$l3A`pY|8#&iw-ZUP_KgQP zc9!pdM!=ebp2{ERNWk7Kh^t%L8POiLb%KpZs0h}BM_+>;y^iZ}I3s#F?8Ku?QGG1- zJ+?uo^viSrwPe_qs@V)R@&l|ay{Ayn@K}OFhro?t@0B^43H+SQ!-9XpnK+Q z7^ly30;AgMYQM#a_8Nf7EsXdw$tu7=NE{l; zFaU2%jd$ z-zv*8T#C5%n#Z#R9*a^K2M1a^LNm=Dr+dWjh06Cn|$d4J1hI?ZC^-yzD7UWp!5`0yUJ%fke76>2!!rKWn% zEer?^I(Mkvcn0qQArR`*>0%1v(0jm}o@oC)J;BtjcJ(Uip4^0zb#sb37xX6g6OTUf zwiVIavCY{qyiH7g;jb!t7@P16R1{N)dnd8I@)xRrkoi8vnUtPk+aJ z97c+oOB?_}(d<&XB?#Ev>nrPSEV~MN4Hl|JCw71QpRsWVH0A2`w4nw6zR)M((kT_k z+qHkL0{k|9gtPb%efoI9@1#HNBs$YJ@w@*4)m8&QO@1-S0)fdf$368P70qE^a@fA4 z-|Tl#BEaMV``vMAYyl?hK%0}mcbv_)6OYK~eW+B}#6FKX_a6j=c%bZmPt!6PcXwJ8 zc&0F5N0q#IT%jOR09)%^vQaC}jH@pApJwUCe!>okQ#u5-Z|t>9M=!WKn+E~1|9MQA zlDJ-U{wqzrpee52**O6rHkG$Di`H!VFni|ew9K$p!z%A61;i9N&rLySMx%de6) y|L;(R28wCN|Ki&$l*Zl_J4M4_SvJxQ2_d0l=h8+wjEUes^3qD@)6ZSI_kRJI2YIOg diff --git a/x-pack/docs/en/watcher/limitations.asciidoc b/x-pack/docs/en/watcher/limitations.asciidoc deleted file mode 100644 index 9ae7273de71db..0000000000000 --- a/x-pack/docs/en/watcher/limitations.asciidoc +++ /dev/null @@ -1,28 +0,0 @@ -[[watcher-limitations]] -== Watcher Limitations - -[float] -=== Watches Are Not Updated When File Based Scripts Change - -When you refer to a file script in a watch, the watch itself is not updated -if you change the script on the filesystem. - -Currently, the only way to reload a file script in a watch is to delete -the watch and recreate it. - -[float] -=== Watcher UI - -When you create a new watch or edit an existing watch, if you navigate away -from the page without saving your changes they will be lost without warning. -Make sure to save your changes before leaving the page. - -image::watcher-ui-edit-watch.png[] - -[float] -=== Security Integration - -When {security} is enabled, a watch stores information about what the user who -stored the watch is allowed to execute **at that time**. This means, if those -permissions change over time, the watch will still be able to execute with the -permissions that existed when the watch was created. diff --git a/x-pack/docs/en/watcher/troubleshooting.asciidoc b/x-pack/docs/en/watcher/troubleshooting.asciidoc deleted file mode 100644 index 20d599f8f5215..0000000000000 --- a/x-pack/docs/en/watcher/troubleshooting.asciidoc +++ /dev/null @@ -1,63 +0,0 @@ -[[watcher-troubleshooting]] -== {xpack} {watcher} Troubleshooting -++++ -{xpack} {watcher} -++++ - -[float] -=== Dynamic Mapping Error When Trying to Add a Watch - -If you get the _Dynamic Mapping is Disabled_ error when you try to add a watch, -verify that the index mappings for the `.watches` index are available. You can -do that by submitting the following request: - -[source,js] --------------------------------------------------- -GET .watches/_mapping --------------------------------------------------- -// CONSOLE -// TEST[setup:my_active_watch] - -If the index mappings are missing, follow these steps to restore the correct -mappings: - -. Stop the Elasticsearch node. -. Add `xpack.watcher.index.rest.direct_access : true` to `elasticsearch.yml`. -. Restart the Elasticsearch node. -. Delete the `.watches` index: -+ -[source,js] --------------------------------------------------- -DELETE .watches --------------------------------------------------- -// CONSOLE -// TEST[skip:index deletion] -+ -. Disable direct access to the `.watches` index: -.. Stop the Elasticsearch node. -.. Remove `xpack.watcher.index.rest.direct_access : true` from `elasticsearch.yml`. -.. Restart the Elasticsearch node. - -[float] -=== Unable to Send Email - -If you get an authentication error indicating that you need to continue the -sign-in process from a web browser when Watcher attempts to send email, you need -to configure Gmail to -https://support.google.com/accounts/answer/6010255?hl=en[Allow Less Secure Apps to access your account]. - -If you have two-step verification enabled for your email account, you must -generate and use an App Specific password to send email from {watcher}. For more -information, see: - -- Gmail: https://support.google.com/accounts/answer/185833?hl=en[Sign in using App Passwords] -- Outlook.com: http://windows.microsoft.com/en-us/windows/app-passwords-two-step-verification[App passwords and two-step verification] - -[float] -=== {watcher} Not Responsive - -Keep in mind that there's no built-in validation of scripts that you add to a -watch. Buggy or deliberately malicious scripts can negatively impact {watcher} -performance. For example, if you add multiple watches with buggy script -conditions in a short period of time, {watcher} might be temporarily unable to -process watches until the bad watches time out. From 6a3adbd93565299c9e2c09b3781a2d051d22550d Mon Sep 17 00:00:00 2001 From: Michael Basnight Date: Thu, 6 Sep 2018 14:11:27 -0500 Subject: [PATCH 24/91] HLRC: split tasks request converters (#33441) In an effort to encapsulate the different clients, the request converters are being shuffled around. This splits the TasksClient request converters. --- .../client/RequestConverters.java | 30 ----- .../org/elasticsearch/client/TasksClient.java | 8 +- .../client/TasksRequestConverters.java | 55 +++++++++ .../client/RequestConvertersTests.java | 81 ------------ .../client/TasksRequestConvertersTests.java | 115 ++++++++++++++++++ 5 files changed, 174 insertions(+), 115 deletions(-) create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/TasksRequestConverters.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/TasksRequestConvertersTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 9cb853c5e5091..c86cf397621c8 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -30,8 +30,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; -import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; @@ -111,7 +109,6 @@ import org.elasticsearch.index.reindex.UpdateByQueryRequest; import org.elasticsearch.protocol.xpack.XPackInfoRequest; import org.elasticsearch.protocol.xpack.XPackUsageRequest; -import org.elasticsearch.protocol.xpack.graph.GraphExploreRequest; import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; @@ -138,17 +135,6 @@ private RequestConverters() { // Contains only status utility methods } - static Request cancelTasks(CancelTasksRequest cancelTasksRequest) { - Request request = new Request(HttpPost.METHOD_NAME, "/_tasks/_cancel"); - Params params = new Params(request); - params.withTimeout(cancelTasksRequest.getTimeout()) - .withTaskId(cancelTasksRequest.getTaskId()) - .withNodes(cancelTasksRequest.getNodes()) - .withParentTaskId(cancelTasksRequest.getParentTaskId()) - .withActions(cancelTasksRequest.getActions()); - return request; - } - static Request delete(DeleteRequest deleteRequest) { String endpoint = endpoint(deleteRequest.index(), deleteRequest.type(), deleteRequest.id()); Request request = new Request(HttpDelete.METHOD_NAME, endpoint); @@ -760,22 +746,6 @@ static Request deletePipeline(DeletePipelineRequest deletePipelineRequest) { return request; } - static Request listTasks(ListTasksRequest listTaskRequest) { - if (listTaskRequest.getTaskId() != null && listTaskRequest.getTaskId().isSet()) { - throw new IllegalArgumentException("TaskId cannot be used for list tasks request"); - } - Request request = new Request(HttpGet.METHOD_NAME, "/_tasks"); - Params params = new Params(request); - params.withTimeout(listTaskRequest.getTimeout()) - .withDetailed(listTaskRequest.getDetailed()) - .withWaitForCompletion(listTaskRequest.getWaitForCompletion()) - .withParentTaskId(listTaskRequest.getParentTaskId()) - .withNodes(listTaskRequest.getNodes()) - .withActions(listTaskRequest.getActions()) - .putParam("group_by", "none"); - return request; - } - static Request reindex(ReindexRequest reindexRequest) throws IOException { String endpoint = new EndpointBuilder().addPathPart("_reindex").build(); Request request = new Request(HttpPost.METHOD_NAME, endpoint); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/TasksClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/TasksClient.java index ebba636b8fa05..3b957b2defb0d 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/TasksClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/TasksClient.java @@ -51,7 +51,7 @@ public final class TasksClient { * @throws IOException in case there is a problem sending the request or parsing back the response */ public ListTasksResponse list(ListTasksRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::listTasks, options, + return restHighLevelClient.performRequestAndParseEntity(request, TasksRequestConverters::listTasks, options, ListTasksResponse::fromXContent, emptySet()); } @@ -64,7 +64,7 @@ public ListTasksResponse list(ListTasksRequest request, RequestOptions options) * @param listener the listener to be notified upon request completion */ public void listAsync(ListTasksRequest request, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::listTasks, options, + restHighLevelClient.performRequestAsyncAndParseEntity(request, TasksRequestConverters::listTasks, options, ListTasksResponse::fromXContent, listener, emptySet()); } @@ -82,7 +82,7 @@ public void listAsync(ListTasksRequest request, RequestOptions options, ActionLi public CancelTasksResponse cancel(CancelTasksRequest cancelTasksRequest, RequestOptions options ) throws IOException { return restHighLevelClient.performRequestAndParseEntity( cancelTasksRequest, - RequestConverters::cancelTasks, + TasksRequestConverters::cancelTasks, options, CancelTasksResponse::fromXContent, emptySet() @@ -101,7 +101,7 @@ public CancelTasksResponse cancel(CancelTasksRequest cancelTasksRequest, Request public void cancelAsync(CancelTasksRequest cancelTasksRequest, RequestOptions options, ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity( cancelTasksRequest, - RequestConverters::cancelTasks, + TasksRequestConverters::cancelTasks, options, CancelTasksResponse::fromXContent, listener, diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/TasksRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/TasksRequestConverters.java new file mode 100644 index 0000000000000..93b407a82fe51 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/TasksRequestConverters.java @@ -0,0 +1,55 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPost; +import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; + +public class TasksRequestConverters { + + static Request cancelTasks(CancelTasksRequest cancelTasksRequest) { + Request request = new Request(HttpPost.METHOD_NAME, "/_tasks/_cancel"); + RequestConverters.Params params = new RequestConverters.Params(request); + params.withTimeout(cancelTasksRequest.getTimeout()) + .withTaskId(cancelTasksRequest.getTaskId()) + .withNodes(cancelTasksRequest.getNodes()) + .withParentTaskId(cancelTasksRequest.getParentTaskId()) + .withActions(cancelTasksRequest.getActions()); + return request; + } + + static Request listTasks(ListTasksRequest listTaskRequest) { + if (listTaskRequest.getTaskId() != null && listTaskRequest.getTaskId().isSet()) { + throw new IllegalArgumentException("TaskId cannot be used for list tasks request"); + } + Request request = new Request(HttpGet.METHOD_NAME, "/_tasks"); + RequestConverters.Params params = new RequestConverters.Params(request); + params.withTimeout(listTaskRequest.getTimeout()) + .withDetailed(listTaskRequest.getDetailed()) + .withWaitForCompletion(listTaskRequest.getWaitForCompletion()) + .withParentTaskId(listTaskRequest.getParentTaskId()) + .withNodes(listTaskRequest.getNodes()) + .withActions(listTaskRequest.getActions()) + .putParam("group_by", "none"); + return request; + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index 32c374501073c..12a285fec8887 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -29,8 +29,6 @@ import org.apache.http.util.EntityUtils; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.DocWriteRequest; -import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; -import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; @@ -145,7 +143,6 @@ import org.elasticsearch.search.rescore.QueryRescorerBuilder; import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder; -import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.RandomObjects; @@ -184,7 +181,6 @@ import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; public class RequestConvertersTests extends ESTestCase { @@ -2000,83 +1996,6 @@ public void testIndexPutSettings() throws IOException { assertEquals(expectedParams, request.getParameters()); } - public void testCancelTasks() { - CancelTasksRequest request = new CancelTasksRequest(); - Map expectedParams = new HashMap<>(); - TaskId taskId = new TaskId(randomAlphaOfLength(5), randomNonNegativeLong()); - TaskId parentTaskId = new TaskId(randomAlphaOfLength(5), randomNonNegativeLong()); - request.setTaskId(taskId); - request.setParentTaskId(parentTaskId); - expectedParams.put("task_id", taskId.toString()); - expectedParams.put("parent_task_id", parentTaskId.toString()); - Request httpRequest = RequestConverters.cancelTasks(request); - assertThat(httpRequest, notNullValue()); - assertThat(httpRequest.getMethod(), equalTo(HttpPost.METHOD_NAME)); - assertThat(httpRequest.getEntity(), nullValue()); - assertThat(httpRequest.getEndpoint(), equalTo("/_tasks/_cancel")); - assertThat(httpRequest.getParameters(), equalTo(expectedParams)); - } - - public void testListTasks() { - { - ListTasksRequest request = new ListTasksRequest(); - Map expectedParams = new HashMap<>(); - if (randomBoolean()) { - request.setDetailed(randomBoolean()); - if (request.getDetailed()) { - expectedParams.put("detailed", "true"); - } - } - if (randomBoolean()) { - request.setWaitForCompletion(randomBoolean()); - if (request.getWaitForCompletion()) { - expectedParams.put("wait_for_completion", "true"); - } - } - if (randomBoolean()) { - String timeout = randomTimeValue(); - request.setTimeout(timeout); - expectedParams.put("timeout", timeout); - } - if (randomBoolean()) { - if (randomBoolean()) { - TaskId taskId = new TaskId(randomAlphaOfLength(5), randomNonNegativeLong()); - request.setParentTaskId(taskId); - expectedParams.put("parent_task_id", taskId.toString()); - } else { - request.setParentTask(TaskId.EMPTY_TASK_ID); - } - } - if (randomBoolean()) { - String[] nodes = generateRandomStringArray(10, 8, false); - request.setNodes(nodes); - if (nodes.length > 0) { - expectedParams.put("nodes", String.join(",", nodes)); - } - } - if (randomBoolean()) { - String[] actions = generateRandomStringArray(10, 8, false); - request.setActions(actions); - if (actions.length > 0) { - expectedParams.put("actions", String.join(",", actions)); - } - } - expectedParams.put("group_by", "none"); - Request httpRequest = RequestConverters.listTasks(request); - assertThat(httpRequest, notNullValue()); - assertThat(httpRequest.getMethod(), equalTo(HttpGet.METHOD_NAME)); - assertThat(httpRequest.getEntity(), nullValue()); - assertThat(httpRequest.getEndpoint(), equalTo("/_tasks")); - assertThat(httpRequest.getParameters(), equalTo(expectedParams)); - } - { - ListTasksRequest request = new ListTasksRequest(); - request.setTaskId(new TaskId(randomAlphaOfLength(5), randomNonNegativeLong())); - IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> RequestConverters.listTasks(request)); - assertEquals("TaskId cannot be used for list tasks request", exception.getMessage()); - } - } - public void testGetRepositories() { Map expectedParams = new HashMap<>(); StringBuilder endpoint = new StringBuilder("/_snapshot"); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/TasksRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/TasksRequestConvertersTests.java new file mode 100644 index 0000000000000..ff6726faee18d --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/TasksRequestConvertersTests.java @@ -0,0 +1,115 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPost; +import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; +import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.test.ESTestCase; + +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class TasksRequestConvertersTests extends ESTestCase { + + public void testCancelTasks() { + CancelTasksRequest request = new CancelTasksRequest(); + Map expectedParams = new HashMap<>(); + TaskId taskId = new TaskId(randomAlphaOfLength(5), randomNonNegativeLong()); + TaskId parentTaskId = new TaskId(randomAlphaOfLength(5), randomNonNegativeLong()); + request.setTaskId(taskId); + request.setParentTaskId(parentTaskId); + expectedParams.put("task_id", taskId.toString()); + expectedParams.put("parent_task_id", parentTaskId.toString()); + Request httpRequest = TasksRequestConverters.cancelTasks(request); + assertThat(httpRequest, notNullValue()); + assertThat(httpRequest.getMethod(), equalTo(HttpPost.METHOD_NAME)); + assertThat(httpRequest.getEntity(), nullValue()); + assertThat(httpRequest.getEndpoint(), equalTo("/_tasks/_cancel")); + assertThat(httpRequest.getParameters(), equalTo(expectedParams)); + } + + public void testListTasks() { + { + ListTasksRequest request = new ListTasksRequest(); + Map expectedParams = new HashMap<>(); + if (randomBoolean()) { + request.setDetailed(randomBoolean()); + if (request.getDetailed()) { + expectedParams.put("detailed", "true"); + } + } + if (randomBoolean()) { + request.setWaitForCompletion(randomBoolean()); + if (request.getWaitForCompletion()) { + expectedParams.put("wait_for_completion", "true"); + } + } + if (randomBoolean()) { + String timeout = randomTimeValue(); + request.setTimeout(timeout); + expectedParams.put("timeout", timeout); + } + if (randomBoolean()) { + if (randomBoolean()) { + TaskId taskId = new TaskId(randomAlphaOfLength(5), randomNonNegativeLong()); + request.setParentTaskId(taskId); + expectedParams.put("parent_task_id", taskId.toString()); + } else { + request.setParentTask(TaskId.EMPTY_TASK_ID); + } + } + if (randomBoolean()) { + String[] nodes = generateRandomStringArray(10, 8, false); + request.setNodes(nodes); + if (nodes.length > 0) { + expectedParams.put("nodes", String.join(",", nodes)); + } + } + if (randomBoolean()) { + String[] actions = generateRandomStringArray(10, 8, false); + request.setActions(actions); + if (actions.length > 0) { + expectedParams.put("actions", String.join(",", actions)); + } + } + expectedParams.put("group_by", "none"); + Request httpRequest = TasksRequestConverters.listTasks(request); + assertThat(httpRequest, notNullValue()); + assertThat(httpRequest.getMethod(), equalTo(HttpGet.METHOD_NAME)); + assertThat(httpRequest.getEntity(), nullValue()); + assertThat(httpRequest.getEndpoint(), equalTo("/_tasks")); + assertThat(httpRequest.getParameters(), equalTo(expectedParams)); + } + { + ListTasksRequest request = new ListTasksRequest(); + request.setTaskId(new TaskId(randomAlphaOfLength(5), randomNonNegativeLong())); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () + -> TasksRequestConverters.listTasks(request)); + assertEquals("TaskId cannot be used for list tasks request", exception.getMessage()); + } + } +} From 0d45752e50b7b7dd84a397fb95e133bcd28dd945 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 6 Sep 2018 17:55:24 -0400 Subject: [PATCH 25/91] Fix IndexMetaData loads after rollover (#33394) When we rollover and index we write the conditions of the rollover that the old index met into the old index. Loading this index metadata requires a working `NamedXContentRegistry` that has been populated with parsers from the rollover infrastructure. We had a few loads that didn't use a working `NamedXContentRegistry` and so would fail if they ever encountered an index that had been rolled over. Here are the locations of the loads and how I fixed them: * IndexFolderUpgrader - removed entirely. It existed to support opening indices made in Elasticsearch 2.x. Since we only need this change as far back as 6.4.1 which will supports reading from indices created as far back as 5.0.0 we should be good here. * TransportNodesListGatewayStartedShards - wired the `NamedXContentRegistry` into place. * TransportNodesListShardStoreMetaData - wired the `NamedXContentRegistry` into place. * OldIndexUtils - removed entirely. It existed to support the zip based index backwards compatibility tests which we've since replaced with code that actually runs old versions of Elasticsearch. In addition to fixing the actual problem I added full cluster restart integration tests for rollover which would have caught this problem and I added an extra assertion to IndexMetaData's deserialization code which will trip if we try to deserialize and index's metadata without a fully formed `NamedXContentRegistry`. It won't catch if use the *wrong* `NamedXContentRegistry` but it is better than nothing. Closes #33316 --- .../upgrades/FullClusterRestartIT.java | 58 +++- .../cluster/metadata/IndexMetaData.java | 3 + .../common/util/IndexFolderUpgrader.java | 134 --------- .../gateway/GatewayMetaState.java | 2 - ...ransportNodesListGatewayStartedShards.java | 9 +- .../TransportNodesListShardStoreMetaData.java | 8 +- .../common/util/IndexFolderUpgraderTests.java | 270 ------------------ .../org/elasticsearch/test/OldIndexUtils.java | 131 --------- 8 files changed, 71 insertions(+), 544 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/common/util/IndexFolderUpgrader.java delete mode 100644 server/src/test/java/org/elasticsearch/common/util/IndexFolderUpgraderTests.java delete mode 100644 test/framework/src/main/java/org/elasticsearch/test/OldIndexUtils.java diff --git a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index d7111f64a1baf..80bed9db5f3da 100644 --- a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -477,6 +477,62 @@ public void testShrinkAfterUpgrade() throws IOException { } } + /** + * Test upgrading after a rollover. Specifically: + *

    + *
  1. Create an index with a write alias + *
  2. Write some documents to the write alias + *
  3. Roll over the index + *
  4. Make sure the document count is correct + *
  5. Upgrade + *
  6. Write some more documents to the write alias + *
  7. Make sure the document count is correct + *
+ */ + public void testRollover() throws IOException { + if (runningAgainstOldCluster) { + Request createIndex = new Request("PUT", "/" + index + "-000001"); + createIndex.setJsonEntity("{" + + " \"aliases\": {" + + " \"" + index + "_write\": {}" + + " }" + + "}"); + client().performRequest(createIndex); + } + + int bulkCount = 10; + StringBuilder bulk = new StringBuilder(); + for (int i = 0; i < bulkCount; i++) { + bulk.append("{\"index\":{}}\n"); + bulk.append("{\"test\":\"test\"}\n"); + } + Request bulkRequest = new Request("POST", "/" + index + "_write/doc/_bulk"); + bulkRequest.setJsonEntity(bulk.toString()); + bulkRequest.addParameter("refresh", ""); + assertThat(EntityUtils.toString(client().performRequest(bulkRequest).getEntity()), containsString("\"errors\":false")); + + if (runningAgainstOldCluster) { + Request rolloverRequest = new Request("POST", "/" + index + "_write/_rollover"); + rolloverRequest.setJsonEntity("{" + + " \"conditions\": {" + + " \"max_docs\": 5" + + " }" + + "}"); + client().performRequest(rolloverRequest); + + assertThat(EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices?v")).getEntity()), + containsString("testrollover-000002")); + } + + Request countRequest = new Request("POST", "/" + index + "-*/_search"); + countRequest.addParameter("size", "0"); + Map count = entityAsMap(client().performRequest(countRequest)); + assertNoFailures(count); + + int expectedCount = bulkCount + (runningAgainstOldCluster ? 0 : bulkCount); + assertEquals(expectedCount, (int) XContentMapValues.extractValue("hits.total", count)); + } + void assertBasicSearchWorks(int count) throws IOException { logger.info("--> testing basic search"); { @@ -947,7 +1003,7 @@ private void checkSnapshot(String snapshotName, int count, Version tookOnVersion Request writeToRestoredRequest = new Request("POST", "/restored_" + index + "/doc/_bulk"); writeToRestoredRequest.addParameter("refresh", "true"); writeToRestoredRequest.setJsonEntity(bulk.toString()); - client().performRequest(writeToRestoredRequest); + assertThat(EntityUtils.toString(client().performRequest(writeToRestoredRequest).getEntity()), containsString("\"errors\":false")); // And count to make sure the add worked // Make sure search finds all documents diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index ece729474d16b..54089abae7e03 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -45,6 +45,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -1346,6 +1347,8 @@ public void toXContent(XContentBuilder builder, IndexMetaData state) throws IOEx @Override public IndexMetaData fromXContent(XContentParser parser) throws IOException { + assert parser.getXContentRegistry() != NamedXContentRegistry.EMPTY + : "loading index metadata requires a working named xcontent registry"; return Builder.fromXContent(parser); } }; diff --git a/server/src/main/java/org/elasticsearch/common/util/IndexFolderUpgrader.java b/server/src/main/java/org/elasticsearch/common/util/IndexFolderUpgrader.java deleted file mode 100644 index b709c48d8c26c..0000000000000 --- a/server/src/main/java/org/elasticsearch/common/util/IndexFolderUpgrader.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.util; - -import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.message.ParameterizedMessage; -import org.elasticsearch.core.internal.io.IOUtils; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.IndexSettings; - -import java.io.FileNotFoundException; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.NoSuchFileException; -import java.nio.file.Path; -import java.nio.file.StandardCopyOption; - -/** - * Renames index folders from {index.name} to {index.uuid} - */ -public class IndexFolderUpgrader { - private final NodeEnvironment nodeEnv; - private final Settings settings; - private final Logger logger = Loggers.getLogger(IndexFolderUpgrader.class); - - /** - * Creates a new upgrader instance - * @param settings node settings - * @param nodeEnv the node env to operate on - */ - IndexFolderUpgrader(Settings settings, NodeEnvironment nodeEnv) { - this.settings = settings; - this.nodeEnv = nodeEnv; - } - - /** - * Moves the index folder found in source to target - */ - void upgrade(final Index index, final Path source, final Path target) throws IOException { - boolean success = false; - try { - Files.move(source, target, StandardCopyOption.ATOMIC_MOVE); - success = true; - } catch (NoSuchFileException | FileNotFoundException exception) { - // thrown when the source is non-existent because the folder was renamed - // by another node (shared FS) after we checked if the target exists - logger.error(() -> new ParameterizedMessage("multiple nodes trying to upgrade [{}] in parallel, retry " + - "upgrading with single node", target), exception); - throw exception; - } finally { - if (success) { - logger.info("{} moved from [{}] to [{}]", index, source, target); - logger.trace("{} syncing directory [{}]", index, target); - IOUtils.fsync(target, true); - } - } - } - - /** - * Renames indexFolderName index folders found in node paths and custom path - * iff {@link #needsUpgrade(Index, String)} is true. - * Index folder in custom paths are renamed first followed by index folders in each node path. - */ - void upgrade(final String indexFolderName) throws IOException { - for (NodeEnvironment.NodePath nodePath : nodeEnv.nodePaths()) { - final Path indexFolderPath = nodePath.indicesPath.resolve(indexFolderName); - final IndexMetaData indexMetaData = IndexMetaData.FORMAT.loadLatestState(logger, NamedXContentRegistry.EMPTY, indexFolderPath); - if (indexMetaData != null) { - final Index index = indexMetaData.getIndex(); - if (needsUpgrade(index, indexFolderName)) { - logger.info("{} upgrading [{}] to new naming convention", index, indexFolderPath); - final IndexSettings indexSettings = new IndexSettings(indexMetaData, settings); - if (indexSettings.hasCustomDataPath()) { - // we rename index folder in custom path before renaming them in any node path - // to have the index state under a not-yet-upgraded index folder, which we use to - // continue renaming after a incomplete upgrade. - final Path customLocationSource = nodeEnv.resolveBaseCustomLocation(indexSettings) - .resolve(indexFolderName); - final Path customLocationTarget = customLocationSource.resolveSibling(index.getUUID()); - // we rename the folder in custom path only the first time we encounter a state - // in a node path, which needs upgrading, it is a no-op for subsequent node paths - if (Files.exists(customLocationSource) // might not exist if no data was written for this index - && Files.exists(customLocationTarget) == false) { - upgrade(index, customLocationSource, customLocationTarget); - } else { - logger.info("[{}] no upgrade needed - already upgraded", customLocationTarget); - } - } - upgrade(index, indexFolderPath, indexFolderPath.resolveSibling(index.getUUID())); - } else { - logger.debug("[{}] no upgrade needed - already upgraded", indexFolderPath); - } - } else { - logger.warn("[{}] no index state found - ignoring", indexFolderPath); - } - } - } - - /** - * Upgrades all indices found under nodeEnv. Already upgraded indices are ignored. - */ - public static void upgradeIndicesIfNeeded(final Settings settings, final NodeEnvironment nodeEnv) throws IOException { - final IndexFolderUpgrader upgrader = new IndexFolderUpgrader(settings, nodeEnv); - for (String indexFolderName : nodeEnv.availableIndexFolders()) { - upgrader.upgrade(indexFolderName); - } - } - - static boolean needsUpgrade(Index index, String indexFolderName) { - return indexFolderName.equals(index.getUUID()) == false; - } -} diff --git a/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java b/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java index 719626b7e1870..46ff2f960e7cf 100644 --- a/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java +++ b/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java @@ -35,7 +35,6 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.IndexFolderUpgrader; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.Index; import org.elasticsearch.plugins.MetaDataUpgrader; @@ -84,7 +83,6 @@ public GatewayMetaState(Settings settings, NodeEnvironment nodeEnv, MetaStateSer if (DiscoveryNode.isMasterNode(settings) || DiscoveryNode.isDataNode(settings)) { try { ensureNoPre019State(); - IndexFolderUpgrader.upgradeIndicesIfNeeded(settings, nodeEnv); final MetaData metaData = metaStateService.loadFullState(); final MetaData upgradedMetaData = upgradeMetaData(metaData, metaDataIndexUpgradeService, metaDataUpgrader); // We finished global state validation and successfully checked all indices for backward compatibility diff --git a/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java b/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java index 95ecc4183165b..c3cbfea9141ec 100644 --- a/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java +++ b/server/src/main/java/org/elasticsearch/gateway/TransportNodesListGatewayStartedShards.java @@ -69,15 +69,18 @@ public class TransportNodesListGatewayStartedShards extends public static final String ACTION_NAME = "internal:gateway/local/started_shards"; private final NodeEnvironment nodeEnv; private final IndicesService indicesService; + private final NamedXContentRegistry namedXContentRegistry; @Inject public TransportNodesListGatewayStartedShards(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, - NodeEnvironment env, IndicesService indicesService) { + NodeEnvironment env, IndicesService indicesService, + NamedXContentRegistry namedXContentRegistry) { super(settings, ACTION_NAME, threadPool, clusterService, transportService, actionFilters, Request::new, NodeRequest::new, ThreadPool.Names.FETCH_SHARD_STARTED, NodeGatewayStartedShards.class); this.nodeEnv = env; this.indicesService = indicesService; + this.namedXContentRegistry = namedXContentRegistry; } @Override @@ -112,7 +115,7 @@ protected NodeGatewayStartedShards nodeOperation(NodeRequest request) { try { final ShardId shardId = request.getShardId(); logger.trace("{} loading local shard state info", shardId); - ShardStateMetaData shardStateMetaData = ShardStateMetaData.FORMAT.loadLatestState(logger, NamedXContentRegistry.EMPTY, + ShardStateMetaData shardStateMetaData = ShardStateMetaData.FORMAT.loadLatestState(logger, namedXContentRegistry, nodeEnv.availableShardPaths(request.shardId)); if (shardStateMetaData != null) { IndexMetaData metaData = clusterService.state().metaData().index(shardId.getIndex()); @@ -120,7 +123,7 @@ protected NodeGatewayStartedShards nodeOperation(NodeRequest request) { // we may send this requests while processing the cluster state that recovered the index // sometimes the request comes in before the local node processed that cluster state // in such cases we can load it from disk - metaData = IndexMetaData.FORMAT.loadLatestState(logger, NamedXContentRegistry.EMPTY, + metaData = IndexMetaData.FORMAT.loadLatestState(logger, namedXContentRegistry, nodeEnv.indexPaths(shardId.getIndex())); } if (metaData == null) { diff --git a/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java b/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java index 3dee58febbd1b..373edfc3b468b 100644 --- a/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java +++ b/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetaData.java @@ -67,17 +67,19 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction> indexSettingsMap = new HashMap<>(); - for (int i = 0; i < randomIntBetween(2, 5); i++) { - final Index index = new Index(randomAlphaOfLength(10), UUIDs.randomBase64UUID()); - Settings settings = Settings.builder() - .put(nodeSettings) - .put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID()) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_6_0_0) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 5)) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) - .build(); - IndexMetaData indexState = IndexMetaData.builder(index.getName()).settings(settings).build(); - Tuple fileCounts = new Tuple<>(randomIntBetween(1, 5), randomIntBetween(1, 5)); - IndexSettings indexSettings = new IndexSettings(indexState, nodeSettings); - indexSettingsMap.put(indexSettings, fileCounts); - writeIndex(nodeEnv, indexSettings, fileCounts.v1(), fileCounts.v2()); - } - IndexFolderUpgrader.upgradeIndicesIfNeeded(nodeSettings, nodeEnv); - for (Map.Entry> entry : indexSettingsMap.entrySet()) { - checkIndex(nodeEnv, entry.getKey(), entry.getValue().v1(), entry.getValue().v2()); - } - } - } - - public void testNeedsUpgrade() throws IOException { - final Index index = new Index("foo", UUIDs.randomBase64UUID()); - IndexMetaData indexState = IndexMetaData.builder(index.getName()) - .settings(Settings.builder() - .put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID()) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) - .numberOfShards(1) - .numberOfReplicas(0) - .build(); - try (NodeEnvironment nodeEnvironment = newNodeEnvironment()) { - IndexMetaData.FORMAT.write(indexState, nodeEnvironment.indexPaths(index)); - assertFalse(IndexFolderUpgrader.needsUpgrade(index, index.getUUID())); - } - } - - private void checkIndex(NodeEnvironment nodeEnv, IndexSettings indexSettings, - int numIdxFiles, int numTranslogFiles) throws IOException { - final Index index = indexSettings.getIndex(); - // ensure index state can be loaded - IndexMetaData loadLatestState = IndexMetaData.FORMAT.loadLatestState(logger, NamedXContentRegistry.EMPTY, - nodeEnv.indexPaths(index)); - assertNotNull(loadLatestState); - assertEquals(loadLatestState.getIndex(), index); - for (int shardId = 0; shardId < indexSettings.getNumberOfShards(); shardId++) { - // ensure shard path can be loaded - ShardPath targetShardPath = ShardPath.loadShardPath(logger, nodeEnv, new ShardId(index, shardId), indexSettings); - assertNotNull(targetShardPath); - // ensure shard contents are copied over - final Path translog = targetShardPath.resolveTranslog(); - final Path idx = targetShardPath.resolveIndex(); - - // ensure index and translog files are copied over - assertEquals(numTranslogFiles, FileSystemUtils.files(translog).length); - assertEquals(numIdxFiles, FileSystemUtils.files(idx).length); - Path[] files = FileSystemUtils.files(translog); - final HashSet translogFiles = new HashSet<>(Arrays.asList(files)); - for (int i = 0; i < numTranslogFiles; i++) { - final String name = Integer.toString(i); - translogFiles.contains(translog.resolve(name + ".translog")); - byte[] content = Files.readAllBytes(translog.resolve(name + ".translog")); - assertEquals(name , new String(content, StandardCharsets.UTF_8)); - } - Path[] indexFileList = FileSystemUtils.files(idx); - final HashSet idxFiles = new HashSet<>(Arrays.asList(indexFileList)); - for (int i = 0; i < numIdxFiles; i++) { - final String name = Integer.toString(i); - idxFiles.contains(idx.resolve(name + ".tst")); - byte[] content = Files.readAllBytes(idx.resolve(name + ".tst")); - assertEquals(name, new String(content, StandardCharsets.UTF_8)); - } - } - } - - private void writeIndex(NodeEnvironment nodeEnv, IndexSettings indexSettings, - int numIdxFiles, int numTranslogFiles) throws IOException { - NodeEnvironment.NodePath[] nodePaths = nodeEnv.nodePaths(); - Path[] oldIndexPaths = new Path[nodePaths.length]; - for (int i = 0; i < nodePaths.length; i++) { - oldIndexPaths[i] = nodePaths[i].indicesPath.resolve(indexSettings.getIndex().getName()); - } - IndexMetaData.FORMAT.write(indexSettings.getIndexMetaData(), oldIndexPaths); - for (int id = 0; id < indexSettings.getNumberOfShards(); id++) { - Path oldIndexPath = randomFrom(oldIndexPaths); - ShardId shardId = new ShardId(indexSettings.getIndex(), id); - if (indexSettings.hasCustomDataPath()) { - Path customIndexPath = nodeEnv.resolveBaseCustomLocation(indexSettings).resolve(indexSettings.getIndex().getName()); - writeShard(shardId, customIndexPath, numIdxFiles, numTranslogFiles); - } else { - writeShard(shardId, oldIndexPath, numIdxFiles, numTranslogFiles); - } - ShardStateMetaData state = new ShardStateMetaData(true, indexSettings.getUUID(), AllocationId.newInitializing()); - ShardStateMetaData.FORMAT.write(state, oldIndexPath.resolve(String.valueOf(shardId.getId()))); - } - } - - private void writeShard(ShardId shardId, Path indexLocation, - final int numIdxFiles, final int numTranslogFiles) throws IOException { - Path oldShardDataPath = indexLocation.resolve(String.valueOf(shardId.getId())); - final Path translogPath = oldShardDataPath.resolve(ShardPath.TRANSLOG_FOLDER_NAME); - final Path idxPath = oldShardDataPath.resolve(ShardPath.INDEX_FOLDER_NAME); - Files.createDirectories(translogPath); - Files.createDirectories(idxPath); - for (int i = 0; i < numIdxFiles; i++) { - String filename = Integer.toString(i); - try (BufferedWriter w = Files.newBufferedWriter(idxPath.resolve(filename + ".tst"), - StandardCharsets.UTF_8)) { - w.write(filename); - } - } - for (int i = 0; i < numTranslogFiles; i++) { - String filename = Integer.toString(i); - try (BufferedWriter w = Files.newBufferedWriter(translogPath.resolve(filename + ".translog"), - StandardCharsets.UTF_8)) { - w.write(filename); - } - } - } -} diff --git a/test/framework/src/main/java/org/elasticsearch/test/OldIndexUtils.java b/test/framework/src/main/java/org/elasticsearch/test/OldIndexUtils.java deleted file mode 100644 index b9a0e4a9b1ea0..0000000000000 --- a/test/framework/src/main/java/org/elasticsearch/test/OldIndexUtils.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.test; - -import org.apache.logging.log4j.Logger; -import org.apache.lucene.index.IndexWriter; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.index.MergePolicyConfig; - -import java.io.IOException; -import java.nio.file.DirectoryStream; -import java.nio.file.FileVisitResult; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.SimpleFileVisitor; -import java.nio.file.attribute.BasicFileAttributes; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - -import static junit.framework.TestCase.assertFalse; -import static junit.framework.TestCase.assertTrue; -import static org.elasticsearch.test.ESTestCase.randomInt; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertNotNull; - - -public class OldIndexUtils { - - public static List loadDataFilesList(String prefix, Path bwcIndicesPath) throws IOException { - List indexes = new ArrayList<>(); - try (DirectoryStream stream = Files.newDirectoryStream(bwcIndicesPath, prefix + "-*.zip")) { - for (Path path : stream) { - indexes.add(path.getFileName().toString()); - } - } - Collections.sort(indexes); - return indexes; - } - - public static Settings getSettings() { - return Settings.builder() - .put(MergePolicyConfig.INDEX_MERGE_ENABLED, false) // disable merging so no segments will be upgraded - .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING.getKey(), 30) // - // speed up recoveries - .put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), 30) - .build(); - } - - public static Path getIndexDir( - final Logger logger, - final String indexName, - final String indexFile, - final Path dataDir) throws IOException { - final Version version = Version.fromString(indexName.substring("index-".length())); - final List indexFolders = new ArrayList<>(); - try (DirectoryStream stream = Files.newDirectoryStream(dataDir.resolve("0/indices"), - (p) -> p.getFileName().toString().startsWith("extra") == false)) { // extra FS can break this... - for (final Path path : stream) { - indexFolders.add(path); - } - } - assertThat(indexFolders.toString(), indexFolders.size(), equalTo(1)); - final IndexMetaData indexMetaData = IndexMetaData.FORMAT.loadLatestState(logger, NamedXContentRegistry.EMPTY, - indexFolders.get(0)); - assertNotNull(indexMetaData); - assertThat(indexFolders.get(0).getFileName().toString(), equalTo(indexMetaData.getIndexUUID())); - assertThat(indexMetaData.getCreationVersion(), equalTo(version)); - return indexFolders.get(0); - } - - // randomly distribute the files from src over dests paths - public static void copyIndex(final Logger logger, final Path src, final String folderName, final Path... dests) throws IOException { - Path destinationDataPath = dests[randomInt(dests.length - 1)]; - for (Path dest : dests) { - Path indexDir = dest.resolve(folderName); - assertFalse(Files.exists(indexDir)); - Files.createDirectories(indexDir); - } - Files.walkFileTree(src, new SimpleFileVisitor() { - @Override - public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { - Path relativeDir = src.relativize(dir); - for (Path dest : dests) { - Path destDir = dest.resolve(folderName).resolve(relativeDir); - Files.createDirectories(destDir); - } - return FileVisitResult.CONTINUE; - } - - @Override - public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { - if (file.getFileName().toString().equals(IndexWriter.WRITE_LOCK_NAME)) { - // skip lock file, we don't need it - logger.trace("Skipping lock file: {}", file); - return FileVisitResult.CONTINUE; - } - - Path relativeFile = src.relativize(file); - Path destFile = destinationDataPath.resolve(folderName).resolve(relativeFile); - logger.trace("--> Moving {} to {}", relativeFile, destFile); - Files.move(file, destFile); - assertFalse(Files.exists(file)); - assertTrue(Files.exists(destFile)); - return FileVisitResult.CONTINUE; - } - }); - } -} From ee73bc2f3fbf8a47d8e68f09dadccb92e5bc2fd9 Mon Sep 17 00:00:00 2001 From: Yogesh Gaikwad <902768+bizybot@users.noreply.github.com> Date: Fri, 7 Sep 2018 08:46:49 +1000 Subject: [PATCH 26/91] [SECURITY] Set Auth-scheme preference (#33156) Some browsers (eg. Firefox) behave differently when presented with multiple auth schemes in 'WWW-Authenticate' header. The expected behavior is that browser select the most secure auth-scheme before trying others, but Firefox selects the first presented auth scheme and tries the next ones sequentially. As the browser interpretation is something that we do not control, we can at least present the auth schemes in most to least secure order as the server's preference. This commit modifies the code to collect and sort the auth schemes presented by most to least secure. The priority of the auth schemes is fixed, the lower number denoting more secure auth-scheme. The current order of schemes based on the ES supported auth-scheme is [Negotiate, Bearer,Basic] and when we add future support for other schemes we will need to update the code. If need be we will make this configuration customizable in future. Unit test to verify the WWW-Authenticate header values are sorted by server preference as more secure to least secure auth schemes. Tested with Firefox, Chrome, Internet Explorer 11. Closes#32699 --- .../DefaultAuthenticationFailureHandler.java | 38 +++++++++++++++++-- ...aultAuthenticationFailureHandlerTests.java | 27 +++++++++++-- 2 files changed, 58 insertions(+), 7 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/DefaultAuthenticationFailureHandler.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/DefaultAuthenticationFailureHandler.java index d6f678a2dcb90..736b9378e3876 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/DefaultAuthenticationFailureHandler.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/DefaultAuthenticationFailureHandler.java @@ -12,9 +12,11 @@ import org.elasticsearch.transport.TransportMessage; import org.elasticsearch.xpack.core.XPackField; +import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; import static org.elasticsearch.xpack.core.security.support.Exceptions.authenticationError; @@ -44,12 +46,42 @@ public DefaultAuthenticationFailureHandler() { * be sent as failure response. * @see Realm#getAuthenticationFailureHeaders() */ - public DefaultAuthenticationFailureHandler(Map> failureResponseHeaders) { + public DefaultAuthenticationFailureHandler(final Map> failureResponseHeaders) { if (failureResponseHeaders == null || failureResponseHeaders.isEmpty()) { - failureResponseHeaders = Collections.singletonMap("WWW-Authenticate", + this.defaultFailureResponseHeaders = Collections.singletonMap("WWW-Authenticate", Collections.singletonList("Basic realm=\"" + XPackField.SECURITY + "\" charset=\"UTF-8\"")); + } else { + this.defaultFailureResponseHeaders = Collections.unmodifiableMap(failureResponseHeaders.entrySet().stream().collect(Collectors + .toMap(entry -> entry.getKey(), entry -> { + if (entry.getKey().equalsIgnoreCase("WWW-Authenticate")) { + List values = new ArrayList<>(entry.getValue()); + Collections.sort(values, (o1, o2) -> authSchemePriority(o1).compareTo(authSchemePriority(o2))); + return Collections.unmodifiableList(values); + } else { + return Collections.unmodifiableList(entry.getValue()); + } + }))); + } + } + + /** + * For given 'WWW-Authenticate' header value returns the priority based on + * the auth-scheme. Lower number denotes more secure and preferred + * auth-scheme than the higher number. + * + * @param headerValue string starting with auth-scheme name + * @return integer value denoting priority for given auth scheme. + */ + private static Integer authSchemePriority(final String headerValue) { + if (headerValue.regionMatches(true, 0, "negotiate", 0, "negotiate".length())) { + return 0; + } else if (headerValue.regionMatches(true, 0, "bearer", 0, "bearer".length())) { + return 1; + } else if (headerValue.regionMatches(true, 0, "basic", 0, "basic".length())) { + return 2; + } else { + return 3; } - this.defaultFailureResponseHeaders = Collections.unmodifiableMap(failureResponseHeaders); } @Override diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/DefaultAuthenticationFailureHandlerTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/DefaultAuthenticationFailureHandlerTests.java index 2598461c37280..15593f0b82ea5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/DefaultAuthenticationFailureHandlerTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authc/DefaultAuthenticationFailureHandlerTests.java @@ -50,7 +50,7 @@ public void testAuthenticationRequired() { if (testDefault) { assertWWWAuthenticateWithSchemes(ese, basicAuthScheme); } else { - assertWWWAuthenticateWithSchemes(ese, basicAuthScheme, bearerAuthScheme); + assertWWWAuthenticateWithSchemes(ese, bearerAuthScheme, basicAuthScheme); } } @@ -83,12 +83,12 @@ public void testExceptionProcessingRequest() { assertThat(ese.getHeader("WWW-Authenticate"), is(notNullValue())); assertThat(ese, is(sameInstance(cause))); if (withAuthenticateHeader == false) { - assertWWWAuthenticateWithSchemes(ese, basicAuthScheme, bearerAuthScheme, negotiateAuthScheme); + assertWWWAuthenticateWithSchemes(ese, negotiateAuthScheme, bearerAuthScheme, basicAuthScheme); } else { if (selectedScheme.contains("Negotiate ")) { assertWWWAuthenticateWithSchemes(ese, selectedScheme); } else { - assertWWWAuthenticateWithSchemes(ese, basicAuthScheme, bearerAuthScheme, negotiateAuthScheme); + assertWWWAuthenticateWithSchemes(ese, negotiateAuthScheme, bearerAuthScheme, basicAuthScheme); } } assertThat(ese.getMessage(), equalTo("unauthorized")); @@ -102,11 +102,30 @@ public void testExceptionProcessingRequest() { assertThat(ese, is(notNullValue())); assertThat(ese.getHeader("WWW-Authenticate"), is(notNullValue())); assertThat(ese.getMessage(), equalTo("error attempting to authenticate request")); - assertWWWAuthenticateWithSchemes(ese, basicAuthScheme, bearerAuthScheme, negotiateAuthScheme); + assertWWWAuthenticateWithSchemes(ese, negotiateAuthScheme, bearerAuthScheme, basicAuthScheme); } } + public void testSortsWWWAuthenticateHeaderValues() { + final String basicAuthScheme = "Basic realm=\"" + XPackField.SECURITY + "\" charset=\"UTF-8\""; + final String bearerAuthScheme = "Bearer realm=\"" + XPackField.SECURITY + "\""; + final String negotiateAuthScheme = randomFrom("Negotiate", "Negotiate Ijoijksdk"); + final Map> failureResponeHeaders = new HashMap<>(); + final List supportedSchemes = Arrays.asList(basicAuthScheme, bearerAuthScheme, negotiateAuthScheme); + Collections.shuffle(supportedSchemes, random()); + failureResponeHeaders.put("WWW-Authenticate", supportedSchemes); + final DefaultAuthenticationFailureHandler failuerHandler = new DefaultAuthenticationFailureHandler(failureResponeHeaders); + + final ElasticsearchSecurityException ese = failuerHandler.exceptionProcessingRequest(Mockito.mock(RestRequest.class), null, + new ThreadContext(Settings.builder().build())); + + assertThat(ese, is(notNullValue())); + assertThat(ese.getHeader("WWW-Authenticate"), is(notNullValue())); + assertThat(ese.getMessage(), equalTo("error attempting to authenticate request")); + assertWWWAuthenticateWithSchemes(ese, negotiateAuthScheme, bearerAuthScheme, basicAuthScheme); + } + private void assertWWWAuthenticateWithSchemes(final ElasticsearchSecurityException ese, final String... schemes) { assertThat(ese.getHeader("WWW-Authenticate").size(), is(schemes.length)); assertThat(ese.getHeader("WWW-Authenticate"), contains(schemes)); From 7b923ea60459eab3546db0684b5d53322655cde3 Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Fri, 7 Sep 2018 07:24:43 +0300 Subject: [PATCH 27/91] SQL: Improve alias vs index resolution (#33393) Improve pattern resolution for aliases vs indices --- .../xpack/sql/analysis/index/IndexResolver.java | 9 +++++++-- x-pack/qa/sql/src/main/resources/command.csv-spec | 8 ++++++++ 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java index 2b5f6111b6e01..0382729aa9f01 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/index/IndexResolver.java @@ -16,6 +16,8 @@ import org.elasticsearch.action.admin.indices.get.GetIndexRequest.Feature; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.IndicesOptions.Option; +import org.elasticsearch.action.support.IndicesOptions.WildcardStates; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; @@ -117,6 +119,10 @@ public boolean equals(Object obj) { } } + private static final IndicesOptions INDICES_ONLY_OPTIONS = new IndicesOptions( + EnumSet.of(Option.ALLOW_NO_INDICES, Option.IGNORE_UNAVAILABLE, Option.IGNORE_ALIASES), EnumSet.of(WildcardStates.OPEN)); + + private final Client client; private final String clusterName; @@ -144,7 +150,6 @@ public void resolveNames(String indexWildcard, String javaRegex, EnumSet filterResults(javaRegex, aliases, response, listener), diff --git a/x-pack/qa/sql/src/main/resources/command.csv-spec b/x-pack/qa/sql/src/main/resources/command.csv-spec index 8c56ca8609029..77d397fa2b5be 100644 --- a/x-pack/qa/sql/src/main/resources/command.csv-spec +++ b/x-pack/qa/sql/src/main/resources/command.csv-spec @@ -174,6 +174,14 @@ test_emp_copy |BASE TABLE test_emp_with_nulls|BASE TABLE ; +showTablesIdentifierPatternOnAliases +SHOW TABLES "test*,-test_emp*"; + + name:s | type:s +test_alias |ALIAS +test_alias_emp |ALIAS +; + // DESCRIBE describeSimpleLike From 34859414a070332ca97ca51dc98f1a94d2ea527d Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Fri, 7 Sep 2018 10:30:53 +0200 Subject: [PATCH 28/91] Fix bwc serialization of total hits when track_total_hits is false --- .../src/main/java/org/elasticsearch/common/lucene/Lucene.java | 2 +- .../elasticsearch/search/query/TopDocsCollectorContext.java | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java index 6016c7cb4c45f..47453aa8a41db 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -399,7 +399,7 @@ private static void writeTotalHits(StreamOutput out, TotalHits totalHits) throws out.writeVLong(totalHits.value); if (out.getVersion().onOrAfter(org.elasticsearch.Version.V_7_0_0_alpha1)) { out.writeEnum(totalHits.relation); - } else if (totalHits.relation != TotalHits.Relation.EQUAL_TO) { + } else if (totalHits.value > 0 && totalHits.relation != TotalHits.Relation.EQUAL_TO) { throw new IllegalArgumentException("Cannot serialize approximate total hit counts to nodes that are on a version < 7.0.0"); } } diff --git a/server/src/main/java/org/elasticsearch/search/query/TopDocsCollectorContext.java b/server/src/main/java/org/elasticsearch/search/query/TopDocsCollectorContext.java index d1b115ff68006..3aaa640f62fe4 100644 --- a/server/src/main/java/org/elasticsearch/search/query/TopDocsCollectorContext.java +++ b/server/src/main/java/org/elasticsearch/search/query/TopDocsCollectorContext.java @@ -238,7 +238,8 @@ private SimpleTopDocsCollectorContext(IndexReader reader, } } else { // total hit count is not needed - this.totalHitsSupplier = () -> topDocsSupplier.get().totalHits; + // for bwc hit count is set to 0, it will be converted to -1 by the coordinating node + this.totalHitsSupplier = () -> new TotalHits(0, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO); } MaxScoreCollector maxScoreCollector = null; if (trackMaxScore) { From 79cd6385fe68e92645ec10c5b05ec75c0277cd4a Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Fri, 7 Sep 2018 10:58:06 +0200 Subject: [PATCH 29/91] Collapse package structure for metrics aggs (#33463) This change collapses all metrics aggregations classes into a single package `org.elasticsearch.aggregations.metrics`. It also restricts the visibility of some classes (aggregators and factories) that should not be used outside of the package. Relates #22868 --- .../resources/checkstyle_suppressions.xml | 10 +-- .../client/RestHighLevelClient.java | 64 +++++++++---------- .../documentation/SearchDocumentationIT.java | 2 +- .../ml/datafeed/DatafeedConfigTests.java | 2 +- .../migration/migrate_7_0/java.asciidoc | 6 ++ .../expression/MoreExpressionTests.java | 2 +- .../join/aggregations/ChildrenIT.java | 4 +- .../ParentToChildrenAggregatorTests.java | 4 +- .../elasticsearch/search/SearchModule.java | 64 +++++++++---------- .../aggregations/AggregationBuilders.java | 58 ++++++++--------- .../AbstractHDRPercentilesAggregator.java | 6 +- .../AbstractInternalHDRPercentiles.java | 3 +- .../AbstractInternalTDigestPercentiles.java | 3 +- .../AbstractTDigestPercentilesAggregator.java | 7 +- .../aggregations/metrics/{avg => }/Avg.java | 4 +- .../{avg => }/AvgAggregationBuilder.java | 2 +- .../metrics/{avg => }/AvgAggregator.java | 7 +- .../{avg => }/AvgAggregatorFactory.java | 6 +- .../{cardinality => }/Cardinality.java | 4 +- .../CardinalityAggregationBuilder.java | 2 +- .../CardinalityAggregator.java | 26 +++++--- .../CardinalityAggregatorFactory.java | 6 +- .../{stats/extended => }/ExtendedStats.java | 4 +- .../ExtendedStatsAggregationBuilder.java | 2 +- .../ExtendedStatsAggregator.java | 9 ++- .../ExtendedStatsAggregatorFactory.java | 6 +- .../metrics/{geobounds => }/GeoBounds.java | 2 +- .../GeoBoundsAggregationBuilder.java | 2 +- .../{geobounds => }/GeoBoundsAggregator.java | 13 ++-- .../GeoBoundsAggregatorFactory.java | 6 +- .../{geocentroid => }/GeoCentroid.java | 4 +- .../GeoCentroidAggregationBuilder.java | 2 +- .../GeoCentroidAggregator.java | 3 +- .../GeoCentroidAggregatorFactory.java | 2 +- .../HDRPercentileRanksAggregator.java | 6 +- .../HDRPercentileRanksAggregatorFactory.java | 6 +- .../hdr => }/HDRPercentilesAggregator.java | 6 +- .../HDRPercentilesAggregatorFactory.java | 6 +- .../HyperLogLogPlusPlus.java | 2 +- .../metrics/{avg => }/InternalAvg.java | 3 +- .../InternalCardinality.java | 3 +- .../extended => }/InternalExtendedStats.java | 3 +- .../{geobounds => }/InternalGeoBounds.java | 2 +- .../InternalGeoCentroid.java | 5 +- .../hdr => }/InternalHDRPercentileRanks.java | 6 +- .../hdr => }/InternalHDRPercentiles.java | 6 +- .../metrics/{max => }/InternalMax.java | 7 +- .../metrics/{min => }/InternalMin.java | 3 +- .../InternalScriptedMetric.java | 4 +- .../metrics/{stats => }/InternalStats.java | 6 +- .../metrics/{sum => }/InternalSum.java | 7 +- .../InternalTDigestPercentileRanks.java | 8 +-- .../InternalTDigestPercentiles.java | 6 +- .../{tophits => }/InternalTopHits.java | 2 +- .../{valuecount => }/InternalValueCount.java | 5 +- .../InternalWeightedAvg.java | 7 +- .../aggregations/metrics/{max => }/Max.java | 4 +- .../{max => }/MaxAggregationBuilder.java | 2 +- .../metrics/{max => }/MaxAggregator.java | 7 +- .../{max => }/MaxAggregatorFactory.java | 6 +- .../aggregations/metrics/{min => }/Min.java | 4 +- .../{min => }/MinAggregationBuilder.java | 3 +- .../metrics/{min => }/MinAggregator.java | 7 +- .../{min => }/MinAggregatorFactory.java | 6 +- .../metrics/{avg => }/ParsedAvg.java | 5 +- .../{cardinality => }/ParsedCardinality.java | 4 +- .../extended => }/ParsedExtendedStats.java | 5 +- .../{geobounds => }/ParsedGeoBounds.java | 12 ++-- .../{geocentroid => }/ParsedGeoCentroid.java | 4 +- .../hdr => }/ParsedHDRPercentileRanks.java | 5 +- .../hdr => }/ParsedHDRPercentiles.java | 4 +- .../metrics/{max => }/ParsedMax.java | 5 +- .../metrics/{min => }/ParsedMin.java | 5 +- .../ParsedPercentileRanks.java | 4 +- .../{percentiles => }/ParsedPercentiles.java | 2 +- .../{scripted => }/ParsedScriptedMetric.java | 2 +- .../metrics/{stats => }/ParsedStats.java | 4 +- .../metrics/{sum => }/ParsedSum.java | 5 +- .../ParsedTDigestPercentileRanks.java | 5 +- .../ParsedTDigestPercentiles.java | 4 +- .../metrics/{tophits => }/ParsedTopHits.java | 2 +- .../{valuecount => }/ParsedValueCount.java | 4 +- .../{weighted_avg => }/ParsedWeightedAvg.java | 5 +- .../metrics/{percentiles => }/Percentile.java | 4 +- .../{percentiles => }/PercentileRanks.java | 4 +- .../PercentileRanksAggregationBuilder.java | 4 +- .../{percentiles => }/Percentiles.java | 4 +- .../PercentilesAggregationBuilder.java | 4 +- .../{percentiles => }/PercentilesMethod.java | 2 +- .../{scripted => }/ScriptedMetric.java | 2 +- .../ScriptedMetricAggregationBuilder.java | 2 +- .../ScriptedMetricAggregator.java | 18 ++++-- .../ScriptedMetricAggregatorFactory.java | 18 +++--- .../metrics/{stats => }/Stats.java | 3 +- .../{stats => }/StatsAggregationBuilder.java | 2 +- .../metrics/{stats => }/StatsAggregator.java | 12 ++-- .../{stats => }/StatsAggregatorFactory.java | 6 +- .../aggregations/metrics/{sum => }/Sum.java | 4 +- .../{sum => }/SumAggregationBuilder.java | 2 +- .../metrics/{sum => }/SumAggregator.java | 5 +- .../{sum => }/SumAggregatorFactory.java | 6 +- .../TDigestPercentileRanksAggregator.java | 21 ++++-- ...igestPercentileRanksAggregatorFactory.java | 6 +- .../TDigestPercentilesAggregator.java | 21 ++++-- .../TDigestPercentilesAggregatorFactory.java | 6 +- .../tdigest => }/TDigestState.java | 2 +- .../metrics/{tophits => }/TopHits.java | 2 +- .../TopHitsAggregationBuilder.java | 2 +- .../{tophits => }/TopHitsAggregator.java | 5 +- .../TopHitsAggregatorFactory.java | 4 +- .../metrics/{valuecount => }/ValueCount.java | 4 +- .../ValueCountAggregationBuilder.java | 2 +- .../ValueCountAggregator.java | 7 +- .../ValueCountAggregatorFactory.java | 6 +- .../{weighted_avg => }/WeightedAvg.java | 4 +- .../WeightedAvgAggregationBuilder.java | 2 +- .../WeightedAvgAggregator.java | 15 ++--- .../WeightedAvgAggregatorFactory.java | 12 ++-- .../percentile/InternalPercentilesBucket.java | 4 +- .../percentile/ParsedPercentilesBucket.java | 4 +- .../percentile/PercentilesBucket.java | 2 +- .../stats/InternalStatsBucket.java | 2 +- .../stats/ParsedStatsBucket.java | 2 +- .../bucketmetrics/stats/StatsBucket.java | 2 +- .../stats/extended/ExtendedStatsBucket.java | 4 +- .../extended/InternalExtendedStatsBucket.java | 5 +- .../extended/ParsedExtendedStatsBucket.java | 2 +- .../search/SearchPhaseControllerTests.java | 2 +- .../aggregations/AggregationsTests.java | 22 +++---- .../search/aggregations/EquivalenceIT.java | 2 +- .../search/aggregations/MetaDataIT.java | 2 +- .../search/aggregations/MissingValueIT.java | 10 +-- .../bucket/AdjacencyMatrixIT.java | 2 +- .../aggregations/bucket/DateHistogramIT.java | 4 +- .../aggregations/bucket/DateRangeIT.java | 2 +- .../bucket/DiversifiedSamplerIT.java | 2 +- .../aggregations/bucket/DoubleTermsIT.java | 10 +-- .../search/aggregations/bucket/FilterIT.java | 2 +- .../search/aggregations/bucket/FiltersIT.java | 2 +- .../bucket/GlobalAggregatorTests.java | 4 +- .../search/aggregations/bucket/GlobalIT.java | 2 +- .../aggregations/bucket/HistogramIT.java | 8 +-- .../aggregations/bucket/LongTermsIT.java | 10 +-- .../search/aggregations/bucket/MissingIT.java | 2 +- .../aggregations/bucket/NaNSortingIT.java | 8 +-- .../search/aggregations/bucket/NestedIT.java | 6 +- .../search/aggregations/bucket/RangeIT.java | 2 +- .../aggregations/bucket/ReverseNestedIT.java | 2 +- .../search/aggregations/bucket/SamplerIT.java | 2 +- .../composite/CompositeAggregatorTests.java | 8 +-- .../AutoDateHistogramAggregatorTests.java | 2 +- .../bucket/nested/NestedAggregatorTests.java | 14 ++-- .../nested/ReverseNestedAggregatorTests.java | 4 +- .../sampler/SamplerAggregatorTests.java | 4 +- .../bucket/terms/StringTermsIT.java | 8 +-- .../bucket/terms/TermsAggregatorTests.java | 4 +- .../metrics/AbstractGeoTestCase.java | 35 +++++++--- .../AbstractPercentilesTestCase.java | 4 +- .../metrics/{avg => }/AvgAggregatorTests.java | 5 +- .../search/aggregations/metrics/AvgIT.java | 1 - .../search/aggregations/metrics/AvgTests.java | 2 - .../metrics/CardinalityAggregatorTests.java | 3 - .../aggregations/metrics/CardinalityIT.java | 1 - .../{cardinality => }/CardinalityTests.java | 3 +- .../metrics/ExtendedStatsAggregatorTests.java | 3 - .../aggregations/metrics/ExtendedStatsIT.java | 3 +- .../metrics/ExtendedStatsTests.java | 2 - .../GeoBoundsAggregatorTests.java | 4 +- .../aggregations/metrics/GeoBoundsIT.java | 2 - .../aggregations/metrics/GeoBoundsTests.java | 1 - .../GeoCentroidAggregatorTests.java | 4 +- .../aggregations/metrics/GeoCentroidIT.java | 1 - .../metrics/GeoCentroidTests.java | 1 - .../HDRPercentileRanksAggregatorTests.java | 10 +-- .../metrics/HDRPercentileRanksIT.java | 3 - .../HDRPercentilesAggregatorTests.java | 8 ++- .../metrics/HDRPercentilesIT.java | 3 - .../HyperLogLogPlusPlusTests.java | 7 +- .../metrics/{avg => }/InternalAvgTests.java | 4 +- .../InternalCardinalityTests.java | 5 +- .../metrics/InternalExtendedStatsTests.java | 4 +- .../InternalGeoBoundsTests.java | 4 +- .../InternalGeoCentroidTests.java | 4 +- .../InternalHDRPercentilesRanksTests.java | 8 ++- .../hdr => }/InternalHDRPercentilesTests.java | 10 +-- .../metrics/InternalMaxTests.java | 2 - .../metrics/InternalMinTests.java | 2 - .../InternalPercentilesRanksTestCase.java | 2 +- .../InternalPercentilesTestCase.java | 2 +- .../InternalScriptedMetricTests.java | 4 +- .../metrics/InternalStatsBucketTests.java | 1 - .../metrics/InternalStatsTests.java | 2 - .../metrics/InternalSumTests.java | 2 - .../InternalTDigestPercentilesRanksTests.java | 9 ++- .../InternalTDigestPercentilesTests.java | 9 ++- .../{tophits => }/InternalTopHitsTests.java | 4 +- .../InternalValueCountTests.java | 4 +- .../metrics/MaxAggregatorTests.java | 3 - .../search/aggregations/metrics/MaxIT.java | 1 - .../search/aggregations/metrics/MaxTests.java | 2 - .../metrics/MinAggregatorTests.java | 3 - .../search/aggregations/metrics/MinIT.java | 1 - .../search/aggregations/metrics/MinTests.java | 2 - .../metrics/PercentileRanksTests.java | 1 - .../PercentilesMethodTests.java | 3 +- .../metrics/PercentilesTests.java | 1 - .../ScriptedMetricAggregatorTests.java | 4 +- .../metrics/ScriptedMetricIT.java | 1 - .../metrics/ScriptedMetricTests.java | 1 - .../metrics/StatsAggregatorTests.java | 2 - .../search/aggregations/metrics/StatsIT.java | 1 - .../aggregations/metrics/StatsTests.java | 2 - .../metrics/SumAggregatorTests.java | 3 - .../search/aggregations/metrics/SumIT.java | 1 - .../search/aggregations/metrics/SumTests.java | 2 - ...TDigestPercentileRanksAggregatorTests.java | 10 +-- .../metrics/TDigestPercentileRanksIT.java | 4 -- .../TDigestPercentilesAggregatorTests.java | 8 ++- .../metrics/TDigestPercentilesIT.java | 4 -- .../{tophits => }/TopHitsAggregatorTests.java | 3 +- .../aggregations/metrics/TopHitsIT.java | 49 ++++++++++---- .../aggregations/metrics/TopHitsTests.java | 1 - .../ValueCountAggregatorTests.java | 5 +- .../aggregations/metrics/ValueCountIT.java | 1 - .../aggregations/metrics/ValueCountTests.java | 1 - .../WeightedAvgAggregatorTests.java | 5 +- .../aggregations/pipeline/AvgBucketIT.java | 2 +- .../aggregations/pipeline/BucketScriptIT.java | 2 +- .../pipeline/BucketSelectorIT.java | 2 +- .../CumulativeSumAggregatorTests.java | 8 +-- .../pipeline/DateDerivativeIT.java | 2 +- .../aggregations/pipeline/DerivativeIT.java | 4 +- .../pipeline/ExtendedStatsBucketIT.java | 4 +- .../aggregations/pipeline/MaxBucketIT.java | 2 +- .../aggregations/pipeline/MinBucketIT.java | 2 +- .../pipeline/PercentilesBucketIT.java | 4 +- .../PipelineAggregationHelperTests.java | 8 +-- .../aggregations/pipeline/StatsBucketIT.java | 2 +- .../aggregations/pipeline/SumBucketIT.java | 2 +- .../avg/AvgBucketAggregatorTests.java | 4 +- .../InternalPercentilesBucketTests.java | 4 +- .../InternalExtendedStatsBucketTests.java | 2 +- .../pipeline/bucketsort/BucketSortIT.java | 2 +- .../pipeline/movfn/MovFnUnitTests.java | 2 +- .../pipeline/moving/avg/MovAvgIT.java | 2 +- .../aggregation/AggregationProfilerIT.java | 20 +++--- ...ternalSingleBucketAggregationTestCase.java | 4 +- .../test/InternalAggregationTestCase.java | 64 +++++++++---------- .../core/ml/datafeed/DatafeedConfig.java | 2 +- .../xpack/core/ml/stats/StatsAccumulator.java | 2 +- .../xpack/core/rollup/RollupField.java | 10 +-- .../core/ml/datafeed/DatafeedConfigTests.java | 4 +- .../core/ml/datafeed/DatafeedUpdateTests.java | 2 +- .../extractor/ExtractorUtilsTests.java | 4 +- .../core/ml/stats/StatsAccumulatorTests.java | 4 +- .../TransportGetOverallBucketsAction.java | 4 +- .../AggregationToJsonProcessor.java | 6 +- .../chunked/ChunkedDataExtractor.java | 4 +- .../job/persistence/JobResultsProvider.java | 4 +- .../OverallBucketsProvider.java | 2 +- .../TransportPreviewDatafeedActionTests.java | 4 +- .../datafeed/DatafeedJobValidatorTests.java | 2 +- .../extractor/DataExtractorFactoryTests.java | 4 +- .../aggregation/AggregationTestUtils.java | 6 +- .../AggregationToJsonProcessorTests.java | 4 +- .../chunked/ChunkedDataExtractorTests.java | 4 +- .../integration/BasicDistributedJobsIT.java | 2 +- .../local/LocalExporterIntegTests.java | 2 +- .../xpack/rollup/RollupRequestTranslator.java | 4 +- .../rollup/RollupResponseTranslator.java | 10 +-- .../xpack/rollup/job/RollupIndexer.java | 10 +-- .../rollup/RollupJobIdentifierUtilTests.java | 8 +-- .../rollup/RollupRequestTranslationTests.java | 10 +-- .../RollupResponseTranslationTests.java | 22 +++---- .../rollup/action/SearchActionTests.java | 8 +-- .../xpack/rollup/job/IndexerUtilsTests.java | 6 +- .../bench/WatcherScheduleEngineBenchmark.java | 2 +- 277 files changed, 780 insertions(+), 820 deletions(-) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{percentiles/hdr => }/AbstractHDRPercentilesAggregator.java (94%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{percentiles/hdr => }/AbstractInternalHDRPercentiles.java (97%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{percentiles/tdigest => }/AbstractInternalTDigestPercentiles.java (97%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{percentiles/tdigest => }/AbstractTDigestPercentilesAggregator.java (91%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{avg => }/Avg.java (87%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{avg => }/AvgAggregationBuilder.java (98%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{avg => }/AvgAggregator.java (94%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{avg => }/AvgAggregatorFactory.java (89%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{cardinality => }/Cardinality.java (87%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{cardinality => }/CardinalityAggregationBuilder.java (98%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{cardinality => }/CardinalityAggregator.java (93%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{cardinality => }/CardinalityAggregatorFactory.java (89%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{stats/extended => }/ExtendedStats.java (93%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{stats/extended => }/ExtendedStatsAggregationBuilder.java (98%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{stats/extended => }/ExtendedStatsAggregator.java (96%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{stats/extended => }/ExtendedStatsAggregatorFactory.java (88%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{geobounds => }/GeoBounds.java (95%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{geobounds => }/GeoBoundsAggregationBuilder.java (98%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{geobounds => }/GeoBoundsAggregator.java (93%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{geobounds => }/GeoBoundsAggregatorFactory.java (88%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{geocentroid => }/GeoCentroid.java (85%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{geocentroid => }/GeoCentroidAggregationBuilder.java (98%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{geocentroid => }/GeoCentroidAggregator.java (97%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{geocentroid => }/GeoCentroidAggregatorFactory.java (97%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{percentiles/hdr => }/HDRPercentileRanksAggregator.java (90%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{percentiles/hdr => }/HDRPercentileRanksAggregatorFactory.java (92%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{percentiles/hdr => }/HDRPercentilesAggregator.java (90%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{percentiles/hdr => }/HDRPercentilesAggregatorFactory.java (89%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{cardinality => }/HyperLogLogPlusPlus.java (99%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{avg => }/InternalAvg.java (96%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{cardinality => }/InternalCardinality.java (96%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{stats/extended => }/InternalExtendedStats.java (98%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{geobounds => }/InternalGeoBounds.java (99%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{geocentroid => }/InternalGeoCentroid.java (97%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{percentiles/hdr => }/InternalHDRPercentileRanks.java (90%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{percentiles/hdr => }/InternalHDRPercentiles.java (90%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{max => }/InternalMax.java (93%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{min => }/InternalMin.java (95%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{scripted => }/InternalScriptedMetric.java (96%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{stats => }/InternalStats.java (96%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{sum => }/InternalSum.java (91%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{percentiles/tdigest => }/InternalTDigestPercentileRanks.java (88%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{percentiles/tdigest => }/InternalTDigestPercentiles.java (89%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{tophits => }/InternalTopHits.java (99%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{valuecount => }/InternalValueCount.java (92%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{weighted_avg => }/InternalWeightedAvg.java (93%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{max => }/Max.java (87%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{max => }/MaxAggregationBuilder.java (98%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{max => }/MaxAggregator.java (93%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{max => }/MaxAggregatorFactory.java (89%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{min => }/Min.java (87%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{min => }/MinAggregationBuilder.java (96%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{min => }/MinAggregator.java (93%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{min => }/MinAggregatorFactory.java (89%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{avg => }/ParsedAvg.java (93%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{cardinality => }/ParsedCardinality.java (97%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{stats/extended => }/ParsedExtendedStats.java (97%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{geobounds => }/ParsedGeoBounds.java (87%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{geocentroid => }/ParsedGeoCentroid.java (95%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{percentiles/hdr => }/ParsedHDRPercentileRanks.java (87%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{percentiles/hdr => }/ParsedHDRPercentiles.java (88%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{max => }/ParsedMax.java (93%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{min => }/ParsedMin.java (93%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{percentiles => }/ParsedPercentileRanks.java (85%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{percentiles => }/ParsedPercentiles.java (99%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{scripted => }/ParsedScriptedMetric.java (98%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{stats => }/ParsedStats.java (97%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{sum => }/ParsedSum.java (92%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{percentiles/tdigest => }/ParsedTDigestPercentileRanks.java (87%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{percentiles/tdigest => }/ParsedTDigestPercentiles.java (88%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{tophits => }/ParsedTopHits.java (97%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{valuecount => }/ParsedValueCount.java (97%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{weighted_avg => }/ParsedWeightedAvg.java (89%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{percentiles => }/Percentile.java (95%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{percentiles => }/PercentileRanks.java (89%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{percentiles => }/PercentileRanksAggregationBuilder.java (97%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{percentiles => }/Percentiles.java (89%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{percentiles => }/PercentilesAggregationBuilder.java (98%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{percentiles => }/PercentilesMethod.java (96%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{scripted => }/ScriptedMetric.java (94%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{scripted => }/ScriptedMetricAggregationBuilder.java (99%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{scripted => }/ScriptedMetricAggregator.java (83%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{scripted => }/ScriptedMetricAggregatorFactory.java (86%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{stats => }/Stats.java (93%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{stats => }/StatsAggregationBuilder.java (98%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{stats => }/StatsAggregator.java (93%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{stats => }/StatsAggregatorFactory.java (89%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{sum => }/Sum.java (87%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{sum => }/SumAggregationBuilder.java (98%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{sum => }/SumAggregator.java (96%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{sum => }/SumAggregatorFactory.java (89%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{percentiles/tdigest => }/TDigestPercentileRanksAggregator.java (71%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{percentiles/tdigest => }/TDigestPercentileRanksAggregatorFactory.java (92%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{percentiles/tdigest => }/TDigestPercentilesAggregator.java (72%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{percentiles/tdigest => }/TDigestPercentilesAggregatorFactory.java (92%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{percentiles/tdigest => }/TDigestState.java (97%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{tophits => }/TopHits.java (94%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{tophits => }/TopHitsAggregationBuilder.java (99%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{tophits => }/TopHitsAggregator.java (98%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{tophits => }/TopHitsAggregatorFactory.java (96%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{valuecount => }/ValueCount.java (87%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{valuecount => }/ValueCountAggregationBuilder.java (98%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{valuecount => }/ValueCountAggregator.java (92%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{valuecount => }/ValueCountAggregatorFactory.java (88%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{weighted_avg => }/WeightedAvg.java (87%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{weighted_avg => }/WeightedAvgAggregationBuilder.java (98%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{weighted_avg => }/WeightedAvgAggregator.java (88%) rename server/src/main/java/org/elasticsearch/search/aggregations/metrics/{weighted_avg => }/WeightedAvgAggregatorFactory.java (82%) rename server/src/test/java/org/elasticsearch/search/aggregations/metrics/{percentiles => }/AbstractPercentilesTestCase.java (96%) rename server/src/test/java/org/elasticsearch/search/aggregations/metrics/{avg => }/AvgAggregatorTests.java (96%) rename server/src/test/java/org/elasticsearch/search/aggregations/metrics/{cardinality => }/CardinalityTests.java (90%) rename server/src/test/java/org/elasticsearch/search/aggregations/metrics/{geobounds => }/GeoBoundsAggregatorTests.java (96%) rename server/src/test/java/org/elasticsearch/search/aggregations/metrics/{geocentroid => }/GeoCentroidAggregatorTests.java (97%) rename server/src/test/java/org/elasticsearch/search/aggregations/metrics/{percentiles/hdr => }/HDRPercentileRanksAggregatorTests.java (92%) rename server/src/test/java/org/elasticsearch/search/aggregations/metrics/{percentiles/hdr => }/HDRPercentilesAggregatorTests.java (94%) rename server/src/test/java/org/elasticsearch/search/aggregations/metrics/{cardinality => }/HyperLogLogPlusPlusTests.java (94%) rename server/src/test/java/org/elasticsearch/search/aggregations/metrics/{avg => }/InternalAvgTests.java (96%) rename server/src/test/java/org/elasticsearch/search/aggregations/metrics/{cardinality => }/InternalCardinalityTests.java (95%) rename server/src/test/java/org/elasticsearch/search/aggregations/metrics/{geobounds => }/InternalGeoBoundsTests.java (96%) rename server/src/test/java/org/elasticsearch/search/aggregations/metrics/{geocentroid => }/InternalGeoCentroidTests.java (96%) rename server/src/test/java/org/elasticsearch/search/aggregations/metrics/{percentiles/hdr => }/InternalHDRPercentilesRanksTests.java (91%) rename server/src/test/java/org/elasticsearch/search/aggregations/metrics/{percentiles/hdr => }/InternalHDRPercentilesTests.java (92%) rename server/src/test/java/org/elasticsearch/search/aggregations/metrics/{percentiles => }/InternalPercentilesRanksTestCase.java (96%) rename server/src/test/java/org/elasticsearch/search/aggregations/metrics/{percentiles => }/InternalPercentilesTestCase.java (97%) rename server/src/test/java/org/elasticsearch/search/aggregations/metrics/{scripted => }/InternalScriptedMetricTests.java (98%) rename server/src/test/java/org/elasticsearch/search/aggregations/metrics/{percentiles/tdigest => }/InternalTDigestPercentilesRanksTests.java (91%) rename server/src/test/java/org/elasticsearch/search/aggregations/metrics/{percentiles/tdigest => }/InternalTDigestPercentilesTests.java (91%) rename server/src/test/java/org/elasticsearch/search/aggregations/metrics/{tophits => }/InternalTopHitsTests.java (98%) rename server/src/test/java/org/elasticsearch/search/aggregations/metrics/{valuecount => }/InternalValueCountTests.java (94%) rename server/src/test/java/org/elasticsearch/search/aggregations/metrics/{percentiles => }/PercentilesMethodTests.java (95%) rename server/src/test/java/org/elasticsearch/search/aggregations/metrics/{scripted => }/ScriptedMetricAggregatorTests.java (98%) rename server/src/test/java/org/elasticsearch/search/aggregations/metrics/{percentiles/tdigest => }/TDigestPercentileRanksAggregatorTests.java (92%) rename server/src/test/java/org/elasticsearch/search/aggregations/metrics/{percentiles/tdigest => }/TDigestPercentilesAggregatorTests.java (95%) rename server/src/test/java/org/elasticsearch/search/aggregations/metrics/{tophits => }/TopHitsAggregatorTests.java (98%) rename server/src/test/java/org/elasticsearch/search/aggregations/metrics/{valuecount => }/ValueCountAggregatorTests.java (96%) rename server/src/test/java/org/elasticsearch/search/aggregations/metrics/{weighted_avg => }/WeightedAvgAggregatorTests.java (98%) diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index e9dc4918d6932..a83aed602e17d 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -360,13 +360,7 @@ - - - - - - - + @@ -641,8 +635,6 @@ - - diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index a959e349c151d..17f8f65943012 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -120,38 +120,38 @@ import org.elasticsearch.search.aggregations.bucket.terms.ParsedLongTerms; import org.elasticsearch.search.aggregations.bucket.terms.ParsedStringTerms; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; -import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.avg.ParsedAvg; -import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.cardinality.ParsedCardinality; -import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.geobounds.ParsedGeoBounds; -import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.geocentroid.ParsedGeoCentroid; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.max.ParsedMax; -import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.min.ParsedMin; -import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentileRanks; -import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentiles; -import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.ParsedHDRPercentileRanks; -import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.ParsedHDRPercentiles; -import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentileRanks; -import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentiles; -import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.ParsedTDigestPercentileRanks; -import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.ParsedTDigestPercentiles; -import org.elasticsearch.search.aggregations.metrics.scripted.ParsedScriptedMetric; -import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.stats.ParsedStats; -import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.stats.extended.ParsedExtendedStats; -import org.elasticsearch.search.aggregations.metrics.sum.ParsedSum; -import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.tophits.ParsedTopHits; -import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.valuecount.ParsedValueCount; -import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ParsedAvg; +import org.elasticsearch.search.aggregations.metrics.CardinalityAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ParsedCardinality; +import org.elasticsearch.search.aggregations.metrics.GeoBoundsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ParsedGeoBounds; +import org.elasticsearch.search.aggregations.metrics.GeoCentroidAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ParsedGeoCentroid; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ParsedMax; +import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ParsedMin; +import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentileRanks; +import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentiles; +import org.elasticsearch.search.aggregations.metrics.ParsedHDRPercentileRanks; +import org.elasticsearch.search.aggregations.metrics.ParsedHDRPercentiles; +import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentileRanks; +import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentiles; +import org.elasticsearch.search.aggregations.metrics.ParsedTDigestPercentileRanks; +import org.elasticsearch.search.aggregations.metrics.ParsedTDigestPercentiles; +import org.elasticsearch.search.aggregations.metrics.ParsedScriptedMetric; +import org.elasticsearch.search.aggregations.metrics.ScriptedMetricAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ParsedStats; +import org.elasticsearch.search.aggregations.metrics.StatsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ExtendedStatsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ParsedExtendedStats; +import org.elasticsearch.search.aggregations.metrics.ParsedSum; +import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ParsedTopHits; +import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ParsedValueCount; +import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; import org.elasticsearch.search.aggregations.pipeline.ParsedSimpleValue; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java index 2f743c786bab8..d9d4f665f9d7c 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java @@ -86,7 +86,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.avg.Avg; +import org.elasticsearch.search.aggregations.metrics.Avg; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/datafeed/DatafeedConfigTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/datafeed/DatafeedConfigTests.java index 462eb1466511d..8ed51415521af 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/datafeed/DatafeedConfigTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/datafeed/DatafeedConfigTests.java @@ -30,7 +30,7 @@ import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorFactories; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField; import org.elasticsearch.test.AbstractXContentTestCase; diff --git a/docs/reference/migration/migrate_7_0/java.asciidoc b/docs/reference/migration/migrate_7_0/java.asciidoc index 169943a16ac03..4a3040507c961 100644 --- a/docs/reference/migration/migrate_7_0/java.asciidoc +++ b/docs/reference/migration/migrate_7_0/java.asciidoc @@ -12,3 +12,9 @@ The `prepareExecute` method which created a request builder has been removed from the client api. Instead, construct a builder for the appropriate request directly. + +=== Some Aggregation classes have moved packages + +* All classes present in `org.elasticsearch.search.aggregations.metrics.*` packages +were moved to a single `org.elasticsearch.search.aggregations.metrics` package. + diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java index f4095b3f68ada..932e5979c0f9a 100644 --- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java @@ -39,7 +39,7 @@ import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; -import org.elasticsearch.search.aggregations.metrics.stats.Stats; +import org.elasticsearch.search.aggregations.metrics.Stats; import org.elasticsearch.search.aggregations.pipeline.SimpleValue; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenIT.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenIT.java index 869019ac0ffce..f7f3b89773b35 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenIT.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenIT.java @@ -30,8 +30,8 @@ import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.sum.Sum; -import org.elasticsearch.search.aggregations.metrics.tophits.TopHits; +import org.elasticsearch.search.aggregations.metrics.Sum; +import org.elasticsearch.search.aggregations.metrics.TopHits; import org.elasticsearch.search.sort.SortOrder; import org.junit.Before; diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java index d6557256ce002..452fe1b490b02 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java @@ -49,8 +49,8 @@ import org.elasticsearch.join.mapper.MetaJoinFieldMapper; import org.elasticsearch.join.mapper.ParentJoinFieldMapper; import org.elasticsearch.search.aggregations.AggregatorTestCase; -import org.elasticsearch.search.aggregations.metrics.min.InternalMin; -import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.InternalMin; +import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; import java.io.IOException; import java.util.Arrays; diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index eae88322a1266..3032f618c2f30 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -151,38 +151,38 @@ import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.UnmappedTerms; -import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.avg.InternalAvg; -import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.cardinality.InternalCardinality; -import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.geobounds.InternalGeoBounds; -import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.geocentroid.InternalGeoCentroid; -import org.elasticsearch.search.aggregations.metrics.max.InternalMax; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.min.InternalMin; -import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentileRanks; -import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentiles; -import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentileRanks; -import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentiles; -import org.elasticsearch.search.aggregations.metrics.scripted.InternalScriptedMetric; -import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.stats.InternalStats; -import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.stats.extended.InternalExtendedStats; -import org.elasticsearch.search.aggregations.metrics.sum.InternalSum; -import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.tophits.InternalTopHits; -import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.valuecount.InternalValueCount; -import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.weighted_avg.InternalWeightedAvg; -import org.elasticsearch.search.aggregations.metrics.weighted_avg.WeightedAvgAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.InternalAvg; +import org.elasticsearch.search.aggregations.metrics.CardinalityAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.InternalCardinality; +import org.elasticsearch.search.aggregations.metrics.GeoBoundsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.InternalGeoBounds; +import org.elasticsearch.search.aggregations.metrics.GeoCentroidAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.InternalGeoCentroid; +import org.elasticsearch.search.aggregations.metrics.InternalMax; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.InternalMin; +import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.PercentileRanksAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.PercentilesAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentileRanks; +import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentiles; +import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentileRanks; +import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentiles; +import org.elasticsearch.search.aggregations.metrics.InternalScriptedMetric; +import org.elasticsearch.search.aggregations.metrics.ScriptedMetricAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.InternalStats; +import org.elasticsearch.search.aggregations.metrics.StatsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ExtendedStatsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.InternalExtendedStats; +import org.elasticsearch.search.aggregations.metrics.InternalSum; +import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.InternalTopHits; +import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.InternalValueCount; +import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.InternalWeightedAvg; +import org.elasticsearch.search.aggregations.metrics.WeightedAvgAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java index b4e416f4d7789..7363ec8306d97 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilders.java @@ -54,35 +54,35 @@ import org.elasticsearch.search.aggregations.bucket.significant.SignificantTextAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.avg.Avg; -import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.cardinality.Cardinality; -import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBounds; -import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroid; -import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.max.Max; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.min.Min; -import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanks; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetric; -import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.stats.Stats; -import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats; -import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.sum.Sum; -import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.tophits.TopHits; -import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCount; -import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.weighted_avg.WeightedAvgAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.Avg; +import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.Cardinality; +import org.elasticsearch.search.aggregations.metrics.CardinalityAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.GeoBounds; +import org.elasticsearch.search.aggregations.metrics.GeoBoundsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.GeoCentroid; +import org.elasticsearch.search.aggregations.metrics.GeoCentroidAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.Max; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.Min; +import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.PercentileRanks; +import org.elasticsearch.search.aggregations.metrics.PercentileRanksAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.Percentiles; +import org.elasticsearch.search.aggregations.metrics.PercentilesAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ScriptedMetric; +import org.elasticsearch.search.aggregations.metrics.ScriptedMetricAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.Stats; +import org.elasticsearch.search.aggregations.metrics.StatsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ExtendedStats; +import org.elasticsearch.search.aggregations.metrics.ExtendedStatsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.Sum; +import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.TopHits; +import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ValueCount; +import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.WeightedAvgAggregationBuilder; import java.util.Map; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/AbstractHDRPercentilesAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHDRPercentilesAggregator.java similarity index 94% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/AbstractHDRPercentilesAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHDRPercentilesAggregator.java index 56cd7eefbf203..0848a494c7454 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/AbstractHDRPercentilesAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractHDRPercentilesAggregator.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles.hdr; +package org.elasticsearch.search.aggregations.metrics; import org.HdrHistogram.DoubleHistogram; import org.apache.lucene.index.LeafReaderContext; @@ -40,7 +40,7 @@ import java.util.List; import java.util.Map; -public abstract class AbstractHDRPercentilesAggregator extends NumericMetricsAggregator.MultiValue { +abstract class AbstractHDRPercentilesAggregator extends NumericMetricsAggregator.MultiValue { private static int indexOfKey(double[] keys, double key) { return ArrayUtils.binarySearch(keys, key, 0.001); @@ -53,7 +53,7 @@ private static int indexOfKey(double[] keys, double key) { protected final int numberOfSignificantValueDigits; protected final boolean keyed; - public AbstractHDRPercentilesAggregator(String name, ValuesSource.Numeric valuesSource, SearchContext context, Aggregator parent, + AbstractHDRPercentilesAggregator(String name, ValuesSource.Numeric valuesSource, SearchContext context, Aggregator parent, double[] keys, int numberOfSignificantValueDigits, boolean keyed, DocValueFormat formatter, List pipelineAggregators, Map metaData) throws IOException { super(name, context, parent, pipelineAggregators, metaData); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/AbstractInternalHDRPercentiles.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalHDRPercentiles.java similarity index 97% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/AbstractInternalHDRPercentiles.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalHDRPercentiles.java index a7b359d59373c..7050254f2793f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/AbstractInternalHDRPercentiles.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalHDRPercentiles.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles.hdr; +package org.elasticsearch.search.aggregations.metrics; import org.HdrHistogram.DoubleHistogram; import org.elasticsearch.common.io.stream.StreamInput; @@ -25,7 +25,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/AbstractInternalTDigestPercentiles.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalTDigestPercentiles.java similarity index 97% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/AbstractInternalTDigestPercentiles.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalTDigestPercentiles.java index 0938710406a7b..6e6ff3cf3a88b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/AbstractInternalTDigestPercentiles.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalTDigestPercentiles.java @@ -17,14 +17,13 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/AbstractTDigestPercentilesAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractTDigestPercentilesAggregator.java similarity index 91% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/AbstractTDigestPercentilesAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractTDigestPercentilesAggregator.java index 802e1b0257cea..15ad622fce58c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/AbstractTDigestPercentilesAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractTDigestPercentilesAggregator.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest; +package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ScoreMode; @@ -30,7 +30,6 @@ import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; -import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.internal.SearchContext; @@ -39,7 +38,7 @@ import java.util.List; import java.util.Map; -public abstract class AbstractTDigestPercentilesAggregator extends NumericMetricsAggregator.MultiValue { +abstract class AbstractTDigestPercentilesAggregator extends NumericMetricsAggregator.MultiValue { private static int indexOfKey(double[] keys, double key) { return ArrayUtils.binarySearch(keys, key, 0.001); @@ -52,7 +51,7 @@ private static int indexOfKey(double[] keys, double key) { protected final double compression; protected final boolean keyed; - public AbstractTDigestPercentilesAggregator(String name, ValuesSource.Numeric valuesSource, SearchContext context, Aggregator parent, + AbstractTDigestPercentilesAggregator(String name, ValuesSource.Numeric valuesSource, SearchContext context, Aggregator parent, double[] keys, double compression, boolean keyed, DocValueFormat formatter, List pipelineAggregators, Map metaData) throws IOException { super(name, context, parent, pipelineAggregators, metaData); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/Avg.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Avg.java similarity index 87% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/Avg.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/Avg.java index e36b8df7debc2..1b9f02c527032 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/Avg.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Avg.java @@ -16,9 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.avg; - -import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; +package org.elasticsearch.search.aggregations.metrics; /** * An aggregation that computes the average of the values in the current bucket. diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregationBuilder.java similarity index 98% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregationBuilder.java index f0d917715ace4..1f57964f667fb 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregationBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.avg; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregator.java similarity index 94% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregator.java index 042618011f16d..22142799a9358 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregator.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.avg; +package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ScoreMode; @@ -30,7 +30,6 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; -import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.internal.SearchContext; @@ -39,7 +38,7 @@ import java.util.List; import java.util.Map; -public class AvgAggregator extends NumericMetricsAggregator.SingleValue { +class AvgAggregator extends NumericMetricsAggregator.SingleValue { final ValuesSource.Numeric valuesSource; @@ -48,7 +47,7 @@ public class AvgAggregator extends NumericMetricsAggregator.SingleValue { DoubleArray compensations; DocValueFormat format; - public AvgAggregator(String name, ValuesSource.Numeric valuesSource, DocValueFormat formatter, SearchContext context, + AvgAggregator(String name, ValuesSource.Numeric valuesSource, DocValueFormat formatter, SearchContext context, Aggregator parent, List pipelineAggregators, Map metaData) throws IOException { super(name, context, parent, pipelineAggregators, metaData); this.valuesSource = valuesSource; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregatorFactory.java similarity index 89% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregatorFactory.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregatorFactory.java index f1fc12ef4e505..817e40db26e18 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregatorFactory.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.avg; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -33,9 +33,9 @@ import java.util.List; import java.util.Map; -public class AvgAggregatorFactory extends ValuesSourceAggregatorFactory { +class AvgAggregatorFactory extends ValuesSourceAggregatorFactory { - public AvgAggregatorFactory(String name, ValuesSourceConfig config, SearchContext context, + AvgAggregatorFactory(String name, ValuesSourceConfig config, SearchContext context, AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, Map metaData) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metaData); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/Cardinality.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Cardinality.java similarity index 87% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/Cardinality.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/Cardinality.java index 92f3b8bb2615e..f85070d135964 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/Cardinality.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Cardinality.java @@ -17,9 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.cardinality; - -import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; +package org.elasticsearch.search.aggregations.metrics; /** * An aggregation that computes approximate numbers of unique terms. diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregationBuilder.java similarity index 98% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregationBuilder.java index 17b3849c5eb16..244aa1dda3fe6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregationBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.cardinality; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregator.java similarity index 93% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregator.java index 0df6b69681937..80dd9beac9298 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregator.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.cardinality; +package org.elasticsearch.search.aggregations.metrics; import com.carrotsearch.hppc.BitMixer; @@ -40,7 +40,6 @@ import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; -import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.internal.SearchContext; @@ -52,7 +51,7 @@ /** * An aggregator that computes approximate counts of unique values. */ -public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue { +class CardinalityAggregator extends NumericMetricsAggregator.SingleValue { private final int precision; private final ValuesSource valuesSource; @@ -63,8 +62,13 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue private Collector collector; - public CardinalityAggregator(String name, ValuesSource valuesSource, int precision, - SearchContext context, Aggregator parent, List pipelineAggregators, Map metaData) throws IOException { + CardinalityAggregator(String name, + ValuesSource valuesSource, + int precision, + SearchContext context, + Aggregator parent, + List pipelineAggregators, + Map metaData) throws IOException { super(name, context, parent, pipelineAggregators, metaData); this.valuesSource = valuesSource; this.precision = precision; @@ -83,7 +87,8 @@ private Collector pickCollector(LeafReaderContext ctx) throws IOException { if (valuesSource instanceof ValuesSource.Numeric) { ValuesSource.Numeric source = (ValuesSource.Numeric) valuesSource; - MurmurHash3Values hashValues = source.isFloatingPoint() ? MurmurHash3Values.hash(source.doubleValues(ctx)) : MurmurHash3Values.hash(source.longValues(ctx)); + MurmurHash3Values hashValues = source.isFloatingPoint() ? + MurmurHash3Values.hash(source.doubleValues(ctx)) : MurmurHash3Values.hash(source.longValues(ctx)); return new DirectCollector(counts, hashValues); } @@ -270,7 +275,8 @@ public void postCollect() throws IOException { final org.elasticsearch.common.hash.MurmurHash3.Hash128 hash = new org.elasticsearch.common.hash.MurmurHash3.Hash128(); try (LongArray hashes = bigArrays.newLongArray(maxOrd, false)) { - for (int ord = allVisitedOrds.nextSetBit(0); ord < DocIdSetIterator.NO_MORE_DOCS; ord = ord + 1 < maxOrd ? allVisitedOrds.nextSetBit(ord + 1) : DocIdSetIterator.NO_MORE_DOCS) { + for (int ord = allVisitedOrds.nextSetBit(0); ord < DocIdSetIterator.NO_MORE_DOCS; + ord = ord + 1 < maxOrd ? allVisitedOrds.nextSetBit(ord + 1) : DocIdSetIterator.NO_MORE_DOCS) { final BytesRef value = values.lookupOrd(ord); org.elasticsearch.common.hash.MurmurHash3.hash128(value.bytes, value.offset, value.length, 0, hash); hashes.set(ord, hash.h1); @@ -279,7 +285,8 @@ public void postCollect() throws IOException { for (long bucket = visitedOrds.size() - 1; bucket >= 0; --bucket) { final FixedBitSet bits = visitedOrds.get(bucket); if (bits != null) { - for (int ord = bits.nextSetBit(0); ord < DocIdSetIterator.NO_MORE_DOCS; ord = ord + 1 < maxOrd ? bits.nextSetBit(ord + 1) : DocIdSetIterator.NO_MORE_DOCS) { + for (int ord = bits.nextSetBit(0); ord < DocIdSetIterator.NO_MORE_DOCS; + ord = ord + 1 < maxOrd ? bits.nextSetBit(ord + 1) : DocIdSetIterator.NO_MORE_DOCS) { counts.collect(bucket, hashes.get(ord)); } } @@ -376,7 +383,8 @@ public long nextValue() throws IOException { private static class Bytes extends MurmurHash3Values { - private final org.elasticsearch.common.hash.MurmurHash3.Hash128 hash = new org.elasticsearch.common.hash.MurmurHash3.Hash128(); + private final org.elasticsearch.common.hash.MurmurHash3.Hash128 hash = + new org.elasticsearch.common.hash.MurmurHash3.Hash128(); private final SortedBinaryDocValues values; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorFactory.java similarity index 89% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregatorFactory.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorFactory.java index 0d2d32f04697c..413c896fbcb3f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorFactory.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.cardinality; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -32,11 +32,11 @@ import java.util.List; import java.util.Map; -public class CardinalityAggregatorFactory extends ValuesSourceAggregatorFactory { +class CardinalityAggregatorFactory extends ValuesSourceAggregatorFactory { private final Long precisionThreshold; - public CardinalityAggregatorFactory(String name, ValuesSourceConfig config, Long precisionThreshold, + CardinalityAggregatorFactory(String name, ValuesSourceConfig config, Long precisionThreshold, SearchContext context, AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, Map metaData) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metaData); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStats.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStats.java similarity index 93% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStats.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStats.java index 8a198a5825a3d..68dac3e373d1c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStats.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStats.java @@ -16,9 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.stats.extended; - -import org.elasticsearch.search.aggregations.metrics.stats.Stats; +package org.elasticsearch.search.aggregations.metrics; /** * Statistics over a set of values (either aggregated over field data or scripts) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregationBuilder.java similarity index 98% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregationBuilder.java index 09a12fb188fe3..33caa5f840028 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregationBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.stats.extended; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregator.java similarity index 96% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregator.java index 1089d2e1b9796..1d383a2ae1946 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregator.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.stats.extended; +package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ScoreMode; @@ -31,7 +31,6 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; -import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.internal.SearchContext; @@ -40,9 +39,9 @@ import java.util.List; import java.util.Map; -public class ExtendedStatsAggregator extends NumericMetricsAggregator.MultiValue { +class ExtendedStatsAggregator extends NumericMetricsAggregator.MultiValue { - public static final ParseField SIGMA_FIELD = new ParseField("sigma"); + static final ParseField SIGMA_FIELD = new ParseField("sigma"); final ValuesSource.Numeric valuesSource; final DocValueFormat format; @@ -56,7 +55,7 @@ public class ExtendedStatsAggregator extends NumericMetricsAggregator.MultiValue DoubleArray sumOfSqrs; DoubleArray compensationOfSqrs; - public ExtendedStatsAggregator(String name, ValuesSource.Numeric valuesSource, DocValueFormat formatter, + ExtendedStatsAggregator(String name, ValuesSource.Numeric valuesSource, DocValueFormat formatter, SearchContext context, Aggregator parent, double sigma, List pipelineAggregators, Map metaData) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorFactory.java similarity index 88% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregatorFactory.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorFactory.java index 521ea8f68a67d..890f3199498b4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorFactory.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.stats.extended; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -33,11 +33,11 @@ import java.util.List; import java.util.Map; -public class ExtendedStatsAggregatorFactory extends ValuesSourceAggregatorFactory { +class ExtendedStatsAggregatorFactory extends ValuesSourceAggregatorFactory { private final double sigma; - public ExtendedStatsAggregatorFactory(String name, ValuesSourceConfig config, double sigma, + ExtendedStatsAggregatorFactory(String name, ValuesSourceConfig config, double sigma, SearchContext context, AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, Map metaData) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metaData); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBounds.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBounds.java similarity index 95% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBounds.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBounds.java index 76b8ed11fc971..22fd5b501f9dd 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBounds.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBounds.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.geobounds; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.search.aggregations.Aggregation; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregationBuilder.java similarity index 98% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregationBuilder.java index 2d616ebe07168..9955f62f80ab7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregationBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.geobounds; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregator.java similarity index 93% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregator.java index 5c0cb4ba60a1f..e6d591482be2b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregator.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.geobounds; +package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.common.ParseField; @@ -30,7 +30,6 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; -import org.elasticsearch.search.aggregations.metrics.MetricsAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.internal.SearchContext; @@ -39,7 +38,7 @@ import java.util.List; import java.util.Map; -public final class GeoBoundsAggregator extends MetricsAggregator { +final class GeoBoundsAggregator extends MetricsAggregator { static final ParseField WRAP_LONGITUDE_FIELD = new ParseField("wrap_longitude"); @@ -52,7 +51,7 @@ public final class GeoBoundsAggregator extends MetricsAggregator { DoubleArray negLefts; DoubleArray negRights; - protected GeoBoundsAggregator(String name, SearchContext aggregationContext, Aggregator parent, + GeoBoundsAggregator(String name, SearchContext aggregationContext, Aggregator parent, ValuesSource.GeoPoint valuesSource, boolean wrapLongitude, List pipelineAggregators, Map metaData) throws IOException { super(name, aggregationContext, parent, pipelineAggregators, metaData); @@ -154,13 +153,15 @@ public InternalAggregation buildAggregation(long owningBucketOrdinal) { double posRight = posRights.get(owningBucketOrdinal); double negLeft = negLefts.get(owningBucketOrdinal); double negRight = negRights.get(owningBucketOrdinal); - return new InternalGeoBounds(name, top, bottom, posLeft, posRight, negLeft, negRight, wrapLongitude, pipelineAggregators(), metaData()); + return new InternalGeoBounds(name, top, bottom, posLeft, posRight, negLeft, negRight, wrapLongitude, + pipelineAggregators(), metaData()); } @Override public InternalAggregation buildEmptyAggregation() { return new InternalGeoBounds(name, Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, - Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, wrapLongitude, pipelineAggregators(), metaData()); + Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, wrapLongitude, + pipelineAggregators(), metaData()); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorFactory.java similarity index 88% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsAggregatorFactory.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorFactory.java index e67ad49115ac6..e6080d16cbfd4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorFactory.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.geobounds; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -32,11 +32,11 @@ import java.util.List; import java.util.Map; -public class GeoBoundsAggregatorFactory extends ValuesSourceAggregatorFactory { +class GeoBoundsAggregatorFactory extends ValuesSourceAggregatorFactory { private final boolean wrapLongitude; - public GeoBoundsAggregatorFactory(String name, ValuesSourceConfig config, boolean wrapLongitude, + GeoBoundsAggregatorFactory(String name, ValuesSourceConfig config, boolean wrapLongitude, SearchContext context, AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, Map metaData) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metaData); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroid.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroid.java similarity index 85% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroid.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroid.java index 2cdf462f0429d..7276bf400dd4b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroid.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroid.java @@ -17,13 +17,13 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.geocentroid; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.search.aggregations.Aggregation; /** - * Interface for {@link org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregator} + * Interface for {@link GeoCentroidAggregator} */ public interface GeoCentroid extends Aggregation { GeoPoint centroid(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregationBuilder.java similarity index 98% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregationBuilder.java index 32fcaf32775c9..088483656f8ff 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregationBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.geocentroid; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregator.java similarity index 97% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregator.java index 795524e5a0fd3..f0f570ebaced6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregator.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.geocentroid; +package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.common.geo.GeoPoint; @@ -29,7 +29,6 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; -import org.elasticsearch.search.aggregations.metrics.MetricsAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.internal.SearchContext; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorFactory.java similarity index 97% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregatorFactory.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorFactory.java index d153da3afa346..2bfb31c49930c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorFactory.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.geocentroid; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentileRanksAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregator.java similarity index 90% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentileRanksAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregator.java index 1360999d86610..881d7a4bf4f4d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentileRanksAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregator.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles.hdr; +package org.elasticsearch.search.aggregations.metrics; import org.HdrHistogram.DoubleHistogram; import org.elasticsearch.search.DocValueFormat; @@ -30,9 +30,9 @@ import java.util.List; import java.util.Map; -public class HDRPercentileRanksAggregator extends AbstractHDRPercentilesAggregator { +class HDRPercentileRanksAggregator extends AbstractHDRPercentilesAggregator { - public HDRPercentileRanksAggregator(String name, Numeric valuesSource, SearchContext context, Aggregator parent, + HDRPercentileRanksAggregator(String name, Numeric valuesSource, SearchContext context, Aggregator parent, double[] percents, int numberOfSignificantValueDigits, boolean keyed, DocValueFormat format, List pipelineAggregators, Map metaData) throws IOException { super(name, valuesSource, context, parent, percents, numberOfSignificantValueDigits, keyed, format, pipelineAggregators, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentileRanksAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorFactory.java similarity index 92% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentileRanksAggregatorFactory.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorFactory.java index d89a9a85b28a9..1bb96e17da7a5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentileRanksAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorFactory.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles.hdr; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -33,14 +33,14 @@ import java.util.List; import java.util.Map; -public class HDRPercentileRanksAggregatorFactory +class HDRPercentileRanksAggregatorFactory extends ValuesSourceAggregatorFactory { private final double[] values; private final int numberOfSignificantValueDigits; private final boolean keyed; - public HDRPercentileRanksAggregatorFactory(String name, ValuesSourceConfig config, double[] values, + HDRPercentileRanksAggregatorFactory(String name, ValuesSourceConfig config, double[] values, int numberOfSignificantValueDigits, boolean keyed, SearchContext context, AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, Map metaData) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metaData); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentilesAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregator.java similarity index 90% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentilesAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregator.java index 93fd92e4fbfb5..f1a4a03b24bb1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentilesAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregator.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles.hdr; +package org.elasticsearch.search.aggregations.metrics; import org.HdrHistogram.DoubleHistogram; import org.elasticsearch.search.DocValueFormat; @@ -30,9 +30,9 @@ import java.util.List; import java.util.Map; -public class HDRPercentilesAggregator extends AbstractHDRPercentilesAggregator { +class HDRPercentilesAggregator extends AbstractHDRPercentilesAggregator { - public HDRPercentilesAggregator(String name, Numeric valuesSource, SearchContext context, Aggregator parent, double[] percents, + HDRPercentilesAggregator(String name, Numeric valuesSource, SearchContext context, Aggregator parent, double[] percents, int numberOfSignificantValueDigits, boolean keyed, DocValueFormat formatter, List pipelineAggregators, Map metaData) throws IOException { super(name, valuesSource, context, parent, percents, numberOfSignificantValueDigits, keyed, formatter, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentilesAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorFactory.java similarity index 89% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentilesAggregatorFactory.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorFactory.java index 1074b6e142db6..fe53f32889a7e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentilesAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorFactory.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles.hdr; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -33,13 +33,13 @@ import java.util.List; import java.util.Map; -public class HDRPercentilesAggregatorFactory extends ValuesSourceAggregatorFactory { +class HDRPercentilesAggregatorFactory extends ValuesSourceAggregatorFactory { private final double[] percents; private final int numberOfSignificantValueDigits; private final boolean keyed; - public HDRPercentilesAggregatorFactory(String name, ValuesSourceConfig config, double[] percents, + HDRPercentilesAggregatorFactory(String name, ValuesSourceConfig config, double[] percents, int numberOfSignificantValueDigits, boolean keyed, SearchContext context, AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, Map metaData) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metaData); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/HyperLogLogPlusPlus.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlus.java similarity index 99% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/HyperLogLogPlusPlus.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlus.java index 1dfe70d4b7f66..e8989868b0767 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/HyperLogLogPlusPlus.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlus.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.cardinality; +package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LongBitSet; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/InternalAvg.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalAvg.java similarity index 96% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/InternalAvg.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalAvg.java index 285ea469aed9e..1b30afc087459 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/InternalAvg.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalAvg.java @@ -16,14 +16,13 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.avg; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/InternalCardinality.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalCardinality.java similarity index 96% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/InternalCardinality.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalCardinality.java index ce1e9fc89396f..b3fcb33a4fb84 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/InternalCardinality.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalCardinality.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.cardinality; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -25,7 +25,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/InternalExtendedStats.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java similarity index 98% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/InternalExtendedStats.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java index 1f259fbe87d9f..608fd1de435c8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/InternalExtendedStats.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java @@ -16,14 +16,13 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.stats.extended; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.metrics.stats.InternalStats; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/InternalGeoBounds.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoBounds.java similarity index 99% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/InternalGeoBounds.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoBounds.java index 69fc6fcaffe9e..7f259baca693f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/InternalGeoBounds.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoBounds.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.geobounds; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.geo.GeoPoint; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/InternalGeoCentroid.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroid.java similarity index 97% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/InternalGeoCentroid.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroid.java index b8d317ff787de..d5d537ab66e5a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/InternalGeoCentroid.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroid.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.geocentroid; +package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.geo.GeoEncodingUtils; import org.elasticsearch.common.ParseField; @@ -41,7 +41,8 @@ public class InternalGeoCentroid extends InternalAggregation implements GeoCentr private final long count; public static long encodeLatLon(double lat, double lon) { - return (Integer.toUnsignedLong(GeoEncodingUtils.encodeLatitude(lat)) << 32) | Integer.toUnsignedLong(GeoEncodingUtils.encodeLongitude(lon)); + return (Integer.toUnsignedLong(GeoEncodingUtils.encodeLatitude(lat)) << 32) | + Integer.toUnsignedLong(GeoEncodingUtils.encodeLongitude(lon)); } public static double decodeLatitude(long encodedLatLon) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/InternalHDRPercentileRanks.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentileRanks.java similarity index 90% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/InternalHDRPercentileRanks.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentileRanks.java index cb058128c5a49..bfe483d0e3c47 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/InternalHDRPercentileRanks.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentileRanks.java @@ -16,13 +16,11 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles.hdr; +package org.elasticsearch.search.aggregations.metrics; import org.HdrHistogram.DoubleHistogram; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanks; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; @@ -33,7 +31,7 @@ public class InternalHDRPercentileRanks extends AbstractInternalHDRPercentiles implements PercentileRanks { public static final String NAME = "hdr_percentile_ranks"; - public InternalHDRPercentileRanks(String name, double[] cdfValues, DoubleHistogram state, boolean keyed, DocValueFormat formatter, + InternalHDRPercentileRanks(String name, double[] cdfValues, DoubleHistogram state, boolean keyed, DocValueFormat formatter, List pipelineAggregators, Map metaData) { super(name, cdfValues, state, keyed, formatter, pipelineAggregators, metaData); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/InternalHDRPercentiles.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentiles.java similarity index 90% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/InternalHDRPercentiles.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentiles.java index a153e497f7bc8..5a62de8a964ec 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/InternalHDRPercentiles.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentiles.java @@ -16,13 +16,11 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles.hdr; +package org.elasticsearch.search.aggregations.metrics; import org.HdrHistogram.DoubleHistogram; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; @@ -33,7 +31,7 @@ public class InternalHDRPercentiles extends AbstractInternalHDRPercentiles implements Percentiles { public static final String NAME = "hdr_percentiles"; - public InternalHDRPercentiles(String name, double[] percents, DoubleHistogram state, boolean keyed, DocValueFormat formatter, + InternalHDRPercentiles(String name, double[] percents, DoubleHistogram state, boolean keyed, DocValueFormat formatter, List pipelineAggregators, Map metaData) { super(name, percents, state, keyed, formatter, pipelineAggregators, metaData); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/max/InternalMax.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMax.java similarity index 93% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/max/InternalMax.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMax.java index 449351b88b169..300c82710f6d1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/max/InternalMax.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMax.java @@ -16,14 +16,13 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.max; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; @@ -34,8 +33,8 @@ public class InternalMax extends InternalNumericMetricsAggregation.SingleValue implements Max { private final double max; - public InternalMax(String name, double max, DocValueFormat formatter, List pipelineAggregators, - Map metaData) { + public InternalMax(String name, double max, DocValueFormat formatter, + List pipelineAggregators, Map metaData) { super(name, pipelineAggregators, metaData); this.format = formatter; this.max = max; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/min/InternalMin.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMin.java similarity index 95% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/min/InternalMin.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMin.java index 886642c222baf..60ed785edfe91 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/min/InternalMin.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMin.java @@ -16,14 +16,13 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.min; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetric.java similarity index 96% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetric.java index db0993d12967d..ec2419e03ab3c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetric.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.scripted; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -40,7 +40,7 @@ public class InternalScriptedMetric extends InternalAggregation implements Scrip final Script reduceScript; private final List aggregation; - public InternalScriptedMetric(String name, Object aggregation, Script reduceScript, List pipelineAggregators, + InternalScriptedMetric(String name, Object aggregation, Script reduceScript, List pipelineAggregators, Map metaData) { this(name, Collections.singletonList(aggregation), reduceScript, pipelineAggregators, metaData); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/InternalStats.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalStats.java similarity index 96% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/InternalStats.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalStats.java index 19f74cd72c821..a05d6db7024e6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/InternalStats.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalStats.java @@ -16,14 +16,13 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.stats; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; @@ -47,8 +46,7 @@ public static Metrics resolve(String name) { protected final double sum; public InternalStats(String name, long count, double sum, double min, double max, DocValueFormat formatter, - List pipelineAggregators, - Map metaData) { + List pipelineAggregators, Map metaData) { super(name, pipelineAggregators, metaData); this.count = count; this.sum = sum; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/InternalSum.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalSum.java similarity index 91% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/InternalSum.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalSum.java index cedcdd4aab07d..c3bb7173b3f07 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/InternalSum.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalSum.java @@ -16,14 +16,13 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.sum; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; @@ -34,8 +33,8 @@ public class InternalSum extends InternalNumericMetricsAggregation.SingleValue implements Sum { private final double sum; - public InternalSum(String name, double sum, DocValueFormat formatter, List pipelineAggregators, - Map metaData) { + InternalSum(String name, double sum, DocValueFormat formatter, List pipelineAggregators, + Map metaData) { super(name, pipelineAggregators, metaData); this.sum = sum; this.format = formatter; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/InternalTDigestPercentileRanks.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentileRanks.java similarity index 88% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/InternalTDigestPercentileRanks.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentileRanks.java index 666993f41fda3..aa82ac5ba6add 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/InternalTDigestPercentileRanks.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentileRanks.java @@ -16,12 +16,10 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanks; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; @@ -32,8 +30,8 @@ public class InternalTDigestPercentileRanks extends AbstractInternalTDigestPercentiles implements PercentileRanks { public static final String NAME = "tdigest_percentile_ranks"; - public InternalTDigestPercentileRanks(String name, double[] cdfValues, TDigestState state, boolean keyed, DocValueFormat formatter, - List pipelineAggregators, Map metaData) { + InternalTDigestPercentileRanks(String name, double[] cdfValues, TDigestState state, boolean keyed, DocValueFormat formatter, + List pipelineAggregators, Map metaData) { super(name, cdfValues, state, keyed, formatter, pipelineAggregators, metaData); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/InternalTDigestPercentiles.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentiles.java similarity index 89% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/InternalTDigestPercentiles.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentiles.java index 5a62f24933b40..28f1230bec713 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/InternalTDigestPercentiles.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentiles.java @@ -16,12 +16,10 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; @@ -32,7 +30,7 @@ public class InternalTDigestPercentiles extends AbstractInternalTDigestPercentiles implements Percentiles { public static final String NAME = "tdigest_percentiles"; - public InternalTDigestPercentiles(String name, double[] percents, TDigestState state, boolean keyed, DocValueFormat formatter, + InternalTDigestPercentiles(String name, double[] percents, TDigestState state, boolean keyed, DocValueFormat formatter, List pipelineAggregators, Map metaData) { super(name, percents, state, keyed, formatter, pipelineAggregators, metaData); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHits.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java similarity index 99% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHits.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java index 8b6fa373212b5..0c85191379fa9 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHits.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.tophits; +package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.ScoreDoc; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/InternalValueCount.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalValueCount.java similarity index 92% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/InternalValueCount.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalValueCount.java index 0ac42ff9f45d6..36f2749c791f4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/InternalValueCount.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalValueCount.java @@ -16,13 +16,12 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.valuecount; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; @@ -36,7 +35,7 @@ public class InternalValueCount extends InternalNumericMetricsAggregation.SingleValue implements ValueCount { private final long value; - public InternalValueCount(String name, long value, List pipelineAggregators, + InternalValueCount(String name, long value, List pipelineAggregators, Map metaData) { super(name, pipelineAggregators, metaData); this.value = value; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/InternalWeightedAvg.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalWeightedAvg.java similarity index 93% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/InternalWeightedAvg.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalWeightedAvg.java index 9ad1a1df78aec..e06ffbc7b4a5a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/InternalWeightedAvg.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalWeightedAvg.java @@ -16,14 +16,13 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.weighted_avg; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; @@ -35,8 +34,8 @@ public class InternalWeightedAvg extends InternalNumericMetricsAggregation.Singl private final double sum; private final double weight; - public InternalWeightedAvg(String name, double sum, double weight, DocValueFormat format, List pipelineAggregators, - Map metaData) { + InternalWeightedAvg(String name, double sum, double weight, DocValueFormat format, List pipelineAggregators, + Map metaData) { super(name, pipelineAggregators, metaData); this.sum = sum; this.weight = weight; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/max/Max.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Max.java similarity index 87% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/max/Max.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/Max.java index bee808d16a12a..ee592fd75fbeb 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/max/Max.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Max.java @@ -16,9 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.max; - -import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; +package org.elasticsearch.search.aggregations.metrics; /** * An aggregation that computes the maximum of the values in the current bucket. diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregationBuilder.java similarity index 98% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregationBuilder.java index 7135aceba95c8..0c3229f08fd51 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregationBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.max; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregator.java similarity index 93% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregator.java index bd73470ff407d..c65277d389c13 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregator.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.max; +package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ScoreMode; @@ -31,7 +31,6 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; -import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.internal.SearchContext; @@ -40,14 +39,14 @@ import java.util.List; import java.util.Map; -public class MaxAggregator extends NumericMetricsAggregator.SingleValue { +class MaxAggregator extends NumericMetricsAggregator.SingleValue { final ValuesSource.Numeric valuesSource; final DocValueFormat formatter; DoubleArray maxes; - public MaxAggregator(String name, ValuesSource.Numeric valuesSource, DocValueFormat formatter, + MaxAggregator(String name, ValuesSource.Numeric valuesSource, DocValueFormat formatter, SearchContext context, Aggregator parent, List pipelineAggregators, Map metaData) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorFactory.java similarity index 89% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxAggregatorFactory.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorFactory.java index aedba76e0c7d1..314e1106b373f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorFactory.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.max; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -33,9 +33,9 @@ import java.util.List; import java.util.Map; -public class MaxAggregatorFactory extends ValuesSourceAggregatorFactory { +class MaxAggregatorFactory extends ValuesSourceAggregatorFactory { - public MaxAggregatorFactory(String name, ValuesSourceConfig config, SearchContext context, + MaxAggregatorFactory(String name, ValuesSourceConfig config, SearchContext context, AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, Map metaData) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metaData); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/min/Min.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Min.java similarity index 87% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/min/Min.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/Min.java index 3b5488199e875..5fd1984da880a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/min/Min.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Min.java @@ -16,9 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.min; - -import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; +package org.elasticsearch.search.aggregations.metrics; /** * An aggregation that computes the minimum of the values in the current bucket. diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregationBuilder.java similarity index 96% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregationBuilder.java index 380569f18969a..2d23539189dc8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregationBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.min; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -27,7 +27,6 @@ import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactory; -import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregator.java similarity index 93% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregator.java index 0f5dd36cb4930..ea8e160e1382f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregator.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.min; +package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ScoreMode; @@ -31,7 +31,6 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; -import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.internal.SearchContext; @@ -40,14 +39,14 @@ import java.util.List; import java.util.Map; -public class MinAggregator extends NumericMetricsAggregator.SingleValue { +class MinAggregator extends NumericMetricsAggregator.SingleValue { final ValuesSource.Numeric valuesSource; final DocValueFormat format; DoubleArray mins; - public MinAggregator(String name, ValuesSource.Numeric valuesSource, DocValueFormat formatter, + MinAggregator(String name, ValuesSource.Numeric valuesSource, DocValueFormat formatter, SearchContext context, Aggregator parent, List pipelineAggregators, Map metaData) throws IOException { super(name, context, parent, pipelineAggregators, metaData); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorFactory.java similarity index 89% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregatorFactory.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorFactory.java index 8f5538fb7a2bb..d08b8199a3307 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorFactory.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.min; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -33,9 +33,9 @@ import java.util.List; import java.util.Map; -public class MinAggregatorFactory extends ValuesSourceAggregatorFactory { +class MinAggregatorFactory extends ValuesSourceAggregatorFactory { - public MinAggregatorFactory(String name, ValuesSourceConfig config, SearchContext context, + MinAggregatorFactory(String name, ValuesSourceConfig config, SearchContext context, AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, Map metaData) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metaData); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/ParsedAvg.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedAvg.java similarity index 93% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/ParsedAvg.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedAvg.java index 16d91bd08f0d3..0e15d417f87fc 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/ParsedAvg.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedAvg.java @@ -17,12 +17,11 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.avg; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.search.aggregations.metrics.ParsedSingleValueNumericMetricsAggregation; import java.io.IOException; @@ -61,4 +60,4 @@ public static ParsedAvg fromXContent(XContentParser parser, final String name) { avg.setName(name); return avg; } -} \ No newline at end of file +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/ParsedCardinality.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedCardinality.java similarity index 97% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/ParsedCardinality.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedCardinality.java index 5a615f61a4ae6..848f2e6fd0101 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/ParsedCardinality.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedCardinality.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.cardinality; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -70,4 +70,4 @@ protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) builder.field(CommonFields.VALUE.getPreferredName(), cardinalityValue); return builder; } -} \ No newline at end of file +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ParsedExtendedStats.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedExtendedStats.java similarity index 97% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ParsedExtendedStats.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedExtendedStats.java index 59311127368f5..cee96c07a2471 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ParsedExtendedStats.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedExtendedStats.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.stats.extended; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.collect.Tuple; @@ -26,8 +26,7 @@ import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.search.aggregations.metrics.stats.ParsedStats; -import org.elasticsearch.search.aggregations.metrics.stats.extended.InternalExtendedStats.Fields; +import org.elasticsearch.search.aggregations.metrics.InternalExtendedStats.Fields; import java.io.IOException; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/ParsedGeoBounds.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedGeoBounds.java similarity index 87% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/ParsedGeoBounds.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedGeoBounds.java index 70abe15d29099..11d36d2ceeead 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/ParsedGeoBounds.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedGeoBounds.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.geobounds; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.geo.GeoPoint; @@ -30,11 +30,11 @@ import java.io.IOException; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.search.aggregations.metrics.geobounds.InternalGeoBounds.BOTTOM_RIGHT_FIELD; -import static org.elasticsearch.search.aggregations.metrics.geobounds.InternalGeoBounds.BOUNDS_FIELD; -import static org.elasticsearch.search.aggregations.metrics.geobounds.InternalGeoBounds.LAT_FIELD; -import static org.elasticsearch.search.aggregations.metrics.geobounds.InternalGeoBounds.LON_FIELD; -import static org.elasticsearch.search.aggregations.metrics.geobounds.InternalGeoBounds.TOP_LEFT_FIELD; +import static org.elasticsearch.search.aggregations.metrics.InternalGeoBounds.BOTTOM_RIGHT_FIELD; +import static org.elasticsearch.search.aggregations.metrics.InternalGeoBounds.BOUNDS_FIELD; +import static org.elasticsearch.search.aggregations.metrics.InternalGeoBounds.LAT_FIELD; +import static org.elasticsearch.search.aggregations.metrics.InternalGeoBounds.LON_FIELD; +import static org.elasticsearch.search.aggregations.metrics.InternalGeoBounds.TOP_LEFT_FIELD; public class ParsedGeoBounds extends ParsedAggregation implements GeoBounds { private GeoPoint topLeft; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/ParsedGeoCentroid.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedGeoCentroid.java similarity index 95% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/ParsedGeoCentroid.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedGeoCentroid.java index 7ce1f5d86feb3..ff40d33de42e6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/ParsedGeoCentroid.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedGeoCentroid.java @@ -17,14 +17,14 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.geocentroid; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.ParsedAggregation; -import org.elasticsearch.search.aggregations.metrics.geocentroid.InternalGeoCentroid.Fields; +import org.elasticsearch.search.aggregations.metrics.InternalGeoCentroid.Fields; import java.io.IOException; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/ParsedHDRPercentileRanks.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedHDRPercentileRanks.java similarity index 87% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/ParsedHDRPercentileRanks.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedHDRPercentileRanks.java index f5fd7717e04bf..eac1f2109056c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/ParsedHDRPercentileRanks.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedHDRPercentileRanks.java @@ -17,13 +17,10 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles.hdr; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentiles; -import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentileRanks; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; import java.io.IOException; import java.util.Iterator; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/ParsedHDRPercentiles.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedHDRPercentiles.java similarity index 88% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/ParsedHDRPercentiles.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedHDRPercentiles.java index 1b1ba906aa087..bb34d8550d0ee 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/ParsedHDRPercentiles.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedHDRPercentiles.java @@ -17,12 +17,10 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles.hdr; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentiles; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles; import java.io.IOException; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/max/ParsedMax.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedMax.java similarity index 93% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/max/ParsedMax.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedMax.java index f6a3190cd04d4..4a284c2d20441 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/max/ParsedMax.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedMax.java @@ -17,12 +17,11 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.max; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.search.aggregations.metrics.ParsedSingleValueNumericMetricsAggregation; import java.io.IOException; @@ -59,4 +58,4 @@ public static ParsedMax fromXContent(XContentParser parser, final String name) { max.setName(name); return max; } -} \ No newline at end of file +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/min/ParsedMin.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedMin.java similarity index 93% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/min/ParsedMin.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedMin.java index 9b214bb346201..51a53d50d7a33 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/min/ParsedMin.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedMin.java @@ -17,12 +17,11 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.min; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.search.aggregations.metrics.ParsedSingleValueNumericMetricsAggregation; import java.io.IOException; @@ -59,4 +58,4 @@ public static ParsedMin fromXContent(XContentParser parser, final String name) { min.setName(name); return min; } -} \ No newline at end of file +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/ParsedPercentileRanks.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedPercentileRanks.java similarity index 85% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/ParsedPercentileRanks.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedPercentileRanks.java index 2c80d0328dd86..5c38bc684a8a7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/ParsedPercentileRanks.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedPercentileRanks.java @@ -17,9 +17,9 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles; +package org.elasticsearch.search.aggregations.metrics; -public abstract class ParsedPercentileRanks extends ParsedPercentiles implements PercentileRanks { +abstract class ParsedPercentileRanks extends ParsedPercentiles implements PercentileRanks { @Override public double percent(double value) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/ParsedPercentiles.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedPercentiles.java similarity index 99% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/ParsedPercentiles.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedPercentiles.java index 2c7da76446d5a..2742050862c0e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/ParsedPercentiles.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedPercentiles.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ParsedScriptedMetric.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedScriptedMetric.java similarity index 98% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ParsedScriptedMetric.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedScriptedMetric.java index f2aae9f5e8aa5..696c12219a4d6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ParsedScriptedMetric.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedScriptedMetric.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.scripted; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.xcontent.ObjectParser; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/ParsedStats.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedStats.java similarity index 97% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/ParsedStats.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedStats.java index 4c676cf227838..e45dd3c87c1e4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/ParsedStats.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedStats.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.stats; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ObjectParser; @@ -25,7 +25,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.ParsedAggregation; -import org.elasticsearch.search.aggregations.metrics.stats.InternalStats.Fields; +import org.elasticsearch.search.aggregations.metrics.InternalStats.Fields; import java.io.IOException; import java.util.HashMap; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/ParsedSum.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedSum.java similarity index 92% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/ParsedSum.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedSum.java index a51f03d356549..514edaa750b6e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/ParsedSum.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedSum.java @@ -17,12 +17,11 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.sum; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.search.aggregations.metrics.ParsedSingleValueNumericMetricsAggregation; import java.io.IOException; @@ -58,4 +57,4 @@ public static ParsedSum fromXContent(XContentParser parser, final String name) { sum.setName(name); return sum; } -} \ No newline at end of file +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/ParsedTDigestPercentileRanks.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedTDigestPercentileRanks.java similarity index 87% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/ParsedTDigestPercentileRanks.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedTDigestPercentileRanks.java index 01929f374d486..f17bc8784aef4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/ParsedTDigestPercentileRanks.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedTDigestPercentileRanks.java @@ -17,13 +17,10 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentiles; -import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentileRanks; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; import java.io.IOException; import java.util.Iterator; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/ParsedTDigestPercentiles.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedTDigestPercentiles.java similarity index 88% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/ParsedTDigestPercentiles.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedTDigestPercentiles.java index cbae25d61e046..2453c702b9608 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/ParsedTDigestPercentiles.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedTDigestPercentiles.java @@ -17,12 +17,10 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentiles; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles; import java.io.IOException; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/ParsedTopHits.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedTopHits.java similarity index 97% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/ParsedTopHits.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedTopHits.java index 362423abca8a3..321ed5709e82f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/ParsedTopHits.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedTopHits.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.tophits; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ObjectParser; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ParsedValueCount.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedValueCount.java similarity index 97% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ParsedValueCount.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedValueCount.java index 7430bca08de32..0f60b145fd119 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ParsedValueCount.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedValueCount.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.valuecount; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -71,4 +71,4 @@ public static ParsedValueCount fromXContent(XContentParser parser, final String sum.setName(name); return sum; } -} \ No newline at end of file +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/ParsedWeightedAvg.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedWeightedAvg.java similarity index 89% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/ParsedWeightedAvg.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedWeightedAvg.java index dcda79ce33e92..984b8509db755 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/ParsedWeightedAvg.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ParsedWeightedAvg.java @@ -17,16 +17,15 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.weighted_avg; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.search.aggregations.metrics.ParsedSingleValueNumericMetricsAggregation; import java.io.IOException; -public class ParsedWeightedAvg extends ParsedSingleValueNumericMetricsAggregation implements WeightedAvg { +class ParsedWeightedAvg extends ParsedSingleValueNumericMetricsAggregation implements WeightedAvg { @Override public double getValue() { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/Percentile.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Percentile.java similarity index 95% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/Percentile.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/Percentile.java index ca62ca6b2007e..85c1184cc062e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/Percentile.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Percentile.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles; +package org.elasticsearch.search.aggregations.metrics; import java.util.Objects; @@ -56,4 +56,4 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(percent, value); } -} \ No newline at end of file +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanks.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentileRanks.java similarity index 89% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanks.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentileRanks.java index 8a2dc9d902644..468045a14f4c2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanks.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentileRanks.java @@ -17,9 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles; - -import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; +package org.elasticsearch.search.aggregations.metrics; /** * An aggregation that computes approximate percentiles given values. diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksAggregationBuilder.java similarity index 97% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksAggregationBuilder.java index 6bb956452ef01..3bf70d20989d3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentileRanksAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksAggregationBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; @@ -29,8 +29,6 @@ import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactory; -import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.HDRPercentileRanksAggregatorFactory; -import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.TDigestPercentileRanksAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/Percentiles.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Percentiles.java similarity index 89% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/Percentiles.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/Percentiles.java index a9052536dc46b..213eede90bf07 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/Percentiles.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Percentiles.java @@ -16,9 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles; - -import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; +package org.elasticsearch.search.aggregations.metrics; /** * An aggregation that computes approximate percentiles. diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesAggregationBuilder.java similarity index 98% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesAggregationBuilder.java index 5c90832bb150a..3a6f5f89622f0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesAggregationBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; @@ -28,8 +28,6 @@ import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactory; -import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.HDRPercentilesAggregatorFactory; -import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.TDigestPercentilesAggregatorFactory; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesMethod.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesMethod.java similarity index 96% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesMethod.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesMethod.java index 3b8085793dc0a..3797e01e899ac 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesMethod.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/PercentilesMethod.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetric.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetric.java similarity index 94% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetric.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetric.java index 9733e5f497923..4043e98ba69b4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetric.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetric.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.scripted; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.search.aggregations.Aggregation; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregationBuilder.java similarity index 99% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregationBuilder.java index 8b6d834184d73..6a25c51737b73 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregationBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.scripted; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregator.java similarity index 83% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregator.java index 8a49530f0d3da..a0c287f6eac51 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregator.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.scripted; +package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ScoreMode; @@ -29,7 +29,6 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; -import org.elasticsearch.search.aggregations.metrics.MetricsAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.internal.SearchContext; @@ -37,17 +36,22 @@ import java.util.List; import java.util.Map; -public class ScriptedMetricAggregator extends MetricsAggregator { +class ScriptedMetricAggregator extends MetricsAggregator { private final ScriptedMetricAggContexts.MapScript.LeafFactory mapScript; private final ScriptedMetricAggContexts.CombineScript combineScript; private final Script reduceScript; private Map aggState; - protected ScriptedMetricAggregator(String name, ScriptedMetricAggContexts.MapScript.LeafFactory mapScript, ScriptedMetricAggContexts.CombineScript combineScript, - Script reduceScript, Map aggState, SearchContext context, Aggregator parent, - List pipelineAggregators, Map metaData) - throws IOException { + ScriptedMetricAggregator(String name, + ScriptedMetricAggContexts.MapScript.LeafFactory mapScript, + ScriptedMetricAggContexts.CombineScript combineScript, + Script reduceScript, + Map aggState, + SearchContext context, + Aggregator parent, + List pipelineAggregators, + Map metaData) throws IOException { super(name, context, parent, pipelineAggregators, metaData); this.aggState = aggState; this.mapScript = mapScript; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorFactory.java similarity index 86% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorFactory.java index 3b8f8321deaa8..e08835f0bea14 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorFactory.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.scripted; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.script.ScriptedMetricAggContexts; import org.elasticsearch.common.util.CollectionUtils; @@ -36,7 +36,7 @@ import java.util.List; import java.util.Map; -public class ScriptedMetricAggregatorFactory extends AggregatorFactory { +class ScriptedMetricAggregatorFactory extends AggregatorFactory { private final ScriptedMetricAggContexts.MapScript.Factory mapScript; private final Map mapScriptParams; @@ -48,13 +48,13 @@ public class ScriptedMetricAggregatorFactory extends AggregatorFactory initScriptParams; - public ScriptedMetricAggregatorFactory(String name, - ScriptedMetricAggContexts.MapScript.Factory mapScript, Map mapScriptParams, - ScriptedMetricAggContexts.InitScript.Factory initScript, Map initScriptParams, - ScriptedMetricAggContexts.CombineScript.Factory combineScript, - Map combineScriptParams, Script reduceScript, Map aggParams, - SearchLookup lookup, SearchContext context, AggregatorFactory parent, - AggregatorFactories.Builder subFactories, Map metaData) throws IOException { + ScriptedMetricAggregatorFactory(String name, + ScriptedMetricAggContexts.MapScript.Factory mapScript, Map mapScriptParams, + ScriptedMetricAggContexts.InitScript.Factory initScript, Map initScriptParams, + ScriptedMetricAggContexts.CombineScript.Factory combineScript, + Map combineScriptParams, Script reduceScript, Map aggParams, + SearchLookup lookup, SearchContext context, AggregatorFactory parent, + AggregatorFactories.Builder subFactories, Map metaData) throws IOException { super(name, context, parent, subFactories, metaData); this.mapScript = mapScript; this.mapScriptParams = mapScriptParams; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/Stats.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Stats.java similarity index 93% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/Stats.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/Stats.java index 46620f51dc2fc..5b8be9390fd2c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/Stats.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Stats.java @@ -16,9 +16,8 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.stats; +package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; /** * Statistics over a set of values (either aggregated over field data or scripts) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregationBuilder.java similarity index 98% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregationBuilder.java index 3d9d9e6c030a1..d96bbba447580 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregationBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.stats; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregator.java similarity index 93% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregator.java index 42d14d05fecb4..1093ecb0692ab 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregator.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.stats; +package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ScoreMode; @@ -30,7 +30,6 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; -import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.internal.SearchContext; @@ -39,7 +38,7 @@ import java.util.List; import java.util.Map; -public class StatsAggregator extends NumericMetricsAggregator.MultiValue { +class StatsAggregator extends NumericMetricsAggregator.MultiValue { final ValuesSource.Numeric valuesSource; final DocValueFormat format; @@ -51,10 +50,9 @@ public class StatsAggregator extends NumericMetricsAggregator.MultiValue { DoubleArray maxes; - public StatsAggregator(String name, ValuesSource.Numeric valuesSource, DocValueFormat format, - SearchContext context, - Aggregator parent, List pipelineAggregators, - Map metaData) throws IOException { + StatsAggregator(String name, ValuesSource.Numeric valuesSource, DocValueFormat format, + SearchContext context, Aggregator parent, + List pipelineAggregators, Map metaData) throws IOException { super(name, context, parent, pipelineAggregators, metaData); this.valuesSource = valuesSource; if (valuesSource != null) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorFactory.java similarity index 89% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsAggregatorFactory.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorFactory.java index a6e59d7c75bf0..82dce359037c2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorFactory.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.stats; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -33,9 +33,9 @@ import java.util.List; import java.util.Map; -public class StatsAggregatorFactory extends ValuesSourceAggregatorFactory { +class StatsAggregatorFactory extends ValuesSourceAggregatorFactory { - public StatsAggregatorFactory(String name, ValuesSourceConfig config, SearchContext context, + StatsAggregatorFactory(String name, ValuesSourceConfig config, SearchContext context, AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, Map metaData) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metaData); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/Sum.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Sum.java similarity index 87% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/Sum.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/Sum.java index d9cacdba114e9..f499b3ecc6ebd 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/Sum.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Sum.java @@ -16,9 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.sum; - -import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; +package org.elasticsearch.search.aggregations.metrics; /** * An aggregation that computes the sum of the values in the current bucket. diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregationBuilder.java similarity index 98% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregationBuilder.java index ed47f245111ee..8035a3ad671f7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregationBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.sum; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregator.java similarity index 96% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregator.java index 56122c6f3dac4..07e91f5e12bec 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregator.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.sum; +package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ScoreMode; @@ -29,7 +29,6 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; -import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.internal.SearchContext; @@ -38,7 +37,7 @@ import java.util.List; import java.util.Map; -public class SumAggregator extends NumericMetricsAggregator.SingleValue { +class SumAggregator extends NumericMetricsAggregator.SingleValue { private final ValuesSource.Numeric valuesSource; private final DocValueFormat format; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregatorFactory.java similarity index 89% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregatorFactory.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregatorFactory.java index 8b6103214a754..d8fa88541cb9b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregatorFactory.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.sum; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -33,9 +33,9 @@ import java.util.List; import java.util.Map; -public class SumAggregatorFactory extends ValuesSourceAggregatorFactory { +class SumAggregatorFactory extends ValuesSourceAggregatorFactory { - public SumAggregatorFactory(String name, ValuesSourceConfig config, SearchContext context, + SumAggregatorFactory(String name, ValuesSourceConfig config, SearchContext context, AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, Map metaData) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metaData); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentileRanksAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregator.java similarity index 71% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentileRanksAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregator.java index 0e86eea6364b1..69e385151eae3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentileRanksAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregator.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; @@ -29,12 +29,18 @@ import java.util.List; import java.util.Map; -public class TDigestPercentileRanksAggregator extends AbstractTDigestPercentilesAggregator { +class TDigestPercentileRanksAggregator extends AbstractTDigestPercentilesAggregator { - public TDigestPercentileRanksAggregator(String name, Numeric valuesSource, SearchContext context, Aggregator parent, double[] percents, - double compression, boolean keyed, DocValueFormat formatter, List pipelineAggregators, - Map metaData) - throws IOException { + TDigestPercentileRanksAggregator(String name, + Numeric valuesSource, + SearchContext context, + Aggregator parent, + double[] percents, + double compression, + boolean keyed, + DocValueFormat formatter, + List pipelineAggregators, + Map metaData) throws IOException { super(name, valuesSource, context, parent, percents, compression, keyed, formatter, pipelineAggregators, metaData); } @@ -50,7 +56,8 @@ public InternalAggregation buildAggregation(long owningBucketOrdinal) { @Override public InternalAggregation buildEmptyAggregation() { - return new InternalTDigestPercentileRanks(name, keys, new TDigestState(compression), keyed, formatter, pipelineAggregators(), metaData()); + return new InternalTDigestPercentileRanks(name, keys, new TDigestState(compression), keyed, + formatter, pipelineAggregators(), metaData()); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentileRanksAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorFactory.java similarity index 92% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentileRanksAggregatorFactory.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorFactory.java index 223d25216bca2..10913bf59d14f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentileRanksAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorFactory.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -33,14 +33,14 @@ import java.util.List; import java.util.Map; -public class TDigestPercentileRanksAggregatorFactory +class TDigestPercentileRanksAggregatorFactory extends ValuesSourceAggregatorFactory { private final double[] percents; private final double compression; private final boolean keyed; - public TDigestPercentileRanksAggregatorFactory(String name, ValuesSourceConfig config, double[] percents, + TDigestPercentileRanksAggregatorFactory(String name, ValuesSourceConfig config, double[] percents, double compression, boolean keyed, SearchContext context, AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, Map metaData) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metaData); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentilesAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesAggregator.java similarity index 72% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentilesAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesAggregator.java index b7c1134e935d4..81bbe15e82150 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentilesAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesAggregator.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; @@ -29,12 +29,18 @@ import java.util.List; import java.util.Map; -public class TDigestPercentilesAggregator extends AbstractTDigestPercentilesAggregator { +class TDigestPercentilesAggregator extends AbstractTDigestPercentilesAggregator { - public TDigestPercentilesAggregator(String name, Numeric valuesSource, SearchContext context, - Aggregator parent, double[] percents, - double compression, boolean keyed, DocValueFormat formatter, List pipelineAggregators, - Map metaData) throws IOException { + TDigestPercentilesAggregator(String name, + Numeric valuesSource, + SearchContext context, + Aggregator parent, + double[] percents, + double compression, + boolean keyed, + DocValueFormat formatter, + List pipelineAggregators, + Map metaData) throws IOException { super(name, valuesSource, context, parent, percents, compression, keyed, formatter, pipelineAggregators, metaData); } @@ -60,6 +66,7 @@ public double metric(String name, long bucketOrd) { @Override public InternalAggregation buildEmptyAggregation() { - return new InternalTDigestPercentiles(name, keys, new TDigestState(compression), keyed, formatter, pipelineAggregators(), metaData()); + return new InternalTDigestPercentiles(name, keys, new TDigestState(compression), keyed, + formatter, pipelineAggregators(), metaData()); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentilesAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesAggregatorFactory.java similarity index 92% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentilesAggregatorFactory.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesAggregatorFactory.java index 47b17d84f3b6b..0c1396196fb62 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentilesAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesAggregatorFactory.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -33,14 +33,14 @@ import java.util.List; import java.util.Map; -public class TDigestPercentilesAggregatorFactory +class TDigestPercentilesAggregatorFactory extends ValuesSourceAggregatorFactory { private final double[] percents; private final double compression; private final boolean keyed; - public TDigestPercentilesAggregatorFactory(String name, ValuesSourceConfig config, double[] percents, + TDigestPercentilesAggregatorFactory(String name, ValuesSourceConfig config, double[] percents, double compression, boolean keyed, SearchContext context, AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, Map metaData) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metaData); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestState.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestState.java similarity index 97% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestState.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestState.java index bcf000e5e09ea..33b967fca86ac 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestState.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TDigestState.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest; +package org.elasticsearch.search.aggregations.metrics; import com.tdunning.math.stats.AVLTreeDigest; import com.tdunning.math.stats.Centroid; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHits.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHits.java similarity index 94% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHits.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHits.java index 565a80a13c804..7c1b84b750ac1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHits.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHits.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.tophits; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.Aggregation; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java similarity index 99% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java index 6b8ae8d79cac4..38b783e6b9519 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.tophits; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParsingException; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java similarity index 98% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java index 48a42b74292c2..ddd62b82500ac 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.tophits; +package org.elasticsearch.search.aggregations.metrics; import com.carrotsearch.hppc.LongObjectHashMap; import com.carrotsearch.hppc.cursors.ObjectCursor; @@ -48,7 +48,6 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; -import org.elasticsearch.search.aggregations.metrics.MetricsAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.search.fetch.FetchSearchResult; @@ -61,7 +60,7 @@ import java.util.List; import java.util.Map; -public class TopHitsAggregator extends MetricsAggregator { +class TopHitsAggregator extends MetricsAggregator { private static class Collectors { public final TopDocsCollector topDocsCollector; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorFactory.java similarity index 96% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorFactory.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorFactory.java index 416c984610503..6086942955122 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorFactory.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.tophits; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -38,7 +38,7 @@ import java.util.Map; import java.util.Optional; -public class TopHitsAggregatorFactory extends AggregatorFactory { +class TopHitsAggregatorFactory extends AggregatorFactory { private final int from; private final int size; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCount.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCount.java similarity index 87% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCount.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCount.java index a66d982749883..2c25254d65b58 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCount.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCount.java @@ -16,9 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.valuecount; - -import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; +package org.elasticsearch.search.aggregations.metrics; /** * An get that holds the number of values that the current document set has for a specific diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregationBuilder.java similarity index 98% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregationBuilder.java index a69efd76e4227..70243cb8bc47a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregationBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.valuecount; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregator.java similarity index 92% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregator.java index 99e7bdf769aa7..96a4cfe930582 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregator.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.valuecount; +package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.common.lease.Releasables; @@ -27,7 +27,6 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; -import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.internal.SearchContext; @@ -42,14 +41,14 @@ * This aggregator works in a multi-bucket mode, that is, when serves as a sub-aggregator, a single aggregator instance aggregates the * counts for all buckets owned by the parent aggregator) */ -public class ValueCountAggregator extends NumericMetricsAggregator.SingleValue { +class ValueCountAggregator extends NumericMetricsAggregator.SingleValue { final ValuesSource valuesSource; // a count per bucket LongArray counts; - public ValueCountAggregator(String name, ValuesSource valuesSource, + ValueCountAggregator(String name, ValuesSource valuesSource, SearchContext aggregationContext, Aggregator parent, List pipelineAggregators, Map metaData) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregatorFactory.java similarity index 88% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountAggregatorFactory.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregatorFactory.java index 80c8001b93c97..26f1760940d20 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregatorFactory.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.valuecount; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -32,9 +32,9 @@ import java.util.List; import java.util.Map; -public class ValueCountAggregatorFactory extends ValuesSourceAggregatorFactory { +class ValueCountAggregatorFactory extends ValuesSourceAggregatorFactory { - public ValueCountAggregatorFactory(String name, ValuesSourceConfig config, SearchContext context, + ValueCountAggregatorFactory(String name, ValuesSourceConfig config, SearchContext context, AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, Map metaData) throws IOException { super(name, config, context, parent, subFactoriesBuilder, metaData); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvg.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvg.java similarity index 87% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvg.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvg.java index 7af48f677c1f6..cf52a8b6fe994 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvg.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvg.java @@ -16,9 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.weighted_avg; - -import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; +package org.elasticsearch.search.aggregations.metrics; /** * An aggregation that computes the average of the values in the current bucket. diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregationBuilder.java similarity index 98% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregationBuilder.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregationBuilder.java index be06f792a5e89..c3f67fb905254 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregationBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.weighted_avg; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregator.java similarity index 88% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregator.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregator.java index 0d9c2b1bc3b83..08d06cf21eda3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregator.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.weighted_avg; +package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ScoreMode; @@ -30,7 +30,6 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; -import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.support.MultiValuesSource; import org.elasticsearch.search.internal.SearchContext; @@ -39,10 +38,10 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.search.aggregations.metrics.weighted_avg.WeightedAvgAggregationBuilder.VALUE_FIELD; -import static org.elasticsearch.search.aggregations.metrics.weighted_avg.WeightedAvgAggregationBuilder.WEIGHT_FIELD; +import static org.elasticsearch.search.aggregations.metrics.WeightedAvgAggregationBuilder.VALUE_FIELD; +import static org.elasticsearch.search.aggregations.metrics.WeightedAvgAggregationBuilder.WEIGHT_FIELD; -public class WeightedAvgAggregator extends NumericMetricsAggregator.SingleValue { +class WeightedAvgAggregator extends NumericMetricsAggregator.SingleValue { private final MultiValuesSource.NumericMultiValuesSource valuesSources; @@ -52,9 +51,9 @@ public class WeightedAvgAggregator extends NumericMetricsAggregator.SingleValue private DoubleArray weightCompensations; private DocValueFormat format; - public WeightedAvgAggregator(String name, MultiValuesSource.NumericMultiValuesSource valuesSources, DocValueFormat format, - SearchContext context, Aggregator parent, List pipelineAggregators, - Map metaData) throws IOException { + WeightedAvgAggregator(String name, MultiValuesSource.NumericMultiValuesSource valuesSources, DocValueFormat format, + SearchContext context, Aggregator parent, + List pipelineAggregators, Map metaData) throws IOException { super(name, context, parent, pipelineAggregators, metaData); this.valuesSources = valuesSources; this.format = format; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorFactory.java similarity index 82% rename from server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregatorFactory.java rename to server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorFactory.java index c7aab73af2867..afdb727c512b0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorFactory.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.weighted_avg; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; @@ -34,12 +34,12 @@ import java.util.List; import java.util.Map; -public class WeightedAvgAggregatorFactory extends MultiValuesSourceAggregatorFactory { +class WeightedAvgAggregatorFactory extends MultiValuesSourceAggregatorFactory { - public WeightedAvgAggregatorFactory(String name, Map> configs, - DocValueFormat format, SearchContext context, AggregatorFactory parent, - AggregatorFactories.Builder subFactoriesBuilder, - Map metaData) throws IOException { + WeightedAvgAggregatorFactory(String name, Map> configs, + DocValueFormat format, SearchContext context, AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metaData) throws IOException { super(name, configs, format, context, parent, subFactoriesBuilder, metaData); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/InternalPercentilesBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/InternalPercentilesBucket.java index 5d13638f70a34..97b43e2606907 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/InternalPercentilesBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/InternalPercentilesBucket.java @@ -25,8 +25,8 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; -import org.elasticsearch.search.aggregations.metrics.max.InternalMax; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; +import org.elasticsearch.search.aggregations.metrics.InternalMax; +import org.elasticsearch.search.aggregations.metrics.Percentile; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/ParsedPercentilesBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/ParsedPercentilesBucket.java index eebe296e531fe..c635ff82735b3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/ParsedPercentilesBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/ParsedPercentilesBucket.java @@ -22,8 +22,8 @@ import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentiles; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles; +import org.elasticsearch.search.aggregations.metrics.ParsedPercentiles; +import org.elasticsearch.search.aggregations.metrics.Percentiles; import java.io.IOException; import java.util.Map.Entry; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucket.java index 64424ac5abc3c..0dfe9d24582f5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucket.java @@ -19,7 +19,7 @@ package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles; +import org.elasticsearch.search.aggregations.metrics.Percentiles; public interface PercentilesBucket extends Percentiles { } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/InternalStatsBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/InternalStatsBucket.java index 371e5bf5e846d..352402fff827f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/InternalStatsBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/InternalStatsBucket.java @@ -22,7 +22,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.metrics.stats.InternalStats; +import org.elasticsearch.search.aggregations.metrics.InternalStats; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/ParsedStatsBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/ParsedStatsBucket.java index c7ddcc6ee9686..84ec05f4eef9f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/ParsedStatsBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/ParsedStatsBucket.java @@ -21,7 +21,7 @@ import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.search.aggregations.metrics.stats.ParsedStats; +import org.elasticsearch.search.aggregations.metrics.ParsedStats; public class ParsedStatsBucket extends ParsedStats implements StatsBucket { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/StatsBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/StatsBucket.java index 0e158d2a12270..c29a27b8446ab 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/StatsBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/StatsBucket.java @@ -19,7 +19,7 @@ * under the License. */ -import org.elasticsearch.search.aggregations.metrics.stats.Stats; +import org.elasticsearch.search.aggregations.metrics.Stats; /** * Statistics over a set of buckets diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucket.java index f252cae37e9af..9e3c7cf88f670 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ExtendedStatsBucket.java @@ -19,10 +19,10 @@ package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended; -import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats; +import org.elasticsearch.search.aggregations.metrics.ExtendedStats; /** * Extended Statistics over a set of buckets */ public interface ExtendedStatsBucket extends ExtendedStats { -} \ No newline at end of file +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/InternalExtendedStatsBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/InternalExtendedStatsBucket.java index 5589a9ebbcb37..c7f2943bfcfcf 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/InternalExtendedStatsBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/InternalExtendedStatsBucket.java @@ -22,7 +22,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.metrics.stats.extended.InternalExtendedStats; +import org.elasticsearch.search.aggregations.metrics.InternalExtendedStats; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; @@ -49,8 +49,7 @@ public String getWriteableName() { } @Override - public org.elasticsearch.search.aggregations.metrics.stats.extended.InternalExtendedStats doReduce( - List aggregations, ReduceContext reduceContext) { + public InternalExtendedStats doReduce(List aggregations, ReduceContext reduceContext) { throw new UnsupportedOperationException("Not supported"); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ParsedExtendedStatsBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ParsedExtendedStatsBucket.java index d292249242396..caa014c9b4944 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ParsedExtendedStatsBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/ParsedExtendedStatsBucket.java @@ -21,7 +21,7 @@ import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.search.aggregations.metrics.stats.extended.ParsedExtendedStats; +import org.elasticsearch.search.aggregations.metrics.ParsedExtendedStats; public class ParsedExtendedStatsBucket extends ParsedExtendedStats implements ExtendedStatsBucket { diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java index 04fd258fa1596..b109e82beefee 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java @@ -36,7 +36,7 @@ import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.search.aggregations.metrics.max.InternalMax; +import org.elasticsearch.search.aggregations.metrics.InternalMax; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.SearchHit; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java index fcafce3936e90..626a2264e1f07 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java @@ -59,17 +59,17 @@ import org.elasticsearch.search.aggregations.metrics.InternalStatsBucketTests; import org.elasticsearch.search.aggregations.metrics.InternalStatsTests; import org.elasticsearch.search.aggregations.metrics.InternalSumTests; -import org.elasticsearch.search.aggregations.metrics.avg.InternalAvgTests; -import org.elasticsearch.search.aggregations.metrics.cardinality.InternalCardinalityTests; -import org.elasticsearch.search.aggregations.metrics.geobounds.InternalGeoBoundsTests; -import org.elasticsearch.search.aggregations.metrics.geocentroid.InternalGeoCentroidTests; -import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentilesRanksTests; -import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentilesTests; -import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentilesRanksTests; -import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentilesTests; -import org.elasticsearch.search.aggregations.metrics.scripted.InternalScriptedMetricTests; -import org.elasticsearch.search.aggregations.metrics.tophits.InternalTopHitsTests; -import org.elasticsearch.search.aggregations.metrics.valuecount.InternalValueCountTests; +import org.elasticsearch.search.aggregations.metrics.InternalAvgTests; +import org.elasticsearch.search.aggregations.metrics.InternalCardinalityTests; +import org.elasticsearch.search.aggregations.metrics.InternalGeoBoundsTests; +import org.elasticsearch.search.aggregations.metrics.InternalGeoCentroidTests; +import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentilesRanksTests; +import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentilesTests; +import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentilesRanksTests; +import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentilesTests; +import org.elasticsearch.search.aggregations.metrics.InternalScriptedMetricTests; +import org.elasticsearch.search.aggregations.metrics.InternalTopHitsTests; +import org.elasticsearch.search.aggregations.metrics.InternalValueCountTests; import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValueTests; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValueTests; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.InternalPercentilesBucketTests; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/EquivalenceIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/EquivalenceIT.java index 2d9f462d86274..28e77e0b9db1d 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/EquivalenceIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/EquivalenceIT.java @@ -39,7 +39,7 @@ import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory; -import org.elasticsearch.search.aggregations.metrics.sum.Sum; +import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.test.ESIntegTestCase; import org.junit.After; import org.junit.Before; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/MetaDataIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/MetaDataIT.java index dfdaa7d9fb2ac..365b6ddc218f1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/MetaDataIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/MetaDataIT.java @@ -22,7 +22,7 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.sum.Sum; +import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue; import org.elasticsearch.test.ESIntegTestCase; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/MissingValueIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/MissingValueIT.java index 5b0b0378e463a..2fdacd63d3d28 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/MissingValueIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/MissingValueIT.java @@ -25,11 +25,11 @@ import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory.ExecutionMode; -import org.elasticsearch.search.aggregations.metrics.cardinality.Cardinality; -import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBounds; -import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroid; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles; -import org.elasticsearch.search.aggregations.metrics.stats.Stats; +import org.elasticsearch.search.aggregations.metrics.Cardinality; +import org.elasticsearch.search.aggregations.metrics.GeoBounds; +import org.elasticsearch.search.aggregations.metrics.GeoCentroid; +import org.elasticsearch.search.aggregations.metrics.Percentiles; +import org.elasticsearch.search.aggregations.metrics.Stats; import org.elasticsearch.test.ESIntegTestCase; import static org.elasticsearch.search.aggregations.AggregationBuilders.cardinality; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/AdjacencyMatrixIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/AdjacencyMatrixIT.java index 81dce8002e84d..b86fd279b31cc 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/AdjacencyMatrixIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/AdjacencyMatrixIT.java @@ -32,7 +32,7 @@ import org.elasticsearch.search.aggregations.bucket.adjacency.AdjacencyMatrix; import org.elasticsearch.search.aggregations.bucket.adjacency.AdjacencyMatrix.Bucket; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; -import org.elasticsearch.search.aggregations.metrics.avg.Avg; +import org.elasticsearch.search.aggregations.metrics.Avg; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java index c40e3b73c6606..58d0ca09ff203 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java @@ -40,8 +40,8 @@ import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; -import org.elasticsearch.search.aggregations.metrics.avg.Avg; -import org.elasticsearch.search.aggregations.metrics.sum.Sum; +import org.elasticsearch.search.aggregations.metrics.Avg; +import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; import org.joda.time.DateTime; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java index 98f73b34b5677..c076fa827d072 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java @@ -31,7 +31,7 @@ import org.elasticsearch.search.aggregations.bucket.range.DateRangeAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.range.Range; import org.elasticsearch.search.aggregations.bucket.range.Range.Bucket; -import org.elasticsearch.search.aggregations.metrics.sum.Sum; +import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; import org.joda.time.DateTime; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerIT.java index a8bc97682f0db..ac601022c78d1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerIT.java @@ -29,7 +29,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.max.Max; +import org.elasticsearch.search.aggregations.metrics.Max; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.test.ESIntegTestCase; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java index 2876fbbaa252d..aad828f95dbb4 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java @@ -35,11 +35,11 @@ import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket; -import org.elasticsearch.search.aggregations.metrics.avg.Avg; -import org.elasticsearch.search.aggregations.metrics.max.Max; -import org.elasticsearch.search.aggregations.metrics.stats.Stats; -import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats; -import org.elasticsearch.search.aggregations.metrics.sum.Sum; +import org.elasticsearch.search.aggregations.metrics.Avg; +import org.elasticsearch.search.aggregations.metrics.Max; +import org.elasticsearch.search.aggregations.metrics.Stats; +import org.elasticsearch.search.aggregations.metrics.ExtendedStats; +import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java index 91c098ff85acc..bcc14f09ed8b9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java @@ -27,7 +27,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; -import org.elasticsearch.search.aggregations.metrics.avg.Avg; +import org.elasticsearch.search.aggregations.metrics.Avg; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java index 2c9ca8fb447ec..860a2d662b88a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java @@ -29,7 +29,7 @@ import org.elasticsearch.search.aggregations.bucket.filter.Filters; import org.elasticsearch.search.aggregations.bucket.filter.FiltersAggregator.KeyedFilter; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; -import org.elasticsearch.search.aggregations.metrics.avg.Avg; +import org.elasticsearch.search.aggregations.metrics.Avg; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GlobalAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GlobalAggregatorTests.java index fc7a24cf7985b..232c9f07510a5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GlobalAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GlobalAggregatorTests.java @@ -33,8 +33,8 @@ import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregator; import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal; -import org.elasticsearch.search.aggregations.metrics.min.InternalMin; -import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.InternalMin; +import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; import java.io.IOException; import java.util.function.BiConsumer; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GlobalIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GlobalIT.java index 4878398c98ca8..429b8c71f723d 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GlobalIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GlobalIT.java @@ -24,7 +24,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.bucket.global.Global; -import org.elasticsearch.search.aggregations.metrics.stats.Stats; +import org.elasticsearch.search.aggregations.metrics.Stats; import org.elasticsearch.test.ESIntegTestCase; import java.util.ArrayList; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java index d7bd069f2ba3d..38f373f131aa5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java @@ -34,10 +34,10 @@ import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; -import org.elasticsearch.search.aggregations.metrics.avg.Avg; -import org.elasticsearch.search.aggregations.metrics.max.Max; -import org.elasticsearch.search.aggregations.metrics.stats.Stats; -import org.elasticsearch.search.aggregations.metrics.sum.Sum; +import org.elasticsearch.search.aggregations.metrics.Avg; +import org.elasticsearch.search.aggregations.metrics.Max; +import org.elasticsearch.search.aggregations.metrics.Stats; +import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java index e7e6402727449..1e67b59ee326d 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java @@ -34,11 +34,11 @@ import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket; -import org.elasticsearch.search.aggregations.metrics.avg.Avg; -import org.elasticsearch.search.aggregations.metrics.max.Max; -import org.elasticsearch.search.aggregations.metrics.stats.Stats; -import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats; -import org.elasticsearch.search.aggregations.metrics.sum.Sum; +import org.elasticsearch.search.aggregations.metrics.Avg; +import org.elasticsearch.search.aggregations.metrics.Max; +import org.elasticsearch.search.aggregations.metrics.Stats; +import org.elasticsearch.search.aggregations.metrics.ExtendedStats; +import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MissingIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MissingIT.java index ac4d8ac315f7c..d51a4a59ff3a0 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MissingIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MissingIT.java @@ -23,7 +23,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.missing.Missing; -import org.elasticsearch.search.aggregations.metrics.avg.Avg; +import org.elasticsearch.search.aggregations.metrics.Avg; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/NaNSortingIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/NaNSortingIT.java index 5b8c3b878c19a..22b6e25252217 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/NaNSortingIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/NaNSortingIT.java @@ -26,10 +26,10 @@ import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.avg.Avg; -import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats; -import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.Avg; +import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ExtendedStats; +import org.elasticsearch.search.aggregations.metrics.ExtendedStatsAggregationBuilder; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java index d5f93f0daa704..10fa2231807c6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java @@ -34,9 +34,9 @@ import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket; -import org.elasticsearch.search.aggregations.metrics.max.Max; -import org.elasticsearch.search.aggregations.metrics.stats.Stats; -import org.elasticsearch.search.aggregations.metrics.sum.Sum; +import org.elasticsearch.search.aggregations.metrics.Max; +import org.elasticsearch.search.aggregations.metrics.Stats; +import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java index 99aeac167e06e..894834882f9f9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java @@ -33,7 +33,7 @@ import org.elasticsearch.search.aggregations.bucket.range.Range; import org.elasticsearch.search.aggregations.bucket.range.Range.Bucket; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.sum.Sum; +import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java index 4a69f9d537934..6a3a9731612c2 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java @@ -28,7 +28,7 @@ import org.elasticsearch.search.aggregations.bucket.nested.Nested; import org.elasticsearch.search.aggregations.bucket.nested.ReverseNested; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCount; +import org.elasticsearch.search.aggregations.metrics.ValueCount; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.test.ESIntegTestCase; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java index 81034a0355061..c135f284dd21b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java @@ -28,7 +28,7 @@ import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket; -import org.elasticsearch.search.aggregations.metrics.max.Max; +import org.elasticsearch.search.aggregations.metrics.Max; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.test.ESIntegTestCase; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java index b0263cb2dbd80..52f6e4227e7cd 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java @@ -53,10 +53,10 @@ import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.max.InternalMax; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.tophits.TopHits; -import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.InternalMax; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.TopHits; +import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.sort.SortOrder; import org.joda.time.DateTimeZone; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java index 7cf29e3aa9cc5..1194e6c69d834 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java @@ -38,7 +38,7 @@ import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.MultiBucketConsumerService; -import org.elasticsearch.search.aggregations.metrics.stats.Stats; +import org.elasticsearch.search.aggregations.metrics.Stats; import org.hamcrest.Matchers; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index e1206cb8d1552..0abfe871e6e4f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -56,13 +56,13 @@ import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.max.InternalMax; -import org.elasticsearch.search.aggregations.metrics.max.Max; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.min.Min; -import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.sum.InternalSum; -import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.InternalMax; +import org.elasticsearch.search.aggregations.metrics.Max; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.Min; +import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.InternalSum; +import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.test.VersionUtils; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java index fd831e5076caa..e0601cbe2f542 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java @@ -35,8 +35,8 @@ import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.InternalAggregation; -import org.elasticsearch.search.aggregations.metrics.max.InternalMax; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.InternalMax; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; import java.io.IOException; import java.util.ArrayList; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorTests.java index 2b217f4ff6e6b..e446dfb3d2b9a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/SamplerAggregatorTests.java @@ -37,8 +37,8 @@ import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType; import org.elasticsearch.search.aggregations.AggregatorTestCase; -import org.elasticsearch.search.aggregations.metrics.min.Min; -import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.Min; +import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; import java.io.IOException; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java index 160e51a67b2c8..c92681d99a9b0 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java @@ -37,10 +37,10 @@ import org.elasticsearch.search.aggregations.bucket.AbstractTermsTestCase; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket; -import org.elasticsearch.search.aggregations.metrics.avg.Avg; -import org.elasticsearch.search.aggregations.metrics.stats.Stats; -import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats; -import org.elasticsearch.search.aggregations.metrics.sum.Sum; +import org.elasticsearch.search.aggregations.metrics.Avg; +import org.elasticsearch.search.aggregations.metrics.Stats; +import org.elasticsearch.search.aggregations.metrics.ExtendedStats; +import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.Matchers; import org.junit.After; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java index 1b33ed478709b..819d39cb62bdf 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java @@ -67,8 +67,8 @@ import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal; import org.elasticsearch.search.aggregations.bucket.nested.InternalNested; import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.tophits.InternalTopHits; -import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.InternalTopHits; +import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.bucketscript.BucketScriptPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.sort.FieldSortBuilder; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java index b899c86d0981a..49442e3fbc01a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java @@ -64,7 +64,8 @@ public abstract class AbstractGeoTestCase extends ESIntegTestCase { protected static int numDocs; protected static int numUniqueGeoPoints; protected static GeoPoint[] singleValues, multiValues; - protected static GeoPoint singleTopLeft, singleBottomRight, multiTopLeft, multiBottomRight, singleCentroid, multiCentroid, unmappedCentroid; + protected static GeoPoint singleTopLeft, singleBottomRight, multiTopLeft, multiBottomRight, + singleCentroid, multiCentroid, unmappedCentroid; protected static ObjectIntMap expectedDocCountsForGeoHash = null; protected static ObjectObjectMap expectedCentroidsForGeoHash = null; protected static final double GEOHASH_TOLERANCE = 1E-5D; @@ -135,7 +136,10 @@ public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate(EMPTY_IDX_NAME).addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=geo_point")); assertAcked(prepareCreate(DATELINE_IDX_NAME) - .addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=geo_point", MULTI_VALUED_FIELD_NAME, "type=geo_point", NUMBER_FIELD_NAME, "type=long", "tag", "type=keyword")); + .addMapping("type", SINGLE_VALUED_FIELD_NAME, + "type=geo_point", MULTI_VALUED_FIELD_NAME, + "type=geo_point", NUMBER_FIELD_NAME, + "type=long", "tag", "type=keyword")); GeoPoint[] geoValues = new GeoPoint[5]; geoValues[0] = new GeoPoint(38, 178); @@ -153,7 +157,11 @@ public void setupSuiteScopeCluster() throws Exception { .endObject())); } assertAcked(prepareCreate(HIGH_CARD_IDX_NAME).setSettings(Settings.builder().put("number_of_shards", 2)) - .addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=geo_point", MULTI_VALUED_FIELD_NAME, "type=geo_point", NUMBER_FIELD_NAME, "type=long,store=true", "tag", "type=keyword")); + .addMapping("type", SINGLE_VALUED_FIELD_NAME, + "type=geo_point", MULTI_VALUED_FIELD_NAME, + "type=geo_point", NUMBER_FIELD_NAME, + "type=long,store=true", + "tag", "type=keyword")); for (int i = 0; i < 2000; i++) { singleVal = singleValues[i % numUniqueGeoPoints]; @@ -161,8 +169,14 @@ public void setupSuiteScopeCluster() throws Exception { .startObject() .array(SINGLE_VALUED_FIELD_NAME, singleVal.lon(), singleVal.lat()) .startArray(MULTI_VALUED_FIELD_NAME) - .startArray().value(multiValues[i % numUniqueGeoPoints].lon()).value(multiValues[i % numUniqueGeoPoints].lat()).endArray() - .startArray().value(multiValues[(i + 1) % numUniqueGeoPoints].lon()).value(multiValues[(i + 1) % numUniqueGeoPoints].lat()).endArray() + .startArray() + .value(multiValues[i % numUniqueGeoPoints].lon()) + .value(multiValues[i % numUniqueGeoPoints].lat()) + .endArray() + .startArray() + .value(multiValues[(i + 1) % numUniqueGeoPoints].lon()) + .value(multiValues[(i + 1) % numUniqueGeoPoints].lat()) + .endArray() .endArray() .field(NUMBER_FIELD_NAME, i) .field("tag", "tag" + i) @@ -177,11 +191,12 @@ public void setupSuiteScopeCluster() throws Exception { indexRandom(true, builders); ensureSearchable(); - // Added to debug a test failure where the terms aggregation seems to be reporting two documents with the same value for NUMBER_FIELD_NAME. This will check that after - // random indexing each document only has 1 value for NUMBER_FIELD_NAME and it is the correct value. Following this initial change its seems that this call was getting - // more that 2000 hits (actual value was 2059) so now it will also check to ensure all hits have the correct index and type - SearchResponse response = client().prepareSearch(HIGH_CARD_IDX_NAME).addStoredField(NUMBER_FIELD_NAME).addSort(SortBuilders.fieldSort(NUMBER_FIELD_NAME) - .order(SortOrder.ASC)).setSize(5000).get(); + // Added to debug a test failure where the terms aggregation seems to be reporting two documents with the same + // value for NUMBER_FIELD_NAME. This will check that after random indexing each document only has 1 value for + // NUMBER_FIELD_NAME and it is the correct value. Following this initial change its seems that this call was getting + // more that 2000 hits (actual value was 2059) so now it will also check to ensure all hits have the correct index and type. + SearchResponse response = client().prepareSearch(HIGH_CARD_IDX_NAME).addStoredField(NUMBER_FIELD_NAME) + .addSort(SortBuilders.fieldSort(NUMBER_FIELD_NAME).order(SortOrder.ASC)).setSize(5000).get(); assertSearchResponse(response); long totalHits = response.getHits().getTotalHits(); XContentBuilder builder = XContentFactory.jsonBuilder(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractPercentilesTestCase.java similarity index 96% rename from server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesTestCase.java rename to server/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractPercentilesTestCase.java index c4a3d3b2ffcef..530046b496e5b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractPercentilesTestCase.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.ToXContent; @@ -26,6 +26,8 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregation.CommonFields; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.metrics.ParsedPercentiles; +import org.elasticsearch.search.aggregations.metrics.Percentile; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.test.InternalAggregationTestCase; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgAggregatorTests.java similarity index 96% rename from server/src/test/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregatorTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgAggregatorTests.java index 7835bf75e721f..b83acfcba80ec 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgAggregatorTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.avg; +package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.NumericDocValuesField; @@ -35,6 +35,9 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.AvgAggregator; +import org.elasticsearch.search.aggregations.metrics.InternalAvg; import java.io.IOException; import java.util.Arrays; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java index 98541d0ff5871..e18bfd7fcc85f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java @@ -35,7 +35,6 @@ import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.avg.Avg; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgTests.java index df90dc4f7c309..5e1c0a4ebc346 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgTests.java @@ -19,8 +19,6 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; - public class AvgTests extends AbstractNumericMetricTestCase { @Override diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorTests.java index 3544b02e97a51..a2789a9ef1648 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregatorTests.java @@ -34,9 +34,6 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregatorTestCase; -import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregator; -import org.elasticsearch.search.aggregations.metrics.cardinality.InternalCardinality; import org.elasticsearch.search.aggregations.support.ValueType; import java.io.IOException; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java index c770bef7df613..cf155b8690d3c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java @@ -31,7 +31,6 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.cardinality.Cardinality; import org.elasticsearch.test.ESIntegTestCase; import java.util.Collection; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityTests.java similarity index 90% rename from server/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityTests.java index 1b3a18581768e..4f631bde8ac06 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityTests.java @@ -17,9 +17,10 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.cardinality; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; +import org.elasticsearch.search.aggregations.metrics.CardinalityAggregationBuilder; public class CardinalityTests extends BaseAggregationTestCase { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java index 144305647ebaf..e65d1269520bc 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java @@ -32,9 +32,6 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregatorTestCase; -import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats; -import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.stats.extended.InternalExtendedStats; import java.io.IOException; import java.util.function.Consumer; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java index 7de333e8127ca..3daafb8684eb6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java @@ -30,8 +30,7 @@ import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.missing.Missing; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats; -import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats.Bounds; +import org.elasticsearch.search.aggregations.metrics.ExtendedStats.Bounds; import org.elasticsearch.search.aggregations.BucketOrder; import java.util.Collection; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsTests.java index 3f78cc17aa990..5135ec46a10c3 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsTests.java @@ -19,8 +19,6 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregationBuilder; - public class ExtendedStatsTests extends AbstractNumericMetricTestCase { @Override diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java similarity index 96% rename from server/src/test/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsAggregatorTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java index 5227c62e6b42c..b171e7436eee4 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.geobounds; +package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.document.Document; import org.apache.lucene.document.LatLonDocValuesField; @@ -32,7 +32,7 @@ import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.test.geo.RandomGeoGenerator; -import static org.elasticsearch.search.aggregations.metrics.geobounds.InternalGeoBoundsTests.GEOHASH_TOLERANCE; +import static org.elasticsearch.search.aggregations.metrics.InternalGeoBoundsTests.GEOHASH_TOLERANCE; import static org.hamcrest.Matchers.closeTo; public class GeoBoundsAggregatorTests extends AggregatorTestCase { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java index 1a97cb49164a4..483cd9f706861 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java @@ -26,8 +26,6 @@ import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket; -import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBounds; -import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregator; import org.elasticsearch.test.ESIntegTestCase; import java.util.List; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsTests.java index 9f5bd13b5f664..0dd19b738ee6e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregationBuilder; public class GeoBoundsTests extends BaseAggregationTestCase { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorTests.java similarity index 97% rename from server/src/test/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregatorTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorTests.java index 5ba9b4b01e7f2..3865070741258 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorTests.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.geocentroid; +package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.document.Document; import org.apache.lucene.document.LatLonDocValuesField; @@ -29,6 +29,8 @@ import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.metrics.GeoCentroidAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.InternalGeoCentroid; import org.elasticsearch.test.geo.RandomGeoGenerator; import java.io.IOException; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidIT.java index 32b036606d399..f06e5510aed8c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidIT.java @@ -24,7 +24,6 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGrid; import org.elasticsearch.search.aggregations.bucket.global.Global; -import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroid; import org.elasticsearch.test.ESIntegTestCase; import java.util.List; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidTests.java index 90067df601387..59f8ec1a5b86a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregationBuilder; public class GeoCentroidTests extends BaseAggregationTestCase { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentileRanksAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java similarity index 92% rename from server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentileRanksAggregatorTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java index 3513beee6687c..52bd6a37e6f6f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentileRanksAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles.hdr; +package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.document.Document; import org.apache.lucene.document.SortedNumericDocValuesField; @@ -31,10 +31,10 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregatorTestCase; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanks; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesMethod; +import org.elasticsearch.search.aggregations.metrics.Percentile; +import org.elasticsearch.search.aggregations.metrics.PercentileRanks; +import org.elasticsearch.search.aggregations.metrics.PercentileRanksAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.PercentilesMethod; import org.hamcrest.Matchers; import java.io.IOException; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java index cf9940521314c..1321c8bca4711 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java @@ -30,9 +30,6 @@ import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanks; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesMethod; import org.elasticsearch.search.aggregations.BucketOrder; import java.util.Arrays; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentilesAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java similarity index 94% rename from server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentilesAggregatorTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java index 690c561b36ee5..b68b68dd544ea 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/HDRPercentilesAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesAggregatorTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles.hdr; +package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.NumericDocValuesField; @@ -34,8 +34,10 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregatorTestCase; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesMethod; +import org.elasticsearch.search.aggregations.metrics.HDRPercentilesAggregator; +import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentiles; +import org.elasticsearch.search.aggregations.metrics.PercentilesAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.PercentilesMethod; import java.io.IOException; import java.util.function.Consumer; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java index ae745e1f1ad03..67eb4939ae529 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java @@ -31,9 +31,6 @@ import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesMethod; import org.elasticsearch.search.aggregations.BucketOrder; import java.util.Arrays; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/HyperLogLogPlusPlusTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlusTests.java similarity index 94% rename from server/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/HyperLogLogPlusPlusTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlusTests.java index e58899807ab1b..514af2a67667d 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/HyperLogLogPlusPlusTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HyperLogLogPlusPlusTests.java @@ -17,15 +17,16 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.cardinality; +package org.elasticsearch.search.aggregations.metrics; import com.carrotsearch.hppc.BitMixer; import com.carrotsearch.hppc.IntHashSet; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.search.aggregations.metrics.HyperLogLogPlusPlus; import org.elasticsearch.test.ESTestCase; -import static org.elasticsearch.search.aggregations.metrics.cardinality.HyperLogLogPlusPlus.MAX_PRECISION; -import static org.elasticsearch.search.aggregations.metrics.cardinality.HyperLogLogPlusPlus.MIN_PRECISION; +import static org.elasticsearch.search.aggregations.metrics.HyperLogLogPlusPlus.MAX_PRECISION; +import static org.elasticsearch.search.aggregations.metrics.HyperLogLogPlusPlus.MIN_PRECISION; import static org.hamcrest.Matchers.closeTo; public class HyperLogLogPlusPlusTests extends ESTestCase { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/avg/InternalAvgTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalAvgTests.java similarity index 96% rename from server/src/test/java/org/elasticsearch/search/aggregations/metrics/avg/InternalAvgTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalAvgTests.java index 5adfb11f5bb9a..10ae10a9af1c0 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/avg/InternalAvgTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalAvgTests.java @@ -17,12 +17,14 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.avg; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.ParsedAggregation; +import org.elasticsearch.search.aggregations.metrics.InternalAvg; +import org.elasticsearch.search.aggregations.metrics.ParsedAvg; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.test.InternalAggregationTestCase; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/InternalCardinalityTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalCardinalityTests.java similarity index 95% rename from server/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/InternalCardinalityTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalCardinalityTests.java index fc1095c857fa4..d20f3620f9036 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/cardinality/InternalCardinalityTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalCardinalityTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.cardinality; +package org.elasticsearch.search.aggregations.metrics; import com.carrotsearch.hppc.BitMixer; @@ -28,6 +28,9 @@ import org.elasticsearch.common.util.MockPageCacheRecycler; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.search.aggregations.ParsedAggregation; +import org.elasticsearch.search.aggregations.metrics.HyperLogLogPlusPlus; +import org.elasticsearch.search.aggregations.metrics.InternalCardinality; +import org.elasticsearch.search.aggregations.metrics.ParsedCardinality; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.test.InternalAggregationTestCase; import org.junit.After; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java index eb6a2e40a01b9..3c5201bfa8aa9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java @@ -23,9 +23,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.ParsedAggregation; -import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats.Bounds; -import org.elasticsearch.search.aggregations.metrics.stats.extended.InternalExtendedStats; -import org.elasticsearch.search.aggregations.metrics.stats.extended.ParsedExtendedStats; +import org.elasticsearch.search.aggregations.metrics.ExtendedStats.Bounds; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.test.InternalAggregationTestCase; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/geobounds/InternalGeoBoundsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalGeoBoundsTests.java similarity index 96% rename from server/src/test/java/org/elasticsearch/search/aggregations/metrics/geobounds/InternalGeoBoundsTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalGeoBoundsTests.java index 3d96d92aeb915..aa2e527b2e605 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/geobounds/InternalGeoBoundsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalGeoBoundsTests.java @@ -17,10 +17,12 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.geobounds; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.search.aggregations.ParsedAggregation; +import org.elasticsearch.search.aggregations.metrics.InternalGeoBounds; +import org.elasticsearch.search.aggregations.metrics.ParsedGeoBounds; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.test.InternalAggregationTestCase; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/geocentroid/InternalGeoCentroidTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroidTests.java similarity index 96% rename from server/src/test/java/org/elasticsearch/search/aggregations/metrics/geocentroid/InternalGeoCentroidTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroidTests.java index 9dc7896638c4e..73fc160bcf1b5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/geocentroid/InternalGeoCentroidTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroidTests.java @@ -16,12 +16,14 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.geocentroid; +package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.geo.GeoEncodingUtils; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.search.aggregations.ParsedAggregation; +import org.elasticsearch.search.aggregations.metrics.InternalGeoCentroid; +import org.elasticsearch.search.aggregations.metrics.ParsedGeoCentroid; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.test.InternalAggregationTestCase; import org.elasticsearch.test.geo.RandomGeoGenerator; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/InternalHDRPercentilesRanksTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentilesRanksTests.java similarity index 91% rename from server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/InternalHDRPercentilesRanksTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentilesRanksTests.java index ee0e3602f2039..dfd9403c8cc94 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/InternalHDRPercentilesRanksTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentilesRanksTests.java @@ -17,13 +17,15 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles.hdr; +package org.elasticsearch.search.aggregations.metrics; import org.HdrHistogram.DoubleHistogram; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.metrics.percentiles.InternalPercentilesRanksTestCase; -import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentiles; +import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentileRanks; +import org.elasticsearch.search.aggregations.metrics.ParsedHDRPercentileRanks; +import org.elasticsearch.search.aggregations.metrics.InternalPercentilesRanksTestCase; +import org.elasticsearch.search.aggregations.metrics.ParsedPercentiles; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.util.Arrays; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/InternalHDRPercentilesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentilesTests.java similarity index 92% rename from server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/InternalHDRPercentilesTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentilesTests.java index 7f1362af04108..99b8bd5575bdc 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/InternalHDRPercentilesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentilesTests.java @@ -17,14 +17,16 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles.hdr; +package org.elasticsearch.search.aggregations.metrics; import org.HdrHistogram.DoubleHistogram; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.metrics.percentiles.InternalPercentilesTestCase; -import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentiles; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; +import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentiles; +import org.elasticsearch.search.aggregations.metrics.ParsedHDRPercentiles; +import org.elasticsearch.search.aggregations.metrics.InternalPercentilesTestCase; +import org.elasticsearch.search.aggregations.metrics.ParsedPercentiles; +import org.elasticsearch.search.aggregations.metrics.Percentile; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.util.Arrays; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMaxTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMaxTests.java index ad8bc350fbd03..10d649a0c0df3 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMaxTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMaxTests.java @@ -22,8 +22,6 @@ import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.ParsedAggregation; -import org.elasticsearch.search.aggregations.metrics.max.InternalMax; -import org.elasticsearch.search.aggregations.metrics.max.ParsedMax; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.test.InternalAggregationTestCase; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMinTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMinTests.java index bca0f3cf31a12..dba794f9d0ef4 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMinTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMinTests.java @@ -22,8 +22,6 @@ import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.ParsedAggregation; -import org.elasticsearch.search.aggregations.metrics.min.InternalMin; -import org.elasticsearch.search.aggregations.metrics.min.ParsedMin; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.test.InternalAggregationTestCase; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesRanksTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalPercentilesRanksTestCase.java similarity index 96% rename from server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesRanksTestCase.java rename to server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalPercentilesRanksTestCase.java index a63fd42da7d96..eba4d7837234e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesRanksTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalPercentilesRanksTestCase.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.ParsedAggregation; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalPercentilesTestCase.java similarity index 97% rename from server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesTestCase.java rename to server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalPercentilesTestCase.java index 1024577a6b6ed..b145349544ece 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalPercentilesTestCase.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.ParsedAggregation; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetricTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java similarity index 98% rename from server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetricTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java index 70ddacf5698b2..89f42355f204a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetricTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.scripted; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.io.stream.Writeable.Reader; @@ -30,6 +30,8 @@ import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.Aggregation.CommonFields; import org.elasticsearch.search.aggregations.ParsedAggregation; +import org.elasticsearch.search.aggregations.metrics.InternalScriptedMetric; +import org.elasticsearch.search.aggregations.metrics.ParsedScriptedMetric; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.test.InternalAggregationTestCase; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalStatsBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalStatsBucketTests.java index cbb097a72820e..cb4b024f99da0 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalStatsBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalStatsBucketTests.java @@ -22,7 +22,6 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.ParsedAggregation; -import org.elasticsearch.search.aggregations.metrics.stats.InternalStats; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.InternalStatsBucket; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.ParsedStatsBucket; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalStatsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalStatsTests.java index 203d584e66ebf..8198d6c2e81a3 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalStatsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalStatsTests.java @@ -26,8 +26,6 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.ParsedAggregation; -import org.elasticsearch.search.aggregations.metrics.stats.InternalStats; -import org.elasticsearch.search.aggregations.metrics.stats.ParsedStats; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.test.InternalAggregationTestCase; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalSumTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalSumTests.java index aa9d25af49e8a..4f44be7d50833 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalSumTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalSumTests.java @@ -22,8 +22,6 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.ParsedAggregation; -import org.elasticsearch.search.aggregations.metrics.sum.InternalSum; -import org.elasticsearch.search.aggregations.metrics.sum.ParsedSum; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.test.InternalAggregationTestCase; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/InternalTDigestPercentilesRanksTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesRanksTests.java similarity index 91% rename from server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/InternalTDigestPercentilesRanksTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesRanksTests.java index 35c566c2e80cf..66e6891f93412 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/InternalTDigestPercentilesRanksTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesRanksTests.java @@ -17,12 +17,15 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.metrics.percentiles.InternalPercentilesRanksTestCase; -import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentiles; +import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentileRanks; +import org.elasticsearch.search.aggregations.metrics.ParsedTDigestPercentileRanks; +import org.elasticsearch.search.aggregations.metrics.TDigestState; +import org.elasticsearch.search.aggregations.metrics.InternalPercentilesRanksTestCase; +import org.elasticsearch.search.aggregations.metrics.ParsedPercentiles; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.util.Arrays; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/InternalTDigestPercentilesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesTests.java similarity index 91% rename from server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/InternalTDigestPercentilesTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesTests.java index 73c9b8a16084e..25ee09ca5cb51 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/InternalTDigestPercentilesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesTests.java @@ -17,12 +17,15 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.metrics.percentiles.InternalPercentilesTestCase; -import org.elasticsearch.search.aggregations.metrics.percentiles.ParsedPercentiles; +import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentiles; +import org.elasticsearch.search.aggregations.metrics.ParsedTDigestPercentiles; +import org.elasticsearch.search.aggregations.metrics.TDigestState; +import org.elasticsearch.search.aggregations.metrics.InternalPercentilesTestCase; +import org.elasticsearch.search.aggregations.metrics.ParsedPercentiles; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.util.Arrays; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHitsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java similarity index 98% rename from server/src/test/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHitsTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java index 0fba35358ecb0..3e97ec94f6b35 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/tophits/InternalTopHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.tophits; +package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.search.FieldComparator; @@ -36,6 +36,8 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.ParsedAggregation; +import org.elasticsearch.search.aggregations.metrics.InternalTopHits; +import org.elasticsearch.search.aggregations.metrics.ParsedTopHits; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.test.InternalAggregationTestCase; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/valuecount/InternalValueCountTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalValueCountTests.java similarity index 94% rename from server/src/test/java/org/elasticsearch/search/aggregations/metrics/valuecount/InternalValueCountTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalValueCountTests.java index 23253777487e2..5ea5cffac1f48 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/valuecount/InternalValueCountTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalValueCountTests.java @@ -17,10 +17,12 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.valuecount; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.search.aggregations.ParsedAggregation; +import org.elasticsearch.search.aggregations.metrics.InternalValueCount; +import org.elasticsearch.search.aggregations.metrics.ParsedValueCount; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.test.InternalAggregationTestCase; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java index 21466a487b371..b27d33aa5ca37 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java @@ -34,9 +34,6 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregatorTestCase; -import org.elasticsearch.search.aggregations.metrics.max.InternalMax; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregator; import java.io.IOException; import java.util.Arrays; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxIT.java index a192b3c4a12c1..5447406f2f217 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxIT.java @@ -29,7 +29,6 @@ import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.max.Max; import org.elasticsearch.search.aggregations.BucketOrder; import java.util.Collection; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxTests.java index 6ffd824aa3cc8..dac145b0a556a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxTests.java @@ -19,8 +19,6 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; - public class MaxTests extends AbstractNumericMetricTestCase { @Override diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java index dfee4437fbe26..5b279f1ea49ba 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java @@ -30,9 +30,6 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregatorTestCase; -import org.elasticsearch.search.aggregations.metrics.min.InternalMin; -import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.min.MinAggregator; public class MinAggregatorTests extends AggregatorTestCase { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinIT.java index 7f2522c04bb50..d92d212f4d2e6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinIT.java @@ -29,7 +29,6 @@ import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.min.Min; import org.elasticsearch.search.aggregations.BucketOrder; import java.util.Collection; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinTests.java index eed4059ade77f..699ad8117d0c6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinTests.java @@ -19,8 +19,6 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder; - public class MinTests extends AbstractNumericMetricTestCase { @Override diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksTests.java index a678f69f19bce..6483dbbc6e39c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentileRanksTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregationBuilder; public class PercentileRanksTests extends BaseAggregationTestCase { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesMethodTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesMethodTests.java similarity index 95% rename from server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesMethodTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesMethodTests.java index 97d5cf1f9eeb8..70445821cef9e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesMethodTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesMethodTests.java @@ -17,10 +17,11 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.search.aggregations.metrics.PercentilesMethod; import org.elasticsearch.test.ESTestCase; import java.io.IOException; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesTests.java index ea0c9f3969669..edc4b7954a3c5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/PercentilesTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregationBuilder; import java.io.IOException; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java similarity index 98% rename from server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java index 65e42556461a5..56b8938b6e54b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.scripted; +package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; @@ -35,6 +35,8 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.metrics.ScriptedMetric; +import org.elasticsearch.search.aggregations.metrics.ScriptedMetricAggregationBuilder; import org.junit.BeforeClass; import java.io.IOException; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java index f62598fa7c317..2643b6c61668e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java @@ -37,7 +37,6 @@ import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; -import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetric; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricTests.java index 453d830002a5b..a624eddea69b1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricTests.java @@ -22,7 +22,6 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregationBuilder; import java.util.Collections; import java.util.HashMap; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorTests.java index c5c1420fb2265..52a45f9c017d1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorTests.java @@ -31,8 +31,6 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregatorTestCase; -import org.elasticsearch.search.aggregations.metrics.stats.InternalStats; -import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregationBuilder; import java.io.IOException; import java.util.function.Consumer; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java index e277902ace24d..a97982cccac3b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java @@ -31,7 +31,6 @@ import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.stats.Stats; import org.elasticsearch.search.aggregations.BucketOrder; import java.util.Collection; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsTests.java index 76a8e9aa98a08..e2db3ac2fb476 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsTests.java @@ -19,8 +19,6 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregationBuilder; - public class StatsTests extends AbstractNumericMetricTestCase { @Override diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumAggregatorTests.java index edaf5ae03f99b..eb57bc9a5115c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumAggregatorTests.java @@ -39,9 +39,6 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregatorTestCase; -import org.elasticsearch.search.aggregations.metrics.sum.Sum; -import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.sum.SumAggregator; import java.io.IOException; import java.util.Arrays; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java index b3a5df4dbfc07..6967b7ffc3fa1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java @@ -30,7 +30,6 @@ import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.sum.Sum; import org.hamcrest.core.IsNull; import java.util.ArrayList; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumTests.java index edc6d4edef0e4..204ee27df3c1b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumTests.java @@ -19,8 +19,6 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; - public class SumTests extends AbstractNumericMetricTestCase { @Override diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentileRanksAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java similarity index 92% rename from server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentileRanksAggregatorTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java index 6545fe9d3ffe1..363ba14198390 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentileRanksAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest; +package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.document.Document; import org.apache.lucene.document.SortedNumericDocValuesField; @@ -31,10 +31,10 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregatorTestCase; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanks; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesMethod; +import org.elasticsearch.search.aggregations.metrics.Percentile; +import org.elasticsearch.search.aggregations.metrics.PercentileRanks; +import org.elasticsearch.search.aggregations.metrics.PercentileRanksAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.PercentilesMethod; import org.hamcrest.Matchers; import java.io.IOException; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java index 3846168009dc6..8cbf9883fe534 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java @@ -31,10 +31,6 @@ import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanks; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesMethod; import org.elasticsearch.search.aggregations.BucketOrder; import java.util.Arrays; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentilesAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesAggregatorTests.java similarity index 95% rename from server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentilesAggregatorTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesAggregatorTests.java index 85ab361a8b337..8a4f399cb2525 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/TDigestPercentilesAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesAggregatorTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest; +package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.NumericDocValuesField; @@ -34,8 +34,10 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregatorTestCase; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesMethod; +import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentiles; +import org.elasticsearch.search.aggregations.metrics.PercentilesAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.PercentilesMethod; +import org.elasticsearch.search.aggregations.metrics.TDigestPercentilesAggregator; import java.io.IOException; import java.util.function.Consumer; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java index 89c7d12c746fa..73ce6c7ece7a6 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java @@ -31,10 +31,6 @@ import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesMethod; import org.elasticsearch.search.aggregations.BucketOrder; import java.util.Arrays; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java similarity index 98% rename from server/src/test/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java index 3fe75b77e7f12..c888dbf8d2eea 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.search.aggregations.metrics.tophits; +package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.analysis.core.KeywordAnalyzer; import org.apache.lucene.document.Document; @@ -49,6 +49,7 @@ import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.bucket.terms.Terms; +import org.elasticsearch.search.aggregations.metrics.TopHits; import org.elasticsearch.search.sort.SortOrder; import java.io.IOException; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java index d7559d47d2f86..03fa60c6d8e93 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java @@ -46,8 +46,6 @@ import org.elasticsearch.search.aggregations.bucket.nested.Nested; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory.ExecutionMode; -import org.elasticsearch.search.aggregations.metrics.max.Max; -import org.elasticsearch.search.aggregations.metrics.tophits.TopHits; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; import org.elasticsearch.search.rescore.QueryRescorerBuilder; @@ -120,7 +118,8 @@ public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("idx").addMapping("type", TERMS_AGGS_FIELD, "type=keyword")); assertAcked(prepareCreate("field-collapsing").addMapping("type", "group", "type=keyword")); createIndex("empty"); - assertAcked(prepareCreate("articles").addMapping("article", jsonBuilder().startObject().startObject("article").startObject("properties") + assertAcked(prepareCreate("articles").addMapping("article", + jsonBuilder().startObject().startObject("article").startObject("properties") .startObject(TERMS_AGGS_FIELD) .field("type", "keyword") .endObject() @@ -251,15 +250,20 @@ public void setupSuiteScopeCluster() throws Exception { ); builders.add( client().prepareIndex("articles", "article", "2") - .setSource(jsonBuilder().startObject().field("title", "title 2").field("body", "some different text").startArray("comments") + .setSource(jsonBuilder().startObject().field("title", "title 2").field("body", "some different text") + .startArray("comments") .startObject() .field("user", "b").field("date", 3L).field("message", "some comment") .startArray("reviewers") .startObject().field("name", "user f").endObject() .endArray() .endObject() - .startObject().field("user", "c").field("date", 4L).field("message", "some other comment").endObject() - .endArray().endObject()) + .startObject() + .field("user", "c") + .field("date", 4L) + .field("message", "some other comment") + .endObject() + .endArray().endObject()) ); indexRandom(true, builders); @@ -314,7 +318,8 @@ public void testIssue11119() throws Exception { .prepareSearch("field-collapsing") .setSize(0) .setQuery(matchQuery("text", "x y z")) - .addAggregation(terms("terms").executionHint(randomExecutionHint()).field("group").subAggregation(topHits("hits"))) + .addAggregation(terms("terms") + .executionHint(randomExecutionHint()).field("group").subAggregation(topHits("hits"))) .get(); assertSearchResponse(response); @@ -584,7 +589,8 @@ public void testFetchFeatures() { .explain(true) .storedField("text") .docValueField("field1") - .scriptField("script", new Script(ScriptType.INLINE, MockScriptEngine.NAME, "5", Collections.emptyMap())) + .scriptField("script", + new Script(ScriptType.INLINE, MockScriptEngine.NAME, "5", Collections.emptyMap())) .fetchSource("text", null) .version(true) ) @@ -761,7 +767,8 @@ public void testTopHitsInSecondLayerNested() throws Exception { .subAggregation( nested("to-reviewers", "comments.reviewers").subAggregation( // Also need to sort on _doc because there are two reviewers with the same name - topHits("top-reviewers").sort("comments.reviewers.name", SortOrder.ASC).sort("_doc", SortOrder.DESC).size(7) + topHits("top-reviewers") + .sort("comments.reviewers.name", SortOrder.ASC).sort("_doc", SortOrder.DESC).size(7) ) ) .subAggregation(topHits("top-comments").sort("comments.date", SortOrder.DESC).size(4)) @@ -866,7 +873,9 @@ public void testNestedFetchFeatures() { nested("to-comments", "comments").subAggregation( topHits("top-comments").size(1).highlighter(new HighlightBuilder().field(hlField)).explain(true) .docValueField("comments.user") - .scriptField("script", new Script(ScriptType.INLINE, MockScriptEngine.NAME, "5", Collections.emptyMap())).fetchSource("comments.message", null) + .scriptField("script", + new Script(ScriptType.INLINE, MockScriptEngine.NAME, "5", Collections.emptyMap())) + .fetchSource("comments.message", null) .version(true).sort("comments.date", SortOrder.ASC))).get(); assertHitCount(searchResponse, 2); Nested nested = searchResponse.getAggregations().get("to-comments"); @@ -883,7 +892,8 @@ public void testNestedFetchFeatures() { assertThat(highlightField.getFragments().length, equalTo(1)); assertThat(highlightField.getFragments()[0].string(), equalTo("some comment")); - // Can't explain nested hit with the main query, since both are in a different scopes, also the nested doc may not even have matched with the main query + // Can't explain nested hit with the main query, since both are in a different scopes, also the nested doc may not + // even have matched with the main query. // If top_hits would have a query option then we can explain that query Explanation explanation = searchHit.getExplanation(); assertFalse(explanation.isMatch()); @@ -913,7 +923,13 @@ public void testTopHitsInNested() throws Exception { .subAggregation( nested("to-comments", "comments") .subAggregation(topHits("comments") - .highlighter(new HighlightBuilder().field(new HighlightBuilder.Field("comments.message").highlightQuery(matchQuery("comments.message", "text")))) + .highlighter( + new HighlightBuilder() + .field( + new HighlightBuilder.Field("comments.message") + .highlightQuery(matchQuery("comments.message", "text")) + ) + ) .sort("comments.id", SortOrder.ASC)) ) ) @@ -953,7 +969,8 @@ public void testUseMaxDocInsteadOfSize() throws Exception { .executionHint(randomExecutionHint()) .field(TERMS_AGGS_FIELD) .subAggregation( - topHits("hits").size(ArrayUtil.MAX_ARRAY_LENGTH - 1).sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC)) + topHits("hits").size(ArrayUtil.MAX_ARRAY_LENGTH - 1) + .sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC)) ) ) .get(); @@ -1064,7 +1081,11 @@ public void testNoStoredFields() throws Exception { public void testDontCacheScripts() throws Exception { try { assertAcked(prepareCreate("cache_test_idx").addMapping("type", "d", "type=long") - .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) + .setSettings( + Settings.builder() + .put("requests.cache.enable", true) + .put("number_of_shards", 1) + .put("number_of_replicas", 1)) .get()); indexRandom(true, client().prepareIndex("cache_test_idx", "type", "1").setSource("s", 1), client().prepareIndex("cache_test_idx", "type", "2").setSource("s", 2)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java index 4d2331b86f2ef..006c0fedba58b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.search.aggregations.AggregationInitializationException; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregationBuilder; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilderTests; import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregatorTests.java similarity index 96% rename from server/src/test/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountAggregatorTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregatorTests.java index 294343c245560..f9118e30a6efd 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregatorTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.valuecount; +package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.NumericDocValuesField; @@ -41,6 +41,9 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.metrics.ValueCount; +import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ValueCountAggregator; import org.elasticsearch.search.aggregations.support.ValueType; import java.io.IOException; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java index 8c5a8e059f7a8..357c5a94a7aed 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java @@ -28,7 +28,6 @@ import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCount; import org.elasticsearch.test.ESIntegTestCase; import java.util.Collection; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountTests.java index 9a3ed32604488..0013a65ea1855 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ValueCountTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; -import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder; public class ValueCountTests extends BaseAggregationTestCase { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java similarity index 98% rename from server/src/test/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregatorTests.java rename to server/src/test/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java index 70b1b651723e0..3836f0cc2ae14 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/weighted_avg/WeightedAvgAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregatorTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.search.aggregations.metrics.weighted_avg; +package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.NumericDocValuesField; @@ -36,6 +36,9 @@ import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.metrics.InternalWeightedAvg; +import org.elasticsearch.search.aggregations.metrics.WeightedAvgAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.WeightedAvgAggregator; import org.elasticsearch.search.aggregations.support.MultiValuesSourceFieldConfig; import org.joda.time.DateTimeZone; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketIT.java index 4858582da8034..8514b1a0c0da9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketIT.java @@ -26,7 +26,7 @@ import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.sum.Sum; +import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.test.ESIntegTestCase; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java index 9e85455d96de9..bd92c73f997f1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java @@ -30,7 +30,7 @@ import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.range.Range; -import org.elasticsearch.search.aggregations.metrics.sum.Sum; +import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.test.ESIntegTestCase; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorIT.java index 9ea4f813dff0f..05de849854f67 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorIT.java @@ -30,7 +30,7 @@ import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; -import org.elasticsearch.search.aggregations.metrics.sum.Sum; +import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.test.ESIntegTestCase; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java index f5dc01f19148b..08337ef969f77 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumAggregatorTests.java @@ -41,10 +41,10 @@ import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.avg.InternalAvg; -import org.elasticsearch.search.aggregations.metrics.sum.Sum; -import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.InternalAvg; +import org.elasticsearch.search.aggregations.metrics.Sum; +import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.cumulativesum.CumulativeSumPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.derivative.DerivativePipelineAggregationBuilder; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java index b0f5eece900b1..aaa296fc31738 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DateDerivativeIT.java @@ -27,7 +27,7 @@ import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; -import org.elasticsearch.search.aggregations.metrics.sum.Sum; +import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.search.aggregations.pipeline.derivative.Derivative; import org.elasticsearch.search.aggregations.support.AggregationPath; import org.elasticsearch.test.ESIntegTestCase; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java index 447d82084de5c..5944777b628f5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java @@ -29,8 +29,8 @@ import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; -import org.elasticsearch.search.aggregations.metrics.stats.Stats; -import org.elasticsearch.search.aggregations.metrics.sum.Sum; +import org.elasticsearch.search.aggregations.metrics.Stats; +import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.search.aggregations.pipeline.derivative.Derivative; import org.elasticsearch.search.aggregations.pipeline.movavg.models.SimpleModel; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java index aa587f3b3c3fe..40c3bfb500e4d 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java @@ -28,8 +28,8 @@ import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats.Bounds; -import org.elasticsearch.search.aggregations.metrics.sum.Sum; +import org.elasticsearch.search.aggregations.metrics.ExtendedStats.Bounds; +import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ExtendedStatsBucket; import org.elasticsearch.test.ESIntegTestCase; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java index 494628eb93260..c3075da827118 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java @@ -27,7 +27,7 @@ import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.sum.Sum; +import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue; import org.elasticsearch.test.ESIntegTestCase; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MinBucketIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MinBucketIT.java index 51b9973b3154b..82629363f8d8a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MinBucketIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MinBucketIT.java @@ -25,7 +25,7 @@ import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; -import org.elasticsearch.search.aggregations.metrics.sum.Sum; +import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue; import org.elasticsearch.search.aggregations.BucketOrder; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java index 2c1abcd953d7c..8f77c305229d9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java @@ -27,8 +27,8 @@ import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; -import org.elasticsearch.search.aggregations.metrics.sum.Sum; +import org.elasticsearch.search.aggregations.metrics.Percentile; +import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucket; import org.elasticsearch.test.ESIntegTestCase; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java index ce9394692deda..9f7b33e805b27 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregationHelperTests.java @@ -20,10 +20,10 @@ package org.elasticsearch.search.aggregations.pipeline; -import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.test.ESTestCase; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketIT.java index c87b4320896ea..f5d409951e3f1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketIT.java @@ -26,7 +26,7 @@ import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.sum.Sum; +import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.StatsBucket; import org.elasticsearch.test.ESIntegTestCase; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/SumBucketIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/SumBucketIT.java index f3fea8f6dd77b..a803b9fe3d466 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/SumBucketIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/SumBucketIT.java @@ -26,7 +26,7 @@ import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.bucket.terms.IncludeExclude; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.sum.Sum; +import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.test.ESIntegTestCase; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketAggregatorTests.java index 5f804c7a8bd3d..dd8938bc8786a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketAggregatorTests.java @@ -38,8 +38,8 @@ import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram; -import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.avg.InternalAvg; +import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.InternalAvg; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/InternalPercentilesBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/InternalPercentilesBucketTests.java index e3ebd9dc77ac6..c1d3ffeb0e553 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/InternalPercentilesBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/InternalPercentilesBucketTests.java @@ -23,7 +23,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregation.CommonFields; import org.elasticsearch.search.aggregations.ParsedAggregation; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; +import org.elasticsearch.search.aggregations.metrics.Percentile; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.test.InternalAggregationTestCase; @@ -37,7 +37,7 @@ import java.util.Map; import java.util.function.Predicate; -import static org.elasticsearch.search.aggregations.metrics.percentiles.InternalPercentilesTestCase.randomPercents; +import static org.elasticsearch.search.aggregations.metrics.InternalPercentilesTestCase.randomPercents; public class InternalPercentilesBucketTests extends InternalAggregationTestCase { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/InternalExtendedStatsBucketTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/InternalExtendedStatsBucketTests.java index 5261c686174a5..03481ab7f6516 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/InternalExtendedStatsBucketTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/stats/extended/InternalExtendedStatsBucketTests.java @@ -23,7 +23,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.metrics.InternalExtendedStatsTests; -import org.elasticsearch.search.aggregations.metrics.stats.extended.InternalExtendedStats; +import org.elasticsearch.search.aggregations.metrics.InternalExtendedStats; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.util.Collections; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortIT.java index 2e2f2a1b0f19b..df2d7e64f4605 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketsort/BucketSortIT.java @@ -27,7 +27,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.avg.Avg; +import org.elasticsearch.search.aggregations.metrics.Avg; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortOrder; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnUnitTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnUnitTests.java index db3f2d745e1f6..db333a8ed7a08 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnUnitTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnUnitTests.java @@ -40,7 +40,7 @@ import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram; -import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; import java.io.IOException; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java index 01af64d26deca..d14f93b7a5189 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/moving/avg/MovAvgIT.java @@ -27,7 +27,7 @@ import org.elasticsearch.common.collect.EvictingQueue; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; -import org.elasticsearch.search.aggregations.metrics.avg.Avg; +import org.elasticsearch.search.aggregations.metrics.Avg; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregationHelperTests; import org.elasticsearch.search.aggregations.pipeline.SimpleValue; diff --git a/server/src/test/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java b/server/src/test/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java index bb480527d7abb..51bc5cc4e24bc 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java +++ b/server/src/test/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java @@ -25,8 +25,6 @@ import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedOrdinalsSamplerAggregator; import org.elasticsearch.search.aggregations.bucket.terms.GlobalOrdinalsStringTermsAggregator; -import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregator; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregator; import org.elasticsearch.search.profile.ProfileResult; import org.elasticsearch.search.profile.ProfileShardResult; import org.elasticsearch.test.ESIntegTestCase; @@ -180,7 +178,7 @@ public void testMultiLevelProfile() { ProfileResult avgAggResult = termsAggResult.getProfiledChildren().get(0); assertThat(avgAggResult, notNullValue()); - assertThat(avgAggResult.getQueryName(), equalTo(AvgAggregator.class.getSimpleName())); + assertThat(avgAggResult.getQueryName(), equalTo("AvgAggregator")); assertThat(avgAggResult.getLuceneDescription(), equalTo("avg")); assertThat(avgAggResult.getTime(), greaterThan(0L)); Map avgBreakdown = termsAggResult.getTimeBreakdown(); @@ -250,7 +248,7 @@ public void testMultiLevelProfileBreadthFirst() { ProfileResult avgAggResult = termsAggResult.getProfiledChildren().get(0); assertThat(avgAggResult, notNullValue()); - assertThat(avgAggResult.getQueryName(), equalTo(AvgAggregator.class.getSimpleName())); + assertThat(avgAggResult.getQueryName(), equalTo("AvgAggregator")); assertThat(avgAggResult.getLuceneDescription(), equalTo("avg")); assertThat(avgAggResult.getTime(), greaterThan(0L)); Map avgBreakdown = termsAggResult.getTimeBreakdown(); @@ -303,7 +301,7 @@ public void testDiversifiedAggProfile() { ProfileResult maxAggResult = diversifyAggResult.getProfiledChildren().get(0); assertThat(maxAggResult, notNullValue()); - assertThat(maxAggResult.getQueryName(), equalTo(MaxAggregator.class.getSimpleName())); + assertThat(maxAggResult.getQueryName(), equalTo("MaxAggregator")); assertThat(maxAggResult.getLuceneDescription(), equalTo("max")); assertThat(maxAggResult.getTime(), greaterThan(0L)); Map termsBreakdown = maxAggResult.getTimeBreakdown(); @@ -381,7 +379,7 @@ public void testComplexProfile() { ProfileResult avgAggResult = tagsAggResult.getProfiledChildren().get(0); assertThat(avgAggResult, notNullValue()); - assertThat(avgAggResult.getQueryName(), equalTo(AvgAggregator.class.getSimpleName())); + assertThat(avgAggResult.getQueryName(), equalTo("AvgAggregator")); assertThat(avgAggResult.getLuceneDescription(), equalTo("avg")); assertThat(avgAggResult.getTime(), greaterThan(0L)); Map avgBreakdown = tagsAggResult.getTimeBreakdown(); @@ -398,7 +396,7 @@ public void testComplexProfile() { ProfileResult maxAggResult = tagsAggResult.getProfiledChildren().get(1); assertThat(maxAggResult, notNullValue()); - assertThat(maxAggResult.getQueryName(), equalTo(MaxAggregator.class.getSimpleName())); + assertThat(maxAggResult.getQueryName(), equalTo("MaxAggregator")); assertThat(maxAggResult.getLuceneDescription(), equalTo("max")); assertThat(maxAggResult.getTime(), greaterThan(0L)); Map maxBreakdown = tagsAggResult.getTimeBreakdown(); @@ -432,7 +430,7 @@ public void testComplexProfile() { avgAggResult = stringsAggResult.getProfiledChildren().get(0); assertThat(avgAggResult, notNullValue()); - assertThat(avgAggResult.getQueryName(), equalTo(AvgAggregator.class.getSimpleName())); + assertThat(avgAggResult.getQueryName(), equalTo("AvgAggregator")); assertThat(avgAggResult.getLuceneDescription(), equalTo("avg")); assertThat(avgAggResult.getTime(), greaterThan(0L)); avgBreakdown = stringsAggResult.getTimeBreakdown(); @@ -449,7 +447,7 @@ public void testComplexProfile() { maxAggResult = stringsAggResult.getProfiledChildren().get(1); assertThat(maxAggResult, notNullValue()); - assertThat(maxAggResult.getQueryName(), equalTo(MaxAggregator.class.getSimpleName())); + assertThat(maxAggResult.getQueryName(), equalTo("MaxAggregator")); assertThat(maxAggResult.getLuceneDescription(), equalTo("max")); assertThat(maxAggResult.getTime(), greaterThan(0L)); maxBreakdown = stringsAggResult.getTimeBreakdown(); @@ -483,7 +481,7 @@ public void testComplexProfile() { avgAggResult = tagsAggResult.getProfiledChildren().get(0); assertThat(avgAggResult, notNullValue()); - assertThat(avgAggResult.getQueryName(), equalTo(AvgAggregator.class.getSimpleName())); + assertThat(avgAggResult.getQueryName(), equalTo("AvgAggregator")); assertThat(avgAggResult.getLuceneDescription(), equalTo("avg")); assertThat(avgAggResult.getTime(), greaterThan(0L)); avgBreakdown = tagsAggResult.getTimeBreakdown(); @@ -500,7 +498,7 @@ public void testComplexProfile() { maxAggResult = tagsAggResult.getProfiledChildren().get(1); assertThat(maxAggResult, notNullValue()); - assertThat(maxAggResult.getQueryName(), equalTo(MaxAggregator.class.getSimpleName())); + assertThat(maxAggResult.getQueryName(), equalTo("MaxAggregator")); assertThat(maxAggResult.getLuceneDescription(), equalTo("max")); assertThat(maxAggResult.getTime(), greaterThan(0L)); maxBreakdown = tagsAggResult.getTimeBreakdown(); diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/InternalSingleBucketAggregationTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/InternalSingleBucketAggregationTestCase.java index 56a4bc983cadb..e32734b887b0a 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/InternalSingleBucketAggregationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/InternalSingleBucketAggregationTestCase.java @@ -25,8 +25,8 @@ import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.search.aggregations.bucket.InternalSingleBucketAggregation; import org.elasticsearch.search.aggregations.bucket.ParsedSingleBucketAggregation; -import org.elasticsearch.search.aggregations.metrics.max.InternalMax; -import org.elasticsearch.search.aggregations.metrics.min.InternalMin; +import org.elasticsearch.search.aggregations.metrics.InternalMax; +import org.elasticsearch.search.aggregations.metrics.InternalMin; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.test.InternalAggregationTestCase; diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java index 15e44853a97ba..1149c7b0941ce 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java @@ -85,38 +85,38 @@ import org.elasticsearch.search.aggregations.bucket.terms.ParsedLongTerms; import org.elasticsearch.search.aggregations.bucket.terms.ParsedStringTerms; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; -import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.avg.ParsedAvg; -import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.cardinality.ParsedCardinality; -import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.geobounds.ParsedGeoBounds; -import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.geocentroid.ParsedGeoCentroid; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.max.ParsedMax; -import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.min.ParsedMin; -import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentileRanks; -import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentiles; -import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.ParsedHDRPercentileRanks; -import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.ParsedHDRPercentiles; -import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentileRanks; -import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentiles; -import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.ParsedTDigestPercentileRanks; -import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.ParsedTDigestPercentiles; -import org.elasticsearch.search.aggregations.metrics.scripted.ParsedScriptedMetric; -import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.stats.ParsedStats; -import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.stats.extended.ParsedExtendedStats; -import org.elasticsearch.search.aggregations.metrics.sum.ParsedSum; -import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.tophits.ParsedTopHits; -import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.valuecount.ParsedValueCount; -import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ParsedAvg; +import org.elasticsearch.search.aggregations.metrics.CardinalityAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ParsedCardinality; +import org.elasticsearch.search.aggregations.metrics.GeoBoundsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ParsedGeoBounds; +import org.elasticsearch.search.aggregations.metrics.GeoCentroidAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ParsedGeoCentroid; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ParsedMax; +import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ParsedMin; +import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentileRanks; +import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentiles; +import org.elasticsearch.search.aggregations.metrics.ParsedHDRPercentileRanks; +import org.elasticsearch.search.aggregations.metrics.ParsedHDRPercentiles; +import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentileRanks; +import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentiles; +import org.elasticsearch.search.aggregations.metrics.ParsedTDigestPercentileRanks; +import org.elasticsearch.search.aggregations.metrics.ParsedTDigestPercentiles; +import org.elasticsearch.search.aggregations.metrics.ParsedScriptedMetric; +import org.elasticsearch.search.aggregations.metrics.ScriptedMetricAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ParsedStats; +import org.elasticsearch.search.aggregations.metrics.StatsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ExtendedStatsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ParsedExtendedStats; +import org.elasticsearch.search.aggregations.metrics.ParsedSum; +import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ParsedTopHits; +import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ParsedValueCount; +import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; import org.elasticsearch.search.aggregations.pipeline.ParsedSimpleValue; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java index cdf25438cea33..03b58732a378a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java @@ -22,7 +22,7 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.xpack.core.ml.datafeed.extractor.ExtractorUtils; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/stats/StatsAccumulator.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/stats/StatsAccumulator.java index fe987db48ce17..91c95c707d46a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/stats/StatsAccumulator.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/stats/StatsAccumulator.java @@ -8,7 +8,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.search.aggregations.metrics.stats.Stats; +import org.elasticsearch.search.aggregations.metrics.Stats; import java.io.IOException; import java.util.HashMap; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupField.java index 134ce6c87b3f7..a784922228b0e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupField.java @@ -7,11 +7,11 @@ import org.elasticsearch.common.ParseField; import org.elasticsearch.index.mapper.NumberFieldMapper; -import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import java.util.Arrays; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java index 3030449abd1b6..36bd2fbcb4689 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfigTests.java @@ -28,8 +28,8 @@ import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField; import org.elasticsearch.test.AbstractSerializingTestCase; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java index 7e0615e85f8c9..592fdbe9de6ef 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java @@ -20,7 +20,7 @@ import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField; import org.elasticsearch.test.AbstractSerializingTestCase; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtilsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtilsTests.java index b1eb13b5d73c5..7770def0fae9a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtilsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/extractor/ExtractorUtilsTests.java @@ -11,8 +11,8 @@ import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; import org.elasticsearch.test.ESTestCase; import org.joda.time.DateTimeZone; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/stats/StatsAccumulatorTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/stats/StatsAccumulatorTests.java index bd2df0823ae17..87d2acff9e384 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/stats/StatsAccumulatorTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/stats/StatsAccumulatorTests.java @@ -6,7 +6,7 @@ package org.elasticsearch.xpack.core.ml.stats; import org.elasticsearch.common.io.stream.Writeable.Reader; -import org.elasticsearch.search.aggregations.metrics.stats.Stats; +import org.elasticsearch.search.aggregations.metrics.Stats; import org.elasticsearch.test.AbstractWireSerializingTestCase; import java.util.HashMap; import java.util.Map; @@ -157,4 +157,4 @@ public StatsAccumulator createTestInstance() { protected Reader instanceReader() { return StatsAccumulator::new; } -} \ No newline at end of file +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java index c0792a45b29d3..4a17a2654c631 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java @@ -19,8 +19,8 @@ import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; -import org.elasticsearch.search.aggregations.metrics.max.Max; -import org.elasticsearch.search.aggregations.metrics.min.Min; +import org.elasticsearch.search.aggregations.metrics.Max; +import org.elasticsearch.search.aggregations.metrics.Min; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java index e481986504d60..864a83afae7e7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessor.java @@ -15,9 +15,9 @@ import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; -import org.elasticsearch.search.aggregations.metrics.max.Max; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles; +import org.elasticsearch.search.aggregations.metrics.Max; +import org.elasticsearch.search.aggregations.metrics.Percentile; +import org.elasticsearch.search.aggregations.metrics.Percentiles; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.joda.time.DateTime; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractor.java index 3b772544821d6..b1daff2b7e783 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractor.java @@ -13,8 +13,8 @@ import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.Aggregations; -import org.elasticsearch.search.aggregations.metrics.max.Max; -import org.elasticsearch.search.aggregations.metrics.min.Min; +import org.elasticsearch.search.aggregations.metrics.Max; +import org.elasticsearch.search.aggregations.metrics.Min; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.core.ml.datafeed.extractor.ExtractorUtils; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java index b9d7322b1ad14..09a0a25cc4de0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java @@ -64,8 +64,8 @@ import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; -import org.elasticsearch.search.aggregations.metrics.stats.Stats; -import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats; +import org.elasticsearch.search.aggregations.metrics.Stats; +import org.elasticsearch.search.aggregations.metrics.ExtendedStats; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortBuilders; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java index d6ade87fa6e7b..204ae42720433 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/overallbuckets/OverallBucketsProvider.java @@ -10,7 +10,7 @@ import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.max.Max; +import org.elasticsearch.search.aggregations.metrics.Max; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.results.OverallBucket; import org.elasticsearch.xpack.core.ml.job.results.Result; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedActionTests.java index ab3fe083d5ff4..50a016f6e5e0a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedActionTests.java @@ -9,7 +9,7 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorFactories; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ml.action.PreviewDatafeedAction; import org.elasticsearch.xpack.core.ml.datafeed.ChunkingConfig; @@ -122,4 +122,4 @@ public void testPreviewDatafed_GivenFailure() throws IOException { assertThat(capturedFailure.getMessage(), equalTo("failed")); verify(dataExtractor).cancel(); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobValidatorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobValidatorTests.java index 180727e88f2fe..35fd9bb98abf3 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobValidatorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedJobValidatorTests.java @@ -10,7 +10,7 @@ import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedJobValidator; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactoryTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactoryTests.java index 52e38a70abdb5..11ff693bad7ed 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactoryTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorFactoryTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorFactories; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.datafeed.ChunkingConfig; @@ -173,4 +173,4 @@ private void givenAggregatableField(String field, String type) { fieldCapsMap.put(type, fieldCaps); when(fieldsCapabilities.getField(field)).thenReturn(fieldCapsMap); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java index 16b62cc23de19..47d2eb828c6a4 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java @@ -11,9 +11,9 @@ import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; -import org.elasticsearch.search.aggregations.metrics.max.Max; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles; +import org.elasticsearch.search.aggregations.metrics.Max; +import org.elasticsearch.search.aggregations.metrics.Percentile; +import org.elasticsearch.search.aggregations.metrics.Percentiles; import java.util.ArrayList; import java.util.Collections; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessorTests.java index ffadcfab43c0d..bf283b5be519d 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationToJsonProcessorTests.java @@ -11,7 +11,7 @@ import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.max.Max; +import org.elasticsearch.search.aggregations.metrics.Max; import org.elasticsearch.test.ESTestCase; import java.io.ByteArrayOutputStream; @@ -457,4 +457,4 @@ private String aggToString(Set fields, Aggregations aggregations) throws keyValuePairsWritten = processor.getKeyValueCount(); return outputStream.toString(StandardCharsets.UTF_8.name()); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorTests.java index 18c35155b6f5d..e85b1e3a6dfd2 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/chunked/ChunkedDataExtractorTests.java @@ -18,8 +18,8 @@ import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.Aggregations; -import org.elasticsearch.search.aggregations.metrics.max.Max; -import org.elasticsearch.search.aggregations.metrics.min.Min; +import org.elasticsearch.search.aggregations.metrics.Max; +import org.elasticsearch.search.aggregations.metrics.Min; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor; import org.elasticsearch.xpack.ml.datafeed.extractor.DataExtractorFactory; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java index 9ab4907b2cd17..ecc12a58d10fc 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java @@ -21,7 +21,7 @@ import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.action.CloseJobAction; diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java index 28a19090225b4..7bc035f7ae236 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java @@ -22,7 +22,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.max.Max; +import org.elasticsearch.search.aggregations.metrics.Max; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; import org.elasticsearch.xpack.core.monitoring.action.MonitoringBulkDoc; diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupRequestTranslator.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupRequestTranslator.java index 0668e7c43ad3c..44e67cc619cf7 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupRequestTranslator.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupRequestTranslator.java @@ -16,8 +16,8 @@ import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java index a38adf5d9de3a..0c1ca89f32d77 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java @@ -28,11 +28,11 @@ import org.elasticsearch.search.aggregations.bucket.terms.LongTerms; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation.SingleValue; -import org.elasticsearch.search.aggregations.metrics.avg.InternalAvg; -import org.elasticsearch.search.aggregations.metrics.max.InternalMax; -import org.elasticsearch.search.aggregations.metrics.min.InternalMin; -import org.elasticsearch.search.aggregations.metrics.sum.InternalSum; -import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.InternalAvg; +import org.elasticsearch.search.aggregations.metrics.InternalMax; +import org.elasticsearch.search.aggregations.metrics.InternalMin; +import org.elasticsearch.search.aggregations.metrics.InternalSum; +import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.xpack.core.rollup.RollupField; diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java index b1b052a3659d6..ee29e56a33169 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java @@ -20,11 +20,11 @@ import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtilTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtilTests.java index 54cab648a20a2..95161e0d149dc 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtilTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupJobIdentifierUtilTests.java @@ -9,10 +9,10 @@ import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.rollup.RollupField; diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupRequestTranslationTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupRequestTranslationTests.java index a618e8b4e6f63..c72808bba3731 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupRequestTranslationTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupRequestTranslationTests.java @@ -19,11 +19,11 @@ import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.range.GeoDistanceAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.StatsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java index 73a4d0665c4e1..0a133cc8e0754 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java @@ -54,16 +54,16 @@ import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram; import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.avg.Avg; -import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.avg.InternalAvg; -import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.max.InternalMax; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.sum.InternalSum; -import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.Avg; +import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.InternalAvg; +import org.elasticsearch.search.aggregations.metrics.CardinalityAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.GeoBoundsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.InternalMax; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.InternalSum; +import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.xpack.core.rollup.RollupField; @@ -512,7 +512,7 @@ public void testMismatch() throws IOException { ClassCastException e = expectThrows(ClassCastException.class, () -> RollupResponseTranslator.combineResponses(msearch, reduceContext)); assertThat(e.getMessage(), - containsString("org.elasticsearch.search.aggregations.metrics.geobounds.InternalGeoBounds")); + containsString("org.elasticsearch.search.aggregations.metrics.InternalGeoBounds")); assertThat(e.getMessage(), containsString("org.elasticsearch.search.aggregations.InternalMultiBucketAggregation")); } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java index 3cc6190db30d5..d7bb34bb1561f 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/SearchActionTests.java @@ -36,10 +36,10 @@ import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; -import org.elasticsearch.search.aggregations.metrics.avg.Avg; -import org.elasticsearch.search.aggregations.metrics.avg.InternalAvg; -import org.elasticsearch.search.aggregations.metrics.sum.InternalSum; -import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.Avg; +import org.elasticsearch.search.aggregations.metrics.InternalAvg; +import org.elasticsearch.search.aggregations.metrics.InternalSum; +import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.suggest.SuggestBuilder; diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java index 098bc83bc7034..f5d335ca6f106 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java @@ -33,9 +33,9 @@ import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; -import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; import org.elasticsearch.xpack.core.rollup.job.GroupConfig; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java index 7cf29632538a9..eff150bf1ef55 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java @@ -24,7 +24,7 @@ import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles; +import org.elasticsearch.search.aggregations.metrics.Percentiles; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.threadpool.ThreadPoolStats; import org.elasticsearch.xpack.core.watcher.WatcherState; From c12d23221557e8d76bf456af1b23315f6527405b Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Fri, 7 Sep 2018 14:00:24 +0200 Subject: [PATCH 30/91] Pass Directory instead of DirectoryService to Store (#33466) Instead of passing DirectoryService which causes yet another dependency on Store we can just pass in a Directory since we will just call `DirectoryService#newDirectory()` on it anyway. --- .../org/elasticsearch/index/IndexService.java | 5 +- .../org/elasticsearch/index/store/Store.java | 13 +-- .../index/shard/RefreshListenersTests.java | 9 +- .../elasticsearch/index/store/StoreTests.java | 95 ++++--------------- .../recovery/RecoverySourceHandlerTests.java | 18 +--- .../index/engine/EngineTestCase.java | 9 +- .../index/shard/IndexShardTestCase.java | 9 +- .../index/engine/FollowingEngineTests.java | 11 +-- 8 files changed, 39 insertions(+), 130 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/IndexService.java b/server/src/main/java/org/elasticsearch/index/IndexService.java index 6ffbc44676e0b..047b3c5cd762d 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexService.java +++ b/server/src/main/java/org/elasticsearch/index/IndexService.java @@ -64,6 +64,7 @@ import org.elasticsearch.index.shard.ShardNotFoundException; import org.elasticsearch.index.shard.ShardPath; import org.elasticsearch.index.similarity.SimilarityService; +import org.elasticsearch.index.store.DirectoryService; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.Translog; @@ -377,7 +378,9 @@ public synchronized IndexShard createShard(ShardRouting routing, Consumer + DirectoryService directoryService = indexStore.newDirectoryService(path); + store = new Store(shardId, this.indexSettings, directoryService.newDirectory(), lock, new StoreCloseListener(shardId, () -> eventListener.onStoreClosed(shardId))); indexShard = new IndexShard(routing, this.indexSettings, path, store, indexSortSupplier, indexCache, mapperService, similarityService, engineFactory, diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index 470f03afc48aa..b892c5c01fefc 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -64,7 +64,6 @@ import org.elasticsearch.common.lucene.store.InputStreamIndexInput; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRefCounted; import org.elasticsearch.common.util.concurrent.RefCounted; @@ -153,18 +152,16 @@ protected void closeInternal() { } }; - public Store(ShardId shardId, IndexSettings indexSettings, DirectoryService directoryService, ShardLock shardLock) throws IOException { - this(shardId, indexSettings, directoryService, shardLock, OnClose.EMPTY); + public Store(ShardId shardId, IndexSettings indexSettings, Directory directory, ShardLock shardLock) { + this(shardId, indexSettings, directory, shardLock, OnClose.EMPTY); } - public Store(ShardId shardId, IndexSettings indexSettings, DirectoryService directoryService, ShardLock shardLock, - OnClose onClose) throws IOException { + public Store(ShardId shardId, IndexSettings indexSettings, Directory directory, ShardLock shardLock, + OnClose onClose) { super(shardId, indexSettings); - final Settings settings = indexSettings.getSettings(); - Directory dir = directoryService.newDirectory(); final TimeValue refreshInterval = indexSettings.getValue(INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING); logger.debug("store stats are refreshed with refresh_interval [{}]", refreshInterval); - ByteSizeCachingDirectory sizeCachingDir = new ByteSizeCachingDirectory(dir, refreshInterval); + ByteSizeCachingDirectory sizeCachingDir = new ByteSizeCachingDirectory(directory, refreshInterval); this.directory = new StoreDirectory(sizeCachingDir, Loggers.getLogger("index.store.deletes", shardId)); this.shardLock = shardLock; this.onClose = onClose; diff --git a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java index a43c7c214aeb3..2492ab4cd8a08 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java @@ -50,7 +50,6 @@ import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.seqno.SequenceNumbers; -import org.elasticsearch.index.store.DirectoryService; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogConfig; @@ -106,13 +105,7 @@ public void setupListeners() throws Exception { ShardId shardId = new ShardId(new Index("index", "_na_"), 1); String allocationId = UUIDs.randomBase64UUID(random()); Directory directory = newDirectory(); - DirectoryService directoryService = new DirectoryService(shardId, indexSettings) { - @Override - public Directory newDirectory() throws IOException { - return directory; - } - }; - store = new Store(shardId, indexSettings, directoryService, new DummyShardLock(shardId)); + store = new Store(shardId, indexSettings, directory, new DummyShardLock(shardId)); IndexWriterConfig iwc = newIndexWriterConfig(); TranslogConfig translogConfig = new TranslogConfig(shardId, createTempDir("translog"), indexSettings, BigArrays.NON_RECYCLING_INSTANCE); diff --git a/server/src/test/java/org/elasticsearch/index/store/StoreTests.java b/server/src/test/java/org/elasticsearch/index/store/StoreTests.java index 2cea9bb364684..584ce9b06421d 100644 --- a/server/src/test/java/org/elasticsearch/index/store/StoreTests.java +++ b/server/src/test/java/org/elasticsearch/index/store/StoreTests.java @@ -104,12 +104,10 @@ public class StoreTests extends ESTestCase { private static final Version MIN_SUPPORTED_LUCENE_VERSION = org.elasticsearch.Version.CURRENT .minimumIndexCompatibilityVersion().luceneVersion; - public void testRefCount() throws IOException { + public void testRefCount() { final ShardId shardId = new ShardId("index", "_na_", 1); - DirectoryService directoryService = new LuceneManagedDirectoryService(random()); IndexSettings indexSettings = INDEX_SETTINGS; - - Store store = new Store(shardId, indexSettings, directoryService, new DummyShardLock(shardId)); + Store store = new Store(shardId, indexSettings, StoreTests.newDirectory(random()), new DummyShardLock(shardId)); int incs = randomIntBetween(1, 100); for (int i = 0; i < incs; i++) { if (randomBoolean()) { @@ -296,8 +294,7 @@ public void testVerifyingIndexOutputWithBogusInput() throws IOException { public void testNewChecksums() throws IOException { final ShardId shardId = new ShardId("index", "_na_", 1); - DirectoryService directoryService = new LuceneManagedDirectoryService(random()); - Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId)); + Store store = new Store(shardId, INDEX_SETTINGS, StoreTests.newDirectory(random()), new DummyShardLock(shardId)); // set default codec - all segments need checksums IndexWriter writer = new IndexWriter(store.directory(), newIndexWriterConfig(random(), new MockAnalyzer(random())).setCodec(TestUtil.getDefaultCodec())); int docs = 1 + random().nextInt(100); @@ -347,7 +344,7 @@ public void testNewChecksums() throws IOException { assertConsistent(store, metadata); TestUtil.checkIndex(store.directory()); - assertDeleteContent(store, directoryService); + assertDeleteContent(store, store.directory()); IOUtils.close(store); } @@ -455,32 +452,11 @@ private void corruptFile(Directory dir, String fileIn, String fileOut) throws IO } - public void assertDeleteContent(Store store, DirectoryService service) throws IOException { + public void assertDeleteContent(Store store, Directory dir) throws IOException { deleteContent(store.directory()); assertThat(Arrays.toString(store.directory().listAll()), store.directory().listAll().length, equalTo(0)); assertThat(store.stats().sizeInBytes(), equalTo(0L)); - assertThat(service.newDirectory().listAll().length, equalTo(0)); - } - - private static final class LuceneManagedDirectoryService extends DirectoryService { - private final Directory dir; - private final Random random; - - LuceneManagedDirectoryService(Random random) { - this(random, true); - } - - LuceneManagedDirectoryService(Random random, boolean preventDoubleWrite) { - super(new ShardId(INDEX_SETTINGS.getIndex(), 1), INDEX_SETTINGS); - dir = StoreTests.newDirectory(random); - this.random = random; - } - - @Override - public Directory newDirectory() throws IOException { - return dir; - } - + assertThat(dir.listAll().length, equalTo(0)); } public static void assertConsistent(Store store, Store.MetadataSnapshot metadata) throws IOException { @@ -511,8 +487,7 @@ public void testRecoveryDiff() throws IOException, InterruptedException { iwc.setMergePolicy(NoMergePolicy.INSTANCE); iwc.setUseCompoundFile(random.nextBoolean()); final ShardId shardId = new ShardId("index", "_na_", 1); - DirectoryService directoryService = new LuceneManagedDirectoryService(random); - Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId)); + Store store = new Store(shardId, INDEX_SETTINGS, StoreTests.newDirectory(random()), new DummyShardLock(shardId)); IndexWriter writer = new IndexWriter(store.directory(), iwc); final boolean lotsOfSegments = rarely(random); for (Document d : docs) { @@ -526,7 +501,7 @@ public void testRecoveryDiff() throws IOException, InterruptedException { writer.commit(); writer.close(); first = store.getMetadata(null); - assertDeleteContent(store, directoryService); + assertDeleteContent(store, store.directory()); store.close(); } long time = new Date().getTime(); @@ -541,8 +516,7 @@ public void testRecoveryDiff() throws IOException, InterruptedException { iwc.setMergePolicy(NoMergePolicy.INSTANCE); iwc.setUseCompoundFile(random.nextBoolean()); final ShardId shardId = new ShardId("index", "_na_", 1); - DirectoryService directoryService = new LuceneManagedDirectoryService(random); - store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId)); + store = new Store(shardId, INDEX_SETTINGS, StoreTests.newDirectory(random()), new DummyShardLock(shardId)); IndexWriter writer = new IndexWriter(store.directory(), iwc); final boolean lotsOfSegments = rarely(random); for (Document d : docs) { @@ -639,8 +613,7 @@ public void testRecoveryDiff() throws IOException, InterruptedException { public void testCleanupFromSnapshot() throws IOException { final ShardId shardId = new ShardId("index", "_na_", 1); - DirectoryService directoryService = new LuceneManagedDirectoryService(random()); - Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId)); + Store store = new Store(shardId, INDEX_SETTINGS, StoreTests.newDirectory(random()), new DummyShardLock(shardId)); // this time random codec.... IndexWriterConfig indexWriterConfig = newIndexWriterConfig(random(), new MockAnalyzer(random())).setCodec(TestUtil.getDefaultCodec()); // we keep all commits and that allows us clean based on multiple snapshots @@ -727,11 +700,10 @@ public void testCleanupFromSnapshot() throws IOException { public void testOnCloseCallback() throws IOException { final ShardId shardId = new ShardId(new Index(randomRealisticUnicodeOfCodepointLengthBetween(1, 10), "_na_"), randomIntBetween(0, 100)); - DirectoryService directoryService = new LuceneManagedDirectoryService(random()); final AtomicInteger count = new AtomicInteger(0); final ShardLock lock = new DummyShardLock(shardId); - Store store = new Store(shardId, INDEX_SETTINGS, directoryService, lock, theLock -> { + Store store = new Store(shardId, INDEX_SETTINGS, StoreTests.newDirectory(random()), lock, theLock -> { assertEquals(shardId, theLock.getShardId()); assertEquals(lock, theLock); count.incrementAndGet(); @@ -748,11 +720,10 @@ public void testOnCloseCallback() throws IOException { public void testStoreStats() throws IOException { final ShardId shardId = new ShardId("index", "_na_", 1); - DirectoryService directoryService = new LuceneManagedDirectoryService(random()); Settings settings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT) .put(Store.INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING.getKey(), TimeValue.timeValueMinutes(0)).build(); - Store store = new Store(shardId, IndexSettingsModule.newIndexSettings("index", settings), directoryService, + Store store = new Store(shardId, IndexSettingsModule.newIndexSettings("index", settings), StoreTests.newDirectory(random()), new DummyShardLock(shardId)); long initialStoreSize = 0; for (String extraFiles : store.directory().listAll()) { @@ -843,8 +814,7 @@ protected Store.MetadataSnapshot createMetaDataSnapshot() { public void testUserDataRead() throws IOException { final ShardId shardId = new ShardId("index", "_na_", 1); - DirectoryService directoryService = new LuceneManagedDirectoryService(random()); - Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId)); + Store store = new Store(shardId, INDEX_SETTINGS, StoreTests.newDirectory(random()), new DummyShardLock(shardId)); IndexWriterConfig config = newIndexWriterConfig(random(), new MockAnalyzer(random())).setCodec(TestUtil.getDefaultCodec()); SnapshotDeletionPolicy deletionPolicy = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy()); config.setIndexDeletionPolicy(deletionPolicy); @@ -867,7 +837,7 @@ public void testUserDataRead() throws IOException { assertThat(metadata.getCommitUserData().get(Engine.SYNC_COMMIT_ID), equalTo(syncId)); assertThat(metadata.getCommitUserData().get(Translog.TRANSLOG_GENERATION_KEY), equalTo(translogId)); TestUtil.checkIndex(store.directory()); - assertDeleteContent(store, directoryService); + assertDeleteContent(store, store.directory()); IOUtils.close(store); } @@ -893,8 +863,7 @@ public void testStreamStoreFilesMetaData() throws Exception { public void testMarkCorruptedOnTruncatedSegmentsFile() throws IOException { IndexWriterConfig iwc = newIndexWriterConfig(); final ShardId shardId = new ShardId("index", "_na_", 1); - DirectoryService directoryService = new LuceneManagedDirectoryService(random()); - Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId)); + Store store = new Store(shardId, INDEX_SETTINGS, StoreTests.newDirectory(random()), new DummyShardLock(shardId)); IndexWriter writer = new IndexWriter(store.directory(), iwc); int numDocs = 1 + random().nextInt(10); @@ -945,15 +914,7 @@ public void testCanOpenIndex() throws IOException { writer.commit(); writer.close(); assertTrue(Store.canOpenIndex(logger, tempDir, shardId, (id, l) -> new DummyShardLock(id))); - - DirectoryService directoryService = new DirectoryService(shardId, INDEX_SETTINGS) { - - @Override - public Directory newDirectory() throws IOException { - return dir; - } - }; - Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId)); + Store store = new Store(shardId, INDEX_SETTINGS, dir, new DummyShardLock(shardId)); store.markStoreCorrupted(new CorruptIndexException("foo", "bar")); assertFalse(Store.canOpenIndex(logger, tempDir, shardId, (id, l) -> new DummyShardLock(id))); store.close(); @@ -962,14 +923,7 @@ public Directory newDirectory() throws IOException { public void testDeserializeCorruptionException() throws IOException { final ShardId shardId = new ShardId("index", "_na_", 1); final Directory dir = new RAMDirectory(); // I use ram dir to prevent that virusscanner being a PITA - DirectoryService directoryService = new DirectoryService(shardId, INDEX_SETTINGS) { - - @Override - public Directory newDirectory() throws IOException { - return dir; - } - }; - Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId)); + Store store = new Store(shardId, INDEX_SETTINGS, dir, new DummyShardLock(shardId)); CorruptIndexException ex = new CorruptIndexException("foo", "bar"); store.markStoreCorrupted(ex); try { @@ -998,14 +952,7 @@ public Directory newDirectory() throws IOException { public void testCanReadOldCorruptionMarker() throws IOException { final ShardId shardId = new ShardId("index", "_na_", 1); final Directory dir = new RAMDirectory(); // I use ram dir to prevent that virusscanner being a PITA - DirectoryService directoryService = new DirectoryService(shardId, INDEX_SETTINGS) { - - @Override - public Directory newDirectory() throws IOException { - return dir; - } - }; - Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId)); + Store store = new Store(shardId, INDEX_SETTINGS, dir, new DummyShardLock(shardId)); CorruptIndexException exception = new CorruptIndexException("foo", "bar"); String uuid = Store.CORRUPTED + UUIDs.randomBase64UUID(); @@ -1065,8 +1012,7 @@ public Directory newDirectory() throws IOException { public void testEnsureIndexHasHistoryUUID() throws IOException { final ShardId shardId = new ShardId("index", "_na_", 1); - DirectoryService directoryService = new LuceneManagedDirectoryService(random()); - try (Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId))) { + try (Store store = new Store(shardId, INDEX_SETTINGS, StoreTests.newDirectory(random()), new DummyShardLock(shardId))) { store.createEmpty(); @@ -1098,8 +1044,7 @@ public void testEnsureIndexHasHistoryUUID() throws IOException { public void testHistoryUUIDCanBeForced() throws IOException { final ShardId shardId = new ShardId("index", "_na_", 1); - DirectoryService directoryService = new LuceneManagedDirectoryService(random()); - try (Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId))) { + try (Store store = new Store(shardId, INDEX_SETTINGS, StoreTests.newDirectory(random()), new DummyShardLock(shardId))) { store.createEmpty(); diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java index d0074791bfa72..0f7a72aacf3f0 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java @@ -63,7 +63,6 @@ import org.elasticsearch.index.shard.IndexShardRelocatedException; import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.index.store.DirectoryService; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.store.StoreFileMetaData; import org.elasticsearch.index.translog.Translog; @@ -461,18 +460,11 @@ private Store newStore(Path path) throws IOException { return newStore(path, true); } private Store newStore(Path path, boolean checkIndex) throws IOException { - DirectoryService directoryService = new DirectoryService(shardId, INDEX_SETTINGS) { - - @Override - public Directory newDirectory() throws IOException { - BaseDirectoryWrapper baseDirectoryWrapper = RecoverySourceHandlerTests.newFSDirectory(path); - if (checkIndex == false) { - baseDirectoryWrapper.setCheckIndexOnClose(false); // don't run checkindex we might corrupt the index in these tests - } - return baseDirectoryWrapper; - } - }; - return new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId)); + BaseDirectoryWrapper baseDirectoryWrapper = RecoverySourceHandlerTests.newFSDirectory(path); + if (checkIndex == false) { + baseDirectoryWrapper.setCheckIndexOnClose(false); // don't run checkindex we might corrupt the index in these tests + } + return new Store(shardId, INDEX_SETTINGS, baseDirectoryWrapper, new DummyShardLock(shardId)); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java index 0e22d0a7eda2a..283a7b137533d 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java @@ -77,7 +77,6 @@ import org.elasticsearch.index.seqno.ReplicationTracker; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.index.store.DirectoryService; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogConfig; @@ -358,13 +357,7 @@ protected Store createStore(final Directory directory) throws IOException { } protected Store createStore(final IndexSettings indexSettings, final Directory directory) throws IOException { - final DirectoryService directoryService = new DirectoryService(shardId, indexSettings) { - @Override - public Directory newDirectory() throws IOException { - return directory; - } - }; - return new Store(shardId, indexSettings, directoryService, new DummyShardLock(shardId)); + return new Store(shardId, indexSettings, directory, new DummyShardLock(shardId)); } protected Translog createTranslog(LongSupplier primaryTermSupplier) throws IOException { diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java index 32db9bf0a2a04..53576a1d80a70 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java @@ -59,7 +59,6 @@ import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; -import org.elasticsearch.index.store.DirectoryService; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -161,13 +160,7 @@ protected Store createStore(IndexSettings indexSettings, ShardPath shardPath) th } protected Store createStore(ShardId shardId, IndexSettings indexSettings, Directory directory) throws IOException { - final DirectoryService directoryService = new DirectoryService(shardId, indexSettings) { - @Override - public Directory newDirectory() throws IOException { - return directory; - } - }; - return new Store(shardId, indexSettings, directoryService, new DummyShardLock(shardId)); + return new Store(shardId, indexSettings, directory, new DummyShardLock(shardId)); } /** diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java index 6897e3bf3f73e..b3e2d12227b59 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/index/engine/FollowingEngineTests.java @@ -37,7 +37,6 @@ import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.index.store.DirectoryService; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogConfig; @@ -261,14 +260,8 @@ public void onFailedEngine(String reason, Exception e) { } private static Store createStore( - final ShardId shardId, final IndexSettings indexSettings, final Directory directory) throws IOException { - final DirectoryService directoryService = new DirectoryService(shardId, indexSettings) { - @Override - public Directory newDirectory() throws IOException { - return directory; - } - }; - return new Store(shardId, indexSettings, directoryService, new DummyShardLock(shardId)); + final ShardId shardId, final IndexSettings indexSettings, final Directory directory) { + return new Store(shardId, indexSettings, directory, new DummyShardLock(shardId)); } private FollowingEngine createEngine(Store store, EngineConfig config) throws IOException { From 9230a48722533b1ac523d3f88927494b9acec0a0 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Fri, 7 Sep 2018 07:04:27 -0500 Subject: [PATCH 31/91] HLRC: ML Post Data (#33443) * HLRC: ML Post data --- .../client/MLRequestConverters.java | 35 +++ .../client/MachineLearningClient.java | 48 ++++ .../client/ml/PostDataRequest.java | 229 ++++++++++++++++++ .../client/ml/PostDataResponse.java | 74 ++++++ .../client/MLRequestConvertersTests.java | 32 +++ .../client/MachineLearningIT.java | 30 ++- .../MlClientDocumentationIT.java | 70 ++++++ .../client/ml/PostDataRequestTests.java | 90 +++++++ .../client/ml/PostDataResponseTests.java | 43 ++++ .../high-level/ml/post-data.asciidoc | 86 +++++++ .../high-level/supported-apis.asciidoc | 2 + 11 files changed, 736 insertions(+), 3 deletions(-) create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataRequest.java create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataResponse.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/ml/PostDataRequestTests.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/ml/PostDataResponseTests.java create mode 100644 docs/java-rest/high-level/ml/post-data.asciidoc diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java index b8d977d8eeb94..cbf653a713d39 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java @@ -19,10 +19,13 @@ package org.elasticsearch.client; +import org.apache.http.HttpEntity; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; +import org.apache.http.entity.ByteArrayEntity; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.client.RequestConverters.EndpointBuilder; import org.elasticsearch.client.ml.CloseJobRequest; import org.elasticsearch.client.ml.DeleteJobRequest; @@ -34,13 +37,16 @@ import org.elasticsearch.client.ml.GetOverallBucketsRequest; import org.elasticsearch.client.ml.GetRecordsRequest; import org.elasticsearch.client.ml.OpenJobRequest; +import org.elasticsearch.client.ml.PostDataRequest; import org.elasticsearch.client.ml.PutJobRequest; import org.elasticsearch.client.ml.UpdateJobRequest; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import java.io.IOException; import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE; +import static org.elasticsearch.client.RequestConverters.createContentType; import static org.elasticsearch.client.RequestConverters.createEntity; final class MLRequestConverters { @@ -202,6 +208,35 @@ static Request getRecords(GetRecordsRequest getRecordsRequest) throws IOExceptio return request; } + static Request postData(PostDataRequest postDataRequest) throws IOException { + String endpoint = new EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("ml") + .addPathPartAsIs("anomaly_detectors") + .addPathPart(postDataRequest.getJobId()) + .addPathPartAsIs("_data") + .build(); + Request request = new Request(HttpPost.METHOD_NAME, endpoint); + + RequestConverters.Params params = new RequestConverters.Params(request); + if (postDataRequest.getResetStart() != null) { + params.putParam(PostDataRequest.RESET_START.getPreferredName(), postDataRequest.getResetStart()); + } + if (postDataRequest.getResetEnd() != null) { + params.putParam(PostDataRequest.RESET_END.getPreferredName(), postDataRequest.getResetEnd()); + } + BytesReference content = postDataRequest.getContent(); + if (content != null) { + BytesRef source = postDataRequest.getContent().toBytesRef(); + HttpEntity byteEntity = new ByteArrayEntity(source.bytes, + source.offset, + source.length, + createContentType(postDataRequest.getXContentType())); + request.setEntity(byteEntity); + } + return request; + } + static Request getInfluencers(GetInfluencersRequest getInfluencersRequest) throws IOException { String endpoint = new EndpointBuilder() .addPathPartAsIs("_xpack") diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java index bdfc34ad997d6..6e54b9259865f 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java @@ -19,6 +19,8 @@ package org.elasticsearch.client; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.ml.PostDataRequest; +import org.elasticsearch.client.ml.PostDataResponse; import org.elasticsearch.client.ml.UpdateJobRequest; import org.elasticsearch.client.ml.CloseJobRequest; import org.elasticsearch.client.ml.CloseJobResponse; @@ -501,6 +503,52 @@ public void getRecordsAsync(GetRecordsRequest request, RequestOptions options, A Collections.emptySet()); } + /** + * Sends data to an anomaly detection job for analysis. + * + * NOTE: The job must have a state of open to receive and process the data. + * + *

+ * For additional info + * see ML POST Data documentation + *

+ * + * @param request PostDataRequest containing the data to post and some additional options + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return response containing operational progress about the job + * @throws IOException when there is a serialization issue sending the request or receiving the response + */ + public PostDataResponse postData(PostDataRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, + MLRequestConverters::postData, + options, + PostDataResponse::fromXContent, + Collections.emptySet()); + } + + /** + * Sends data to an anomaly detection job for analysis, asynchronously + * + * NOTE: The job must have a state of open to receive and process the data. + * + *

+ * For additional info + * see ML POST Data documentation + *

+ * + * @param request PostDataRequest containing the data to post and some additional options + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener Listener to be notified upon request completion + */ + public void postDataAsync(PostDataRequest request, RequestOptions options, ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, + MLRequestConverters::postData, + options, + PostDataResponse::fromXContent, + listener, + Collections.emptySet()); + } + /** * Gets the influencers for a Machine Learning Job. *

diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataRequest.java new file mode 100644 index 0000000000000..cc015fc4837e2 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataRequest.java @@ -0,0 +1,229 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.ml.job.config.Job; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * POJO for posting data to a Machine Learning job + */ +public class PostDataRequest extends ActionRequest implements ToXContentObject { + + public static final ParseField RESET_START = new ParseField("reset_start"); + public static final ParseField RESET_END = new ParseField("reset_end"); + public static final ParseField CONTENT_TYPE = new ParseField("content_type"); + + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("post_data_request", + (a) -> new PostDataRequest((String)a[0], XContentType.fromMediaTypeOrFormat((String)a[1]), new byte[0])); + + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); + PARSER.declareString(ConstructingObjectParser.constructorArg(), CONTENT_TYPE); + PARSER.declareStringOrNull(PostDataRequest::setResetEnd, RESET_END); + PARSER.declareStringOrNull(PostDataRequest::setResetStart, RESET_START); + } + + private final String jobId; + private final XContentType xContentType; + private final BytesReference content; + private String resetStart; + private String resetEnd; + + /** + * Create a new PostDataRequest object + * + * @param jobId non-null jobId of the job to post data to + * @param xContentType content type of the data to post. Only {@link XContentType#JSON} or {@link XContentType#SMILE} are supported + * @param content bulk serialized content in the format of the passed {@link XContentType} + */ + public PostDataRequest(String jobId, XContentType xContentType, BytesReference content) { + this.jobId = Objects.requireNonNull(jobId, "job_id must not be null"); + this.xContentType = Objects.requireNonNull(xContentType, "content_type must not be null"); + this.content = Objects.requireNonNull(content, "content must not be null"); + } + + /** + * Create a new PostDataRequest object referencing the passed {@code byte[]} content + * + * @param jobId non-null jobId of the job to post data to + * @param xContentType content type of the data to post. Only {@link XContentType#JSON} or {@link XContentType#SMILE} are supported + * @param content bulk serialized content in the format of the passed {@link XContentType} + */ + public PostDataRequest(String jobId, XContentType xContentType, byte[] content) { + this(jobId, xContentType, new BytesArray(content)); + } + + /** + * Create a new PostDataRequest object referencing the passed {@link JsonBuilder} object + * + * @param jobId non-null jobId of the job to post data to + * @param builder {@link JsonBuilder} object containing documents to be serialized and sent in {@link XContentType#JSON} format + */ + public PostDataRequest(String jobId, JsonBuilder builder) { + this(jobId, XContentType.JSON, builder.build()); + } + + public String getJobId() { + return jobId; + } + + public String getResetStart() { + return resetStart; + } + + /** + * Specifies the start of the bucket resetting range + * + * @param resetStart String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO 8601 string + */ + public void setResetStart(String resetStart) { + this.resetStart = resetStart; + } + + public String getResetEnd() { + return resetEnd; + } + + /** + * Specifies the end of the bucket resetting range + * + * @param resetEnd String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO 8601 string + */ + public void setResetEnd(String resetEnd) { + this.resetEnd = resetEnd; + } + + public BytesReference getContent() { + return content; + } + + public XContentType getXContentType() { + return xContentType; + } + + @Override + public int hashCode() { + //We leave out the content for server side parity + return Objects.hash(jobId, resetStart, resetEnd, xContentType); + } + + @Override + public boolean equals(Object obj) { + if(obj == this) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + //We leave out the content for server side parity + PostDataRequest other = (PostDataRequest) obj; + return Objects.equals(jobId, other.jobId) && + Objects.equals(resetStart, other.resetStart) && + Objects.equals(resetEnd, other.resetEnd) && + Objects.equals(xContentType, other.xContentType); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(Job.ID.getPreferredName(), jobId); + builder.field(CONTENT_TYPE.getPreferredName(), xContentType.mediaType()); + if (resetEnd != null) { + builder.field(RESET_END.getPreferredName(), resetEnd); + } + if (resetStart != null) { + builder.field(RESET_START.getPreferredName(), resetStart); + } + builder.endObject(); + return builder; + } + + /** + * Class for incrementally building a bulk document request in {@link XContentType#JSON} format + */ + public static class JsonBuilder { + + private final List bytes = new ArrayList<>(); + + /** + * Add a document via a {@code byte[]} array + * + * @param doc {@code byte[]} array of a serialized JSON object + */ + public JsonBuilder addDoc(byte[] doc) { + bytes.add(ByteBuffer.wrap(doc)); + return this; + } + + /** + * Add a document via a serialized JSON String + * + * @param doc a serialized JSON String + */ + public JsonBuilder addDoc(String doc) { + bytes.add(ByteBuffer.wrap(doc.getBytes(StandardCharsets.UTF_8))); + return this; + } + + /** + * Add a document via an object map + * + * @param doc document object to add to bulk request + * @throws IOException on parsing/serialization errors + */ + public JsonBuilder addDoc(Map doc) throws IOException { + try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) { + builder.map(doc); + bytes.add(ByteBuffer.wrap(BytesReference.toBytes(BytesReference.bytes(builder)))); + } + return this; + } + + private BytesReference build() { + ByteBuffer[] buffers = bytes.toArray(new ByteBuffer[bytes.size()]); + return BytesReference.fromByteBuffers(buffers); + } + + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataResponse.java new file mode 100644 index 0000000000000..ce99316e90c76 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataResponse.java @@ -0,0 +1,74 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.client.ml.job.process.DataCounts; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +/** + * Response object when posting data to a Machine Learning Job + */ +public class PostDataResponse extends ActionResponse implements ToXContentObject { + + private DataCounts dataCounts; + + public static PostDataResponse fromXContent(XContentParser parser) throws IOException { + return new PostDataResponse(DataCounts.PARSER.parse(parser, null)); + } + + public PostDataResponse(DataCounts counts) { + this.dataCounts = counts; + } + + public DataCounts getDataCounts() { + return dataCounts; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + return dataCounts.toXContent(builder, params); + } + + @Override + public int hashCode() { + return Objects.hashCode(dataCounts); + } + + @Override + public boolean equals(Object obj) { + if (obj == this) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + PostDataResponse other = (PostDataResponse) obj; + return Objects.equals(dataCounts, other.dataCounts); + } + +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java index f1b035566aa4d..e0f20e2f23c83 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.client.ml.GetOverallBucketsRequest; import org.elasticsearch.client.ml.GetRecordsRequest; import org.elasticsearch.client.ml.OpenJobRequest; +import org.elasticsearch.client.ml.PostDataRequest; import org.elasticsearch.client.ml.PutJobRequest; import org.elasticsearch.client.ml.UpdateJobRequest; import org.elasticsearch.client.ml.job.config.AnalysisConfig; @@ -43,12 +44,15 @@ import org.elasticsearch.client.ml.job.util.PageParams; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.ESTestCase; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.Collections; +import java.util.HashMap; +import java.util.Map; import static org.hamcrest.Matchers.equalTo; @@ -238,6 +242,34 @@ public void testGetRecords() throws IOException { } } + public void testPostData() throws Exception { + String jobId = randomAlphaOfLength(10); + PostDataRequest.JsonBuilder jsonBuilder = new PostDataRequest.JsonBuilder(); + Map obj = new HashMap<>(); + obj.put("foo", "bar"); + jsonBuilder.addDoc(obj); + + PostDataRequest postDataRequest = new PostDataRequest(jobId, jsonBuilder); + Request request = MLRequestConverters.postData(postDataRequest); + + assertEquals(HttpPost.METHOD_NAME, request.getMethod()); + assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/_data", request.getEndpoint()); + assertEquals("{\"foo\":\"bar\"}", requestEntityToString(request)); + assertEquals(postDataRequest.getXContentType().mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue()); + assertFalse(request.getParameters().containsKey(PostDataRequest.RESET_END.getPreferredName())); + assertFalse(request.getParameters().containsKey(PostDataRequest.RESET_START.getPreferredName())); + + PostDataRequest postDataRequest2 = new PostDataRequest(jobId, XContentType.SMILE, new byte[0]); + postDataRequest2.setResetStart("2018-08-08T00:00:00Z"); + postDataRequest2.setResetEnd("2018-09-08T00:00:00Z"); + + request = MLRequestConverters.postData(postDataRequest2); + + assertEquals(postDataRequest2.getXContentType().mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue()); + assertEquals("2018-09-08T00:00:00Z", request.getParameters().get(PostDataRequest.RESET_END.getPreferredName())); + assertEquals("2018-08-08T00:00:00Z", request.getParameters().get(PostDataRequest.RESET_START.getPreferredName())); + } + public void testGetInfluencers() throws IOException { String jobId = randomAlphaOfLength(10); GetInfluencersRequest getInfluencersRequest = new GetInfluencersRequest(jobId); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java index bf25d9d1c0fb3..93019ba0d43e0 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java @@ -20,6 +20,8 @@ import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.client.ml.PostDataRequest; +import org.elasticsearch.client.ml.PostDataResponse; import org.elasticsearch.client.ml.UpdateJobRequest; import org.elasticsearch.client.ml.job.config.JobUpdate; import org.elasticsearch.common.unit.TimeValue; @@ -41,13 +43,14 @@ import org.elasticsearch.client.ml.job.config.DataDescription; import org.elasticsearch.client.ml.job.config.Detector; import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.client.ml.FlushJobRequest; import org.elasticsearch.client.ml.FlushJobResponse; import org.junit.After; import java.io.IOException; import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; @@ -220,6 +223,27 @@ public void testGetJobStats() throws Exception { assertThat(exception.status().getStatus(), equalTo(404)); } + public void testPostData() throws Exception { + String jobId = randomValidJobId(); + Job job = buildJob(jobId); + MachineLearningClient machineLearningClient = highLevelClient().machineLearning(); + machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT); + machineLearningClient.openJob(new OpenJobRequest(jobId), RequestOptions.DEFAULT); + + PostDataRequest.JsonBuilder builder = new PostDataRequest.JsonBuilder(); + for(int i = 0; i < 10; i++) { + Map hashMap = new HashMap<>(); + hashMap.put("total", randomInt(1000)); + hashMap.put("timestamp", (i+1)*1000); + builder.addDoc(hashMap); + } + PostDataRequest postDataRequest = new PostDataRequest(jobId, builder); + + PostDataResponse response = execute(postDataRequest, machineLearningClient::postData, machineLearningClient::postDataAsync); + assertEquals(10, response.getDataCounts().getInputRecordCount()); + assertEquals(0, response.getDataCounts().getOutOfOrderTimeStampCount()); + } + public void testUpdateJob() throws Exception { String jobId = randomValidJobId(); Job job = buildJob(jobId); @@ -256,8 +280,8 @@ public static Job buildJob(String jobId) { builder.setAnalysisConfig(configBuilder); DataDescription.Builder dataDescription = new DataDescription.Builder(); - dataDescription.setTimeFormat(randomFrom(DataDescription.EPOCH_MS, DataDescription.EPOCH)); - dataDescription.setTimeField(randomAlphaOfLength(10)); + dataDescription.setTimeFormat(DataDescription.EPOCH_MS); + dataDescription.setTimeField("timestamp"); builder.setDataDescription(dataDescription); return builder.build(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java index ac7835735fcf1..bc452ad8503f7 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java @@ -49,6 +49,8 @@ import org.elasticsearch.client.ml.GetRecordsResponse; import org.elasticsearch.client.ml.OpenJobRequest; import org.elasticsearch.client.ml.OpenJobResponse; +import org.elasticsearch.client.ml.PostDataRequest; +import org.elasticsearch.client.ml.PostDataResponse; import org.elasticsearch.client.ml.PutJobRequest; import org.elasticsearch.client.ml.PutJobResponse; import org.elasticsearch.client.ml.UpdateJobRequest; @@ -58,6 +60,7 @@ import org.elasticsearch.client.ml.job.config.DetectionRule; import org.elasticsearch.client.ml.job.config.Detector; import org.elasticsearch.client.ml.job.config.Job; +import org.elasticsearch.client.ml.job.process.DataCounts; import org.elasticsearch.client.ml.job.config.JobUpdate; import org.elasticsearch.client.ml.job.config.ModelPlotConfig; import org.elasticsearch.client.ml.job.config.Operator; @@ -882,6 +885,73 @@ public void onFailure(Exception e) { } } + public void testPostData() throws Exception { + RestHighLevelClient client = highLevelClient(); + + Job job = MachineLearningIT.buildJob("test-post-data"); + client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); + client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT); + + { + //tag::x-pack-ml-post-data-request + PostDataRequest.JsonBuilder jsonBuilder = new PostDataRequest.JsonBuilder(); //<1> + Map mapData = new HashMap<>(); + mapData.put("total", 109); + jsonBuilder.addDoc(mapData); //<2> + jsonBuilder.addDoc("{\"total\":1000}"); //<3> + PostDataRequest postDataRequest = new PostDataRequest("test-post-data", jsonBuilder); //<4> + //end::x-pack-ml-post-data-request + + + //tag::x-pack-ml-post-data-request-options + postDataRequest.setResetStart("2018-08-31T16:35:07+00:00"); //<1> + postDataRequest.setResetEnd("2018-08-31T16:35:17+00:00"); //<2> + //end::x-pack-ml-post-data-request-options + postDataRequest.setResetEnd(null); + postDataRequest.setResetStart(null); + + //tag::x-pack-ml-post-data-execute + PostDataResponse postDataResponse = client.machineLearning().postData(postDataRequest, RequestOptions.DEFAULT); + //end::x-pack-ml-post-data-execute + + //tag::x-pack-ml-post-data-response + DataCounts dataCounts = postDataResponse.getDataCounts(); //<1> + //end::x-pack-ml-post-data-response + assertEquals(2, dataCounts.getInputRecordCount()); + + } + { + //tag::x-pack-ml-post-data-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(PostDataResponse postDataResponse) { + //<1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + //end::x-pack-ml-post-data-listener + PostDataRequest.JsonBuilder jsonBuilder = new PostDataRequest.JsonBuilder(); + Map mapData = new HashMap<>(); + mapData.put("total", 109); + jsonBuilder.addDoc(mapData); + PostDataRequest postDataRequest = new PostDataRequest("test-post-data", jsonBuilder); //<1> + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::x-pack-ml-post-data-execute-async + client.machineLearning().postDataAsync(postDataRequest, RequestOptions.DEFAULT, listener); //<1> + // end::x-pack-ml-post-data-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + public void testGetInfluencers() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/PostDataRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/PostDataRequestTests.java new file mode 100644 index 0000000000000..363d37c3ca4a0 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/PostDataRequestTests.java @@ -0,0 +1,90 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.HashMap; +import java.util.Map; + + +public class PostDataRequestTests extends AbstractXContentTestCase { + + @Override + protected PostDataRequest createTestInstance() { + String jobId = randomAlphaOfLength(10); + XContentType contentType = randomFrom(XContentType.JSON, XContentType.SMILE); + + PostDataRequest request = new PostDataRequest(jobId, contentType, new byte[0]); + if (randomBoolean()) { + request.setResetEnd(randomAlphaOfLength(10)); + } + if (randomBoolean()) { + request.setResetStart(randomAlphaOfLength(10)); + } + + return request; + } + + @Override + protected PostDataRequest doParseInstance(XContentParser parser) { + return PostDataRequest.PARSER.apply(parser, null); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } + + public void testJsonBuilder() throws IOException { + + String jobId = randomAlphaOfLength(10); + PostDataRequest.JsonBuilder builder = new PostDataRequest.JsonBuilder(); + + Map obj1 = new HashMap<>(); + obj1.put("entry1", "value1"); + obj1.put("entry2", "value2"); + builder.addDoc(obj1); + + builder.addDoc("{\"entry3\":\"value3\"}"); + builder.addDoc("{\"entry4\":\"value4\"}".getBytes(StandardCharsets.UTF_8)); + + PostDataRequest request = new PostDataRequest(jobId, builder); + + assertEquals("{\"entry1\":\"value1\",\"entry2\":\"value2\"}{\"entry3\":\"value3\"}{\"entry4\":\"value4\"}", + request.getContent().utf8ToString()); + assertEquals(XContentType.JSON, request.getXContentType()); + assertEquals(jobId, request.getJobId()); + } + + public void testFromByteArray() { + String jobId = randomAlphaOfLength(10); + PostDataRequest request = new PostDataRequest(jobId, + XContentType.JSON, + "{\"others\":{\"foo\":100}}".getBytes(StandardCharsets.UTF_8)); + + assertEquals("{\"others\":{\"foo\":100}}", request.getContent().utf8ToString()); + assertEquals(XContentType.JSON, request.getXContentType()); + assertEquals(jobId, request.getJobId()); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/PostDataResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/PostDataResponseTests.java new file mode 100644 index 0000000000000..fc74040cc407c --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/PostDataResponseTests.java @@ -0,0 +1,43 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.client.ml.job.process.DataCountsTests; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; + +public class PostDataResponseTests extends AbstractXContentTestCase { + + @Override + protected PostDataResponse createTestInstance() { + return new PostDataResponse(DataCountsTests.createTestInstance(randomAlphaOfLength(10))); + } + + @Override + protected PostDataResponse doParseInstance(XContentParser parser) throws IOException { + return PostDataResponse.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } +} diff --git a/docs/java-rest/high-level/ml/post-data.asciidoc b/docs/java-rest/high-level/ml/post-data.asciidoc new file mode 100644 index 0000000000000..2c8ca8f18a384 --- /dev/null +++ b/docs/java-rest/high-level/ml/post-data.asciidoc @@ -0,0 +1,86 @@ +[[java-rest-high-x-pack-ml-post-data]] +=== Post Data API + +The Post Data API provides the ability to post data to an open + {ml} job in the cluster. +It accepts a `PostDataRequest` object and responds +with a `PostDataResponse` object. + +[[java-rest-high-x-pack-ml-post-data-request]] +==== Post Data Request + +A `PostDataRequest` object gets created with an existing non-null `jobId` +and the `XContentType` being sent. Individual docs can be added +incrementally via the `PostDataRequest.JsonBuilder#addDoc` method. +These are then serialized and sent in bulk when passed to the `PostDataRequest`. + +Alternatively, the serialized bulk content can be set manually, along with its `XContentType` +through one of the other `PostDataRequest` constructors. + +Only `XContentType.JSON` and `XContentType.SMILE` are supported. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-post-data-request] +-------------------------------------------------- +<1> Create a new `PostDataRequest.JsonBuilder` object for incrementally adding documents +<2> Add a new document as a `Map` object +<3> Add a new document as a serialized JSON formatted String. +<4> Constructing a new request referencing an opened `jobId`, and a JsonBuilder + +==== Optional Arguments + +The following arguments are optional. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-post-data-request-options] +-------------------------------------------------- +<1> Set the start of the bucket resetting time +<2> Set the end of the bucket resetting time + +[[java-rest-high-x-pack-ml-post-data-execution]] +==== Execution + +The request can be executed through the `MachineLearningClient` contained +in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-post-data-execute] +-------------------------------------------------- + +[[java-rest-high-x-pack-ml-post-data-execution-async]] +==== Asynchronous Execution + +The request can also be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-post-data-execute-async] +-------------------------------------------------- +<1> The `PostDataRequest` to execute and the `ActionListener` to use when +the execution completes + +The method does not block and returns immediately. The passed `ActionListener` is used +to notify the caller of completion. A typical `ActionListener` for `PostDataResponse` may +look like + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-post-data-listener] +-------------------------------------------------- +<1> `onResponse` is called back when the action is completed successfully +<2> `onFailure` is called back when some unexpected error occurs + +[[java-rest-high-x-pack-ml-post-data-response]] +==== Post Data Response + +A `PostDataResponse` contains current data processing statistics. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-post-data-response] +-------------------------------------------------- +<1> `getDataCounts()` a `DataCounts` object containing the current +data processing counts. diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index c482c8bccff23..e1335b0effc5d 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -221,6 +221,7 @@ The Java High Level REST Client supports the following Machine Learning APIs: * <> * <> * <> +* <> * <> include::ml/put-job.asciidoc[] @@ -234,6 +235,7 @@ include::ml/get-job-stats.asciidoc[] include::ml/get-buckets.asciidoc[] include::ml/get-overall-buckets.asciidoc[] include::ml/get-records.asciidoc[] +include::ml/post-data.asciidoc[] include::ml/get-influencers.asciidoc[] == Migration APIs From 79e79697349234e3e07804264bc5f23772e17ac4 Mon Sep 17 00:00:00 2001 From: Colin Goodheart-Smithe Date: Fri, 7 Sep 2018 13:10:27 +0100 Subject: [PATCH 32/91] Removes redundant test method in SQL tests (#33498) --- .../execution/search/extractor/FieldHitExtractorTests.java | 5 ----- 1 file changed, 5 deletions(-) diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java index a9e1349e8316b..9aa0c9f7b36c2 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/FieldHitExtractorTests.java @@ -52,11 +52,6 @@ protected FieldHitExtractor mutateInstance(FieldHitExtractor instance) throws IO return new FieldHitExtractor(instance.fieldName() + "mutated", null, true, instance.hitName()); } - @AwaitsFix(bugUrl = "implement after we're sure of the InnerHitExtractor's implementation") - public void testGetNested() throws IOException { - fail("implement after we're sure of the InnerHitExtractor's implementation"); - } - public void testGetDottedValueWithDocValues() { String grandparent = randomAlphaOfLength(5); String parent = randomAlphaOfLength(5); From 90c99ea96e2c65dee37419e4f574eb9c37363ca9 Mon Sep 17 00:00:00 2001 From: Daniel Mitterdorfer Date: Fri, 7 Sep 2018 15:52:47 +0200 Subject: [PATCH 33/91] Make Watcher validation message copy/pasteable Watcher validates `action.auto_create_index` upon startup. If a user specifies a pattern that does not contain watcher indices, it raises an error message to include a list of three indices. However, the indices are separated by a comma and a space which is not considered in parsing. With this commit we change the error message string so it does not contain the additional space thus making it more straightforward to copy it to the configuration file. Closes #33369 Relates #33497 --- .../java/org/elasticsearch/xpack/watcher/Watcher.java | 2 +- .../elasticsearch/xpack/watcher/WatcherPluginTests.java | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java index 975ceacbffaf0..33b79c38ccaba 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java @@ -535,7 +535,7 @@ static void validAutoCreateIndex(Settings settings, Logger logger) { String errorMessage = LoggerMessageFormat.format("the [action.auto_create_index] setting value [{}] is too" + " restrictive. disable [action.auto_create_index] or set it to " + - "[{}, {}, {}*]", (Object) value, Watch.INDEX, TriggeredWatchStoreField.INDEX_NAME, HistoryStoreField.INDEX_PREFIX); + "[{},{},{}*]", (Object) value, Watch.INDEX, TriggeredWatchStoreField.INDEX_NAME, HistoryStoreField.INDEX_PREFIX); if (Booleans.isFalse(value)) { throw new IllegalArgumentException(errorMessage); } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java index e345e890db178..b13b035304d7c 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java @@ -37,7 +37,7 @@ public void testValidAutoCreateIndex() { IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> Watcher.validAutoCreateIndex(Settings.builder().put("action.auto_create_index", false).build(), logger)); - assertThat(exception.getMessage(), containsString("[.watches, .triggered_watches, .watcher-history-*]")); + assertThat(exception.getMessage(), containsString("[.watches,.triggered_watches,.watcher-history-*]")); Watcher.validAutoCreateIndex(Settings.builder().put("action.auto_create_index", ".watches,.triggered_watches,.watcher-history*").build(), logger); @@ -46,16 +46,16 @@ public void testValidAutoCreateIndex() { exception = expectThrows(IllegalArgumentException.class, () -> Watcher.validAutoCreateIndex(Settings.builder().put("action.auto_create_index", ".watches").build(), logger)); - assertThat(exception.getMessage(), containsString("[.watches, .triggered_watches, .watcher-history-*]")); + assertThat(exception.getMessage(), containsString("[.watches,.triggered_watches,.watcher-history-*]")); exception = expectThrows(IllegalArgumentException.class, () -> Watcher.validAutoCreateIndex(Settings.builder().put("action.auto_create_index", ".triggered_watch").build(), logger)); - assertThat(exception.getMessage(), containsString("[.watches, .triggered_watches, .watcher-history-*]")); + assertThat(exception.getMessage(), containsString("[.watches,.triggered_watches,.watcher-history-*]")); exception = expectThrows(IllegalArgumentException.class, () -> Watcher.validAutoCreateIndex(Settings.builder().put("action.auto_create_index", ".watcher-history-*").build(), logger)); - assertThat(exception.getMessage(), containsString("[.watches, .triggered_watches, .watcher-history-*]")); + assertThat(exception.getMessage(), containsString("[.watches,.triggered_watches,.watcher-history-*]")); } public void testWatcherDisabledTests() throws Exception { From c32e71c61f8f8d71e603b9186dce31fce51fe020 Mon Sep 17 00:00:00 2001 From: Michael Basnight Date: Fri, 7 Sep 2018 09:25:47 -0500 Subject: [PATCH 34/91] HLRC: split snapshot request converters (#33439) In an effort to encapsulate the different clients, the request converters are being shuffled around. This splits the SnapshotClient request converters. --- .../client/RequestConverters.java | 129 -------- .../elasticsearch/client/SnapshotClient.java | 40 +-- .../client/SnapshotRequestConverters.java | 162 ++++++++++ .../client/RequestConvertersTests.java | 236 --------------- .../SnapshotRequestConvertersTests.java | 277 ++++++++++++++++++ 5 files changed, 459 insertions(+), 385 deletions(-) create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotRequestConverters.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotRequestConvertersTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index c86cf397621c8..753c74aabd052 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -30,15 +30,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; -import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest; -import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest; -import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; -import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; -import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; -import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; -import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; -import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; -import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; @@ -893,126 +884,6 @@ static Request indexPutSettings(UpdateSettingsRequest updateSettingsRequest) thr return request; } - static Request getRepositories(GetRepositoriesRequest getRepositoriesRequest) { - String[] repositories = getRepositoriesRequest.repositories() == null ? Strings.EMPTY_ARRAY : getRepositoriesRequest.repositories(); - String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot").addCommaSeparatedPathParts(repositories).build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - - Params parameters = new Params(request); - parameters.withMasterTimeout(getRepositoriesRequest.masterNodeTimeout()); - parameters.withLocal(getRepositoriesRequest.local()); - return request; - } - - static Request createRepository(PutRepositoryRequest putRepositoryRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPart("_snapshot").addPathPart(putRepositoryRequest.name()).build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - - Params parameters = new Params(request); - parameters.withMasterTimeout(putRepositoryRequest.masterNodeTimeout()); - parameters.withTimeout(putRepositoryRequest.timeout()); - parameters.withVerify(putRepositoryRequest.verify()); - - request.setEntity(createEntity(putRepositoryRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request deleteRepository(DeleteRepositoryRequest deleteRepositoryRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot").addPathPart(deleteRepositoryRequest.name()).build(); - Request request = new Request(HttpDelete.METHOD_NAME, endpoint); - - Params parameters = new Params(request); - parameters.withMasterTimeout(deleteRepositoryRequest.masterNodeTimeout()); - parameters.withTimeout(deleteRepositoryRequest.timeout()); - return request; - } - - static Request verifyRepository(VerifyRepositoryRequest verifyRepositoryRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot") - .addPathPart(verifyRepositoryRequest.name()) - .addPathPartAsIs("_verify") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - - Params parameters = new Params(request); - parameters.withMasterTimeout(verifyRepositoryRequest.masterNodeTimeout()); - parameters.withTimeout(verifyRepositoryRequest.timeout()); - return request; - } - - static Request createSnapshot(CreateSnapshotRequest createSnapshotRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPart("_snapshot") - .addPathPart(createSnapshotRequest.repository()) - .addPathPart(createSnapshotRequest.snapshot()) - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - Params params = new Params(request); - params.withMasterTimeout(createSnapshotRequest.masterNodeTimeout()); - params.withWaitForCompletion(createSnapshotRequest.waitForCompletion()); - request.setEntity(createEntity(createSnapshotRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getSnapshots(GetSnapshotsRequest getSnapshotsRequest) { - EndpointBuilder endpointBuilder = new EndpointBuilder().addPathPartAsIs("_snapshot") - .addPathPart(getSnapshotsRequest.repository()); - String endpoint; - if (getSnapshotsRequest.snapshots().length == 0) { - endpoint = endpointBuilder.addPathPart("_all").build(); - } else { - endpoint = endpointBuilder.addCommaSeparatedPathParts(getSnapshotsRequest.snapshots()).build(); - } - - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - - Params parameters = new Params(request); - parameters.withMasterTimeout(getSnapshotsRequest.masterNodeTimeout()); - parameters.putParam("ignore_unavailable", Boolean.toString(getSnapshotsRequest.ignoreUnavailable())); - parameters.putParam("verbose", Boolean.toString(getSnapshotsRequest.verbose())); - - return request; - } - - static Request snapshotsStatus(SnapshotsStatusRequest snapshotsStatusRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot") - .addPathPart(snapshotsStatusRequest.repository()) - .addCommaSeparatedPathParts(snapshotsStatusRequest.snapshots()) - .addPathPartAsIs("_status") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - - Params parameters = new Params(request); - parameters.withMasterTimeout(snapshotsStatusRequest.masterNodeTimeout()); - parameters.withIgnoreUnavailable(snapshotsStatusRequest.ignoreUnavailable()); - return request; - } - - static Request restoreSnapshot(RestoreSnapshotRequest restoreSnapshotRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot") - .addPathPart(restoreSnapshotRequest.repository()) - .addPathPart(restoreSnapshotRequest.snapshot()) - .addPathPartAsIs("_restore") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - Params parameters = new Params(request); - parameters.withMasterTimeout(restoreSnapshotRequest.masterNodeTimeout()); - parameters.withWaitForCompletion(restoreSnapshotRequest.waitForCompletion()); - request.setEntity(createEntity(restoreSnapshotRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request deleteSnapshot(DeleteSnapshotRequest deleteSnapshotRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot") - .addPathPart(deleteSnapshotRequest.repository()) - .addPathPart(deleteSnapshotRequest.snapshot()) - .build(); - Request request = new Request(HttpDelete.METHOD_NAME, endpoint); - - Params parameters = new Params(request); - parameters.withMasterTimeout(deleteSnapshotRequest.masterNodeTimeout()); - return request; - } - static Request putTemplate(PutIndexTemplateRequest putIndexTemplateRequest) throws IOException { String endpoint = new EndpointBuilder().addPathPartAsIs("_template").addPathPart(putIndexTemplateRequest.name()).build(); Request request = new Request(HttpPut.METHOD_NAME, endpoint); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java index 7df0df4836d68..f3a49f064596e 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java @@ -65,7 +65,7 @@ public final class SnapshotClient { */ public GetRepositoriesResponse getRepository(GetRepositoriesRequest getRepositoriesRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(getRepositoriesRequest, RequestConverters::getRepositories, options, + return restHighLevelClient.performRequestAndParseEntity(getRepositoriesRequest, SnapshotRequestConverters::getRepositories, options, GetRepositoriesResponse::fromXContent, emptySet()); } @@ -80,7 +80,7 @@ public GetRepositoriesResponse getRepository(GetRepositoriesRequest getRepositor */ public void getRepositoryAsync(GetRepositoriesRequest getRepositoriesRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(getRepositoriesRequest, RequestConverters::getRepositories, options, + restHighLevelClient.performRequestAsyncAndParseEntity(getRepositoriesRequest, SnapshotRequestConverters::getRepositories, options, GetRepositoriesResponse::fromXContent, listener, emptySet()); } @@ -94,7 +94,7 @@ public void getRepositoryAsync(GetRepositoriesRequest getRepositoriesRequest, Re * @throws IOException in case there is a problem sending the request or parsing back the response */ public AcknowledgedResponse createRepository(PutRepositoryRequest putRepositoryRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(putRepositoryRequest, RequestConverters::createRepository, options, + return restHighLevelClient.performRequestAndParseEntity(putRepositoryRequest, SnapshotRequestConverters::createRepository, options, AcknowledgedResponse::fromXContent, emptySet()); } @@ -108,7 +108,7 @@ public AcknowledgedResponse createRepository(PutRepositoryRequest putRepositoryR */ public void createRepositoryAsync(PutRepositoryRequest putRepositoryRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(putRepositoryRequest, RequestConverters::createRepository, options, + restHighLevelClient.performRequestAsyncAndParseEntity(putRepositoryRequest, SnapshotRequestConverters::createRepository, options, AcknowledgedResponse::fromXContent, listener, emptySet()); } @@ -123,8 +123,8 @@ public void createRepositoryAsync(PutRepositoryRequest putRepositoryRequest, Req */ public AcknowledgedResponse deleteRepository(DeleteRepositoryRequest deleteRepositoryRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(deleteRepositoryRequest, RequestConverters::deleteRepository, options, - AcknowledgedResponse::fromXContent, emptySet()); + return restHighLevelClient.performRequestAndParseEntity(deleteRepositoryRequest, SnapshotRequestConverters::deleteRepository, + options, AcknowledgedResponse::fromXContent, emptySet()); } /** @@ -137,7 +137,7 @@ public AcknowledgedResponse deleteRepository(DeleteRepositoryRequest deleteRepos */ public void deleteRepositoryAsync(DeleteRepositoryRequest deleteRepositoryRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(deleteRepositoryRequest, RequestConverters::deleteRepository, options, + restHighLevelClient.performRequestAsyncAndParseEntity(deleteRepositoryRequest, SnapshotRequestConverters::deleteRepository, options, AcknowledgedResponse::fromXContent, listener, emptySet()); } @@ -152,8 +152,8 @@ public void deleteRepositoryAsync(DeleteRepositoryRequest deleteRepositoryReques */ public VerifyRepositoryResponse verifyRepository(VerifyRepositoryRequest verifyRepositoryRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(verifyRepositoryRequest, RequestConverters::verifyRepository, options, - VerifyRepositoryResponse::fromXContent, emptySet()); + return restHighLevelClient.performRequestAndParseEntity(verifyRepositoryRequest, SnapshotRequestConverters::verifyRepository, + options, VerifyRepositoryResponse::fromXContent, emptySet()); } /** @@ -166,7 +166,7 @@ public VerifyRepositoryResponse verifyRepository(VerifyRepositoryRequest verifyR */ public void verifyRepositoryAsync(VerifyRepositoryRequest verifyRepositoryRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(verifyRepositoryRequest, RequestConverters::verifyRepository, options, + restHighLevelClient.performRequestAsyncAndParseEntity(verifyRepositoryRequest, SnapshotRequestConverters::verifyRepository, options, VerifyRepositoryResponse::fromXContent, listener, emptySet()); } @@ -178,7 +178,7 @@ public void verifyRepositoryAsync(VerifyRepositoryRequest verifyRepositoryReques */ public CreateSnapshotResponse create(CreateSnapshotRequest createSnapshotRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(createSnapshotRequest, RequestConverters::createSnapshot, options, + return restHighLevelClient.performRequestAndParseEntity(createSnapshotRequest, SnapshotRequestConverters::createSnapshot, options, CreateSnapshotResponse::fromXContent, emptySet()); } @@ -190,7 +190,7 @@ public CreateSnapshotResponse create(CreateSnapshotRequest createSnapshotRequest */ public void createAsync(CreateSnapshotRequest createSnapshotRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(createSnapshotRequest, RequestConverters::createSnapshot, options, + restHighLevelClient.performRequestAsyncAndParseEntity(createSnapshotRequest, SnapshotRequestConverters::createSnapshot, options, CreateSnapshotResponse::fromXContent, listener, emptySet()); } @@ -205,7 +205,7 @@ public void createAsync(CreateSnapshotRequest createSnapshotRequest, RequestOpti * @throws IOException in case there is a problem sending the request or parsing back the response */ public GetSnapshotsResponse get(GetSnapshotsRequest getSnapshotsRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(getSnapshotsRequest, RequestConverters::getSnapshots, options, + return restHighLevelClient.performRequestAndParseEntity(getSnapshotsRequest, SnapshotRequestConverters::getSnapshots, options, GetSnapshotsResponse::fromXContent, emptySet()); } @@ -219,7 +219,7 @@ public GetSnapshotsResponse get(GetSnapshotsRequest getSnapshotsRequest, Request * @param listener the listener to be notified upon request completion */ public void getAsync(GetSnapshotsRequest getSnapshotsRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(getSnapshotsRequest, RequestConverters::getSnapshots, options, + restHighLevelClient.performRequestAsyncAndParseEntity(getSnapshotsRequest, SnapshotRequestConverters::getSnapshots, options, GetSnapshotsResponse::fromXContent, listener, emptySet()); } @@ -234,7 +234,7 @@ public void getAsync(GetSnapshotsRequest getSnapshotsRequest, RequestOptions opt */ public SnapshotsStatusResponse status(SnapshotsStatusRequest snapshotsStatusRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(snapshotsStatusRequest, RequestConverters::snapshotsStatus, options, + return restHighLevelClient.performRequestAndParseEntity(snapshotsStatusRequest, SnapshotRequestConverters::snapshotsStatus, options, SnapshotsStatusResponse::fromXContent, emptySet()); } @@ -248,7 +248,7 @@ public SnapshotsStatusResponse status(SnapshotsStatusRequest snapshotsStatusRequ */ public void statusAsync(SnapshotsStatusRequest snapshotsStatusRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(snapshotsStatusRequest, RequestConverters::snapshotsStatus, options, + restHighLevelClient.performRequestAsyncAndParseEntity(snapshotsStatusRequest, SnapshotRequestConverters::snapshotsStatus, options, SnapshotsStatusResponse::fromXContent, listener, emptySet()); } @@ -263,7 +263,7 @@ public void statusAsync(SnapshotsStatusRequest snapshotsStatusRequest, RequestOp * @throws IOException in case there is a problem sending the request or parsing back the response */ public RestoreSnapshotResponse restore(RestoreSnapshotRequest restoreSnapshotRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(restoreSnapshotRequest, RequestConverters::restoreSnapshot, options, + return restHighLevelClient.performRequestAndParseEntity(restoreSnapshotRequest, SnapshotRequestConverters::restoreSnapshot, options, RestoreSnapshotResponse::fromXContent, emptySet()); } @@ -278,7 +278,7 @@ public RestoreSnapshotResponse restore(RestoreSnapshotRequest restoreSnapshotReq */ public void restoreAsync(RestoreSnapshotRequest restoreSnapshotRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(restoreSnapshotRequest, RequestConverters::restoreSnapshot, options, + restHighLevelClient.performRequestAsyncAndParseEntity(restoreSnapshotRequest, SnapshotRequestConverters::restoreSnapshot, options, RestoreSnapshotResponse::fromXContent, listener, emptySet()); } @@ -293,7 +293,7 @@ public void restoreAsync(RestoreSnapshotRequest restoreSnapshotRequest, RequestO * @throws IOException in case there is a problem sending the request or parsing back the response */ public AcknowledgedResponse delete(DeleteSnapshotRequest deleteSnapshotRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(deleteSnapshotRequest, RequestConverters::deleteSnapshot, options, + return restHighLevelClient.performRequestAndParseEntity(deleteSnapshotRequest, SnapshotRequestConverters::deleteSnapshot, options, AcknowledgedResponse::fromXContent, emptySet()); } @@ -308,7 +308,7 @@ public AcknowledgedResponse delete(DeleteSnapshotRequest deleteSnapshotRequest, */ public void deleteAsync(DeleteSnapshotRequest deleteSnapshotRequest, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(deleteSnapshotRequest, RequestConverters::deleteSnapshot, options, + restHighLevelClient.performRequestAsyncAndParseEntity(deleteSnapshotRequest, SnapshotRequestConverters::deleteSnapshot, options, AcknowledgedResponse::fromXContent, listener, emptySet()); } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotRequestConverters.java new file mode 100644 index 0000000000000..7ddd089258539 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotRequestConverters.java @@ -0,0 +1,162 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest; +import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest; +import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; +import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; +import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; +import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest; +import org.elasticsearch.common.Strings; + +import java.io.IOException; + +public class SnapshotRequestConverters { + + static Request getRepositories(GetRepositoriesRequest getRepositoriesRequest) { + String[] repositories = getRepositoriesRequest.repositories() == null ? Strings.EMPTY_ARRAY : getRepositoriesRequest.repositories(); + String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_snapshot").addCommaSeparatedPathParts(repositories) + .build(); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withMasterTimeout(getRepositoriesRequest.masterNodeTimeout()); + parameters.withLocal(getRepositoriesRequest.local()); + return request; + } + + static Request createRepository(PutRepositoryRequest putRepositoryRequest) throws IOException { + String endpoint = new RequestConverters.EndpointBuilder().addPathPart("_snapshot").addPathPart(putRepositoryRequest.name()).build(); + Request request = new Request(HttpPut.METHOD_NAME, endpoint); + + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withMasterTimeout(putRepositoryRequest.masterNodeTimeout()); + parameters.withTimeout(putRepositoryRequest.timeout()); + parameters.withVerify(putRepositoryRequest.verify()); + + request.setEntity(RequestConverters.createEntity(putRepositoryRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); + return request; + } + + static Request deleteRepository(DeleteRepositoryRequest deleteRepositoryRequest) { + String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_snapshot").addPathPart(deleteRepositoryRequest.name()) + .build(); + Request request = new Request(HttpDelete.METHOD_NAME, endpoint); + + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withMasterTimeout(deleteRepositoryRequest.masterNodeTimeout()); + parameters.withTimeout(deleteRepositoryRequest.timeout()); + return request; + } + + static Request verifyRepository(VerifyRepositoryRequest verifyRepositoryRequest) { + String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_snapshot") + .addPathPart(verifyRepositoryRequest.name()) + .addPathPartAsIs("_verify") + .build(); + Request request = new Request(HttpPost.METHOD_NAME, endpoint); + + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withMasterTimeout(verifyRepositoryRequest.masterNodeTimeout()); + parameters.withTimeout(verifyRepositoryRequest.timeout()); + return request; + } + + static Request createSnapshot(CreateSnapshotRequest createSnapshotRequest) throws IOException { + String endpoint = new RequestConverters.EndpointBuilder().addPathPart("_snapshot") + .addPathPart(createSnapshotRequest.repository()) + .addPathPart(createSnapshotRequest.snapshot()) + .build(); + Request request = new Request(HttpPut.METHOD_NAME, endpoint); + RequestConverters.Params params = new RequestConverters.Params(request); + params.withMasterTimeout(createSnapshotRequest.masterNodeTimeout()); + params.withWaitForCompletion(createSnapshotRequest.waitForCompletion()); + request.setEntity(RequestConverters.createEntity(createSnapshotRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); + return request; + } + + static Request getSnapshots(GetSnapshotsRequest getSnapshotsRequest) { + RequestConverters.EndpointBuilder endpointBuilder = new RequestConverters.EndpointBuilder().addPathPartAsIs("_snapshot") + .addPathPart(getSnapshotsRequest.repository()); + String endpoint; + if (getSnapshotsRequest.snapshots().length == 0) { + endpoint = endpointBuilder.addPathPart("_all").build(); + } else { + endpoint = endpointBuilder.addCommaSeparatedPathParts(getSnapshotsRequest.snapshots()).build(); + } + + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withMasterTimeout(getSnapshotsRequest.masterNodeTimeout()); + parameters.putParam("ignore_unavailable", Boolean.toString(getSnapshotsRequest.ignoreUnavailable())); + parameters.putParam("verbose", Boolean.toString(getSnapshotsRequest.verbose())); + + return request; + } + + static Request snapshotsStatus(SnapshotsStatusRequest snapshotsStatusRequest) { + String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_snapshot") + .addPathPart(snapshotsStatusRequest.repository()) + .addCommaSeparatedPathParts(snapshotsStatusRequest.snapshots()) + .addPathPartAsIs("_status") + .build(); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withMasterTimeout(snapshotsStatusRequest.masterNodeTimeout()); + parameters.withIgnoreUnavailable(snapshotsStatusRequest.ignoreUnavailable()); + return request; + } + + static Request restoreSnapshot(RestoreSnapshotRequest restoreSnapshotRequest) throws IOException { + String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_snapshot") + .addPathPart(restoreSnapshotRequest.repository()) + .addPathPart(restoreSnapshotRequest.snapshot()) + .addPathPartAsIs("_restore") + .build(); + Request request = new Request(HttpPost.METHOD_NAME, endpoint); + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withMasterTimeout(restoreSnapshotRequest.masterNodeTimeout()); + parameters.withWaitForCompletion(restoreSnapshotRequest.waitForCompletion()); + request.setEntity(RequestConverters.createEntity(restoreSnapshotRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); + return request; + } + + static Request deleteSnapshot(DeleteSnapshotRequest deleteSnapshotRequest) { + String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_snapshot") + .addPathPart(deleteSnapshotRequest.repository()) + .addPathPart(deleteSnapshotRequest.snapshot()) + .build(); + Request request = new Request(HttpDelete.METHOD_NAME, endpoint); + + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withMasterTimeout(deleteSnapshotRequest.masterNodeTimeout()); + return request; + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index 12a285fec8887..1f19c9c31fe80 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -29,15 +29,6 @@ import org.apache.http.util.EntityUtils; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.DocWriteRequest; -import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest; -import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest; -import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; -import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; -import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; -import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; -import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; -import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; -import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.indices.alias.Alias; @@ -97,11 +88,9 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.ToXContent; @@ -127,7 +116,6 @@ import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; -import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; @@ -150,7 +138,6 @@ import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; -import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -1996,229 +1983,6 @@ public void testIndexPutSettings() throws IOException { assertEquals(expectedParams, request.getParameters()); } - public void testGetRepositories() { - Map expectedParams = new HashMap<>(); - StringBuilder endpoint = new StringBuilder("/_snapshot"); - - GetRepositoriesRequest getRepositoriesRequest = new GetRepositoriesRequest(); - setRandomMasterTimeout(getRepositoriesRequest, expectedParams); - setRandomLocal(getRepositoriesRequest, expectedParams); - - if (randomBoolean()) { - String[] entries = new String[] { "a", "b", "c" }; - getRepositoriesRequest.repositories(entries); - endpoint.append("/" + String.join(",", entries)); - } - - Request request = RequestConverters.getRepositories(getRepositoriesRequest); - assertThat(endpoint.toString(), equalTo(request.getEndpoint())); - assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod())); - assertThat(expectedParams, equalTo(request.getParameters())); - } - - public void testCreateRepository() throws IOException { - String repository = randomIndicesNames(1, 1)[0]; - String endpoint = "/_snapshot/" + repository; - Path repositoryLocation = PathUtils.get("."); - PutRepositoryRequest putRepositoryRequest = new PutRepositoryRequest(repository); - putRepositoryRequest.type(FsRepository.TYPE); - putRepositoryRequest.verify(randomBoolean()); - - putRepositoryRequest.settings( - Settings.builder() - .put(FsRepository.LOCATION_SETTING.getKey(), repositoryLocation) - .put(FsRepository.COMPRESS_SETTING.getKey(), randomBoolean()) - .put(FsRepository.CHUNK_SIZE_SETTING.getKey(), randomIntBetween(100, 1000), ByteSizeUnit.BYTES) - .build()); - - Request request = RequestConverters.createRepository(putRepositoryRequest); - assertThat(endpoint, equalTo(request.getEndpoint())); - assertThat(HttpPut.METHOD_NAME, equalTo(request.getMethod())); - assertToXContentBody(putRepositoryRequest, request.getEntity()); - } - - public void testDeleteRepository() { - Map expectedParams = new HashMap<>(); - String repository = randomIndicesNames(1, 1)[0]; - - StringBuilder endpoint = new StringBuilder("/_snapshot/" + repository); - - DeleteRepositoryRequest deleteRepositoryRequest = new DeleteRepositoryRequest(); - deleteRepositoryRequest.name(repository); - setRandomMasterTimeout(deleteRepositoryRequest, expectedParams); - setRandomTimeout(deleteRepositoryRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); - - Request request = RequestConverters.deleteRepository(deleteRepositoryRequest); - assertThat(endpoint.toString(), equalTo(request.getEndpoint())); - assertThat(HttpDelete.METHOD_NAME, equalTo(request.getMethod())); - assertThat(expectedParams, equalTo(request.getParameters())); - assertNull(request.getEntity()); - } - - public void testVerifyRepository() { - Map expectedParams = new HashMap<>(); - String repository = randomIndicesNames(1, 1)[0]; - String endpoint = "/_snapshot/" + repository + "/_verify"; - - VerifyRepositoryRequest verifyRepositoryRequest = new VerifyRepositoryRequest(repository); - setRandomMasterTimeout(verifyRepositoryRequest, expectedParams); - setRandomTimeout(verifyRepositoryRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); - - Request request = RequestConverters.verifyRepository(verifyRepositoryRequest); - assertThat(endpoint, equalTo(request.getEndpoint())); - assertThat(HttpPost.METHOD_NAME, equalTo(request.getMethod())); - assertThat(expectedParams, equalTo(request.getParameters())); - } - - public void testCreateSnapshot() throws IOException { - Map expectedParams = new HashMap<>(); - String repository = randomIndicesNames(1, 1)[0]; - String snapshot = "snapshot-" + generateRandomStringArray(1, randomInt(10), false, false)[0]; - String endpoint = "/_snapshot/" + repository + "/" + snapshot; - - CreateSnapshotRequest createSnapshotRequest = new CreateSnapshotRequest(repository, snapshot); - setRandomMasterTimeout(createSnapshotRequest, expectedParams); - Boolean waitForCompletion = randomBoolean(); - createSnapshotRequest.waitForCompletion(waitForCompletion); - - if (waitForCompletion) { - expectedParams.put("wait_for_completion", waitForCompletion.toString()); - } - - Request request = RequestConverters.createSnapshot(createSnapshotRequest); - assertThat(endpoint, equalTo(request.getEndpoint())); - assertThat(HttpPut.METHOD_NAME, equalTo(request.getMethod())); - assertThat(expectedParams, equalTo(request.getParameters())); - assertToXContentBody(createSnapshotRequest, request.getEntity()); - } - - public void testGetSnapshots() { - Map expectedParams = new HashMap<>(); - String repository = randomIndicesNames(1, 1)[0]; - String snapshot1 = "snapshot1-" + randomAlphaOfLengthBetween(2, 5).toLowerCase(Locale.ROOT); - String snapshot2 = "snapshot2-" + randomAlphaOfLengthBetween(2, 5).toLowerCase(Locale.ROOT); - - String endpoint = String.format(Locale.ROOT, "/_snapshot/%s/%s,%s", repository, snapshot1, snapshot2); - - GetSnapshotsRequest getSnapshotsRequest = new GetSnapshotsRequest(); - getSnapshotsRequest.repository(repository); - getSnapshotsRequest.snapshots(Arrays.asList(snapshot1, snapshot2).toArray(new String[0])); - setRandomMasterTimeout(getSnapshotsRequest, expectedParams); - - if (randomBoolean()) { - boolean ignoreUnavailable = randomBoolean(); - getSnapshotsRequest.ignoreUnavailable(ignoreUnavailable); - expectedParams.put("ignore_unavailable", Boolean.toString(ignoreUnavailable)); - } else { - expectedParams.put("ignore_unavailable", Boolean.FALSE.toString()); - } - - if (randomBoolean()) { - boolean verbose = randomBoolean(); - getSnapshotsRequest.verbose(verbose); - expectedParams.put("verbose", Boolean.toString(verbose)); - } else { - expectedParams.put("verbose", Boolean.TRUE.toString()); - } - - Request request = RequestConverters.getSnapshots(getSnapshotsRequest); - assertThat(endpoint, equalTo(request.getEndpoint())); - assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod())); - assertThat(expectedParams, equalTo(request.getParameters())); - assertNull(request.getEntity()); - } - - public void testGetAllSnapshots() { - Map expectedParams = new HashMap<>(); - String repository = randomIndicesNames(1, 1)[0]; - - String endpoint = String.format(Locale.ROOT, "/_snapshot/%s/_all", repository); - - GetSnapshotsRequest getSnapshotsRequest = new GetSnapshotsRequest(repository); - setRandomMasterTimeout(getSnapshotsRequest, expectedParams); - - boolean ignoreUnavailable = randomBoolean(); - getSnapshotsRequest.ignoreUnavailable(ignoreUnavailable); - expectedParams.put("ignore_unavailable", Boolean.toString(ignoreUnavailable)); - - boolean verbose = randomBoolean(); - getSnapshotsRequest.verbose(verbose); - expectedParams.put("verbose", Boolean.toString(verbose)); - - Request request = RequestConverters.getSnapshots(getSnapshotsRequest); - assertThat(endpoint, equalTo(request.getEndpoint())); - assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod())); - assertThat(expectedParams, equalTo(request.getParameters())); - assertNull(request.getEntity()); - } - - public void testSnapshotsStatus() { - Map expectedParams = new HashMap<>(); - String repository = randomIndicesNames(1, 1)[0]; - String[] snapshots = randomIndicesNames(1, 5); - StringBuilder snapshotNames = new StringBuilder(snapshots[0]); - for (int idx = 1; idx < snapshots.length; idx++) { - snapshotNames.append(",").append(snapshots[idx]); - } - boolean ignoreUnavailable = randomBoolean(); - String endpoint = "/_snapshot/" + repository + "/" + snapshotNames.toString() + "/_status"; - - SnapshotsStatusRequest snapshotsStatusRequest = new SnapshotsStatusRequest(repository, snapshots); - setRandomMasterTimeout(snapshotsStatusRequest, expectedParams); - snapshotsStatusRequest.ignoreUnavailable(ignoreUnavailable); - expectedParams.put("ignore_unavailable", Boolean.toString(ignoreUnavailable)); - - Request request = RequestConverters.snapshotsStatus(snapshotsStatusRequest); - assertThat(request.getEndpoint(), equalTo(endpoint)); - assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME)); - assertThat(request.getParameters(), equalTo(expectedParams)); - assertThat(request.getEntity(), is(nullValue())); - } - - public void testRestoreSnapshot() throws IOException { - Map expectedParams = new HashMap<>(); - String repository = randomIndicesNames(1, 1)[0]; - String snapshot = "snapshot-" + randomAlphaOfLengthBetween(2, 5).toLowerCase(Locale.ROOT); - String endpoint = String.format(Locale.ROOT, "/_snapshot/%s/%s/_restore", repository, snapshot); - - RestoreSnapshotRequest restoreSnapshotRequest = new RestoreSnapshotRequest(repository, snapshot); - setRandomMasterTimeout(restoreSnapshotRequest, expectedParams); - if (randomBoolean()) { - restoreSnapshotRequest.waitForCompletion(true); - expectedParams.put("wait_for_completion", "true"); - } - if (randomBoolean()) { - String timeout = randomTimeValue(); - restoreSnapshotRequest.masterNodeTimeout(timeout); - expectedParams.put("master_timeout", timeout); - } - - Request request = RequestConverters.restoreSnapshot(restoreSnapshotRequest); - assertThat(endpoint, equalTo(request.getEndpoint())); - assertThat(HttpPost.METHOD_NAME, equalTo(request.getMethod())); - assertThat(expectedParams, equalTo(request.getParameters())); - assertToXContentBody(restoreSnapshotRequest, request.getEntity()); - } - - public void testDeleteSnapshot() { - Map expectedParams = new HashMap<>(); - String repository = randomIndicesNames(1, 1)[0]; - String snapshot = "snapshot-" + randomAlphaOfLengthBetween(2, 5).toLowerCase(Locale.ROOT); - - String endpoint = String.format(Locale.ROOT, "/_snapshot/%s/%s", repository, snapshot); - - DeleteSnapshotRequest deleteSnapshotRequest = new DeleteSnapshotRequest(); - deleteSnapshotRequest.repository(repository); - deleteSnapshotRequest.snapshot(snapshot); - setRandomMasterTimeout(deleteSnapshotRequest, expectedParams); - - Request request = RequestConverters.deleteSnapshot(deleteSnapshotRequest); - assertThat(endpoint, equalTo(request.getEndpoint())); - assertThat(HttpDelete.METHOD_NAME, equalTo(request.getMethod())); - assertThat(expectedParams, equalTo(request.getParameters())); - assertNull(request.getEntity()); - } - public void testPutTemplateRequest() throws Exception { Map names = new HashMap<>(); names.put("log", "log"); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotRequestConvertersTests.java new file mode 100644 index 0000000000000..efd321aa7ee34 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotRequestConvertersTests.java @@ -0,0 +1,277 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest; +import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRequest; +import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; +import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; +import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; +import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest; +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.repositories.fs.FsRepository; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.nio.file.Path; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; + +public class SnapshotRequestConvertersTests extends ESTestCase { + + public void testGetRepositories() { + Map expectedParams = new HashMap<>(); + StringBuilder endpoint = new StringBuilder("/_snapshot"); + + GetRepositoriesRequest getRepositoriesRequest = new GetRepositoriesRequest(); + RequestConvertersTests.setRandomMasterTimeout(getRepositoriesRequest, expectedParams); + RequestConvertersTests.setRandomLocal(getRepositoriesRequest, expectedParams); + + if (randomBoolean()) { + String[] entries = new String[] { "a", "b", "c" }; + getRepositoriesRequest.repositories(entries); + endpoint.append("/" + String.join(",", entries)); + } + + Request request = SnapshotRequestConverters.getRepositories(getRepositoriesRequest); + assertThat(endpoint.toString(), equalTo(request.getEndpoint())); + assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod())); + assertThat(expectedParams, equalTo(request.getParameters())); + } + + public void testCreateRepository() throws IOException { + String repository = RequestConvertersTests.randomIndicesNames(1, 1)[0]; + String endpoint = "/_snapshot/" + repository; + Path repositoryLocation = PathUtils.get("."); + PutRepositoryRequest putRepositoryRequest = new PutRepositoryRequest(repository); + putRepositoryRequest.type(FsRepository.TYPE); + putRepositoryRequest.verify(randomBoolean()); + + putRepositoryRequest.settings( + Settings.builder() + .put(FsRepository.LOCATION_SETTING.getKey(), repositoryLocation) + .put(FsRepository.COMPRESS_SETTING.getKey(), randomBoolean()) + .put(FsRepository.CHUNK_SIZE_SETTING.getKey(), randomIntBetween(100, 1000), ByteSizeUnit.BYTES) + .build()); + + Request request = SnapshotRequestConverters.createRepository(putRepositoryRequest); + assertThat(endpoint, equalTo(request.getEndpoint())); + assertThat(HttpPut.METHOD_NAME, equalTo(request.getMethod())); + RequestConvertersTests.assertToXContentBody(putRepositoryRequest, request.getEntity()); + } + + public void testDeleteRepository() { + Map expectedParams = new HashMap<>(); + String repository = RequestConvertersTests.randomIndicesNames(1, 1)[0]; + + StringBuilder endpoint = new StringBuilder("/_snapshot/" + repository); + + DeleteRepositoryRequest deleteRepositoryRequest = new DeleteRepositoryRequest(); + deleteRepositoryRequest.name(repository); + RequestConvertersTests.setRandomMasterTimeout(deleteRepositoryRequest, expectedParams); + RequestConvertersTests.setRandomTimeout(deleteRepositoryRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); + + Request request = SnapshotRequestConverters.deleteRepository(deleteRepositoryRequest); + assertThat(endpoint.toString(), equalTo(request.getEndpoint())); + assertThat(HttpDelete.METHOD_NAME, equalTo(request.getMethod())); + assertThat(expectedParams, equalTo(request.getParameters())); + assertNull(request.getEntity()); + } + + public void testVerifyRepository() { + Map expectedParams = new HashMap<>(); + String repository = RequestConvertersTests.randomIndicesNames(1, 1)[0]; + String endpoint = "/_snapshot/" + repository + "/_verify"; + + VerifyRepositoryRequest verifyRepositoryRequest = new VerifyRepositoryRequest(repository); + RequestConvertersTests.setRandomMasterTimeout(verifyRepositoryRequest, expectedParams); + RequestConvertersTests.setRandomTimeout(verifyRepositoryRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); + + Request request = SnapshotRequestConverters.verifyRepository(verifyRepositoryRequest); + assertThat(endpoint, equalTo(request.getEndpoint())); + assertThat(HttpPost.METHOD_NAME, equalTo(request.getMethod())); + assertThat(expectedParams, equalTo(request.getParameters())); + } + + public void testCreateSnapshot() throws IOException { + Map expectedParams = new HashMap<>(); + String repository = RequestConvertersTests.randomIndicesNames(1, 1)[0]; + String snapshot = "snapshot-" + generateRandomStringArray(1, randomInt(10), false, false)[0]; + String endpoint = "/_snapshot/" + repository + "/" + snapshot; + + CreateSnapshotRequest createSnapshotRequest = new CreateSnapshotRequest(repository, snapshot); + RequestConvertersTests.setRandomMasterTimeout(createSnapshotRequest, expectedParams); + Boolean waitForCompletion = randomBoolean(); + createSnapshotRequest.waitForCompletion(waitForCompletion); + + if (waitForCompletion) { + expectedParams.put("wait_for_completion", waitForCompletion.toString()); + } + + Request request = SnapshotRequestConverters.createSnapshot(createSnapshotRequest); + assertThat(endpoint, equalTo(request.getEndpoint())); + assertThat(HttpPut.METHOD_NAME, equalTo(request.getMethod())); + assertThat(expectedParams, equalTo(request.getParameters())); + RequestConvertersTests.assertToXContentBody(createSnapshotRequest, request.getEntity()); + } + + public void testGetSnapshots() { + Map expectedParams = new HashMap<>(); + String repository = RequestConvertersTests.randomIndicesNames(1, 1)[0]; + String snapshot1 = "snapshot1-" + randomAlphaOfLengthBetween(2, 5).toLowerCase(Locale.ROOT); + String snapshot2 = "snapshot2-" + randomAlphaOfLengthBetween(2, 5).toLowerCase(Locale.ROOT); + + String endpoint = String.format(Locale.ROOT, "/_snapshot/%s/%s,%s", repository, snapshot1, snapshot2); + + GetSnapshotsRequest getSnapshotsRequest = new GetSnapshotsRequest(); + getSnapshotsRequest.repository(repository); + getSnapshotsRequest.snapshots(Arrays.asList(snapshot1, snapshot2).toArray(new String[0])); + RequestConvertersTests.setRandomMasterTimeout(getSnapshotsRequest, expectedParams); + + if (randomBoolean()) { + boolean ignoreUnavailable = randomBoolean(); + getSnapshotsRequest.ignoreUnavailable(ignoreUnavailable); + expectedParams.put("ignore_unavailable", Boolean.toString(ignoreUnavailable)); + } else { + expectedParams.put("ignore_unavailable", Boolean.FALSE.toString()); + } + + if (randomBoolean()) { + boolean verbose = randomBoolean(); + getSnapshotsRequest.verbose(verbose); + expectedParams.put("verbose", Boolean.toString(verbose)); + } else { + expectedParams.put("verbose", Boolean.TRUE.toString()); + } + + Request request = SnapshotRequestConverters.getSnapshots(getSnapshotsRequest); + assertThat(endpoint, equalTo(request.getEndpoint())); + assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod())); + assertThat(expectedParams, equalTo(request.getParameters())); + assertNull(request.getEntity()); + } + + public void testGetAllSnapshots() { + Map expectedParams = new HashMap<>(); + String repository = RequestConvertersTests.randomIndicesNames(1, 1)[0]; + + String endpoint = String.format(Locale.ROOT, "/_snapshot/%s/_all", repository); + + GetSnapshotsRequest getSnapshotsRequest = new GetSnapshotsRequest(repository); + RequestConvertersTests.setRandomMasterTimeout(getSnapshotsRequest, expectedParams); + + boolean ignoreUnavailable = randomBoolean(); + getSnapshotsRequest.ignoreUnavailable(ignoreUnavailable); + expectedParams.put("ignore_unavailable", Boolean.toString(ignoreUnavailable)); + + boolean verbose = randomBoolean(); + getSnapshotsRequest.verbose(verbose); + expectedParams.put("verbose", Boolean.toString(verbose)); + + Request request = SnapshotRequestConverters.getSnapshots(getSnapshotsRequest); + assertThat(endpoint, equalTo(request.getEndpoint())); + assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod())); + assertThat(expectedParams, equalTo(request.getParameters())); + assertNull(request.getEntity()); + } + + public void testSnapshotsStatus() { + Map expectedParams = new HashMap<>(); + String repository = RequestConvertersTests.randomIndicesNames(1, 1)[0]; + String[] snapshots = RequestConvertersTests.randomIndicesNames(1, 5); + StringBuilder snapshotNames = new StringBuilder(snapshots[0]); + for (int idx = 1; idx < snapshots.length; idx++) { + snapshotNames.append(",").append(snapshots[idx]); + } + boolean ignoreUnavailable = randomBoolean(); + String endpoint = "/_snapshot/" + repository + "/" + snapshotNames.toString() + "/_status"; + + SnapshotsStatusRequest snapshotsStatusRequest = new SnapshotsStatusRequest(repository, snapshots); + RequestConvertersTests.setRandomMasterTimeout(snapshotsStatusRequest, expectedParams); + snapshotsStatusRequest.ignoreUnavailable(ignoreUnavailable); + expectedParams.put("ignore_unavailable", Boolean.toString(ignoreUnavailable)); + + Request request = SnapshotRequestConverters.snapshotsStatus(snapshotsStatusRequest); + assertThat(request.getEndpoint(), equalTo(endpoint)); + assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME)); + assertThat(request.getParameters(), equalTo(expectedParams)); + assertThat(request.getEntity(), is(nullValue())); + } + + public void testRestoreSnapshot() throws IOException { + Map expectedParams = new HashMap<>(); + String repository = RequestConvertersTests.randomIndicesNames(1, 1)[0]; + String snapshot = "snapshot-" + randomAlphaOfLengthBetween(2, 5).toLowerCase(Locale.ROOT); + String endpoint = String.format(Locale.ROOT, "/_snapshot/%s/%s/_restore", repository, snapshot); + + RestoreSnapshotRequest restoreSnapshotRequest = new RestoreSnapshotRequest(repository, snapshot); + RequestConvertersTests.setRandomMasterTimeout(restoreSnapshotRequest, expectedParams); + if (randomBoolean()) { + restoreSnapshotRequest.waitForCompletion(true); + expectedParams.put("wait_for_completion", "true"); + } + if (randomBoolean()) { + String timeout = randomTimeValue(); + restoreSnapshotRequest.masterNodeTimeout(timeout); + expectedParams.put("master_timeout", timeout); + } + + Request request = SnapshotRequestConverters.restoreSnapshot(restoreSnapshotRequest); + assertThat(endpoint, equalTo(request.getEndpoint())); + assertThat(HttpPost.METHOD_NAME, equalTo(request.getMethod())); + assertThat(expectedParams, equalTo(request.getParameters())); + RequestConvertersTests.assertToXContentBody(restoreSnapshotRequest, request.getEntity()); + } + + public void testDeleteSnapshot() { + Map expectedParams = new HashMap<>(); + String repository = RequestConvertersTests.randomIndicesNames(1, 1)[0]; + String snapshot = "snapshot-" + randomAlphaOfLengthBetween(2, 5).toLowerCase(Locale.ROOT); + + String endpoint = String.format(Locale.ROOT, "/_snapshot/%s/%s", repository, snapshot); + + DeleteSnapshotRequest deleteSnapshotRequest = new DeleteSnapshotRequest(); + deleteSnapshotRequest.repository(repository); + deleteSnapshotRequest.snapshot(snapshot); + RequestConvertersTests.setRandomMasterTimeout(deleteSnapshotRequest, expectedParams); + + Request request = SnapshotRequestConverters.deleteSnapshot(deleteSnapshotRequest); + assertThat(endpoint, equalTo(request.getEndpoint())); + assertThat(HttpDelete.METHOD_NAME, equalTo(request.getMethod())); + assertThat(expectedParams, equalTo(request.getParameters())); + assertNull(request.getEntity()); + } +} From 42469a993058bb140ce4a9a1dd9e7c9e66de539d Mon Sep 17 00:00:00 2001 From: Michael Basnight Date: Fri, 7 Sep 2018 09:46:27 -0500 Subject: [PATCH 35/91] HLRC: split migration request converters (#33436) In an effort to encapsulate the different clients, the request converters are being shuffled around. This splits the MigrationClient request converters. --- .../elasticsearch/client/MigrationClient.java | 2 +- .../client/MigrationRequestConverters.java | 37 ++++++++++++++ .../client/RequestConverters.java | 12 ----- .../MigrationRequestConvertersTests.java | 48 +++++++++++++++++++ .../client/RequestConvertersTests.java | 22 +-------- 5 files changed, 88 insertions(+), 33 deletions(-) create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/MigrationRequestConverters.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/MigrationRequestConvertersTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MigrationClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MigrationClient.java index 7da3832994768..8717943d79718 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MigrationClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MigrationClient.java @@ -49,7 +49,7 @@ public final class MigrationClient { * @throws IOException in case there is a problem sending the request or parsing back the response */ public IndexUpgradeInfoResponse getAssistance(IndexUpgradeInfoRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::getMigrationAssistance, options, + return restHighLevelClient.performRequestAndParseEntity(request, MigrationRequestConverters::getMigrationAssistance, options, IndexUpgradeInfoResponse::fromXContent, Collections.emptySet()); } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MigrationRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MigrationRequestConverters.java new file mode 100644 index 0000000000000..2f5309350df42 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MigrationRequestConverters.java @@ -0,0 +1,37 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.client.methods.HttpGet; +import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest; + +public class MigrationRequestConverters { + + static Request getMigrationAssistance(IndexUpgradeInfoRequest indexUpgradeInfoRequest) { + RequestConverters.EndpointBuilder endpointBuilder = new RequestConverters.EndpointBuilder() + .addPathPartAsIs("_xpack/migration/assistance") + .addCommaSeparatedPathParts(indexUpgradeInfoRequest.indices()); + String endpoint = endpointBuilder.build(); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withIndicesOptions(indexUpgradeInfoRequest.indicesOptions()); + return request; + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 753c74aabd052..5401d32b6b735 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -100,7 +100,6 @@ import org.elasticsearch.index.reindex.UpdateByQueryRequest; import org.elasticsearch.protocol.xpack.XPackInfoRequest; import org.elasticsearch.protocol.xpack.XPackUsageRequest; -import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.rest.action.search.RestSearchAction; @@ -1028,17 +1027,6 @@ static Request xpackUsage(XPackUsageRequest usageRequest) { return request; } - static Request getMigrationAssistance(IndexUpgradeInfoRequest indexUpgradeInfoRequest) { - EndpointBuilder endpointBuilder = new EndpointBuilder() - .addPathPartAsIs("_xpack/migration/assistance") - .addCommaSeparatedPathParts(indexUpgradeInfoRequest.indices()); - String endpoint = endpointBuilder.build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - Params parameters = new Params(request); - parameters.withIndicesOptions(indexUpgradeInfoRequest.indicesOptions()); - return request; - } - static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException { BytesRef source = XContentHelper.toXContent(toXContent, xContentType, false).toBytesRef(); return new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType)); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MigrationRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MigrationRequestConvertersTests.java new file mode 100644 index 0000000000000..97a2cc16a7ef9 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MigrationRequestConvertersTests.java @@ -0,0 +1,48 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.client.methods.HttpGet; +import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest; +import org.elasticsearch.test.ESTestCase; + +import java.util.HashMap; +import java.util.Map; + +public class MigrationRequestConvertersTests extends ESTestCase { + + public static void testGetMigrationAssistance() { + IndexUpgradeInfoRequest upgradeInfoRequest = new IndexUpgradeInfoRequest(); + String expectedEndpoint = "/_xpack/migration/assistance"; + if (randomBoolean()) { + String[] indices = RequestConvertersTests.randomIndicesNames(1, 5); + upgradeInfoRequest.indices(indices); + expectedEndpoint += "/" + String.join(",", indices); + } + Map expectedParams = new HashMap<>(); + RequestConvertersTests.setRandomIndicesOptions(upgradeInfoRequest::indicesOptions, upgradeInfoRequest::indicesOptions, + expectedParams); + Request request = MigrationRequestConverters.getMigrationAssistance(upgradeInfoRequest); + assertEquals(HttpGet.METHOD_NAME, request.getMethod()); + assertEquals(expectedEndpoint, request.getEndpoint()); + assertNull(request.getEntity()); + assertEquals(expectedParams, request.getParameters()); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index 1f19c9c31fe80..d346934ff03eb 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -113,7 +113,6 @@ import org.elasticsearch.index.reindex.RemoteInfo; import org.elasticsearch.index.reindex.UpdateByQueryRequest; import org.elasticsearch.protocol.xpack.XPackInfoRequest; -import org.elasticsearch.protocol.xpack.migration.IndexUpgradeInfoRequest; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.rest.action.search.RestSearchAction; @@ -2291,23 +2290,6 @@ public void testXPackInfo() { assertEquals(expectedParams, request.getParameters()); } - public void testGetMigrationAssistance() { - IndexUpgradeInfoRequest upgradeInfoRequest = new IndexUpgradeInfoRequest(); - String expectedEndpoint = "/_xpack/migration/assistance"; - if (randomBoolean()) { - String[] indices = randomIndicesNames(1, 5); - upgradeInfoRequest.indices(indices); - expectedEndpoint += "/" + String.join(",", indices); - } - Map expectedParams = new HashMap<>(); - setRandomIndicesOptions(upgradeInfoRequest::indicesOptions, upgradeInfoRequest::indicesOptions, expectedParams); - Request request = RequestConverters.getMigrationAssistance(upgradeInfoRequest); - assertEquals(HttpGet.METHOD_NAME, request.getMethod()); - assertEquals(expectedEndpoint, request.getEndpoint()); - assertNull(request.getEntity()); - assertEquals(expectedParams, request.getParameters()); - } - public void testXPackPutWatch() throws Exception { PutWatchRequest putWatchRequest = new PutWatchRequest(); String watchId = randomAlphaOfLength(10); @@ -2410,8 +2392,8 @@ private static void setRandomSearchParams(SearchRequest searchRequest, } } - private static void setRandomIndicesOptions(Consumer setter, Supplier getter, - Map expectedParams) { + static void setRandomIndicesOptions(Consumer setter, Supplier getter, + Map expectedParams) { if (randomBoolean()) { setter.accept(IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean())); From 4d233107f8c8af910cd3dfabf10f7eec7a35aa58 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Fri, 7 Sep 2018 11:16:58 -0500 Subject: [PATCH 36/91] HLRC: ML Forecast Job (#33506) * HLRC: ML Forecast job --- .../client/MLRequestConverters.java | 14 ++ .../client/MachineLearningClient.java | 46 ++++++ .../client/ml/ForecastJobRequest.java | 140 ++++++++++++++++++ .../client/ml/ForecastJobResponse.java | 102 +++++++++++++ .../client/MLRequestConvertersTests.java | 16 ++ .../client/MachineLearningIT.java | 27 ++++ .../MlClientDocumentationIT.java | 69 +++++++++ .../client/ml/ForecastJobRequestTests.java | 51 +++++++ .../client/ml/ForecastJobResponseTests.java | 42 ++++++ .../high-level/ml/forecast-job.asciidoc | 76 ++++++++++ .../high-level/supported-apis.asciidoc | 2 + 11 files changed, 585 insertions(+) create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobRequest.java create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobResponse.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/ml/ForecastJobRequestTests.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/ml/ForecastJobResponseTests.java create mode 100644 docs/java-rest/high-level/ml/forecast-job.asciidoc diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java index cbf653a713d39..ecbe7f2d3a5d3 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java @@ -30,6 +30,7 @@ import org.elasticsearch.client.ml.CloseJobRequest; import org.elasticsearch.client.ml.DeleteJobRequest; import org.elasticsearch.client.ml.FlushJobRequest; +import org.elasticsearch.client.ml.ForecastJobRequest; import org.elasticsearch.client.ml.GetBucketsRequest; import org.elasticsearch.client.ml.GetInfluencersRequest; import org.elasticsearch.client.ml.GetJobRequest; @@ -153,6 +154,19 @@ static Request flushJob(FlushJobRequest flushJobRequest) throws IOException { return request; } + static Request forecastJob(ForecastJobRequest forecastJobRequest) throws IOException { + String endpoint = new EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("ml") + .addPathPartAsIs("anomaly_detectors") + .addPathPart(forecastJobRequest.getJobId()) + .addPathPartAsIs("_forecast") + .build(); + Request request = new Request(HttpPost.METHOD_NAME, endpoint); + request.setEntity(createEntity(forecastJobRequest, REQUEST_BODY_CONTENT_TYPE)); + return request; + } + static Request updateJob(UpdateJobRequest updateJobRequest) throws IOException { String endpoint = new EndpointBuilder() .addPathPartAsIs("_xpack") diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java index 6e54b9259865f..85c5771f3450b 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java @@ -19,6 +19,8 @@ package org.elasticsearch.client; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.ml.ForecastJobRequest; +import org.elasticsearch.client.ml.ForecastJobResponse; import org.elasticsearch.client.ml.PostDataRequest; import org.elasticsearch.client.ml.PostDataResponse; import org.elasticsearch.client.ml.UpdateJobRequest; @@ -360,6 +362,28 @@ public void flushJobAsync(FlushJobRequest request, RequestOptions options, Actio Collections.emptySet()); } + /** + * Creates a forecast of an existing, opened Machine Learning Job + * + * This predicts the future behavior of a time series by using its historical behavior. + * + *

+ * For additional info + * see Forecast ML Job Documentation + *

+ * @param request ForecastJobRequest with forecasting options + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return response containing forecast acknowledgement and new forecast's ID + * @throws IOException when there is a serialization issue sending the request or receiving the response + */ + public ForecastJobResponse forecastJob(ForecastJobRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, + MLRequestConverters::forecastJob, + options, + ForecastJobResponse::fromXContent, + Collections.emptySet()); + } + /** * Updates a Machine Learning {@link org.elasticsearch.client.ml.job.config.Job} * @@ -376,6 +400,28 @@ public PutJobResponse updateJob(UpdateJobRequest request, RequestOptions options Collections.emptySet()); } + /** + * Creates a forecast of an existing, opened Machine Learning Job asynchronously + * + * This predicts the future behavior of a time series by using its historical behavior. + * + *

+ * For additional info + * see Forecast ML Job Documentation + *

+ * @param request ForecastJobRequest with forecasting options + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener Listener to be notified upon request completion + */ + public void forecastJobAsync(ForecastJobRequest request, RequestOptions options, ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, + MLRequestConverters::forecastJob, + options, + ForecastJobResponse::fromXContent, + listener, + Collections.emptySet()); + } + /** * Updates a Machine Learning {@link org.elasticsearch.client.ml.job.config.Job} asynchronously * diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobRequest.java new file mode 100644 index 0000000000000..67d290c37f08b --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobRequest.java @@ -0,0 +1,140 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.ml.job.config.Job; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +/** + * Pojo for forecasting an existing and open Machine Learning Job + */ +public class ForecastJobRequest extends ActionRequest implements ToXContentObject { + + public static final ParseField DURATION = new ParseField("duration"); + public static final ParseField EXPIRES_IN = new ParseField("expires_in"); + + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("forecast_job_request", (a) -> new ForecastJobRequest((String)a[0])); + + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); + PARSER.declareString( + (request, val) -> request.setDuration(TimeValue.parseTimeValue(val, DURATION.getPreferredName())), DURATION); + PARSER.declareString( + (request, val) -> request.setExpiresIn(TimeValue.parseTimeValue(val, EXPIRES_IN.getPreferredName())), EXPIRES_IN); + } + + private final String jobId; + private TimeValue duration; + private TimeValue expiresIn; + + /** + * A new forecast request + * + * @param jobId the non-null, existing, and opened jobId to forecast + */ + public ForecastJobRequest(String jobId) { + this.jobId = jobId; + } + + public String getJobId() { + return jobId; + } + + public TimeValue getDuration() { + return duration; + } + + /** + * Set the forecast duration + * + * A period of time that indicates how far into the future to forecast. + * The default value is 1 day. The forecast starts at the last record that was processed. + * + * @param duration TimeValue for the duration of the forecast + */ + public void setDuration(TimeValue duration) { + this.duration = duration; + } + + public TimeValue getExpiresIn() { + return expiresIn; + } + + /** + * Set the forecast expiration + * + * The period of time that forecast results are retained. + * After a forecast expires, the results are deleted. The default value is 14 days. + * If set to a value of 0, the forecast is never automatically deleted. + * + * @param expiresIn TimeValue for the forecast expiration + */ + public void setExpiresIn(TimeValue expiresIn) { + this.expiresIn = expiresIn; + } + + @Override + public int hashCode() { + return Objects.hash(jobId, duration, expiresIn); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + ForecastJobRequest other = (ForecastJobRequest) obj; + return Objects.equals(jobId, other.jobId) + && Objects.equals(duration, other.duration) + && Objects.equals(expiresIn, other.expiresIn); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { + builder.startObject(); + builder.field(Job.ID.getPreferredName(), jobId); + if (duration != null) { + builder.field(DURATION.getPreferredName(), duration.getStringRep()); + } + if (expiresIn != null) { + builder.field(EXPIRES_IN.getPreferredName(), expiresIn.getStringRep()); + } + builder.endObject(); + return builder; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobResponse.java new file mode 100644 index 0000000000000..b45275c5e59ad --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobResponse.java @@ -0,0 +1,102 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Objects; + +/** + * Forecast response object + */ +public class ForecastJobResponse extends ActionResponse implements ToXContentObject { + + public static final ParseField ACKNOWLEDGED = new ParseField("acknowledged"); + public static final ParseField FORECAST_ID = new ParseField("forecast_id"); + + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("forecast_job_response", + true, + (a) -> new ForecastJobResponse((Boolean)a[0], (String)a[1])); + + static { + PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ACKNOWLEDGED); + PARSER.declareString(ConstructingObjectParser.constructorArg(), FORECAST_ID); + } + + public static ForecastJobResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + private final boolean acknowledged; + private final String forecastId; + + public ForecastJobResponse(boolean acknowledged, String forecastId) { + this.acknowledged = acknowledged; + this.forecastId = forecastId; + } + + /** + * Forecast creating acknowledgement + * @return {@code true} indicates success, {@code false} otherwise + */ + public boolean isAcknowledged() { + return acknowledged; + } + + /** + * The created forecast ID + */ + public String getForecastId() { + return forecastId; + } + + @Override + public int hashCode() { + return Objects.hash(acknowledged, forecastId); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + ForecastJobResponse other = (ForecastJobResponse) obj; + return Objects.equals(acknowledged, other.acknowledged) + && Objects.equals(forecastId, other.forecastId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(ACKNOWLEDGED.getPreferredName(), acknowledged); + builder.field(FORECAST_ID.getPreferredName(), forecastId); + builder.endObject(); + return builder; + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java index e0f20e2f23c83..26e6251af48d0 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.client.ml.CloseJobRequest; import org.elasticsearch.client.ml.DeleteJobRequest; import org.elasticsearch.client.ml.FlushJobRequest; +import org.elasticsearch.client.ml.ForecastJobRequest; import org.elasticsearch.client.ml.GetBucketsRequest; import org.elasticsearch.client.ml.GetInfluencersRequest; import org.elasticsearch.client.ml.GetJobRequest; @@ -173,6 +174,21 @@ public void testFlushJob() throws Exception { requestEntityToString(request)); } + public void testForecastJob() throws Exception { + String jobId = randomAlphaOfLength(10); + ForecastJobRequest forecastJobRequest = new ForecastJobRequest(jobId); + + forecastJobRequest.setDuration(TimeValue.timeValueHours(10)); + forecastJobRequest.setExpiresIn(TimeValue.timeValueHours(12)); + Request request = MLRequestConverters.forecastJob(forecastJobRequest); + assertEquals(HttpPost.METHOD_NAME, request.getMethod()); + assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/_forecast", request.getEndpoint()); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) { + ForecastJobRequest parsedRequest = ForecastJobRequest.PARSER.apply(parser, null); + assertThat(parsedRequest, equalTo(forecastJobRequest)); + } + } + public void testUpdateJob() throws Exception { String jobId = randomAlphaOfLength(10); JobUpdate updates = JobUpdateTests.createRandom(jobId); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java index 93019ba0d43e0..fb715683b2709 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java @@ -20,6 +20,8 @@ import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.client.ml.ForecastJobRequest; +import org.elasticsearch.client.ml.ForecastJobResponse; import org.elasticsearch.client.ml.PostDataRequest; import org.elasticsearch.client.ml.PostDataResponse; import org.elasticsearch.client.ml.UpdateJobRequest; @@ -223,6 +225,31 @@ public void testGetJobStats() throws Exception { assertThat(exception.status().getStatus(), equalTo(404)); } + public void testForecastJob() throws Exception { + String jobId = "ml-forecast-job-test"; + Job job = buildJob(jobId); + MachineLearningClient machineLearningClient = highLevelClient().machineLearning(); + machineLearningClient.putJob(new PutJobRequest(job), RequestOptions.DEFAULT); + machineLearningClient.openJob(new OpenJobRequest(jobId), RequestOptions.DEFAULT); + + PostDataRequest.JsonBuilder builder = new PostDataRequest.JsonBuilder(); + for(int i = 0; i < 30; i++) { + Map hashMap = new HashMap<>(); + hashMap.put("total", randomInt(1000)); + hashMap.put("timestamp", (i+1)*1000); + builder.addDoc(hashMap); + } + PostDataRequest postDataRequest = new PostDataRequest(jobId, builder); + machineLearningClient.postData(postDataRequest, RequestOptions.DEFAULT); + machineLearningClient.flushJob(new FlushJobRequest(jobId), RequestOptions.DEFAULT); + + ForecastJobRequest request = new ForecastJobRequest(jobId); + ForecastJobResponse response = execute(request, machineLearningClient::forecastJob, machineLearningClient::forecastJobAsync); + + assertTrue(response.isAcknowledged()); + assertNotNull(response.getForecastId()); + } + public void testPostData() throws Exception { String jobId = randomValidJobId(); Job job = buildJob(jobId); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java index bc452ad8503f7..9abef54d0d24f 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java @@ -35,6 +35,8 @@ import org.elasticsearch.client.ml.DeleteJobResponse; import org.elasticsearch.client.ml.FlushJobRequest; import org.elasticsearch.client.ml.FlushJobResponse; +import org.elasticsearch.client.ml.ForecastJobRequest; +import org.elasticsearch.client.ml.ForecastJobResponse; import org.elasticsearch.client.ml.GetBucketsRequest; import org.elasticsearch.client.ml.GetBucketsResponse; import org.elasticsearch.client.ml.GetInfluencersRequest; @@ -694,6 +696,73 @@ public void onFailure(Exception e) { } } + public void testForecastJob() throws Exception { + RestHighLevelClient client = highLevelClient(); + + Job job = MachineLearningIT.buildJob("forecasting-my-first-machine-learning-job"); + client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); + client.machineLearning().openJob(new OpenJobRequest(job.getId()), RequestOptions.DEFAULT); + + PostDataRequest.JsonBuilder builder = new PostDataRequest.JsonBuilder(); + for(int i = 0; i < 30; i++) { + Map hashMap = new HashMap<>(); + hashMap.put("total", randomInt(1000)); + hashMap.put("timestamp", (i+1)*1000); + builder.addDoc(hashMap); + } + PostDataRequest postDataRequest = new PostDataRequest(job.getId(), builder); + client.machineLearning().postData(postDataRequest, RequestOptions.DEFAULT); + client.machineLearning().flushJob(new FlushJobRequest(job.getId()), RequestOptions.DEFAULT); + + { + //tag::x-pack-ml-forecast-job-request + ForecastJobRequest forecastJobRequest = new ForecastJobRequest("forecasting-my-first-machine-learning-job"); //<1> + //end::x-pack-ml-forecast-job-request + + //tag::x-pack-ml-forecast-job-request-options + forecastJobRequest.setExpiresIn(TimeValue.timeValueHours(48)); //<1> + forecastJobRequest.setDuration(TimeValue.timeValueHours(24)); //<2> + //end::x-pack-ml-forecast-job-request-options + + //tag::x-pack-ml-forecast-job-execute + ForecastJobResponse forecastJobResponse = client.machineLearning().forecastJob(forecastJobRequest, RequestOptions.DEFAULT); + //end::x-pack-ml-forecast-job-execute + + //tag::x-pack-ml-forecast-job-response + boolean isAcknowledged = forecastJobResponse.isAcknowledged(); //<1> + String forecastId = forecastJobResponse.getForecastId(); //<2> + //end::x-pack-ml-forecast-job-response + assertTrue(isAcknowledged); + assertNotNull(forecastId); + } + { + //tag::x-pack-ml-forecast-job-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(ForecastJobResponse forecastJobResponse) { + //<1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + //end::x-pack-ml-forecast-job-listener + ForecastJobRequest forecastJobRequest = new ForecastJobRequest("forecasting-my-first-machine-learning-job"); + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::x-pack-ml-forecast-job-execute-async + client.machineLearning().forecastJobAsync(forecastJobRequest, RequestOptions.DEFAULT, listener); //<1> + // end::x-pack-ml-forecast-job-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + public void testGetOverallBuckets() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/ForecastJobRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/ForecastJobRequestTests.java new file mode 100644 index 0000000000000..c6a33dad609ca --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/ForecastJobRequestTests.java @@ -0,0 +1,51 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; + +public class ForecastJobRequestTests extends AbstractXContentTestCase { + + @Override + protected ForecastJobRequest createTestInstance() { + ForecastJobRequest request = new ForecastJobRequest(randomAlphaOfLengthBetween(1, 20)); + + if (randomBoolean()) { + request.setExpiresIn(TimeValue.timeValueHours(randomInt(10))); + } + if (randomBoolean()) { + request.setDuration(TimeValue.timeValueHours(randomIntBetween(24, 72))); + } + return request; + } + + @Override + protected ForecastJobRequest doParseInstance(XContentParser parser) throws IOException { + return ForecastJobRequest.PARSER.apply(parser, null); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/ForecastJobResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/ForecastJobResponseTests.java new file mode 100644 index 0000000000000..c7833a79cba32 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/ForecastJobResponseTests.java @@ -0,0 +1,42 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; + +public class ForecastJobResponseTests extends AbstractXContentTestCase { + + @Override + protected ForecastJobResponse createTestInstance() { + return new ForecastJobResponse(randomBoolean(),randomAlphaOfLength(10)); + } + + @Override + protected ForecastJobResponse doParseInstance(XContentParser parser) throws IOException { + return ForecastJobResponse.PARSER.apply(parser, null); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } +} diff --git a/docs/java-rest/high-level/ml/forecast-job.asciidoc b/docs/java-rest/high-level/ml/forecast-job.asciidoc new file mode 100644 index 0000000000000..88bd5fdb532dd --- /dev/null +++ b/docs/java-rest/high-level/ml/forecast-job.asciidoc @@ -0,0 +1,76 @@ +[[java-rest-high-x-pack-ml-forecast-job]] +=== Forecast Job API + +The Forecast Job API provides the ability to forecast a {ml} job's behavior based +on historical data. +It accepts a `ForecastJobRequest` object and responds +with a `ForecastJobResponse` object. + +[[java-rest-high-x-pack-ml-forecast-job-request]] +==== Forecast Job Request + +A `ForecastJobRequest` object gets created with an existing non-null `jobId`. +All other fields are optional for the request. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-forecast-job-request] +-------------------------------------------------- +<1> Constructing a new request referencing an existing `jobId` + +==== Optional Arguments + +The following arguments are optional. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-forecast-job-request-options] +-------------------------------------------------- +<1> Set when the forecast for the job should expire +<2> Set how far into the future should the forecast predict + +[[java-rest-high-x-pack-ml-forecast-job-execution]] +==== Execution + +The request can be executed through the `MachineLearningClient` contained +in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-forecast-job-execute] +-------------------------------------------------- + +[[java-rest-high-x-pack-ml-forecast-job-execution-async]] +==== Asynchronous Execution + +The request can also be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-forecast-job-execute-async] +-------------------------------------------------- +<1> The `ForecastJobRequest` to execute and the `ActionListener` to use when +the execution completes + +The method does not block and returns immediately. The passed `ActionListener` is used +to notify the caller of completion. A typical `ActionListener` for `ForecastJobResponse` may +look like + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-forecast-job-listener] +-------------------------------------------------- +<1> `onResponse` is called back when the action is completed successfully +<2> `onFailure` is called back when some unexpected error occurs + +[[java-rest-high-x-pack-ml-forecast-job-response]] +==== Forecast Job Response + +A `ForecastJobResponse` contains an acknowledgement and the forecast ID + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-forecast-job-response] +-------------------------------------------------- +<1> `isAcknowledged()` indicates if the forecast was successful +<2> `getForecastId()` provides the ID of the forecast that was created \ No newline at end of file diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index e1335b0effc5d..eb03d8ee4c6a6 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -218,6 +218,7 @@ The Java High Level REST Client supports the following Machine Learning APIs: * <> * <> * <> +* <> * <> * <> * <> @@ -232,6 +233,7 @@ include::ml/close-job.asciidoc[] include::ml/update-job.asciidoc[] include::ml/flush-job.asciidoc[] include::ml/get-job-stats.asciidoc[] +include::ml/forecast-job.asciidoc[] include::ml/get-buckets.asciidoc[] include::ml/get-overall-buckets.asciidoc[] include::ml/get-records.asciidoc[] From e42cc5cd8ce50fdbfb890f91ffc15bf147c112cc Mon Sep 17 00:00:00 2001 From: David Roberts Date: Fri, 7 Sep 2018 17:41:57 +0100 Subject: [PATCH 37/91] [ML] Add a file structure determination endpoint (#33471) This endpoint accepts an arbitrary file in the request body and attempts to determine the structure. If successful it also proposes mappings that could be used when indexing the file's contents, and calculates simple statistics for each of the fields that are useful in the data preparation step prior to configuring machine learning jobs. --- .../xpack/core/XPackClientPlugin.java | 2 + .../ml/action/FindFileStructureAction.java | 183 ++++++++++++++++++ .../ml/filestructurefinder/FieldStats.java | 26 ++- .../ml/filestructurefinder/FileStructure.java | 71 ++++++- .../FindFileStructureActionRequestTests.java | 59 ++++++ .../FindFileStructureActionResponseTests.java | 22 +++ .../filestructurefinder/FieldStatsTests.java | 16 +- .../FileStructureTests.java | 26 ++- .../smoketest/MlWithSecurityUserRoleIT.java | 5 +- .../xpack/ml/MachineLearning.java | 9 +- .../TransportFindFileStructureAction.java | 57 ++++++ .../FileStructureFinderManager.java | 11 +- .../GrokPatternCreator.java | 12 +- .../ml/rest/RestFindFileStructureAction.java | 55 ++++++ .../api/xpack.ml.find_file_structure.json | 25 +++ .../test/ml/find_file_structure.yml | 44 +++++ 16 files changed, 596 insertions(+), 27 deletions(-) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FindFileStructureAction.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/FindFileStructureActionRequestTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/FindFileStructureActionResponseTests.java create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFindFileStructureAction.java create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestFindFileStructureAction.java create mode 100644 x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.find_file_structure.json create mode 100644 x-pack/plugin/src/test/resources/rest-api-spec/test/ml/find_file_structure.yml diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index 03820b1f40b22..190a9a2215ee6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -56,6 +56,7 @@ import org.elasticsearch.xpack.core.ml.action.DeleteJobAction; import org.elasticsearch.xpack.core.ml.action.DeleteModelSnapshotAction; import org.elasticsearch.xpack.core.ml.action.FinalizeJobExecutionAction; +import org.elasticsearch.xpack.core.ml.action.FindFileStructureAction; import org.elasticsearch.xpack.core.ml.action.FlushJobAction; import org.elasticsearch.xpack.core.ml.action.ForecastJobAction; import org.elasticsearch.xpack.core.ml.action.GetBucketsAction; @@ -265,6 +266,7 @@ public List> getClientActions() { GetCalendarEventsAction.INSTANCE, PostCalendarEventsAction.INSTANCE, PersistJobAction.INSTANCE, + FindFileStructureAction.INSTANCE, // security ClearRealmCacheAction.INSTANCE, ClearRolesCacheAction.INSTANCE, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FindFileStructureAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FindFileStructureAction.java new file mode 100644 index 0000000000000..9fda416b33bbe --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FindFileStructureAction.java @@ -0,0 +1,183 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.StatusToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.core.ml.filestructurefinder.FileStructure; + +import java.io.IOException; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +public class FindFileStructureAction extends Action { + + public static final FindFileStructureAction INSTANCE = new FindFileStructureAction(); + public static final String NAME = "cluster:monitor/xpack/ml/findfilestructure"; + + private FindFileStructureAction() { + super(NAME); + } + + @Override + public Response newResponse() { + return new Response(); + } + + static class RequestBuilder extends ActionRequestBuilder { + + RequestBuilder(ElasticsearchClient client, FindFileStructureAction action) { + super(client, action, new Request()); + } + } + + public static class Response extends ActionResponse implements StatusToXContentObject, Writeable { + + private FileStructure fileStructure; + + public Response(FileStructure fileStructure) { + this.fileStructure = fileStructure; + } + + Response() { + } + + public FileStructure getFileStructure() { + return fileStructure; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + fileStructure = new FileStructure(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + fileStructure.writeTo(out); + } + + @Override + public RestStatus status() { + return RestStatus.OK; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + fileStructure.toXContent(builder, params); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(fileStructure); + } + + @Override + public boolean equals(Object other) { + + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + FindFileStructureAction.Response that = (FindFileStructureAction.Response) other; + return Objects.equals(fileStructure, that.fileStructure); + } + } + + public static class Request extends ActionRequest { + + public static final ParseField LINES_TO_SAMPLE = new ParseField("lines_to_sample"); + + private Integer linesToSample; + private BytesReference sample; + + public Request() { + } + + public Integer getLinesToSample() { + return linesToSample; + } + + public void setLinesToSample(Integer linesToSample) { + this.linesToSample = linesToSample; + } + + public BytesReference getSample() { + return sample; + } + + public void setSample(BytesReference sample) { + this.sample = sample; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (linesToSample != null && linesToSample <= 0) { + validationException = + addValidationError(LINES_TO_SAMPLE.getPreferredName() + " must be positive if specified", validationException); + } + if (sample == null || sample.length() == 0) { + validationException = addValidationError("sample must be specified", validationException); + } + return validationException; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + linesToSample = in.readOptionalVInt(); + sample = in.readBytesReference(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeOptionalVInt(linesToSample); + out.writeBytesReference(sample); + } + + @Override + public int hashCode() { + return Objects.hash(linesToSample, sample); + } + + @Override + public boolean equals(Object other) { + + if (this == other) { + return true; + } + + if (other == null || getClass() != other.getClass()) { + return false; + } + + Request that = (Request) other; + return Objects.equals(this.linesToSample, that.linesToSample) && + Objects.equals(this.sample, that.sample); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FieldStats.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FieldStats.java index a09aa522f7f87..8f624d000cc38 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FieldStats.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FieldStats.java @@ -6,6 +6,9 @@ package org.elasticsearch.xpack.core.ml.filestructurefinder; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -16,7 +19,7 @@ import java.util.Map; import java.util.Objects; -public class FieldStats implements ToXContentObject { +public class FieldStats implements ToXContentObject, Writeable { static final ParseField COUNT = new ParseField("count"); static final ParseField CARDINALITY = new ParseField("cardinality"); @@ -64,6 +67,27 @@ public FieldStats(long count, int cardinality, Double minValue, Double maxValue, this.topHits = (topHits == null) ? Collections.emptyList() : Collections.unmodifiableList(topHits); } + public FieldStats(StreamInput in) throws IOException { + count = in.readVLong(); + cardinality = in.readVInt(); + minValue = in.readOptionalDouble(); + maxValue = in.readOptionalDouble(); + meanValue = in.readOptionalDouble(); + medianValue = in.readOptionalDouble(); + topHits = in.readList(StreamInput::readMap); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVLong(count); + out.writeVInt(cardinality); + out.writeOptionalDouble(minValue); + out.writeOptionalDouble(maxValue); + out.writeOptionalDouble(meanValue); + out.writeOptionalDouble(medianValue); + out.writeCollection(topHits, StreamOutput::writeMap); + } + public long getCount() { return count; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructure.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructure.java index 6993737e8547d..5484f9f9902f4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructure.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructure.java @@ -6,6 +6,9 @@ package org.elasticsearch.xpack.core.ml.filestructurefinder; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -24,7 +27,7 @@ /** * Stores the file format determined by Machine Learning. */ -public class FileStructure implements ToXContentObject { +public class FileStructure implements ToXContentObject, Writeable { public enum Format { @@ -79,6 +82,8 @@ public String toString() { } } + public static final String EXPLAIN = "explain"; + static final ParseField NUM_LINES_ANALYZED = new ParseField("num_lines_analyzed"); static final ParseField NUM_MESSAGES_ANALYZED = new ParseField("num_messages_analyzed"); static final ParseField SAMPLE_START = new ParseField("sample_start"); @@ -176,6 +181,66 @@ public FileStructure(int numLinesAnalyzed, int numMessagesAnalyzed, String sampl this.explanation = Collections.unmodifiableList(new ArrayList<>(explanation)); } + public FileStructure(StreamInput in) throws IOException { + numLinesAnalyzed = in.readVInt(); + numMessagesAnalyzed = in.readVInt(); + sampleStart = in.readString(); + charset = in.readString(); + hasByteOrderMarker = in.readOptionalBoolean(); + format = in.readEnum(Format.class); + multilineStartPattern = in.readOptionalString(); + excludeLinesPattern = in.readOptionalString(); + inputFields = in.readBoolean() ? Collections.unmodifiableList(in.readList(StreamInput::readString)) : null; + hasHeaderRow = in.readOptionalBoolean(); + delimiter = in.readBoolean() ? (char) in.readVInt() : null; + shouldTrimFields = in.readOptionalBoolean(); + grokPattern = in.readOptionalString(); + timestampFormats = in.readBoolean() ? Collections.unmodifiableList(in.readList(StreamInput::readString)) : null; + timestampField = in.readOptionalString(); + needClientTimezone = in.readBoolean(); + mappings = Collections.unmodifiableSortedMap(new TreeMap<>(in.readMap())); + fieldStats = Collections.unmodifiableSortedMap(new TreeMap<>(in.readMap(StreamInput::readString, FieldStats::new))); + explanation = Collections.unmodifiableList(in.readList(StreamInput::readString)); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(numLinesAnalyzed); + out.writeVInt(numMessagesAnalyzed); + out.writeString(sampleStart); + out.writeString(charset); + out.writeOptionalBoolean(hasByteOrderMarker); + out.writeEnum(format); + out.writeOptionalString(multilineStartPattern); + out.writeOptionalString(excludeLinesPattern); + if (inputFields == null) { + out.writeBoolean(false); + } else { + out.writeBoolean(true); + out.writeCollection(inputFields, StreamOutput::writeString); + } + out.writeOptionalBoolean(hasHeaderRow); + if (delimiter == null) { + out.writeBoolean(false); + } else { + out.writeBoolean(true); + out.writeVInt(delimiter); + } + out.writeOptionalBoolean(shouldTrimFields); + out.writeOptionalString(grokPattern); + if (timestampFormats == null) { + out.writeBoolean(false); + } else { + out.writeBoolean(true); + out.writeCollection(timestampFormats, StreamOutput::writeString); + } + out.writeOptionalString(timestampField); + out.writeBoolean(needClientTimezone); + out.writeMap(mappings); + out.writeMap(fieldStats, StreamOutput::writeString, (out1, value) -> value.writeTo(out1)); + out.writeCollection(explanation, StreamOutput::writeString); + } + public int getNumLinesAnalyzed() { return numLinesAnalyzed; } @@ -300,7 +365,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } builder.endObject(); } - builder.field(EXPLANATION.getPreferredName(), explanation); + if (params.paramAsBoolean(EXPLAIN, false)) { + builder.field(EXPLANATION.getPreferredName(), explanation); + } builder.endObject(); return builder; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/FindFileStructureActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/FindFileStructureActionRequestTests.java new file mode 100644 index 0000000000000..05ba0e7f306f4 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/FindFileStructureActionRequestTests.java @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.test.AbstractStreamableTestCase; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.startsWith; + +public class FindFileStructureActionRequestTests extends AbstractStreamableTestCase { + + @Override + protected FindFileStructureAction.Request createTestInstance() { + + FindFileStructureAction.Request request = new FindFileStructureAction.Request(); + + if (randomBoolean()) { + request.setLinesToSample(randomIntBetween(10, 2000)); + } + request.setSample(new BytesArray(randomByteArrayOfLength(randomIntBetween(1000, 20000)))); + + return request; + } + + @Override + protected FindFileStructureAction.Request createBlankInstance() { + return new FindFileStructureAction.Request(); + } + + public void testValidateLinesToSample() { + + FindFileStructureAction.Request request = new FindFileStructureAction.Request(); + request.setLinesToSample(randomFrom(-1, 0)); + request.setSample(new BytesArray("foo\n")); + + ActionRequestValidationException e = request.validate(); + assertNotNull(e); + assertThat(e.getMessage(), startsWith("Validation Failed: ")); + assertThat(e.getMessage(), containsString(" lines_to_sample must be positive if specified")); + } + + public void testValidateSample() { + + FindFileStructureAction.Request request = new FindFileStructureAction.Request(); + if (randomBoolean()) { + request.setSample(BytesArray.EMPTY); + } + + ActionRequestValidationException e = request.validate(); + assertNotNull(e); + assertThat(e.getMessage(), startsWith("Validation Failed: ")); + assertThat(e.getMessage(), containsString(" sample must be specified")); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/FindFileStructureActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/FindFileStructureActionResponseTests.java new file mode 100644 index 0000000000000..706ee44a4fd97 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/FindFileStructureActionResponseTests.java @@ -0,0 +1,22 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.test.AbstractStreamableTestCase; +import org.elasticsearch.xpack.core.ml.filestructurefinder.FileStructureTests; + +public class FindFileStructureActionResponseTests extends AbstractStreamableTestCase { + + @Override + protected FindFileStructureAction.Response createTestInstance() { + return new FindFileStructureAction.Response(FileStructureTests.createTestFileStructure()); + } + + @Override + protected FindFileStructureAction.Response createBlankInstance() { + return new FindFileStructureAction.Response(); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FieldStatsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FieldStatsTests.java index 2041fb26a6259..30f7c8f5576d0 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FieldStatsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FieldStatsTests.java @@ -5,16 +5,18 @@ */ package org.elasticsearch.xpack.core.ml.filestructurefinder; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.test.AbstractXContentTestCase; +import org.elasticsearch.test.AbstractSerializingTestCase; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -public class FieldStatsTests extends AbstractXContentTestCase { +public class FieldStatsTests extends AbstractSerializingTestCase { + @Override protected FieldStats createTestInstance() { return createTestFieldStats(); } @@ -51,11 +53,13 @@ static FieldStats createTestFieldStats() { return new FieldStats(count, cardinality, minValue, maxValue, meanValue, medianValue, topHits); } - protected FieldStats doParseInstance(XContentParser parser) { - return FieldStats.PARSER.apply(parser, null); + @Override + protected Writeable.Reader instanceReader() { + return FieldStats::new; } - protected boolean supportsUnknownFields() { - return false; + @Override + protected FieldStats doParseInstance(XContentParser parser) { + return FieldStats.PARSER.apply(parser, null); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructureTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructureTests.java index 5e89a4840b585..6dcf675196508 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructureTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructureTests.java @@ -5,8 +5,10 @@ */ package org.elasticsearch.xpack.core.ml.filestructurefinder; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.test.AbstractXContentTestCase; +import org.elasticsearch.test.AbstractSerializingTestCase; import java.nio.charset.Charset; import java.util.Arrays; @@ -16,9 +18,14 @@ import java.util.Map; import java.util.TreeMap; -public class FileStructureTests extends AbstractXContentTestCase { +public class FileStructureTests extends AbstractSerializingTestCase { + @Override protected FileStructure createTestInstance() { + return createTestFileStructure(); + } + + public static FileStructure createTestFileStructure() { FileStructure.Format format = randomFrom(EnumSet.allOf(FileStructure.Format.class)); @@ -66,24 +73,31 @@ protected FileStructure createTestInstance() { } builder.setMappings(mappings); - //if (randomBoolean()) { + if (randomBoolean()) { Map fieldStats = new TreeMap<>(); for (String field : generateRandomStringArray(5, 20, false, false)) { fieldStats.put(field, FieldStatsTests.createTestFieldStats()); } builder.setFieldStats(fieldStats); - //} + } builder.setExplanation(Arrays.asList(generateRandomStringArray(10, 150, false, false))); return builder.build(); } + @Override + protected Writeable.Reader instanceReader() { + return FileStructure::new; + } + + @Override protected FileStructure doParseInstance(XContentParser parser) { return FileStructure.PARSER.apply(parser, null).build(); } - protected boolean supportsUnknownFields() { - return false; + @Override + protected ToXContent.Params getToXContentParams() { + return new ToXContent.MapParams(Collections.singletonMap(FileStructure.EXPLAIN, "true")); } } diff --git a/x-pack/plugin/ml/qa/ml-with-security/src/test/java/org/elasticsearch/smoketest/MlWithSecurityUserRoleIT.java b/x-pack/plugin/ml/qa/ml-with-security/src/test/java/org/elasticsearch/smoketest/MlWithSecurityUserRoleIT.java index b103d30f282e2..9e31ddb131c6f 100644 --- a/x-pack/plugin/ml/qa/ml-with-security/src/test/java/org/elasticsearch/smoketest/MlWithSecurityUserRoleIT.java +++ b/x-pack/plugin/ml/qa/ml-with-security/src/test/java/org/elasticsearch/smoketest/MlWithSecurityUserRoleIT.java @@ -31,10 +31,13 @@ public void test() throws IOException { super.test(); // We should have got here if and only if the only ML endpoints in the test were GETs + // or the find_file_structure API, which is also available to the machine_learning_user + // role for (ExecutableSection section : testCandidate.getTestSection().getExecutableSections()) { if (section instanceof DoSection) { if (((DoSection) section).getApiCallSection().getApi().startsWith("xpack.ml.") && - ((DoSection) section).getApiCallSection().getApi().startsWith("xpack.ml.get_") == false) { + ((DoSection) section).getApiCallSection().getApi().startsWith("xpack.ml.get_") == false && + ((DoSection) section).getApiCallSection().getApi().equals("xpack.ml.find_file_structure") == false) { fail("should have failed because of missing role"); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index 343833c080618..cd13b2c8bb657 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -66,6 +66,7 @@ import org.elasticsearch.xpack.core.ml.action.DeleteJobAction; import org.elasticsearch.xpack.core.ml.action.DeleteModelSnapshotAction; import org.elasticsearch.xpack.core.ml.action.FinalizeJobExecutionAction; +import org.elasticsearch.xpack.core.ml.action.FindFileStructureAction; import org.elasticsearch.xpack.core.ml.action.FlushJobAction; import org.elasticsearch.xpack.core.ml.action.ForecastJobAction; import org.elasticsearch.xpack.core.ml.action.GetBucketsAction; @@ -119,6 +120,7 @@ import org.elasticsearch.xpack.ml.action.TransportDeleteJobAction; import org.elasticsearch.xpack.ml.action.TransportDeleteModelSnapshotAction; import org.elasticsearch.xpack.ml.action.TransportFinalizeJobExecutionAction; +import org.elasticsearch.xpack.ml.action.TransportFindFileStructureAction; import org.elasticsearch.xpack.ml.action.TransportFlushJobAction; import org.elasticsearch.xpack.ml.action.TransportForecastJobAction; import org.elasticsearch.xpack.ml.action.TransportGetBucketsAction; @@ -180,6 +182,7 @@ import org.elasticsearch.xpack.ml.job.process.normalizer.NormalizerProcessFactory; import org.elasticsearch.xpack.ml.notifications.Auditor; import org.elasticsearch.xpack.ml.rest.RestDeleteExpiredDataAction; +import org.elasticsearch.xpack.ml.rest.RestFindFileStructureAction; import org.elasticsearch.xpack.ml.rest.RestMlInfoAction; import org.elasticsearch.xpack.ml.rest.calendar.RestDeleteCalendarAction; import org.elasticsearch.xpack.ml.rest.calendar.RestDeleteCalendarEventAction; @@ -500,7 +503,8 @@ public List getRestHandlers(Settings settings, RestController restC new RestDeleteCalendarJobAction(settings, restController), new RestPutCalendarJobAction(settings, restController), new RestGetCalendarEventsAction(settings, restController), - new RestPostCalendarEventAction(settings, restController) + new RestPostCalendarEventAction(settings, restController), + new RestFindFileStructureAction(settings, restController) ); } @@ -557,7 +561,8 @@ public List getRestHandlers(Settings settings, RestController restC new ActionHandler<>(UpdateCalendarJobAction.INSTANCE, TransportUpdateCalendarJobAction.class), new ActionHandler<>(GetCalendarEventsAction.INSTANCE, TransportGetCalendarEventsAction.class), new ActionHandler<>(PostCalendarEventsAction.INSTANCE, TransportPostCalendarEventsAction.class), - new ActionHandler<>(PersistJobAction.INSTANCE, TransportPersistJobAction.class) + new ActionHandler<>(PersistJobAction.INSTANCE, TransportPersistJobAction.class), + new ActionHandler<>(FindFileStructureAction.INSTANCE, TransportFindFileStructureAction.class) ); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFindFileStructureAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFindFileStructureAction.java new file mode 100644 index 0000000000000..66d07f5111c52 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFindFileStructureAction.java @@ -0,0 +1,57 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.ml.action.FindFileStructureAction; +import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.filestructurefinder.FileStructureFinder; +import org.elasticsearch.xpack.ml.filestructurefinder.FileStructureFinderManager; + +public class TransportFindFileStructureAction + extends HandledTransportAction { + + private final ThreadPool threadPool; + + @Inject + public TransportFindFileStructureAction(Settings settings, TransportService transportService, ActionFilters actionFilters, + ThreadPool threadPool) { + super(settings, FindFileStructureAction.NAME, transportService, actionFilters, FindFileStructureAction.Request::new); + this.threadPool = threadPool; + } + + @Override + protected void doExecute(Task task, FindFileStructureAction.Request request, + ActionListener listener) { + + // As determining the file structure might take a while, we run + // in a different thread to avoid blocking the network thread. + threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(() -> { + try { + listener.onResponse(buildFileStructureResponse(request)); + } catch (Exception e) { + listener.onFailure(e); + } + }); + } + + private FindFileStructureAction.Response buildFileStructureResponse(FindFileStructureAction.Request request) throws Exception { + + FileStructureFinderManager structureFinderManager = new FileStructureFinderManager(); + + FileStructureFinder fileStructureFinder = + structureFinderManager.findFileStructure(request.getLinesToSample(), request.getSample().streamInput()); + + return new FindFileStructureAction.Response(fileStructureFinder.getStructure()); + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManager.java index 983188614d0ca..d0ce68aff25c0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/FileStructureFinderManager.java @@ -35,6 +35,7 @@ public final class FileStructureFinderManager { public static final int MIN_SAMPLE_LINE_COUNT = 2; + public static final int DEFAULT_IDEAL_SAMPLE_LINE_COUNT = 1000; static final Set FILEBEAT_SUPPORTED_ENCODINGS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList( "866", "ansi_x3.4-1968", "arabic", "ascii", "asmo-708", "big5", "big5-hkscs", "chinese", "cn-big5", "cp1250", "cp1251", "cp1252", @@ -82,16 +83,18 @@ public final class FileStructureFinderManager { * Given a stream of data from some file, determine its structure. * @param idealSampleLineCount Ideally, how many lines from the stream will be read to determine the structure? * If the stream has fewer lines then an attempt will still be made, providing at - * least {@link #MIN_SAMPLE_LINE_COUNT} lines can be read. + * least {@link #MIN_SAMPLE_LINE_COUNT} lines can be read. If null + * the value of {@link #DEFAULT_IDEAL_SAMPLE_LINE_COUNT} will be used. * @param fromFile A stream from which the sample will be read. * @return A {@link FileStructureFinder} object from which the structure and messages can be queried. * @throws Exception A variety of problems could occur at various stages of the structure finding process. */ - public FileStructureFinder findLogStructure(int idealSampleLineCount, InputStream fromFile) throws Exception { - return findLogStructure(new ArrayList<>(), idealSampleLineCount, fromFile); + public FileStructureFinder findFileStructure(Integer idealSampleLineCount, InputStream fromFile) throws Exception { + return findFileStructure(new ArrayList<>(), (idealSampleLineCount == null) ? DEFAULT_IDEAL_SAMPLE_LINE_COUNT : idealSampleLineCount, + fromFile); } - public FileStructureFinder findLogStructure(List explanation, int idealSampleLineCount, InputStream fromFile) + public FileStructureFinder findFileStructure(List explanation, int idealSampleLineCount, InputStream fromFile) throws Exception { CharsetMatch charsetMatch = findCharset(explanation, fromFile); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/GrokPatternCreator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/GrokPatternCreator.java index 3caa78589ba1b..292d0b8e8b305 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/GrokPatternCreator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/GrokPatternCreator.java @@ -445,7 +445,7 @@ public boolean matchesAll(Collection snippets) { @Override public String processCaptures(Map fieldNameCountStore, Collection snippets, Collection prefaces, Collection epilogues, Map mappings, Map fieldStats) { - String sampleValue = null; + Collection values = new ArrayList<>(); for (String snippet : snippets) { Map captures = grok.captures(snippet); // If the pattern doesn't match then captures will be null @@ -453,22 +453,24 @@ public String processCaptures(Map fieldNameCountStore, Collecti throw new IllegalStateException("[%{" + grokPatternName + "}] does not match snippet [" + snippet + "]"); } prefaces.add(captures.getOrDefault(PREFACE, "").toString()); - if (sampleValue == null) { - sampleValue = captures.get(VALUE).toString(); - } + values.add(captures.getOrDefault(VALUE, "").toString()); epilogues.add(captures.getOrDefault(EPILOGUE, "").toString()); } String adjustedFieldName = buildFieldName(fieldNameCountStore, fieldName); if (mappings != null) { Map fullMappingType = Collections.singletonMap(FileStructureUtils.MAPPING_TYPE_SETTING, mappingType); if ("date".equals(mappingType)) { - TimestampMatch timestampMatch = TimestampFormatFinder.findFirstFullMatch(sampleValue); + assert values.isEmpty() == false; + TimestampMatch timestampMatch = TimestampFormatFinder.findFirstFullMatch(values.iterator().next()); if (timestampMatch != null) { fullMappingType = timestampMatch.getEsDateMappingTypeWithFormat(); } } mappings.put(adjustedFieldName, fullMappingType); } + if (fieldStats != null) { + fieldStats.put(adjustedFieldName, FileStructureUtils.calculateFieldStats(values)); + } return "%{" + grokPatternName + ":" + adjustedFieldName + "}"; } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestFindFileStructureAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestFindFileStructureAction.java new file mode 100644 index 0000000000000..83293c7d60efa --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestFindFileStructureAction.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.rest; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.core.ml.action.FindFileStructureAction; +import org.elasticsearch.xpack.core.ml.filestructurefinder.FileStructure; +import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.filestructurefinder.FileStructureFinderManager; + +import java.io.IOException; +import java.util.Collections; +import java.util.Set; + +public class RestFindFileStructureAction extends BaseRestHandler { + + public RestFindFileStructureAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(RestRequest.Method.POST, MachineLearning.BASE_PATH + "find_file_structure", this); + } + + @Override + public String getName() { + return "xpack_ml_find_file_structure_action"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + + FindFileStructureAction.Request request = new FindFileStructureAction.Request(); + request.setLinesToSample(restRequest.paramAsInt(FindFileStructureAction.Request.LINES_TO_SAMPLE.getPreferredName(), + FileStructureFinderManager.DEFAULT_IDEAL_SAMPLE_LINE_COUNT)); + if (restRequest.hasContent()) { + request.setSample(restRequest.content()); + } else { + throw new ElasticsearchParseException("request body is required"); + } + + return channel -> client.execute(FindFileStructureAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } + + @Override + protected Set responseParams() { + return Collections.singleton(FileStructure.EXPLAIN); + } +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.find_file_structure.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.find_file_structure.json new file mode 100644 index 0000000000000..bd41e0c00bca8 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.find_file_structure.json @@ -0,0 +1,25 @@ +{ + "xpack.ml.find_file_structure": { + "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-file-structure.html", + "methods": [ "POST" ], + "url": { + "path": "/_xpack/ml/find_file_structure", + "paths": [ "/_xpack/ml/find_file_structure" ], + "params": { + "lines_to_sample": { + "type": "int", + "description": "Optional parameter to specify how many lines of the file to include in the analysis" + }, + "explain": { + "type": "boolean", + "description": "Optional parameter to include an commentary on how the structure was derived" + } + } + }, + "body": { + "description" : "The contents of the file to be analyzed", + "required" : true, + "serialize" : "bulk" + } + } +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/find_file_structure.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/find_file_structure.yml new file mode 100644 index 0000000000000..1d164cc0c5afc --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/find_file_structure.yml @@ -0,0 +1,44 @@ +--- +"Test JSON file structure analysis": + - do: + headers: + # This is to stop the usual content type randomization, which + # would obviously ruin the results for this particular test + Content-Type: "application/json" + xpack.ml.find_file_structure: + body: + - airline: AAL + responsetime: 132.2046 + sourcetype: file-structure-test + time: 1403481600 + - airline: JZA + responsetime: 990.4628 + sourcetype: file-structure-test + time: 1403481700 + - airline: AAL + responsetime: 134.2046 + sourcetype: file-structure-test + time: 1403481800 + + - match: { num_lines_analyzed: 3 } + - match: { num_messages_analyzed: 3 } + - match: { charset: "UTF-8" } + - match: { has_byte_order_marker: false } + - match: { format: json } + - match: { timestamp_field: time } + - match: { timestamp_formats.0: UNIX } + - match: { need_client_timezone: false } + - match: { mappings.airline.type: keyword } + - match: { mappings.responsetime.type: double } + - match: { mappings.sourcetype.type: keyword } + - match: { mappings.time.type: date } + - match: { mappings.time.format: epoch_second } + - match: { field_stats.airline.count: 3 } + - match: { field_stats.airline.cardinality: 2 } + - match: { field_stats.responsetime.count: 3 } + - match: { field_stats.responsetime.cardinality: 3 } + - match: { field_stats.sourcetype.count: 3 } + - match: { field_stats.sourcetype.cardinality: 1 } + - match: { field_stats.time.count: 3 } + - match: { field_stats.time.cardinality: 3 } + - match: { field_stats.time.cardinality: 3 } From ab7e6961083fcaa97df77f71eab3d892515910e8 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Fri, 7 Sep 2018 12:58:00 -0400 Subject: [PATCH 38/91] TEST: Ensure merge triggered in _source retention test (#33487) We invoke force merge twice in the test to verify that recovery sources are pruned when the global checkpoint advanced. However, if the global checkpoint equals to the local checkpoint in the first force-merge, the second force-merge will be a noop because all deleted docs are expunged in the first merge already. We need to flush a new segment to make merge happen so we can verify that all recovery sources are pruned. --- .../index/engine/InternalEngineTests.java | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 6d9cdd0f225d7..a26fd72468b48 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -1521,6 +1521,16 @@ public void testForceMergeWithSoftDeletesRetentionAndRecoverySource() throws Exc settings.put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), 0); indexSettings.updateIndexMetaData(IndexMetaData.builder(defaultSettings.getIndexMetaData()).settings(settings).build()); engine.onSettingsChanged(); + // If the global checkpoint equals to the local checkpoint, the next force-merge will be a noop + // because all deleted documents are expunged in the previous force-merge already. We need to flush + // a new segment to make merge happen so that we can verify that all _recovery_source are pruned. + if (globalCheckpoint.get() == engine.getLocalCheckpoint() && liveDocs.isEmpty() == false) { + String deleteId = randomFrom(liveDocs); + engine.delete(new Engine.Delete("test", deleteId, newUid(deleteId), primaryTerm.get())); + liveDocsWithSource.remove(deleteId); + liveDocs.remove(deleteId); + engine.flush(); + } globalCheckpoint.set(engine.getLocalCheckpoint()); engine.syncTranslog(); engine.forceMerge(true, 1, false, false, false); From 944868908cf326a31043e86c7f4a222fbd221dab Mon Sep 17 00:00:00 2001 From: lcawl Date: Fri, 7 Sep 2018 10:26:44 -0700 Subject: [PATCH 39/91] [DOCS] Fixes formatting error --- docs/reference/index-modules.asciidoc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/reference/index-modules.asciidoc b/docs/reference/index-modules.asciidoc index 53de67e55fdf9..70c3d09dc930b 100644 --- a/docs/reference/index-modules.asciidoc +++ b/docs/reference/index-modules.asciidoc @@ -179,9 +179,9 @@ specific index module: `index.blocks.write`:: -   Set to `true` to disable data write operations against the index. Unlike `read_only', -   this setting does not affect metadata. For instance, you can close an index with a `write` -   block, but not an index with a `read_only` block. + Set to `true` to disable data write operations against the index. Unlike `read_only`, + this setting does not affect metadata. For instance, you can close an index with a `write` + block, but not an index with a `read_only` block. `index.blocks.metadata`:: From 9d16a7b7f0bcea1d3139671eaa983e7c6ed5d6c5 Mon Sep 17 00:00:00 2001 From: Jay Modi Date: Fri, 7 Sep 2018 11:51:37 -0600 Subject: [PATCH 40/91] HLRC: add enable and disable user API support (#33481) This change adds support for enable and disable user APIs to the high level rest client. There is a common request base class for both requests with specific requests that simplify the use of these APIs. The response for these APIs is simply an empty object so a new response class has been created for cases where we expect an empty response to be returned. Finally, the put user documentation has been moved to the proper location that is not within an x-pack sub directory and the document tags no longer contain x-pack. See #29827 --- .../elasticsearch/client/SecurityClient.java | 59 ++++++++++ .../client/SecurityRequestConverters.java | 25 +++- .../client/security/DisableUserRequest.java | 30 +++++ .../client/security/EmptyResponse.java | 37 ++++++ .../client/security/EnableUserRequest.java | 30 +++++ .../security/SetUserEnabledRequest.java | 52 +++++++++ .../SecurityRequestConvertersTests.java | 41 ++++++- .../SecurityDocumentationIT.java | 108 ++++++++++++++++-- .../client/security/EmptyResponseTests.java | 51 +++++++++ .../high-level/security/disable-user.asciidoc | 46 ++++++++ .../high-level/security/enable-user.asciidoc | 46 ++++++++ .../{x-pack => }/security/put-user.asciidoc | 26 ++--- .../high-level/supported-apis.asciidoc | 12 ++ 13 files changed, 535 insertions(+), 28 deletions(-) create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/security/DisableUserRequest.java create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/security/EmptyResponse.java create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/security/EnableUserRequest.java create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/security/SetUserEnabledRequest.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/security/EmptyResponseTests.java create mode 100644 docs/java-rest/high-level/security/disable-user.asciidoc create mode 100644 docs/java-rest/high-level/security/enable-user.asciidoc rename docs/java-rest/high-level/{x-pack => }/security/put-user.asciidoc (67%) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityClient.java index 19f56fbd1ec93..a4bc34004c247 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityClient.java @@ -20,8 +20,11 @@ package org.elasticsearch.client; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.client.security.DisableUserRequest; +import org.elasticsearch.client.security.EnableUserRequest; import org.elasticsearch.client.security.PutUserRequest; import org.elasticsearch.client.security.PutUserResponse; +import org.elasticsearch.client.security.EmptyResponse; import java.io.IOException; @@ -66,4 +69,60 @@ public void putUserAsync(PutUserRequest request, RequestOptions options, ActionL restHighLevelClient.performRequestAsyncAndParseEntity(request, SecurityRequestConverters::putUser, options, PutUserResponse::fromXContent, listener, emptySet()); } + + /** + * Enable a native realm or built-in user synchronously. + * See + * the docs for more. + * @param request the request with the user to enable + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response from the enable user call + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public EmptyResponse enableUser(EnableUserRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, SecurityRequestConverters::enableUser, options, + EmptyResponse::fromXContent, emptySet()); + } + + /** + * Enable a native realm or built-in user asynchronously. + * See + * the docs for more. + * @param request the request with the user to enable + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void enableUserAsync(EnableUserRequest request, RequestOptions options, + ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, SecurityRequestConverters::enableUser, options, + EmptyResponse::fromXContent, listener, emptySet()); + } + + /** + * Disable a native realm or built-in user synchronously. + * See + * the docs for more. + * @param request the request with the user to disable + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response from the enable user call + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public EmptyResponse disableUser(DisableUserRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, SecurityRequestConverters::disableUser, options, + EmptyResponse::fromXContent, emptySet()); + } + + /** + * Disable a native realm or built-in user asynchronously. + * See + * the docs for more. + * @param request the request with the user to disable + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void disableUserAsync(DisableUserRequest request, RequestOptions options, + ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, SecurityRequestConverters::disableUser, options, + EmptyResponse::fromXContent, listener, emptySet()); + } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityRequestConverters.java index c414cdf82708a..8533e0f1b4cd4 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityRequestConverters.java @@ -20,14 +20,17 @@ package org.elasticsearch.client; import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.client.security.DisableUserRequest; +import org.elasticsearch.client.security.EnableUserRequest; import org.elasticsearch.client.security.PutUserRequest; +import org.elasticsearch.client.security.SetUserEnabledRequest; import java.io.IOException; import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE; import static org.elasticsearch.client.RequestConverters.createEntity; -public final class SecurityRequestConverters { +final class SecurityRequestConverters { private SecurityRequestConverters() {} @@ -42,4 +45,24 @@ static Request putUser(PutUserRequest putUserRequest) throws IOException { params.withRefreshPolicy(putUserRequest.getRefreshPolicy()); return request; } + + static Request enableUser(EnableUserRequest enableUserRequest) { + return setUserEnabled(enableUserRequest); + } + + static Request disableUser(DisableUserRequest disableUserRequest) { + return setUserEnabled(disableUserRequest); + } + + private static Request setUserEnabled(SetUserEnabledRequest setUserEnabledRequest) { + String endpoint = new RequestConverters.EndpointBuilder() + .addPathPartAsIs("_xpack/security/user") + .addPathPart(setUserEnabledRequest.getUsername()) + .addPathPart(setUserEnabledRequest.isEnabled() ? "_enable" : "_disable") + .build(); + Request request = new Request(HttpPut.METHOD_NAME, endpoint); + RequestConverters.Params params = new RequestConverters.Params(request); + params.withRefreshPolicy(setUserEnabledRequest.getRefreshPolicy()); + return request; + } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/DisableUserRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/DisableUserRequest.java new file mode 100644 index 0000000000000..dc5411f3be7d1 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/DisableUserRequest.java @@ -0,0 +1,30 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security; + +/** + * Request object to disable a native realm or built-in user. + */ +public final class DisableUserRequest extends SetUserEnabledRequest { + + public DisableUserRequest(String username, RefreshPolicy refreshPolicy) { + super(false, username, refreshPolicy); + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/EmptyResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/EmptyResponse.java new file mode 100644 index 0000000000000..62fea88e52356 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/EmptyResponse.java @@ -0,0 +1,37 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security; + +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; + +/** + * Response for a request which simply returns an empty object. + */ +public final class EmptyResponse { + + private static final ObjectParser PARSER = new ObjectParser<>("empty_response", false, EmptyResponse::new); + + public static EmptyResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/EnableUserRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/EnableUserRequest.java new file mode 100644 index 0000000000000..851cb683e0551 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/EnableUserRequest.java @@ -0,0 +1,30 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security; + +/** + * Request object to enable a native realm or built-in user. + */ +public final class EnableUserRequest extends SetUserEnabledRequest { + + public EnableUserRequest(String username, RefreshPolicy refreshPolicy) { + super(true, username, refreshPolicy); + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/SetUserEnabledRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/SetUserEnabledRequest.java new file mode 100644 index 0000000000000..ab61f7d879d22 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/SetUserEnabledRequest.java @@ -0,0 +1,52 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security; + +import org.elasticsearch.client.Validatable; + +import java.util.Objects; + +/** + * Abstract request object to enable or disable a built-in or native user. + */ +public abstract class SetUserEnabledRequest implements Validatable { + + private final boolean enabled; + private final String username; + private final RefreshPolicy refreshPolicy; + + SetUserEnabledRequest(boolean enabled, String username, RefreshPolicy refreshPolicy) { + this.enabled = enabled; + this.username = Objects.requireNonNull(username, "username is required"); + this.refreshPolicy = refreshPolicy == null ? RefreshPolicy.getDefault() : refreshPolicy; + } + + public boolean isEnabled() { + return enabled; + } + + public String getUsername() { + return username; + } + + public RefreshPolicy getRefreshPolicy() { + return refreshPolicy; + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SecurityRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SecurityRequestConvertersTests.java index 924fc6ddadbed..3670379cd9fee 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SecurityRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SecurityRequestConvertersTests.java @@ -20,6 +20,8 @@ package org.elasticsearch.client; import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.client.security.DisableUserRequest; +import org.elasticsearch.client.security.EnableUserRequest; import org.elasticsearch.client.security.PutUserRequest; import org.elasticsearch.client.security.RefreshPolicy; import org.elasticsearch.test.ESTestCase; @@ -53,12 +55,7 @@ public void testPutUser() throws IOException { } final RefreshPolicy refreshPolicy = randomFrom(RefreshPolicy.values()); - final Map expectedParams; - if (refreshPolicy != RefreshPolicy.NONE) { - expectedParams = Collections.singletonMap("refresh", refreshPolicy.getValue()); - } else { - expectedParams = Collections.emptyMap(); - } + final Map expectedParams = getExpectedParamsFromRefreshPolicy(refreshPolicy); PutUserRequest putUserRequest = new PutUserRequest(username, password, roles, fullName, email, enabled, metadata, refreshPolicy); Request request = SecurityRequestConverters.putUser(putUserRequest); @@ -67,4 +64,36 @@ public void testPutUser() throws IOException { assertEquals(expectedParams, request.getParameters()); assertToXContentBody(putUserRequest, request.getEntity()); } + + public void testEnableUser() { + final String username = randomAlphaOfLengthBetween(1, 12); + final RefreshPolicy refreshPolicy = randomFrom(RefreshPolicy.values()); + final Map expectedParams = getExpectedParamsFromRefreshPolicy(refreshPolicy); + EnableUserRequest enableUserRequest = new EnableUserRequest(username, refreshPolicy); + Request request = SecurityRequestConverters.enableUser(enableUserRequest); + assertEquals(HttpPut.METHOD_NAME, request.getMethod()); + assertEquals("/_xpack/security/user/" + username + "/_enable", request.getEndpoint()); + assertEquals(expectedParams, request.getParameters()); + assertNull(request.getEntity()); + } + + public void testDisableUser() { + final String username = randomAlphaOfLengthBetween(1, 12); + final RefreshPolicy refreshPolicy = randomFrom(RefreshPolicy.values()); + final Map expectedParams = getExpectedParamsFromRefreshPolicy(refreshPolicy); + DisableUserRequest disableUserRequest = new DisableUserRequest(username, refreshPolicy); + Request request = SecurityRequestConverters.disableUser(disableUserRequest); + assertEquals(HttpPut.METHOD_NAME, request.getMethod()); + assertEquals("/_xpack/security/user/" + username + "/_disable", request.getEndpoint()); + assertEquals(expectedParams, request.getParameters()); + assertNull(request.getEntity()); + } + + private static Map getExpectedParamsFromRefreshPolicy(RefreshPolicy refreshPolicy) { + if (refreshPolicy != RefreshPolicy.NONE) { + return Collections.singletonMap("refresh", refreshPolicy.getValue()); + } else { + return Collections.emptyMap(); + } + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java index 5741b0539ba0e..103b031fc0e03 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SecurityDocumentationIT.java @@ -24,9 +24,12 @@ import org.elasticsearch.client.ESRestHighLevelClientTestCase; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; +import org.elasticsearch.client.security.DisableUserRequest; +import org.elasticsearch.client.security.EnableUserRequest; import org.elasticsearch.client.security.PutUserRequest; import org.elasticsearch.client.security.PutUserResponse; import org.elasticsearch.client.security.RefreshPolicy; +import org.elasticsearch.client.security.EmptyResponse; import java.util.Collections; import java.util.concurrent.CountDownLatch; @@ -38,16 +41,16 @@ public void testPutUser() throws Exception { RestHighLevelClient client = highLevelClient(); { - //tag::x-pack-put-user-execute + //tag::put-user-execute char[] password = new char[] { 'p', 'a', 's', 's', 'w', 'o', 'r', 'd' }; PutUserRequest request = new PutUserRequest("example", password, Collections.singletonList("superuser"), null, null, true, null, RefreshPolicy.NONE); PutUserResponse response = client.security().putUser(request, RequestOptions.DEFAULT); - //end::x-pack-put-user-execute + //end::put-user-execute - //tag::x-pack-put-user-response + //tag::put-user-response boolean isCreated = response.isCreated(); // <1> - //end::x-pack-put-user-response + //end::put-user-response assertTrue(isCreated); } @@ -56,7 +59,7 @@ public void testPutUser() throws Exception { char[] password = new char[] { 'p', 'a', 's', 's', 'w', 'o', 'r', 'd' }; PutUserRequest request = new PutUserRequest("example2", password, Collections.singletonList("superuser"), null, null, true, null, RefreshPolicy.NONE); - // tag::x-pack-put-user-execute-listener + // tag::put-user-execute-listener ActionListener listener = new ActionListener() { @Override public void onResponse(PutUserResponse response) { @@ -68,15 +71,104 @@ public void onFailure(Exception e) { // <2> } }; - // end::x-pack-put-user-execute-listener + // end::put-user-execute-listener // Replace the empty listener by a blocking listener in test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); - // tag::x-pack-put-user-execute-async + // tag::put-user-execute-async client.security().putUserAsync(request, RequestOptions.DEFAULT, listener); // <1> - // end::x-pack-put-user-execute-async + // end::put-user-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + + public void testEnableUser() throws Exception { + RestHighLevelClient client = highLevelClient(); + char[] password = new char[]{'p', 'a', 's', 's', 'w', 'o', 'r', 'd'}; + PutUserRequest putUserRequest = new PutUserRequest("enable_user", password, Collections.singletonList("superuser"), null, + null, true, null, RefreshPolicy.IMMEDIATE); + PutUserResponse putUserResponse = client.security().putUser(putUserRequest, RequestOptions.DEFAULT); + assertTrue(putUserResponse.isCreated()); + + { + //tag::enable-user-execute + EnableUserRequest request = new EnableUserRequest("enable_user", RefreshPolicy.NONE); + EmptyResponse response = client.security().enableUser(request, RequestOptions.DEFAULT); + //end::enable-user-execute + + assertNotNull(response); + } + + { + //tag::enable-user-execute-listener + EnableUserRequest request = new EnableUserRequest("enable_user", RefreshPolicy.NONE); + ActionListener listener = new ActionListener() { + @Override + public void onResponse(EmptyResponse setUserEnabledResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + //end::enable-user-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::enable-user-execute-async + client.security().enableUserAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::enable-user-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + + public void testDisableUser() throws Exception { + RestHighLevelClient client = highLevelClient(); + char[] password = new char[]{'p', 'a', 's', 's', 'w', 'o', 'r', 'd'}; + PutUserRequest putUserRequest = new PutUserRequest("disable_user", password, Collections.singletonList("superuser"), null, + null, true, null, RefreshPolicy.IMMEDIATE); + PutUserResponse putUserResponse = client.security().putUser(putUserRequest, RequestOptions.DEFAULT); + assertTrue(putUserResponse.isCreated()); + { + //tag::disable-user-execute + DisableUserRequest request = new DisableUserRequest("disable_user", RefreshPolicy.NONE); + EmptyResponse response = client.security().disableUser(request, RequestOptions.DEFAULT); + //end::disable-user-execute + + assertNotNull(response); + } + + { + //tag::disable-user-execute-listener + DisableUserRequest request = new DisableUserRequest("disable_user", RefreshPolicy.NONE); + ActionListener listener = new ActionListener() { + @Override + public void onResponse(EmptyResponse setUserEnabledResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + //end::disable-user-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::disable-user-execute-async + client.security().disableUserAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::disable-user-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/security/EmptyResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/EmptyResponseTests.java new file mode 100644 index 0000000000000..37e2e6bb51565 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/security/EmptyResponseTests.java @@ -0,0 +1,51 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client.security; + +import org.elasticsearch.common.xcontent.DeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentParseException; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +import static org.hamcrest.Matchers.containsString; + +public class EmptyResponseTests extends ESTestCase { + + public void testParseFromXContent() throws IOException { + try (XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, "{}")) { + + EmptyResponse response = EmptyResponse.fromXContent(parser); + assertNotNull(response); + } + + try (XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, "{\"foo\": \"bar\"}")) { + + XContentParseException exception = + expectThrows(XContentParseException.class, () -> EmptyResponse.fromXContent(parser)); + assertThat(exception.getMessage(), containsString("field [foo]")); + } + } +} diff --git a/docs/java-rest/high-level/security/disable-user.asciidoc b/docs/java-rest/high-level/security/disable-user.asciidoc new file mode 100644 index 0000000000000..8bb2299946c42 --- /dev/null +++ b/docs/java-rest/high-level/security/disable-user.asciidoc @@ -0,0 +1,46 @@ +[[java-rest-high-security-disable-user]] +=== Disable User API + +[[java-rest-high-security-disable-user-execution]] +==== Execution + +Disabling a user can be performed using the `security().disableUser()` +method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SecurityDocumentationIT.java[disable-user-execute] +-------------------------------------------------- + +[[java-rest-high-security-disable-user-response]] +==== Response + +The returned `EmptyResponse` does not contain any fields. The return of this +response indicates a successful request. + +[[java-rest-high-security-disable-user-async]] +==== Asynchronous Execution + +This request can be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SecurityDocumentationIT.java[disable-user-execute-async] +-------------------------------------------------- +<1> The `DisableUser` request to execute and the `ActionListener` to use when +the execution completes. + +The asynchronous method does not block and returns immediately. Once the request +has completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for a `EmptyResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SecurityDocumentationIT.java[disable-user-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument. +<2> Called in case of failure. The raised exception is provided as an argument. diff --git a/docs/java-rest/high-level/security/enable-user.asciidoc b/docs/java-rest/high-level/security/enable-user.asciidoc new file mode 100644 index 0000000000000..7601653269789 --- /dev/null +++ b/docs/java-rest/high-level/security/enable-user.asciidoc @@ -0,0 +1,46 @@ +[[java-rest-high-security-enable-user]] +=== Enable User API + +[[java-rest-high-security-enable-user-execution]] +==== Execution + +Enabling a disabled user can be performed using the `security().enableUser()` +method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SecurityDocumentationIT.java[enable-user-execute] +-------------------------------------------------- + +[[java-rest-high-security-enable-user-response]] +==== Response + +The returned `EmptyResponse` does not contain any fields. The return of this +response indicates a successful request. + +[[java-rest-high-security-enable-user-async]] +==== Asynchronous Execution + +This request can be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SecurityDocumentationIT.java[enable-user-execute-async] +-------------------------------------------------- +<1> The `EnableUser` request to execute and the `ActionListener` to use when +the execution completes. + +The asynchronous method does not block and returns immediately. Once the request +has completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for a `EmptyResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SecurityDocumentationIT.java[enable-user-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument. +<2> Called in case of failure. The raised exception is provided as an argument. diff --git a/docs/java-rest/high-level/x-pack/security/put-user.asciidoc b/docs/java-rest/high-level/security/put-user.asciidoc similarity index 67% rename from docs/java-rest/high-level/x-pack/security/put-user.asciidoc rename to docs/java-rest/high-level/security/put-user.asciidoc index b6d1e0166eede..aca69b8182842 100644 --- a/docs/java-rest/high-level/x-pack/security/put-user.asciidoc +++ b/docs/java-rest/high-level/security/put-user.asciidoc @@ -1,7 +1,7 @@ -[[java-rest-high-x-pack-security-put-user]] -=== X-Pack Put User API +[[java-rest-high-security-put-user]] +=== Put User API -[[java-rest-high-x-pack-security-put-user-execution]] +[[java-rest-high-security-put-user-execution]] ==== Execution Creating and updating a user can be performed using the `security().putUser()` @@ -9,10 +9,10 @@ method: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SecurityDocumentationIT.java[x-pack-put-user-execute] +include-tagged::{doc-tests}/SecurityDocumentationIT.java[put-user-execute] -------------------------------------------------- -[[java-rest-high-x-pack-security-put-user-response]] +[[java-rest-high-security-put-user-response]] ==== Response The returned `PutUserResponse` contains a single field, `created`. This field @@ -20,21 +20,21 @@ serves as an indication if a user was created or if an existing entry was update ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SecurityDocumentationIT.java[x-pack-put-user-response] +include-tagged::{doc-tests}/SecurityDocumentationIT.java[put-user-response] -------------------------------------------------- <1> `created` is a boolean indicating whether the user was created or updated -[[java-rest-high-x-pack-security-put-user-async]] +[[java-rest-high-security-put-user-async]] ==== Asynchronous Execution This request can be executed asynchronously: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SecurityDocumentationIT.java[x-pack-put-user-execute-async] +include-tagged::{doc-tests}/SecurityDocumentationIT.java[put-user-execute-async] -------------------------------------------------- -<1> The `PutUserResponse` to execute and the `ActionListener` to use when -the execution completes +<1> The `PutUserRequest` to execute and the `ActionListener` to use when +the execution completes. The asynchronous method does not block and returns immediately. Once the request has completed the `ActionListener` is called back using the `onResponse` method @@ -45,8 +45,8 @@ A typical listener for a `PutUserResponse` looks like: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- -include-tagged::{doc-tests}/SecurityDocumentationIT.java[x-pack-put-user-execute-listener] +include-tagged::{doc-tests}/SecurityDocumentationIT.java[put-user-execute-listener] -------------------------------------------------- <1> Called when the execution is successfully completed. The response is -provided as an argument -<2> Called in case of failure. The raised exception is provided as an argument +provided as an argument. +<2> Called in case of failure. The raised exception is provided as an argument. diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index eb03d8ee4c6a6..8d49724353e6f 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -248,6 +248,18 @@ The Java High Level REST Client supports the following Migration APIs: include::migration/get-assistance.asciidoc[] +== Security APIs + +The Java High Level REST Client supports the following Security APIs: + +* <> +* <> +* <> + +include::security/put-user.asciidoc[] +include::security/enable-user.asciidoc[] +include::security/disable-user.asciidoc[] + == Watcher APIs The Java High Level REST Client supports the following Watcher APIs: From 43592305a1a1abb318a31216284d5a1b3f0838cf Mon Sep 17 00:00:00 2001 From: Michael Basnight Date: Fri, 7 Sep 2018 13:08:31 -0500 Subject: [PATCH 41/91] HLRC: split watcher request converters (#33442) In an effort to encapsulate the different clients, the request converters are being shuffled around. This splits the WatcherClient request converters. --- .../elasticsearch/client/WatcherClient.java | 8 +- .../client/WatcherRequestConverters.java | 62 +++++++++++++++ .../client/WatcherRequestConvertersTests.java | 78 +++++++++++++++++++ 3 files changed, 144 insertions(+), 4 deletions(-) create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherRequestConverters.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherRequestConvertersTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java index 48487926f024b..b1a3eb3f87bf9 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java @@ -47,7 +47,7 @@ public final class WatcherClient { * @throws IOException in case there is a problem sending the request or parsing back the response */ public PutWatchResponse putWatch(PutWatchRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::xPackWatcherPutWatch, options, + return restHighLevelClient.performRequestAndParseEntity(request, WatcherRequestConverters::putWatch, options, PutWatchResponse::fromXContent, emptySet()); } @@ -61,7 +61,7 @@ public PutWatchResponse putWatch(PutWatchRequest request, RequestOptions options */ public void putWatchAsync(PutWatchRequest request, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::xPackWatcherPutWatch, options, + restHighLevelClient.performRequestAsyncAndParseEntity(request, WatcherRequestConverters::putWatch, options, PutWatchResponse::fromXContent, listener, emptySet()); } @@ -75,7 +75,7 @@ public void putWatchAsync(PutWatchRequest request, RequestOptions options, * @throws IOException in case there is a problem sending the request or parsing back the response */ public DeleteWatchResponse deleteWatch(DeleteWatchRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::xPackWatcherDeleteWatch, options, + return restHighLevelClient.performRequestAndParseEntity(request, WatcherRequestConverters::deleteWatch, options, DeleteWatchResponse::fromXContent, singleton(404)); } @@ -88,7 +88,7 @@ public DeleteWatchResponse deleteWatch(DeleteWatchRequest request, RequestOption * @param listener the listener to be notified upon request completion */ public void deleteWatchAsync(DeleteWatchRequest request, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::xPackWatcherDeleteWatch, options, + restHighLevelClient.performRequestAsyncAndParseEntity(request, WatcherRequestConverters::deleteWatch, options, DeleteWatchResponse::fromXContent, listener, singleton(404)); } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherRequestConverters.java new file mode 100644 index 0000000000000..3b52d1c7b9943 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherRequestConverters.java @@ -0,0 +1,62 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpPut; +import org.apache.http.entity.ByteArrayEntity; +import org.apache.http.entity.ContentType; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; + +public class WatcherRequestConverters { + + static Request putWatch(PutWatchRequest putWatchRequest) { + String endpoint = new RequestConverters.EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("watcher") + .addPathPartAsIs("watch") + .addPathPart(putWatchRequest.getId()) + .build(); + + Request request = new Request(HttpPut.METHOD_NAME, endpoint); + RequestConverters.Params params = new RequestConverters.Params(request).withVersion(putWatchRequest.getVersion()); + if (putWatchRequest.isActive() == false) { + params.putParam("active", "false"); + } + ContentType contentType = RequestConverters.createContentType(putWatchRequest.xContentType()); + BytesReference source = putWatchRequest.getSource(); + request.setEntity(new ByteArrayEntity(source.toBytesRef().bytes, 0, source.length(), contentType)); + return request; + } + + static Request deleteWatch(DeleteWatchRequest deleteWatchRequest) { + String endpoint = new RequestConverters.EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("watcher") + .addPathPartAsIs("watch") + .addPathPart(deleteWatchRequest.getId()) + .build(); + + Request request = new Request(HttpDelete.METHOD_NAME, endpoint); + return request; + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherRequestConvertersTests.java new file mode 100644 index 0000000000000..203d0826c6d96 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherRequestConvertersTests.java @@ -0,0 +1,78 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; +import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; +import org.elasticsearch.test.ESTestCase; + +import java.io.ByteArrayOutputStream; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; + +public class WatcherRequestConvertersTests extends ESTestCase { + + public void testXPackPutWatch() throws Exception { + PutWatchRequest putWatchRequest = new PutWatchRequest(); + String watchId = randomAlphaOfLength(10); + putWatchRequest.setId(watchId); + String body = randomAlphaOfLength(20); + putWatchRequest.setSource(new BytesArray(body), XContentType.JSON); + + Map expectedParams = new HashMap<>(); + if (randomBoolean()) { + putWatchRequest.setActive(false); + expectedParams.put("active", "false"); + } + + if (randomBoolean()) { + long version = randomLongBetween(10, 100); + putWatchRequest.setVersion(version); + expectedParams.put("version", String.valueOf(version)); + } + + Request request = WatcherRequestConverters.putWatch(putWatchRequest); + assertEquals(HttpPut.METHOD_NAME, request.getMethod()); + assertEquals("/_xpack/watcher/watch/" + watchId, request.getEndpoint()); + assertEquals(expectedParams, request.getParameters()); + assertThat(request.getEntity().getContentType().getValue(), is(XContentType.JSON.mediaTypeWithoutParameters())); + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + request.getEntity().writeTo(bos); + assertThat(bos.toString("UTF-8"), is(body)); + } + + public void testXPackDeleteWatch() { + DeleteWatchRequest deleteWatchRequest = new DeleteWatchRequest(); + String watchId = randomAlphaOfLength(10); + deleteWatchRequest.setId(watchId); + + Request request = WatcherRequestConverters.deleteWatch(deleteWatchRequest); + assertEquals(HttpDelete.METHOD_NAME, request.getMethod()); + assertEquals("/_xpack/watcher/watch/" + watchId, request.getEndpoint()); + assertThat(request.getEntity(), nullValue()); + } +} From 8d61457d9c8ff6471009728e8c8206ff6985999b Mon Sep 17 00:00:00 2001 From: Michael Basnight Date: Fri, 7 Sep 2018 13:30:57 -0500 Subject: [PATCH 42/91] HLRC: split xpack request converters (#33444) In an effort to encapsulate the different clients, the request converters are being shuffled around. This splits the XPackClient request converters. --- .../client/RequestConverters.java | 54 ++++++++++------ .../org/elasticsearch/client/XPackClient.java | 8 +-- .../client/XPackRequestConverters.java | 51 +++++++++++++++ .../client/RequestConvertersTests.java | 33 ---------- .../client/XPackRequestConvertersTests.java | 63 +++++++++++++++++++ 5 files changed, 152 insertions(+), 57 deletions(-) create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/XPackRequestConverters.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/XPackRequestConvertersTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 5401d32b6b735..75d3a0b5b2027 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -98,8 +98,9 @@ import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.index.reindex.ReindexRequest; import org.elasticsearch.index.reindex.UpdateByQueryRequest; -import org.elasticsearch.protocol.xpack.XPackInfoRequest; -import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; +import org.elasticsearch.protocol.xpack.license.GetLicenseRequest; +import org.elasticsearch.protocol.xpack.license.PutLicenseRequest; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.rest.action.search.RestSearchAction; @@ -113,10 +114,8 @@ import java.net.URI; import java.net.URISyntaxException; import java.nio.charset.Charset; -import java.util.EnumSet; import java.util.Locale; import java.util.StringJoiner; -import java.util.stream.Collectors; final class RequestConverters { static final XContentType REQUEST_BODY_CONTENT_TYPE = XContentType.JSON; @@ -976,19 +975,6 @@ static Request deleteScript(DeleteStoredScriptRequest deleteStoredScriptRequest) return request; } - static Request xPackInfo(XPackInfoRequest infoRequest) { - Request request = new Request(HttpGet.METHOD_NAME, "/_xpack"); - if (false == infoRequest.isVerbose()) { - request.addParameter("human", "false"); - } - if (false == infoRequest.getCategories().equals(EnumSet.allOf(XPackInfoRequest.Category.class))) { - request.addParameter("categories", infoRequest.getCategories().stream() - .map(c -> c.toString().toLowerCase(Locale.ROOT)) - .collect(Collectors.joining(","))); - } - return request; - } - static Request xPackWatcherPutWatch(PutWatchRequest putWatchRequest) { String endpoint = new EndpointBuilder() .addPathPartAsIs("_xpack") @@ -1020,10 +1006,38 @@ static Request xPackWatcherDeleteWatch(DeleteWatchRequest deleteWatchRequest) { return request; } - static Request xpackUsage(XPackUsageRequest usageRequest) { - Request request = new Request(HttpGet.METHOD_NAME, "/_xpack/usage"); + static Request putLicense(PutLicenseRequest putLicenseRequest) { + String endpoint = new EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("license") + .build(); + Request request = new Request(HttpPut.METHOD_NAME, endpoint); + Params parameters = new Params(request); + parameters.withTimeout(putLicenseRequest.timeout()); + parameters.withMasterTimeout(putLicenseRequest.masterNodeTimeout()); + if (putLicenseRequest.isAcknowledge()) { + parameters.putParam("acknowledge", "true"); + } + request.setJsonEntity(putLicenseRequest.getLicenseDefinition()); + return request; + } + + static Request getLicense(GetLicenseRequest getLicenseRequest) { + String endpoint = new EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("license") + .build(); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + Params parameters = new Params(request); + parameters.withLocal(getLicenseRequest.local()); + return request; + } + + static Request deleteLicense(DeleteLicenseRequest deleteLicenseRequest) { + Request request = new Request(HttpDelete.METHOD_NAME, "/_xpack/license"); Params parameters = new Params(request); - parameters.withMasterTimeout(usageRequest.masterNodeTimeout()); + parameters.withTimeout(deleteLicenseRequest.timeout()); + parameters.withMasterTimeout(deleteLicenseRequest.masterNodeTimeout()); return request; } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java index 2af49ba1a1b73..9cd8413fa7917 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java @@ -56,7 +56,7 @@ public final class XPackClient { * @throws IOException in case there is a problem sending the request or parsing back the response */ public XPackInfoResponse info(XPackInfoRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::xPackInfo, options, + return restHighLevelClient.performRequestAndParseEntity(request, XPackRequestConverters::info, options, XPackInfoResponse::fromXContent, emptySet()); } @@ -70,7 +70,7 @@ public XPackInfoResponse info(XPackInfoRequest request, RequestOptions options) */ public void infoAsync(XPackInfoRequest request, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::xPackInfo, options, + restHighLevelClient.performRequestAsyncAndParseEntity(request, XPackRequestConverters::info, options, XPackInfoResponse::fromXContent, listener, emptySet()); } @@ -81,7 +81,7 @@ public void infoAsync(XPackInfoRequest request, RequestOptions options, * @throws IOException in case there is a problem sending the request or parsing back the response */ public XPackUsageResponse usage(XPackUsageRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::xpackUsage, options, + return restHighLevelClient.performRequestAndParseEntity(request, XPackRequestConverters::usage, options, XPackUsageResponse::fromXContent, emptySet()); } @@ -91,7 +91,7 @@ public XPackUsageResponse usage(XPackUsageRequest request, RequestOptions option * @param listener the listener to be notified upon request completion */ public void usageAsync(XPackUsageRequest request, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::xpackUsage, options, + restHighLevelClient.performRequestAsyncAndParseEntity(request, XPackRequestConverters::usage, options, XPackUsageResponse::fromXContent, listener, emptySet()); } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackRequestConverters.java new file mode 100644 index 0000000000000..1e2e15ad97c2e --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackRequestConverters.java @@ -0,0 +1,51 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.client.methods.HttpGet; +import org.elasticsearch.protocol.xpack.XPackInfoRequest; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; + +import java.util.EnumSet; +import java.util.Locale; +import java.util.stream.Collectors; + +public class XPackRequestConverters { + + static Request info(XPackInfoRequest infoRequest) { + Request request = new Request(HttpGet.METHOD_NAME, "/_xpack"); + if (false == infoRequest.isVerbose()) { + request.addParameter("human", "false"); + } + if (false == infoRequest.getCategories().equals(EnumSet.allOf(XPackInfoRequest.Category.class))) { + request.addParameter("categories", infoRequest.getCategories().stream() + .map(c -> c.toString().toLowerCase(Locale.ROOT)) + .collect(Collectors.joining(","))); + } + return request; + } + + static Request usage(XPackUsageRequest usageRequest) { + Request request = new Request(HttpGet.METHOD_NAME, "/_xpack/usage"); + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withMasterTimeout(usageRequest.masterNodeTimeout()); + return request; + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index d346934ff03eb..5d2f4cb74d66f 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -112,7 +112,6 @@ import org.elasticsearch.index.reindex.ReindexRequest; import org.elasticsearch.index.reindex.RemoteInfo; import org.elasticsearch.index.reindex.UpdateByQueryRequest; -import org.elasticsearch.protocol.xpack.XPackInfoRequest; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.rest.action.search.RestSearchAction; @@ -140,7 +139,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; -import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -2259,37 +2257,6 @@ public void testEnforceSameContentType() { + "previous requests have content-type [" + xContentType + "]", exception.getMessage()); } - public void testXPackInfo() { - XPackInfoRequest infoRequest = new XPackInfoRequest(); - Map expectedParams = new HashMap<>(); - infoRequest.setVerbose(randomBoolean()); - if (false == infoRequest.isVerbose()) { - expectedParams.put("human", "false"); - } - int option = between(0, 2); - switch (option) { - case 0: - infoRequest.setCategories(EnumSet.allOf(XPackInfoRequest.Category.class)); - break; - case 1: - infoRequest.setCategories(EnumSet.of(XPackInfoRequest.Category.FEATURES)); - expectedParams.put("categories", "features"); - break; - case 2: - infoRequest.setCategories(EnumSet.of(XPackInfoRequest.Category.FEATURES, XPackInfoRequest.Category.BUILD)); - expectedParams.put("categories", "build,features"); - break; - default: - throw new IllegalArgumentException("invalid option [" + option + "]"); - } - - Request request = RequestConverters.xPackInfo(infoRequest); - assertEquals(HttpGet.METHOD_NAME, request.getMethod()); - assertEquals("/_xpack", request.getEndpoint()); - assertNull(request.getEntity()); - assertEquals(expectedParams, request.getParameters()); - } - public void testXPackPutWatch() throws Exception { PutWatchRequest putWatchRequest = new PutWatchRequest(); String watchId = randomAlphaOfLength(10); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/XPackRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/XPackRequestConvertersTests.java new file mode 100644 index 0000000000000..d2f20273d4d3c --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/XPackRequestConvertersTests.java @@ -0,0 +1,63 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.client.methods.HttpGet; +import org.elasticsearch.protocol.xpack.XPackInfoRequest; +import org.elasticsearch.test.ESTestCase; +import org.junit.Assert; + +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; + +public class XPackRequestConvertersTests extends ESTestCase { + + public void testXPackInfo() { + XPackInfoRequest infoRequest = new XPackInfoRequest(); + Map expectedParams = new HashMap<>(); + infoRequest.setVerbose(ESTestCase.randomBoolean()); + if (false == infoRequest.isVerbose()) { + expectedParams.put("human", "false"); + } + int option = ESTestCase.between(0, 2); + switch (option) { + case 0: + infoRequest.setCategories(EnumSet.allOf(XPackInfoRequest.Category.class)); + break; + case 1: + infoRequest.setCategories(EnumSet.of(XPackInfoRequest.Category.FEATURES)); + expectedParams.put("categories", "features"); + break; + case 2: + infoRequest.setCategories(EnumSet.of(XPackInfoRequest.Category.FEATURES, XPackInfoRequest.Category.BUILD)); + expectedParams.put("categories", "build,features"); + break; + default: + throw new IllegalArgumentException("invalid option [" + option + "]"); + } + + Request request = XPackRequestConverters.info(infoRequest); + Assert.assertEquals(HttpGet.METHOD_NAME, request.getMethod()); + Assert.assertEquals("/_xpack", request.getEndpoint()); + Assert.assertNull(request.getEntity()); + Assert.assertEquals(expectedParams, request.getParameters()); + } +} From 190ea9a6def9082348d983b16420ef02607d4c17 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 7 Sep 2018 14:31:23 -0400 Subject: [PATCH 43/91] Logging: Configure the node name when we have it (#32983) Change the logging infrastructure to handle when the node name isn't available in `elasticsearch.yml`. In that case the node name is not available until long after logging is configured. The biggest change is that the node name logging no longer fixed at pattern build time. Instead it is read from a `SetOnce` on every print. If it is unset it is printed as `unknown` so we have something that fits in the pattern. On normal startup we don't log anything until the node name is available so we never see the `unknown`s. --- .../gradle/test/ClusterFormationTasks.groovy | 8 +- .../archives/integ-test-zip/build.gradle | 25 ++++- .../test/rest/NodeNameInLogsIT.java | 43 +++++++++ .../src/test/resources/plugin-security.policy | 4 + .../common/logging/EvilLoggerTests.java | 4 +- .../env/NodeEnvironmentEvilTests.java | 6 +- qa/unconfigured-node-name/build.gradle | 30 ++++++ .../NodeNameInLogsIT.java | 43 +++++++++ .../src/test/resources/plugin-security.policy | 4 + .../elasticsearch/bootstrap/Bootstrap.java | 12 ++- .../common/logging/LogConfigurator.java | 12 ++- .../logging/NodeNamePatternConverter.java | 29 +++--- .../elasticsearch/env/NodeEnvironment.java | 33 +++---- .../java/org/elasticsearch/node/Node.java | 55 ++++++----- .../env/NodeEnvironmentTests.java | 14 +-- .../elasticsearch/index/IndexModuleTests.java | 2 +- .../index/shard/NewPathForShardTests.java | 8 +- .../logging/NodeNameInLogsIntegTestCase.java | 96 +++++++++++++++++++ .../java/org/elasticsearch/node/MockNode.java | 5 +- .../org/elasticsearch/test/ESTestCase.java | 2 +- .../bench/WatcherScheduleEngineBenchmark.java | 7 +- 21 files changed, 363 insertions(+), 79 deletions(-) create mode 100644 distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/NodeNameInLogsIT.java create mode 100644 distribution/archives/integ-test-zip/src/test/resources/plugin-security.policy create mode 100644 qa/unconfigured-node-name/build.gradle create mode 100644 qa/unconfigured-node-name/src/test/java/org/elasticsearch/unconfigured_node_name/NodeNameInLogsIT.java create mode 100644 qa/unconfigured-node-name/src/test/resources/plugin-security.policy create mode 100644 test/framework/src/main/java/org/elasticsearch/common/logging/NodeNameInLogsIntegTestCase.java diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index 4ede349b206d6..ecf3e3420408d 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -337,7 +337,13 @@ class ClusterFormationTasks { if (node.nodeVersion.major >= 7) { esConfig['indices.breaker.total.use_real_memory'] = false } - esConfig.putAll(node.config.settings) + for (Map.Entry setting : node.config.settings) { + if (setting.value == null) { + esConfig.remove(setting.key) + } else { + esConfig.put(setting.key, setting.value) + } + } Task writeConfig = project.tasks.create(name: name, type: DefaultTask, dependsOn: setup) writeConfig.doFirst { diff --git a/distribution/archives/integ-test-zip/build.gradle b/distribution/archives/integ-test-zip/build.gradle index 4a6dde5fc0c92..4c2ac7d1cf4de 100644 --- a/distribution/archives/integ-test-zip/build.gradle +++ b/distribution/archives/integ-test-zip/build.gradle @@ -1,2 +1,23 @@ -// This file is intentionally blank. All configuration of the -// distribution is done in the parent project. +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +integTestRunner { + systemProperty 'tests.logfile', + "${ -> integTest.nodes[0].homeDir}/logs/${ -> integTest.nodes[0].clusterName }.log" +} diff --git a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/NodeNameInLogsIT.java b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/NodeNameInLogsIT.java new file mode 100644 index 0000000000000..13128b9478e0c --- /dev/null +++ b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/NodeNameInLogsIT.java @@ -0,0 +1,43 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.unconfigurednodename; + +import org.elasticsearch.common.logging.NodeNameInLogsIntegTestCase; + +import java.io.IOException; +import java.io.BufferedReader; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.security.AccessController; +import java.security.PrivilegedAction; + +public class NodeNameInLogsIT extends NodeNameInLogsIntegTestCase { + @Override + protected BufferedReader openReader(Path logFile) throws IOException { + return AccessController.doPrivileged((PrivilegedAction) () -> { + try { + return Files.newBufferedReader(logFile, StandardCharsets.UTF_8); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + } +} diff --git a/distribution/archives/integ-test-zip/src/test/resources/plugin-security.policy b/distribution/archives/integ-test-zip/src/test/resources/plugin-security.policy new file mode 100644 index 0000000000000..d0d865c4ede16 --- /dev/null +++ b/distribution/archives/integ-test-zip/src/test/resources/plugin-security.policy @@ -0,0 +1,4 @@ +grant { + // Needed to read the log file + permission java.io.FilePermission "${tests.logfile}", "read"; +}; diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java index ede61da1369f5..a06d7ad544592 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/common/logging/EvilLoggerTests.java @@ -357,7 +357,7 @@ public void testProperties() throws IOException, UserException { } } - public void testNoNodeNameWarning() throws IOException, UserException { + public void testNoNodeNameInPatternWarning() throws IOException, UserException { setupLogging("no_node_name"); final String path = @@ -368,7 +368,7 @@ public void testNoNodeNameWarning() throws IOException, UserException { assertThat(events.size(), equalTo(2)); final String location = "org.elasticsearch.common.logging.LogConfigurator"; // the first message is a warning for unsupported configuration files - assertLogLine(events.get(0), Level.WARN, location, "\\[null\\] Some logging configurations have %marker but don't " + assertLogLine(events.get(0), Level.WARN, location, "\\[unknown\\] Some logging configurations have %marker but don't " + "have %node_name. We will automatically add %node_name to the pattern to ease the migration for users " + "who customize log4j2.properties but will stop this behavior in 7.0. You should manually replace " + "`%node_name` with `\\[%node_name\\]%marker ` in these locations:"); diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/env/NodeEnvironmentEvilTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/env/NodeEnvironmentEvilTests.java index 57d4a363cc8c7..642694856a654 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/env/NodeEnvironmentEvilTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/env/NodeEnvironmentEvilTests.java @@ -52,7 +52,7 @@ public void testMissingWritePermission() throws IOException { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString()) .putList(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build(); IOException ioException = expectThrows(IOException.class, () -> { - new NodeEnvironment(build, TestEnvironment.newEnvironment(build)); + new NodeEnvironment(build, TestEnvironment.newEnvironment(build), nodeId -> {}); }); assertTrue(ioException.getMessage(), ioException.getMessage().startsWith(path.toString())); } @@ -72,7 +72,7 @@ public void testMissingWritePermissionOnIndex() throws IOException { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString()) .putList(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build(); IOException ioException = expectThrows(IOException.class, () -> { - new NodeEnvironment(build, TestEnvironment.newEnvironment(build)); + new NodeEnvironment(build, TestEnvironment.newEnvironment(build), nodeId -> {}); }); assertTrue(ioException.getMessage(), ioException.getMessage().startsWith("failed to test writes in data directory")); } @@ -97,7 +97,7 @@ public void testMissingWritePermissionOnShard() throws IOException { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString()) .putList(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build(); IOException ioException = expectThrows(IOException.class, () -> { - new NodeEnvironment(build, TestEnvironment.newEnvironment(build)); + new NodeEnvironment(build, TestEnvironment.newEnvironment(build), nodeId -> {}); }); assertTrue(ioException.getMessage(), ioException.getMessage().startsWith("failed to test writes in data directory")); } diff --git a/qa/unconfigured-node-name/build.gradle b/qa/unconfigured-node-name/build.gradle new file mode 100644 index 0000000000000..dcc3e7c6a169b --- /dev/null +++ b/qa/unconfigured-node-name/build.gradle @@ -0,0 +1,30 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +apply plugin: 'elasticsearch.standalone-rest-test' +apply plugin: 'elasticsearch.rest-test' + +integTestCluster { + setting 'node.name', null +} + +integTestRunner { + systemProperty 'tests.logfile', + "${ -> integTest.nodes[0].homeDir}/logs/${ -> integTest.nodes[0].clusterName }.log" +} diff --git a/qa/unconfigured-node-name/src/test/java/org/elasticsearch/unconfigured_node_name/NodeNameInLogsIT.java b/qa/unconfigured-node-name/src/test/java/org/elasticsearch/unconfigured_node_name/NodeNameInLogsIT.java new file mode 100644 index 0000000000000..512fc2345549c --- /dev/null +++ b/qa/unconfigured-node-name/src/test/java/org/elasticsearch/unconfigured_node_name/NodeNameInLogsIT.java @@ -0,0 +1,43 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.unconfigured_node_name; + +import org.elasticsearch.common.logging.NodeNameInLogsIntegTestCase; + +import java.io.IOException; +import java.io.BufferedReader; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.security.AccessController; +import java.security.PrivilegedAction; + +public class NodeNameInLogsIT extends NodeNameInLogsIntegTestCase { + @Override + protected BufferedReader openReader(Path logFile) throws IOException { + return AccessController.doPrivileged((PrivilegedAction) () -> { + try { + return Files.newBufferedReader(logFile, StandardCharsets.UTF_8); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + } +} diff --git a/qa/unconfigured-node-name/src/test/resources/plugin-security.policy b/qa/unconfigured-node-name/src/test/resources/plugin-security.policy new file mode 100644 index 0000000000000..d0d865c4ede16 --- /dev/null +++ b/qa/unconfigured-node-name/src/test/resources/plugin-security.policy @@ -0,0 +1,4 @@ +grant { + // Needed to read the log file + permission java.io.FilePermission "${tests.logfile}", "read"; +}; diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index bc2fe747c030b..2694baf2c39f8 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -37,7 +37,6 @@ import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.logging.LogConfigurator; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.logging.NodeNamePatternConverter; import org.elasticsearch.common.network.IfConfig; import org.elasticsearch.common.settings.KeyStoreWrapper; import org.elasticsearch.common.settings.SecureSettings; @@ -217,6 +216,11 @@ protected void validateNodeBeforeAcceptingRequests( final BoundTransportAddress boundTransportAddress, List checks) throws NodeValidationException { BootstrapChecks.check(context, boundTransportAddress, checks); } + + @Override + protected void registerDerivedNodeNameWithLogger(String nodeName) { + LogConfigurator.setNodeName(nodeName); + } }; } @@ -289,9 +293,9 @@ static void init( final SecureSettings keystore = loadSecureSettings(initialEnv); final Environment environment = createEnvironment(pidFile, keystore, initialEnv.settings(), initialEnv.configFile()); - String nodeName = Node.NODE_NAME_SETTING.get(environment.settings()); - NodeNamePatternConverter.setNodeName(nodeName); - + if (Node.NODE_NAME_SETTING.exists(environment.settings())) { + LogConfigurator.setNodeName(Node.NODE_NAME_SETTING.get(environment.settings())); + } try { LogConfigurator.configure(environment); } catch (IOException e) { diff --git a/server/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java b/server/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java index 4e3771f36680d..531104a1a397a 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java +++ b/server/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java @@ -134,6 +134,15 @@ public static void loadLog4jPlugins() { PluginManager.addPackage(LogConfigurator.class.getPackage().getName()); } + /** + * Sets the node name. This is called before logging is configured if the + * node name is set in elasticsearch.yml. Otherwise it is called as soon + * as the node id is available. + */ + public static void setNodeName(String nodeName) { + NodeNamePatternConverter.setNodeName(nodeName); + } + private static void checkErrorListener() { assert errorListenerIsRegistered() : "expected error listener to be registered"; if (error.get()) { @@ -158,8 +167,8 @@ private static void configure(final Settings settings, final Path configsPath, f final LoggerContext context = (LoggerContext) LogManager.getContext(false); + final Set locationsWithDeprecatedPatterns = Collections.synchronizedSet(new HashSet<>()); final List configurations = new ArrayList<>(); - /* * Subclass the properties configurator to hack the new pattern in * place so users don't have to change log4j2.properties in @@ -170,7 +179,6 @@ private static void configure(final Settings settings, final Path configsPath, f * Everything in this subclass that isn't marked as a hack is copied * from log4j2's source. */ - Set locationsWithDeprecatedPatterns = Collections.synchronizedSet(new HashSet<>()); final PropertiesConfigurationFactory factory = new PropertiesConfigurationFactory() { @Override public PropertiesConfiguration getConfiguration(final LoggerContext loggerContext, final ConfigurationSource source) { diff --git a/server/src/main/java/org/elasticsearch/common/logging/NodeNamePatternConverter.java b/server/src/main/java/org/elasticsearch/common/logging/NodeNamePatternConverter.java index ca4c9ab776f6e..b63db40276d68 100644 --- a/server/src/main/java/org/elasticsearch/common/logging/NodeNamePatternConverter.java +++ b/server/src/main/java/org/elasticsearch/common/logging/NodeNamePatternConverter.java @@ -30,20 +30,22 @@ /** * Converts {@code %node_name} in log4j patterns into the current node name. - * We *could* use a system property lookup instead but this is very explicit - * and fails fast if we try to use the logger without initializing the node - * name. As a bonus it ought to be ever so slightly faster because it doesn't - * have to look up the system property every time. + * We can't use a system property for this because the node name system + * property is only set if the node name is explicitly defined in + * elasticsearch.yml. */ @Plugin(category = PatternConverter.CATEGORY, name = "NodeNamePatternConverter") @ConverterKeys({"node_name"}) -public class NodeNamePatternConverter extends LogEventPatternConverter { +public final class NodeNamePatternConverter extends LogEventPatternConverter { + /** + * The name of this node. + */ private static final SetOnce NODE_NAME = new SetOnce<>(); /** * Set the name of this node. */ - public static void setNodeName(String nodeName) { + static void setNodeName(String nodeName) { NODE_NAME.set(nodeName); } @@ -55,18 +57,21 @@ public static NodeNamePatternConverter newInstance(final String[] options) { throw new IllegalArgumentException("no options supported but options provided: " + Arrays.toString(options)); } - return new NodeNamePatternConverter(NODE_NAME.get()); + return new NodeNamePatternConverter(); } - private final String nodeName; - - private NodeNamePatternConverter(String nodeName) { + private NodeNamePatternConverter() { super("NodeName", "node_name"); - this.nodeName = nodeName; } @Override public void format(LogEvent event, StringBuilder toAppendTo) { - toAppendTo.append(nodeName); + /* + * We're not thrilled about this volatile read on every line logged but + * the alternatives are slightly terrifying and/or don't work with the + * security manager. + */ + String nodeName = NODE_NAME.get(); + toAppendTo.append(nodeName == null ? "unknown" : nodeName); } } diff --git a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java index 87874bd45000c..29d3207c73ac2 100644 --- a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -20,6 +20,7 @@ package org.elasticsearch.env; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.SegmentInfos; @@ -37,7 +38,6 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.io.FileSystemUtils; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; @@ -53,7 +53,6 @@ import org.elasticsearch.monitor.fs.FsInfo; import org.elasticsearch.monitor.fs.FsProbe; import org.elasticsearch.monitor.jvm.JvmInfo; -import org.elasticsearch.node.Node; import java.io.Closeable; import java.io.IOException; @@ -76,6 +75,7 @@ import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Consumer; import static java.util.Collections.unmodifiableSet; @@ -83,9 +83,6 @@ * A component that holds all data paths for a single node. */ public final class NodeEnvironment implements Closeable { - - private final Logger logger; - public static class NodePath { /* ${data.paths}/nodes/{node.id} */ public final Path path; @@ -139,6 +136,7 @@ public String toString() { } + private final Logger logger = LogManager.getLogger(NodeEnvironment.class); private final NodePath[] nodePaths; private final Path sharedDataPath; private final Lock[] locks; @@ -173,24 +171,27 @@ public String toString() { public static final String INDICES_FOLDER = "indices"; public static final String NODE_LOCK_FILENAME = "node.lock"; - public NodeEnvironment(Settings settings, Environment environment) throws IOException { - + /** + * Setup the environment. + * @param settings settings from elasticsearch.yml + * @param nodeIdConsumer called as soon as the node id is available to the + * node name in log messages if it wasn't loaded from + * elasticsearch.yml + */ + public NodeEnvironment(Settings settings, Environment environment, Consumer nodeIdConsumer) throws IOException { if (!DiscoveryNode.nodeRequiresLocalStorage(settings)) { nodePaths = null; sharedDataPath = null; locks = null; nodeLockId = -1; nodeMetaData = new NodeMetaData(generateNodeId(settings)); - logger = Loggers.getLogger(getClass(), Node.addNodeNameIfNeeded(settings, this.nodeMetaData.nodeId())); + nodeIdConsumer.accept(nodeMetaData.nodeId()); return; } final NodePath[] nodePaths = new NodePath[environment.dataWithClusterFiles().length]; final Lock[] locks = new Lock[nodePaths.length]; boolean success = false; - // trace logger to debug issues before the default node name is derived from the node id - Logger startupTraceLogger = Loggers.getLogger(getClass(), settings); - try { sharedDataPath = environment.sharedDataFile(); int nodeLockId = -1; @@ -203,13 +204,13 @@ public NodeEnvironment(Settings settings, Environment environment) throws IOExce Files.createDirectories(dir); try (Directory luceneDir = FSDirectory.open(dir, NativeFSLockFactory.INSTANCE)) { - startupTraceLogger.trace("obtaining node lock on {} ...", dir.toAbsolutePath()); + logger.trace("obtaining node lock on {} ...", dir.toAbsolutePath()); try { locks[dirIndex] = luceneDir.obtainLock(NODE_LOCK_FILENAME); nodePaths[dirIndex] = new NodePath(dir); nodeLockId = possibleLockId; } catch (LockObtainFailedException ex) { - startupTraceLogger.trace( + logger.trace( new ParameterizedMessage("failed to obtain node lock on {}", dir.toAbsolutePath()), ex); // release all the ones that were obtained up until now releaseAndNullLocks(locks); @@ -217,7 +218,7 @@ public NodeEnvironment(Settings settings, Environment environment) throws IOExce } } catch (IOException e) { - startupTraceLogger.trace(() -> new ParameterizedMessage( + logger.trace(() -> new ParameterizedMessage( "failed to obtain node lock on {}", dir.toAbsolutePath()), e); lastException = new IOException("failed to obtain lock on " + dir.toAbsolutePath(), e); // release all the ones that were obtained up until now @@ -242,8 +243,8 @@ public NodeEnvironment(Settings settings, Environment environment) throws IOExce maxLocalStorageNodes); throw new IllegalStateException(message, lastException); } - this.nodeMetaData = loadOrCreateNodeMetaData(settings, startupTraceLogger, nodePaths); - this.logger = Loggers.getLogger(getClass(), Node.addNodeNameIfNeeded(settings, this.nodeMetaData.nodeId())); + this.nodeMetaData = loadOrCreateNodeMetaData(settings, logger, nodePaths); + nodeIdConsumer.accept(nodeMetaData.nodeId()); this.nodeLockId = nodeLockId; this.locks = locks; diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 9ead528c974da..c2ef6d12331fe 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -183,7 +183,7 @@ * A node represent a node within a cluster ({@code cluster.name}). The {@link #client()} can be used * in order to use a {@link Client} to perform actions/operations against the cluster. */ -public class Node implements Closeable { +public abstract class Node implements Closeable { public static final Setting WRITE_PORTS_FILE_SETTING = @@ -229,17 +229,6 @@ public class Node implements Closeable { } }, Setting.Property.NodeScope); - /** - * Adds a default node name to the given setting, if it doesn't already exist - * @return the given setting if node name is already set, or a new copy with a default node name set. - */ - public static final Settings addNodeNameIfNeeded(Settings settings, final String nodeId) { - if (NODE_NAME_SETTING.exists(settings)) { - return settings; - } - return Settings.builder().put(settings).put(NODE_NAME_SETTING.getKey(), nodeId.substring(0, 7)).build(); - } - private static final String CLIENT_TYPE = "node"; private final Lifecycle lifecycle = new Lifecycle(); @@ -291,24 +280,34 @@ protected Node( Settings tmpSettings = Settings.builder().put(environment.settings()) .put(Client.CLIENT_TYPE_SETTING_S.getKey(), CLIENT_TYPE).build(); - // create the node environment as soon as possible, to recover the node id and enable logging + /* + * Create the node environment as soon as possible so we can + * recover the node id which we might have to use to derive the + * node name. And it is important to get *that* as soon as possible + * so that log lines can contain it. + */ + boolean nodeNameExplicitlyDefined = NODE_NAME_SETTING.exists(tmpSettings); try { - nodeEnvironment = new NodeEnvironment(tmpSettings, environment); + Consumer nodeIdConsumer = nodeNameExplicitlyDefined ? + nodeId -> {} : nodeId -> registerDerivedNodeNameWithLogger(nodeIdToNodeName(nodeId)); + nodeEnvironment = new NodeEnvironment(tmpSettings, environment, nodeIdConsumer); resourcesToClose.add(nodeEnvironment); } catch (IOException ex) { throw new IllegalStateException("Failed to create node environment", ex); } - final boolean hadPredefinedNodeName = NODE_NAME_SETTING.exists(tmpSettings); - final String nodeId = nodeEnvironment.nodeId(); - tmpSettings = addNodeNameIfNeeded(tmpSettings, nodeId); - // this must be captured after the node name is possibly added to the settings - final String nodeName = NODE_NAME_SETTING.get(tmpSettings); - if (hadPredefinedNodeName == false) { - logger.info("node name derived from node ID [{}]; set [{}] to override", nodeId, NODE_NAME_SETTING.getKey()); + if (nodeNameExplicitlyDefined) { + logger.info("node name [{}], node ID [{}]", + NODE_NAME_SETTING.get(tmpSettings), nodeEnvironment.nodeId()); } else { - logger.info("node name [{}], node ID [{}]", nodeName, nodeId); + tmpSettings = Settings.builder() + .put(tmpSettings) + .put(NODE_NAME_SETTING.getKey(), nodeIdToNodeName(nodeEnvironment.nodeId())) + .build(); + logger.info("node name derived from node ID [{}]; set [{}] to override", + nodeEnvironment.nodeId(), NODE_NAME_SETTING.getKey()); } + final JvmInfo jvmInfo = JvmInfo.jvmInfo(); logger.info( "version[{}], pid[{}], build[{}/{}/{}/{}], OS[{}/{}/{}], JVM[{}/{}/{}/{}]", @@ -1009,6 +1008,18 @@ protected HttpServerTransport newHttpTransport(NetworkModule networkModule) { return networkModule.getHttpServerTransportSupplier().get(); } + /** + * If the node name was derived from the node id this is called with the + * node name as soon as it is available so that we can register the + * node name with the logger. If the node name defined in elasticsearch.yml + * this is never called. + */ + protected abstract void registerDerivedNodeNameWithLogger(String nodeName); + + private String nodeIdToNodeName(String nodeId) { + return nodeId.substring(0, 7); + } + private static class LocalNodeFactory implements Function { private final SetOnce localNode = new SetOnce<>(); private final String persistentNodeId; diff --git a/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java b/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java index 39f03fefe4e65..0b44d9c94d5e1 100644 --- a/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java +++ b/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java @@ -79,13 +79,13 @@ public void testNodeLockSingleEnvironment() throws IOException { List dataPaths = Environment.PATH_DATA_SETTING.get(settings); // Reuse the same location and attempt to lock again - IllegalStateException ex = - expectThrows(IllegalStateException.class, () -> new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings))); + IllegalStateException ex = expectThrows(IllegalStateException.class, () -> + new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings), nodeId -> {})); assertThat(ex.getMessage(), containsString("failed to obtain node lock")); // Close the environment that holds the lock and make sure we can get the lock after release env.close(); - env = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings)); + env = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings), nodeId -> {}); assertThat(env.nodeDataPaths(), arrayWithSize(dataPaths.size())); for (int i = 0; i < dataPaths.size(); i++) { @@ -120,7 +120,7 @@ public void testNodeLockMultipleEnvironment() throws IOException { final Settings settings = buildEnvSettings(Settings.builder().put("node.max_local_storage_nodes", 2).build()); final NodeEnvironment first = newNodeEnvironment(settings); List dataPaths = Environment.PATH_DATA_SETTING.get(settings); - NodeEnvironment second = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings)); + NodeEnvironment second = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings), nodeId -> {}); assertEquals(first.nodeDataPaths().length, dataPaths.size()); assertEquals(second.nodeDataPaths().length, dataPaths.size()); for (int i = 0; i < dataPaths.size(); i++) { @@ -477,7 +477,7 @@ public NodeEnvironment newNodeEnvironment() throws IOException { @Override public NodeEnvironment newNodeEnvironment(Settings settings) throws IOException { Settings build = buildEnvSettings(settings); - return new NodeEnvironment(build, TestEnvironment.newEnvironment(build)); + return new NodeEnvironment(build, TestEnvironment.newEnvironment(build), nodeId -> {}); } public Settings buildEnvSettings(Settings settings) { @@ -492,7 +492,7 @@ public NodeEnvironment newNodeEnvironment(String[] dataPaths, Settings settings) .put(settings) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString()) .putList(Environment.PATH_DATA_SETTING.getKey(), dataPaths).build(); - return new NodeEnvironment(build, TestEnvironment.newEnvironment(build)); + return new NodeEnvironment(build, TestEnvironment.newEnvironment(build), nodeId -> {}); } public NodeEnvironment newNodeEnvironment(String[] dataPaths, String sharedDataPath, Settings settings) throws IOException { @@ -501,6 +501,6 @@ public NodeEnvironment newNodeEnvironment(String[] dataPaths, String sharedDataP .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString()) .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), sharedDataPath) .putList(Environment.PATH_DATA_SETTING.getKey(), dataPaths).build(); - return new NodeEnvironment(build, TestEnvironment.newEnvironment(build)); + return new NodeEnvironment(build, TestEnvironment.newEnvironment(build), nodeId -> {}); } } diff --git a/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java index 75ff1ac1259d2..078ec5ec20abc 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -133,7 +133,7 @@ public void setUp() throws Exception { bigArrays = new BigArrays(pageCacheRecycler, circuitBreakerService); scriptService = new ScriptService(settings, Collections.emptyMap(), Collections.emptyMap()); clusterService = ClusterServiceUtils.createClusterService(threadPool); - nodeEnvironment = new NodeEnvironment(settings, environment); + nodeEnvironment = new NodeEnvironment(settings, environment, nodeId -> {}); mapperRegistry = new IndicesModule(Collections.emptyList()).getMapperRegistry(); } diff --git a/server/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java index 4e6e3036f4c40..d539b71669482 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java @@ -178,7 +178,7 @@ public void testSelectNewPathForShard() throws Exception { Settings settings = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), path) .putList(Environment.PATH_DATA_SETTING.getKey(), paths).build(); - NodeEnvironment nodeEnv = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings)); + NodeEnvironment nodeEnv = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings), nodeId -> {}); // Make sure all our mocking above actually worked: NodePath[] nodePaths = nodeEnv.nodePaths(); @@ -233,7 +233,7 @@ public void testSelectNewPathForShardEvenly() throws Exception { Settings settings = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), path) .putList(Environment.PATH_DATA_SETTING.getKey(), paths).build(); - NodeEnvironment nodeEnv = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings)); + NodeEnvironment nodeEnv = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings), nodeId -> {}); // Make sure all our mocking above actually worked: NodePath[] nodePaths = nodeEnv.nodePaths(); @@ -290,7 +290,7 @@ public void testGettingPathWithMostFreeSpace() throws Exception { Settings settings = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), path) .putList(Environment.PATH_DATA_SETTING.getKey(), paths).build(); - NodeEnvironment nodeEnv = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings)); + NodeEnvironment nodeEnv = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings), nodeId -> {}); aFileStore.usableSpace = 100000; bFileStore.usableSpace = 1000; @@ -315,7 +315,7 @@ public void testTieBreakWithMostShards() throws Exception { Settings settings = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), path) .putList(Environment.PATH_DATA_SETTING.getKey(), paths).build(); - NodeEnvironment nodeEnv = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings)); + NodeEnvironment nodeEnv = new NodeEnvironment(settings, TestEnvironment.newEnvironment(settings), nodeId -> {}); // Make sure all our mocking above actually worked: NodePath[] nodePaths = nodeEnv.nodePaths(); diff --git a/test/framework/src/main/java/org/elasticsearch/common/logging/NodeNameInLogsIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/common/logging/NodeNameInLogsIntegTestCase.java new file mode 100644 index 0000000000000..5b57c015895b4 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/common/logging/NodeNameInLogsIntegTestCase.java @@ -0,0 +1,96 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.logging; + +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.test.rest.ESRestTestCase; + +import java.io.BufferedReader; +import java.io.IOException; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.regex.Pattern; +import java.util.regex.Matcher; + +import static org.hamcrest.Matchers.containsString; + +/** + * Tests that extend this class verify that the node name appears in the first + * few log lines on startup. Note that this won't pass for clusters that don't + * the node name defined in elasticsearch.yml and start with + * DEBUG or TRACE level logging. Those nodes log a few lines before they + * resolve the node name. + */ +public abstract class NodeNameInLogsIntegTestCase extends ESRestTestCase { + /** + * Number of lines in the log file to check for the node name. We don't + * just check the entire log file because it could be quite long and + * exceptions don't include the node name. + */ + private static final int LINES_TO_CHECK = 10; + + /** + * Open the log file. This is delegated to subclasses because the test + * framework doesn't have permission to read from the log file but + * subclasses can grant themselves that permission. + */ + protected abstract BufferedReader openReader(Path logFile) throws IOException ; + + public void testNodeNameIsOnAllLinesOfLog() throws IOException { + BufferedReader logReader = openReader(getLogFile()); + try { + String line = logReader.readLine(); + assertNotNull("no logs at all?!", line); + Matcher m = Pattern.compile("\\] \\[([^\\]]+)\\] ").matcher(line); + if (false == m.find()) { + fail("Didn't see the node name in [" + line + "]"); + } + String nodeName = m.group(1); + + assertNotEquals("unknown", nodeName); + + int lineNumber = 1; + while (true) { + if (lineNumber < LINES_TO_CHECK) { + break; + } + line = logReader.readLine(); + if (line == null) { + break; // eof + } + lineNumber++; + assertThat(line, containsString("] [" + nodeName + "] ")); + } + } finally { + logReader.close(); + } + } + + @SuppressForbidden(reason = "PathUtils doesn't have permission to read this file") + private Path getLogFile() { + String logFileString = System.getProperty("tests.logfile"); + if (null == logFileString) { + fail("tests.logfile must be set to run this test. It is automatically " + + "set by gradle. If you must set it yourself then it should be the absolute path to the " + + "log file."); + } + return Paths.get(logFileString); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/node/MockNode.java b/test/framework/src/main/java/org/elasticsearch/node/MockNode.java index 0e7e35e88a90c..67d91e97e1661 100644 --- a/test/framework/src/main/java/org/elasticsearch/node/MockNode.java +++ b/test/framework/src/main/java/org/elasticsearch/node/MockNode.java @@ -175,5 +175,8 @@ protected HttpServerTransport newHttpTransport(NetworkModule networkModule) { } } + @Override + protected void registerDerivedNodeNameWithLogger(String nodeName) { + // Nothing to do because test uses the thread name + } } - diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 922a6e0d27606..82ae989fb413b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -934,7 +934,7 @@ public NodeEnvironment newNodeEnvironment(Settings settings) throws IOException .put(settings) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath()) .putList(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()).build(); - return new NodeEnvironment(build, TestEnvironment.newEnvironment(build)); + return new NodeEnvironment(build, TestEnvironment.newEnvironment(build), nodeId -> {}); } /** Return consistent index settings for the provided index version. */ diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java index eff150bf1ef55..0f6fd33497b42 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java @@ -94,7 +94,12 @@ public static void main(String[] args) throws Exception { // First clean everything and index the watcher (but not via put alert api!) - try (Node node = new Node(Settings.builder().put(SETTINGS).put("node.data", false).build()).start()) { + try (Node node = new Node(Settings.builder().put(SETTINGS).put("node.data", false).build()) { + @Override + protected void registerDerivedNodeNameWithLogger(String nodeName) { + // Nothing to do because test uses the thread name + } + }.start()) { try (Client client = node.client()) { ClusterHealthResponse response = client.admin().cluster().prepareHealth().setWaitForNodes("2").get(); if (response.getNumberOfNodes() != 2 && response.getNumberOfDataNodes() != 1) { From d6b40f4c7b21f8bc665304a14d05560f999564cd Mon Sep 17 00:00:00 2001 From: Michael Basnight Date: Fri, 7 Sep 2018 13:44:16 -0500 Subject: [PATCH 44/91] HLRC: split ingest request converters (#33435) In an effort to encapsulate the different clients, the request converters are being shuffled around. This splits the IngestClient request converters. --- .../elasticsearch/client/IngestClient.java | 16 +-- .../client/IngestRequestConverters.java | 89 +++++++++++++ .../client/RequestConverters.java | 59 --------- .../client/ESRestHighLevelClientTestCase.java | 3 +- .../client/IngestRequestConvertersTests.java | 120 ++++++++++++++++++ .../client/RequestConvertersTests.java | 82 ------------ 6 files changed, 219 insertions(+), 150 deletions(-) create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/IngestRequestConverters.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/IngestRequestConvertersTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java index 99d50f6b46b7e..eb070759ed9cf 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java @@ -55,7 +55,7 @@ public final class IngestClient { * @throws IOException in case there is a problem sending the request or parsing back the response */ public AcknowledgedResponse putPipeline(PutPipelineRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::putPipeline, options, + return restHighLevelClient.performRequestAndParseEntity( request, IngestRequestConverters::putPipeline, options, AcknowledgedResponse::fromXContent, emptySet()); } @@ -68,7 +68,7 @@ public AcknowledgedResponse putPipeline(PutPipelineRequest request, RequestOptio * @param listener the listener to be notified upon request completion */ public void putPipelineAsync(PutPipelineRequest request, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::putPipeline, options, + restHighLevelClient.performRequestAsyncAndParseEntity( request, IngestRequestConverters::putPipeline, options, AcknowledgedResponse::fromXContent, listener, emptySet()); } @@ -82,7 +82,7 @@ public void putPipelineAsync(PutPipelineRequest request, RequestOptions options, * @throws IOException in case there is a problem sending the request or parsing back the response */ public GetPipelineResponse getPipeline(GetPipelineRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::getPipeline, options, + return restHighLevelClient.performRequestAndParseEntity( request, IngestRequestConverters::getPipeline, options, GetPipelineResponse::fromXContent, emptySet()); } @@ -95,7 +95,7 @@ public GetPipelineResponse getPipeline(GetPipelineRequest request, RequestOption * @param listener the listener to be notified upon request completion */ public void getPipelineAsync(GetPipelineRequest request, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::getPipeline, options, + restHighLevelClient.performRequestAsyncAndParseEntity( request, IngestRequestConverters::getPipeline, options, GetPipelineResponse::fromXContent, listener, emptySet()); } @@ -110,7 +110,7 @@ public void getPipelineAsync(GetPipelineRequest request, RequestOptions options, * @throws IOException in case there is a problem sending the request or parsing back the response */ public AcknowledgedResponse deletePipeline(DeletePipelineRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::deletePipeline, options, + return restHighLevelClient.performRequestAndParseEntity( request, IngestRequestConverters::deletePipeline, options, AcknowledgedResponse::fromXContent, emptySet()); } @@ -124,7 +124,7 @@ public AcknowledgedResponse deletePipeline(DeletePipelineRequest request, Reques * @param listener the listener to be notified upon request completion */ public void deletePipelineAsync(DeletePipelineRequest request, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::deletePipeline, options, + restHighLevelClient.performRequestAsyncAndParseEntity( request, IngestRequestConverters::deletePipeline, options, AcknowledgedResponse::fromXContent, listener, emptySet()); } @@ -140,7 +140,7 @@ public void deletePipelineAsync(DeletePipelineRequest request, RequestOptions op * @throws IOException in case there is a problem sending the request or parsing back the response */ public SimulatePipelineResponse simulate(SimulatePipelineRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::simulatePipeline, options, + return restHighLevelClient.performRequestAndParseEntity( request, IngestRequestConverters::simulatePipeline, options, SimulatePipelineResponse::fromXContent, emptySet()); } @@ -157,7 +157,7 @@ public SimulatePipelineResponse simulate(SimulatePipelineRequest request, Reques public void simulateAsync(SimulatePipelineRequest request, RequestOptions options, ActionListener listener) { - restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::simulatePipeline, options, + restHighLevelClient.performRequestAsyncAndParseEntity( request, IngestRequestConverters::simulatePipeline, options, SimulatePipelineResponse::fromXContent, listener, emptySet()); } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestRequestConverters.java new file mode 100644 index 0000000000000..e81d716b60f3f --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestRequestConverters.java @@ -0,0 +1,89 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.action.ingest.DeletePipelineRequest; +import org.elasticsearch.action.ingest.GetPipelineRequest; +import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.ingest.SimulatePipelineRequest; + +import java.io.IOException; + +public class IngestRequestConverters { + + static Request getPipeline(GetPipelineRequest getPipelineRequest) { + String endpoint = new RequestConverters.EndpointBuilder() + .addPathPartAsIs("_ingest/pipeline") + .addCommaSeparatedPathParts(getPipelineRequest.getIds()) + .build(); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withMasterTimeout(getPipelineRequest.masterNodeTimeout()); + return request; + } + + static Request putPipeline(PutPipelineRequest putPipelineRequest) throws IOException { + String endpoint = new RequestConverters.EndpointBuilder() + .addPathPartAsIs("_ingest/pipeline") + .addPathPart(putPipelineRequest.getId()) + .build(); + Request request = new Request(HttpPut.METHOD_NAME, endpoint); + + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withTimeout(putPipelineRequest.timeout()); + parameters.withMasterTimeout(putPipelineRequest.masterNodeTimeout()); + + request.setEntity(RequestConverters.createEntity(putPipelineRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); + return request; + } + + static Request deletePipeline(DeletePipelineRequest deletePipelineRequest) { + String endpoint = new RequestConverters.EndpointBuilder() + .addPathPartAsIs("_ingest/pipeline") + .addPathPart(deletePipelineRequest.getId()) + .build(); + Request request = new Request(HttpDelete.METHOD_NAME, endpoint); + + RequestConverters.Params parameters = new RequestConverters.Params(request); + parameters.withTimeout(deletePipelineRequest.timeout()); + parameters.withMasterTimeout(deletePipelineRequest.masterNodeTimeout()); + + return request; + } + + static Request simulatePipeline(SimulatePipelineRequest simulatePipelineRequest) throws IOException { + RequestConverters.EndpointBuilder builder = new RequestConverters.EndpointBuilder().addPathPartAsIs("_ingest/pipeline"); + if (simulatePipelineRequest.getId() != null && !simulatePipelineRequest.getId().isEmpty()) { + builder.addPathPart(simulatePipelineRequest.getId()); + } + builder.addPathPartAsIs("_simulate"); + String endpoint = builder.build(); + Request request = new Request(HttpPost.METHOD_NAME, endpoint); + RequestConverters.Params params = new RequestConverters.Params(request); + params.putParam("verbose", Boolean.toString(simulatePipelineRequest.isVerbose())); + request.setEntity(RequestConverters.createEntity(simulatePipelineRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); + return request; + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 75d3a0b5b2027..7fe3e08f3afb0 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -63,10 +63,6 @@ import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.MultiGetRequest; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.ingest.DeletePipelineRequest; -import org.elasticsearch.action.ingest.GetPipelineRequest; -import org.elasticsearch.action.ingest.PutPipelineRequest; -import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.MultiSearchRequest; import org.elasticsearch.action.search.SearchRequest; @@ -694,47 +690,6 @@ private static Request resize(ResizeRequest resizeRequest) throws IOException { return request; } - static Request getPipeline(GetPipelineRequest getPipelineRequest) { - String endpoint = new EndpointBuilder() - .addPathPartAsIs("_ingest/pipeline") - .addCommaSeparatedPathParts(getPipelineRequest.getIds()) - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - - Params parameters = new Params(request); - parameters.withMasterTimeout(getPipelineRequest.masterNodeTimeout()); - return request; - } - - static Request putPipeline(PutPipelineRequest putPipelineRequest) throws IOException { - String endpoint = new EndpointBuilder() - .addPathPartAsIs("_ingest/pipeline") - .addPathPart(putPipelineRequest.getId()) - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - - Params parameters = new Params(request); - parameters.withTimeout(putPipelineRequest.timeout()); - parameters.withMasterTimeout(putPipelineRequest.masterNodeTimeout()); - - request.setEntity(createEntity(putPipelineRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request deletePipeline(DeletePipelineRequest deletePipelineRequest) { - String endpoint = new EndpointBuilder() - .addPathPartAsIs("_ingest/pipeline") - .addPathPart(deletePipelineRequest.getId()) - .build(); - Request request = new Request(HttpDelete.METHOD_NAME, endpoint); - - Params parameters = new Params(request); - parameters.withTimeout(deletePipelineRequest.timeout()); - parameters.withMasterTimeout(deletePipelineRequest.masterNodeTimeout()); - - return request; - } - static Request reindex(ReindexRequest reindexRequest) throws IOException { String endpoint = new EndpointBuilder().addPathPart("_reindex").build(); Request request = new Request(HttpPost.METHOD_NAME, endpoint); @@ -911,20 +866,6 @@ static Request validateQuery(ValidateQueryRequest validateQueryRequest) throws I return request; } - static Request simulatePipeline(SimulatePipelineRequest simulatePipelineRequest) throws IOException { - EndpointBuilder builder = new EndpointBuilder().addPathPartAsIs("_ingest/pipeline"); - if (simulatePipelineRequest.getId() != null && !simulatePipelineRequest.getId().isEmpty()) { - builder.addPathPart(simulatePipelineRequest.getId()); - } - builder.addPathPartAsIs("_simulate"); - String endpoint = builder.build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - Params params = new Params(request); - params.putParam("verbose", Boolean.toString(simulatePipelineRequest.isVerbose())); - request.setEntity(createEntity(simulatePipelineRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - static Request getAlias(GetAliasesRequest getAliasesRequest) { String[] indices = getAliasesRequest.indices() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.indices(); String[] aliases = getAliasesRequest.aliases() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.aliases(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java index f1da9af4a1e72..9217b0b4e5550 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java @@ -130,7 +130,8 @@ protected static void createPipeline(String pipelineId) throws IOException { } protected static void createPipeline(PutPipelineRequest putPipelineRequest) throws IOException { - assertOK(client().performRequest(RequestConverters.putPipeline(putPipelineRequest))); + assertTrue(execute( + putPipelineRequest, highLevelClient().ingest()::putPipeline, highLevelClient().ingest()::putPipelineAsync).isAcknowledged()); } protected static void clusterUpdateSettings(Settings persistentSettings, diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestRequestConvertersTests.java new file mode 100644 index 0000000000000..a615757fa22a9 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestRequestConvertersTests.java @@ -0,0 +1,120 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.action.ingest.DeletePipelineRequest; +import org.elasticsearch.action.ingest.GetPipelineRequest; +import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.ingest.SimulatePipelineRequest; +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.ESTestCase; +import org.junit.Assert; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.HashMap; +import java.util.Map; +import java.util.StringJoiner; + +public class IngestRequestConvertersTests extends ESTestCase { + + public void testPutPipeline() throws IOException { + String pipelineId = "some_pipeline_id"; + PutPipelineRequest request = new PutPipelineRequest( + "some_pipeline_id", + new BytesArray("{}".getBytes(StandardCharsets.UTF_8)), + XContentType.JSON + ); + Map expectedParams = new HashMap<>(); + RequestConvertersTests.setRandomMasterTimeout(request, expectedParams); + RequestConvertersTests.setRandomTimeout(request::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); + + Request expectedRequest = IngestRequestConverters.putPipeline(request); + StringJoiner endpoint = new StringJoiner("/", "/", ""); + endpoint.add("_ingest/pipeline"); + endpoint.add(pipelineId); + Assert.assertEquals(endpoint.toString(), expectedRequest.getEndpoint()); + Assert.assertEquals(HttpPut.METHOD_NAME, expectedRequest.getMethod()); + Assert.assertEquals(expectedParams, expectedRequest.getParameters()); + } + + public void testGetPipeline() { + String pipelineId = "some_pipeline_id"; + Map expectedParams = new HashMap<>(); + GetPipelineRequest request = new GetPipelineRequest("some_pipeline_id"); + RequestConvertersTests.setRandomMasterTimeout(request, expectedParams); + Request expectedRequest = IngestRequestConverters.getPipeline(request); + StringJoiner endpoint = new StringJoiner("/", "/", ""); + endpoint.add("_ingest/pipeline"); + endpoint.add(pipelineId); + Assert.assertEquals(endpoint.toString(), expectedRequest.getEndpoint()); + Assert.assertEquals(HttpGet.METHOD_NAME, expectedRequest.getMethod()); + Assert.assertEquals(expectedParams, expectedRequest.getParameters()); + } + + public void testDeletePipeline() { + String pipelineId = "some_pipeline_id"; + Map expectedParams = new HashMap<>(); + DeletePipelineRequest request = new DeletePipelineRequest(pipelineId); + RequestConvertersTests.setRandomMasterTimeout(request, expectedParams); + RequestConvertersTests.setRandomTimeout(request::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); + Request expectedRequest = IngestRequestConverters.deletePipeline(request); + StringJoiner endpoint = new StringJoiner("/", "/", ""); + endpoint.add("_ingest/pipeline"); + endpoint.add(pipelineId); + Assert.assertEquals(endpoint.toString(), expectedRequest.getEndpoint()); + Assert.assertEquals(HttpDelete.METHOD_NAME, expectedRequest.getMethod()); + Assert.assertEquals(expectedParams, expectedRequest.getParameters()); + } + + public void testSimulatePipeline() throws IOException { + String pipelineId = ESTestCase.randomBoolean() ? "some_pipeline_id" : null; + boolean verbose = ESTestCase.randomBoolean(); + String json = "{\"pipeline\":{" + + "\"description\":\"_description\"," + + "\"processors\":[{\"set\":{\"field\":\"field2\",\"value\":\"_value\"}}]}," + + "\"docs\":[{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"id\",\"_source\":{\"foo\":\"rab\"}}]}"; + SimulatePipelineRequest request = new SimulatePipelineRequest( + new BytesArray(json.getBytes(StandardCharsets.UTF_8)), + XContentType.JSON + ); + request.setId(pipelineId); + request.setVerbose(verbose); + Map expectedParams = new HashMap<>(); + expectedParams.put("verbose", Boolean.toString(verbose)); + + Request expectedRequest = IngestRequestConverters.simulatePipeline(request); + StringJoiner endpoint = new StringJoiner("/", "/", ""); + endpoint.add("_ingest/pipeline"); + if (pipelineId != null && !pipelineId.isEmpty()) + endpoint.add(pipelineId); + endpoint.add("_simulate"); + Assert.assertEquals(endpoint.toString(), expectedRequest.getEndpoint()); + Assert.assertEquals(HttpPost.METHOD_NAME, expectedRequest.getMethod()); + Assert.assertEquals(expectedParams, expectedRequest.getParameters()); + RequestConvertersTests.assertToXContentBody(request, expectedRequest.getEntity()); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index 5d2f4cb74d66f..840df49b47811 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -65,10 +65,6 @@ import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.MultiGetRequest; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.ingest.DeletePipelineRequest; -import org.elasticsearch.action.ingest.GetPipelineRequest; -import org.elasticsearch.action.ingest.PutPipelineRequest; -import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.MultiSearchRequest; import org.elasticsearch.action.search.SearchRequest; @@ -135,7 +131,6 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; -import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -1809,83 +1804,6 @@ private static void resizeTest(ResizeType resizeType, CheckedFunction expectedParams = new HashMap<>(); - setRandomMasterTimeout(request, expectedParams); - setRandomTimeout(request::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); - - Request expectedRequest = RequestConverters.putPipeline(request); - StringJoiner endpoint = new StringJoiner("/", "/", ""); - endpoint.add("_ingest/pipeline"); - endpoint.add(pipelineId); - assertEquals(endpoint.toString(), expectedRequest.getEndpoint()); - assertEquals(HttpPut.METHOD_NAME, expectedRequest.getMethod()); - assertEquals(expectedParams, expectedRequest.getParameters()); - } - - public void testGetPipeline() { - String pipelineId = "some_pipeline_id"; - Map expectedParams = new HashMap<>(); - GetPipelineRequest request = new GetPipelineRequest("some_pipeline_id"); - setRandomMasterTimeout(request, expectedParams); - Request expectedRequest = RequestConverters.getPipeline(request); - StringJoiner endpoint = new StringJoiner("/", "/", ""); - endpoint.add("_ingest/pipeline"); - endpoint.add(pipelineId); - assertEquals(endpoint.toString(), expectedRequest.getEndpoint()); - assertEquals(HttpGet.METHOD_NAME, expectedRequest.getMethod()); - assertEquals(expectedParams, expectedRequest.getParameters()); - } - - public void testDeletePipeline() { - String pipelineId = "some_pipeline_id"; - Map expectedParams = new HashMap<>(); - DeletePipelineRequest request = new DeletePipelineRequest(pipelineId); - setRandomMasterTimeout(request, expectedParams); - setRandomTimeout(request::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); - Request expectedRequest = RequestConverters.deletePipeline(request); - StringJoiner endpoint = new StringJoiner("/", "/", ""); - endpoint.add("_ingest/pipeline"); - endpoint.add(pipelineId); - assertEquals(endpoint.toString(), expectedRequest.getEndpoint()); - assertEquals(HttpDelete.METHOD_NAME, expectedRequest.getMethod()); - assertEquals(expectedParams, expectedRequest.getParameters()); - } - - public void testSimulatePipeline() throws IOException { - String pipelineId = randomBoolean() ? "some_pipeline_id" : null; - boolean verbose = randomBoolean(); - String json = "{\"pipeline\":{" + - "\"description\":\"_description\"," + - "\"processors\":[{\"set\":{\"field\":\"field2\",\"value\":\"_value\"}}]}," + - "\"docs\":[{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"id\",\"_source\":{\"foo\":\"rab\"}}]}"; - SimulatePipelineRequest request = new SimulatePipelineRequest( - new BytesArray(json.getBytes(StandardCharsets.UTF_8)), - XContentType.JSON - ); - request.setId(pipelineId); - request.setVerbose(verbose); - Map expectedParams = new HashMap<>(); - expectedParams.put("verbose", Boolean.toString(verbose)); - - Request expectedRequest = RequestConverters.simulatePipeline(request); - StringJoiner endpoint = new StringJoiner("/", "/", ""); - endpoint.add("_ingest/pipeline"); - if (pipelineId != null && !pipelineId.isEmpty()) - endpoint.add(pipelineId); - endpoint.add("_simulate"); - assertEquals(endpoint.toString(), expectedRequest.getEndpoint()); - assertEquals(HttpPost.METHOD_NAME, expectedRequest.getMethod()); - assertEquals(expectedParams, expectedRequest.getParameters()); - assertToXContentBody(request, expectedRequest.getEntity()); - } - public void testRollover() throws IOException { RolloverRequest rolloverRequest = new RolloverRequest(randomAlphaOfLengthBetween(3, 10), randomBoolean() ? null : randomAlphaOfLengthBetween(3, 10)); From 0685c951f2ad090e911f398ad7b3f04d912c8d43 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 7 Sep 2018 16:22:07 -0400 Subject: [PATCH 45/91] Test: Fix test name (#33510) This test has the wrong name and hasn't been automatically running. This fixes the name so we'll run it. --- ...rRefreshAndCloseTests.java => WaitForRefreshAndCloseIT.java} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/{WaitForRefreshAndCloseTests.java => WaitForRefreshAndCloseIT.java} (98%) diff --git a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseTests.java b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseIT.java similarity index 98% rename from distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseTests.java rename to distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseIT.java index 756d26745b2cb..0690c18b27254 100644 --- a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseTests.java +++ b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseIT.java @@ -37,7 +37,7 @@ /** * Tests that wait for refresh is fired if the index is closed. */ -public class WaitForRefreshAndCloseTests extends ESRestTestCase { +public class WaitForRefreshAndCloseIT extends ESRestTestCase { @Before public void setupIndex() throws IOException { try { From 609a167c45347c92c945c39882d24e79d8d26941 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 7 Sep 2018 16:45:51 -0400 Subject: [PATCH 46/91] CRUD: Disable wait for refresh tests with delete They look to be broken, sadly. Tracked by #33533 --- .../org/elasticsearch/test/rest/WaitForRefreshAndCloseIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseIT.java b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseIT.java index 0690c18b27254..09d6b9e51a5fc 100644 --- a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseIT.java +++ b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.test.rest; import org.apache.http.util.EntityUtils; +import org.apache.lucene.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Response; @@ -75,6 +76,7 @@ public void testUpdateAndThenClose() throws Exception { closeWhileListenerEngaged(start("POST", "/_update", "{\"doc\":{\"name\":\"test\"}}")); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33533") public void testDeleteAndThenClose() throws Exception { Request request = new Request("PUT", docPath()); request.setJsonEntity("{\"test\":\"test\"}"); From 6b780e9926b33d2920be22005ca414cf9819e838 Mon Sep 17 00:00:00 2001 From: lcawl Date: Fri, 7 Sep 2018 16:53:36 -0700 Subject: [PATCH 47/91] [DOCS] Fixing formatting issues in breaking changes --- docs/reference/migration/migrate_7_0/api.asciidoc | 2 +- docs/reference/migration/migrate_7_0/java.asciidoc | 2 +- docs/reference/migration/migrate_7_0/mappings.asciidoc | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/reference/migration/migrate_7_0/api.asciidoc b/docs/reference/migration/migrate_7_0/api.asciidoc index 689b941ef6b6b..ce2d817ac5044 100644 --- a/docs/reference/migration/migrate_7_0/api.asciidoc +++ b/docs/reference/migration/migrate_7_0/api.asciidoc @@ -1,5 +1,5 @@ [[breaking_70_api_changes]] -=== Breaking API changes in 7.0 +=== API changes ==== Camel case and underscore parameters deprecated in 6.x have been removed A number of duplicate parameters deprecated in 6.x have been removed from diff --git a/docs/reference/migration/migrate_7_0/java.asciidoc b/docs/reference/migration/migrate_7_0/java.asciidoc index 4a3040507c961..ea2632832911d 100644 --- a/docs/reference/migration/migrate_7_0/java.asciidoc +++ b/docs/reference/migration/migrate_7_0/java.asciidoc @@ -13,7 +13,7 @@ The `prepareExecute` method which created a request builder has been removed from the client api. Instead, construct a builder for the appropriate request directly. -=== Some Aggregation classes have moved packages +==== Some Aggregation classes have moved packages * All classes present in `org.elasticsearch.search.aggregations.metrics.*` packages were moved to a single `org.elasticsearch.search.aggregations.metrics` package. diff --git a/docs/reference/migration/migrate_7_0/mappings.asciidoc b/docs/reference/migration/migrate_7_0/mappings.asciidoc index c56a0ae9b6422..4983cb2da579a 100644 --- a/docs/reference/migration/migrate_7_0/mappings.asciidoc +++ b/docs/reference/migration/migrate_7_0/mappings.asciidoc @@ -31,7 +31,7 @@ the index setting `index.mapping.nested_objects.limit`. This option is useless now that all indices have at most one type. -=== The `classic` similarity has been removed +==== The `classic` similarity has been removed The `classic` similarity relied on coordination factors for scoring to be good in presence of stopwords in the query. This feature has been removed from @@ -39,7 +39,7 @@ Lucene, which means that the `classic` similarity now produces scores of lower quality. It is advised to switch to `BM25` instead, which is widely accepted as a better alternative. -=== Similarities fail when unsupported options are provided +==== Similarities fail when unsupported options are provided An error will now be thrown when unknown configuration options are provided to similarities. Such unknown parameters were ignored before. From 9a404f3def3f2b54e28cf8c15ff8ec1de1a25672 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 7 Sep 2018 20:09:53 -0400 Subject: [PATCH 48/91] Include fallback settings when checking dependencies (#33522) Today when checking settings dependencies, we do not check if fallback settings are present. This means, for example, that if cluster.remote.*.seeds falls back to search.remote.*.seeds, and cluster.remote.*.skip_unavailable and search.remote.*.skip_unavailable depend on cluster.remote.*.seeds, and we have set search.remote.*.seeds and search.remote.*.skip_unavailable, then validation will fail because it is expected that cluster.ermote.*.seeds is set here. This commit addresses this by also checking fallback settings when validating dependencies. To do this, we adjust the settings exist method to also check for fallback settings, a case that it was not handling previously. --- ...rossClusterSearchUnavailableClusterIT.java | 4 +- .../settings/AbstractScopedSettings.java | 16 +- .../common/settings/Setting.java | 138 ++++++++++++------ .../common/settings/ScopedSettingsTests.java | 30 +++- .../common/settings/SettingTests.java | 26 ++++ .../indices/settings/UpdateSettingsIT.java | 16 +- .../transport/RemoteClusterServiceTests.java | 2 +- .../xpack/security/Security.java | 8 +- 8 files changed, 171 insertions(+), 69 deletions(-) diff --git a/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java b/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java index fbcf55c91b739..0c42e4be89ac1 100644 --- a/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java +++ b/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java @@ -235,7 +235,7 @@ public void testSkipUnavailableDependsOnSeeds() throws IOException { () -> client().performRequest(request)); assertEquals(400, responseException.getResponse().getStatusLine().getStatusCode()); assertThat(responseException.getMessage(), - containsString("Missing required setting [cluster.remote.remote1.seeds] " + + containsString("missing required setting [cluster.remote.remote1.seeds] " + "for setting [cluster.remote.remote1.skip_unavailable]")); } @@ -251,7 +251,7 @@ public void testSkipUnavailableDependsOnSeeds() throws IOException { ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest(request)); assertEquals(400, responseException.getResponse().getStatusLine().getStatusCode()); - assertThat(responseException.getMessage(), containsString("Missing required setting [cluster.remote.remote1.seeds] " + + assertThat(responseException.getMessage(), containsString("missing required setting [cluster.remote.remote1.seeds] " + "for setting [cluster.remote.remote1.skip_unavailable]")); } diff --git a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index 52439f7c89d14..a77d739ffe0b4 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -32,6 +32,7 @@ import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; @@ -461,16 +462,19 @@ void validate( } throw new IllegalArgumentException(msg); } else { - Set settingsDependencies = setting.getSettingsDependencies(key); + Set> settingsDependencies = setting.getSettingsDependencies(key); if (setting.hasComplexMatcher()) { setting = setting.getConcreteSetting(key); } if (validateDependencies && settingsDependencies.isEmpty() == false) { - Set settingKeys = settings.keySet(); - for (String requiredSetting : settingsDependencies) { - if (settingKeys.contains(requiredSetting) == false) { - throw new IllegalArgumentException("Missing required setting [" - + requiredSetting + "] for setting [" + setting.getKey() + "]"); + for (final Setting settingDependency : settingsDependencies) { + if (settingDependency.existsOrFallbackExists(settings) == false) { + final String message = String.format( + Locale.ROOT, + "missing required setting [%s] for setting [%s]", + settingDependency.getKey(), + setting.getKey()); + throw new IllegalArgumentException(message); } } } diff --git a/server/src/main/java/org/elasticsearch/common/settings/Setting.java b/server/src/main/java/org/elasticsearch/common/settings/Setting.java index eabf2ef498406..89bbe752a1ffc 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -366,12 +366,25 @@ public T getDefault(Settings settings) { } /** - * Returns true iff this setting is present in the given settings object. Otherwise false + * Returns true if and only if this setting is present in the given settings instance. Note that fallback settings are excluded. + * + * @param settings the settings + * @return true if the setting is present in the given settings instance, otherwise false */ - public boolean exists(Settings settings) { + public boolean exists(final Settings settings) { return settings.keySet().contains(getKey()); } + /** + * Returns true if and only if this setting including fallback settings is present in the given settings instance. + * + * @param settings the settings + * @return true if the setting including fallback settings is present in the given settings instance, otherwise false + */ + public boolean existsOrFallbackExists(final Settings settings) { + return settings.keySet().contains(getKey()) || (fallbackSetting != null && fallbackSetting.existsOrFallbackExists(settings)); + } + /** * Returns the settings value. If the setting is not present in the given settings object the default value is returned * instead. @@ -511,7 +524,7 @@ public Setting getConcreteSetting(String key) { * Returns a set of settings that are required at validation time. Unless all of the dependencies are present in the settings * object validation of setting must fail. */ - public Set getSettingsDependencies(String key) { + public Set> getSettingsDependencies(String key) { return Collections.emptySet(); } @@ -634,12 +647,12 @@ private Stream matchStream(Settings settings) { return settings.keySet().stream().filter(this::match).map(key::getConcreteString); } - public Set getSettingsDependencies(String settingsKey) { + public Set> getSettingsDependencies(String settingsKey) { if (dependencies.isEmpty()) { return Collections.emptySet(); } else { String namespace = key.getNamespace(settingsKey); - return dependencies.stream().map(s -> s.key.toConcreteKey(namespace).key).collect(Collectors.toSet()); + return dependencies.stream().map(s -> (Setting)s.getConcreteSettingForNamespace(namespace)).collect(Collectors.toSet()); } } @@ -914,40 +927,6 @@ public String toString() { } } - private static class ListSetting extends Setting> { - private final Function> defaultStringValue; - - private ListSetting(String key, Function> defaultStringValue, Function> parser, - Property... properties) { - super(new ListKey(key), (s) -> Setting.arrayToParsableString(defaultStringValue.apply(s)), parser, - properties); - this.defaultStringValue = defaultStringValue; - } - - @Override - String innerGetRaw(final Settings settings) { - List array = settings.getAsList(getKey(), null); - return array == null ? defaultValue.apply(settings) : arrayToParsableString(array); - } - - @Override - boolean hasComplexMatcher() { - return true; - } - - @Override - public void diff(Settings.Builder builder, Settings source, Settings defaultSettings) { - if (exists(source) == false) { - List asList = defaultSettings.getAsList(getKey(), null); - if (asList == null) { - builder.putList(getKey(), defaultStringValue.apply(defaultSettings)); - } else { - builder.putList(getKey(), asList); - } - } - } - } - private final class Updater implements AbstractScopedSettings.SettingUpdater { private final Consumer consumer; private final Logger logger; @@ -1209,26 +1188,44 @@ public static Setting memorySizeSetting(String key, String defaul return new Setting<>(key, (s) -> defaultPercentage, (s) -> MemorySizeValue.parseBytesSizeValueOrHeapRatio(s, key), properties); } - public static Setting> listSetting(String key, List defaultStringValue, Function singleValueParser, - Property... properties) { - return listSetting(key, (s) -> defaultStringValue, singleValueParser, properties); + public static Setting> listSetting( + final String key, + final List defaultStringValue, + final Function singleValueParser, + final Property... properties) { + return listSetting(key, null, singleValueParser, (s) -> defaultStringValue, properties); } // TODO this one's two argument get is still broken - public static Setting> listSetting(String key, Setting> fallbackSetting, Function singleValueParser, - Property... properties) { - return listSetting(key, (s) -> parseableStringToList(fallbackSetting.getRaw(s)), singleValueParser, properties); + public static Setting> listSetting( + final String key, + final Setting> fallbackSetting, + final Function singleValueParser, + final Property... properties) { + return listSetting(key, fallbackSetting, singleValueParser, (s) -> parseableStringToList(fallbackSetting.getRaw(s)), properties); + } + + public static Setting> listSetting( + final String key, + final Function singleValueParser, + final Function> defaultStringValue, + final Property... properties) { + return listSetting(key, null, singleValueParser, defaultStringValue, properties); } - public static Setting> listSetting(String key, Function> defaultStringValue, - Function singleValueParser, Property... properties) { + public static Setting> listSetting( + final String key, + final @Nullable Setting> fallbackSetting, + final Function singleValueParser, + final Function> defaultStringValue, + final Property... properties) { if (defaultStringValue.apply(Settings.EMPTY) == null) { throw new IllegalArgumentException("default value function must not return null"); } Function> parser = (s) -> parseableStringToList(s).stream().map(singleValueParser).collect(Collectors.toList()); - return new ListSetting<>(key, defaultStringValue, parser, properties); + return new ListSetting<>(key, fallbackSetting, defaultStringValue, parser, properties); } private static List parseableStringToList(String parsableString) { @@ -1266,6 +1263,51 @@ private static String arrayToParsableString(List array) { } } + private static class ListSetting extends Setting> { + + private final Function> defaultStringValue; + + private ListSetting( + final String key, + final @Nullable Setting> fallbackSetting, + final Function> defaultStringValue, + final Function> parser, + final Property... properties) { + super( + new ListKey(key), + fallbackSetting, + (s) -> Setting.arrayToParsableString(defaultStringValue.apply(s)), + parser, + (v,s) -> {}, + properties); + this.defaultStringValue = defaultStringValue; + } + + @Override + String innerGetRaw(final Settings settings) { + List array = settings.getAsList(getKey(), null); + return array == null ? defaultValue.apply(settings) : arrayToParsableString(array); + } + + @Override + boolean hasComplexMatcher() { + return true; + } + + @Override + public void diff(Settings.Builder builder, Settings source, Settings defaultSettings) { + if (exists(source) == false) { + List asList = defaultSettings.getAsList(getKey(), null); + if (asList == null) { + builder.putList(getKey(), defaultStringValue.apply(defaultSettings)); + } else { + builder.putList(getKey(), asList); + } + } + } + + } + static void logSettingUpdate(Setting setting, Settings current, Settings previous, Logger logger) { if (logger.isInfoEnabled()) { if (setting.isFiltered()) { diff --git a/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index af2d874a67941..f0f8b6c417f2f 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -171,7 +171,7 @@ public void testDependentSettings() { IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> service.validate(Settings.builder().put("foo.test.bar", 7).build(), true)); - assertEquals("Missing required setting [foo.test.name] for setting [foo.test.bar]", iae.getMessage()); + assertEquals("missing required setting [foo.test.name] for setting [foo.test.bar]", iae.getMessage()); service.validate(Settings.builder() .put("foo.test.name", "test") @@ -181,6 +181,34 @@ public void testDependentSettings() { service.validate(Settings.builder().put("foo.test.bar", 7).build(), false); } + public void testDependentSettingsWithFallback() { + Setting.AffixSetting nameFallbackSetting = + Setting.affixKeySetting("fallback.", "name", k -> Setting.simpleString(k, Property.Dynamic, Property.NodeScope)); + Setting.AffixSetting nameSetting = Setting.affixKeySetting( + "foo.", + "name", + k -> Setting.simpleString( + k, + "_na_".equals(k) + ? nameFallbackSetting.getConcreteSettingForNamespace(k) + : nameFallbackSetting.getConcreteSetting(k.replaceAll("^foo", "fallback")), + Property.Dynamic, + Property.NodeScope)); + Setting.AffixSetting barSetting = + Setting.affixKeySetting("foo.", "bar", k -> Setting.intSetting(k, 1, Property.Dynamic, Property.NodeScope), nameSetting); + + final AbstractScopedSettings service = + new ClusterSettings(Settings.EMPTY,new HashSet<>(Arrays.asList(nameFallbackSetting, nameSetting, barSetting))); + + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> service.validate(Settings.builder().put("foo.test.bar", 7).build(), true)); + assertThat(e, hasToString(containsString("missing required setting [foo.test.name] for setting [foo.test.bar]"))); + + service.validate(Settings.builder().put("foo.test.name", "test").put("foo.test.bar", 7).build(), true); + service.validate(Settings.builder().put("fallback.test.name", "test").put("foo.test.bar", 7).build(), true); + } + public void testTupleAffixUpdateConsumer() { String prefix = randomAlphaOfLength(3) + "foo."; String intSuffix = randomAlphaOfLength(3); diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java index d82b620660249..b13988b705059 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -856,4 +856,30 @@ public void testAffixNamespacesWithGroupSetting() { assertThat(affixSetting.getNamespaces(Settings.builder().put("prefix.infix.suffix", "anything").build()), hasSize(1)); assertThat(affixSetting.getNamespaces(Settings.builder().put("prefix.infix.suffix.anything", "anything").build()), hasSize(1)); } + + public void testExists() { + final Setting fooSetting = Setting.simpleString("foo", Property.NodeScope); + assertFalse(fooSetting.exists(Settings.EMPTY)); + assertTrue(fooSetting.exists(Settings.builder().put("foo", "bar").build())); + } + + public void testExistsWithFallback() { + final int count = randomIntBetween(1, 16); + Setting current = Setting.simpleString("fallback0", Property.NodeScope); + for (int i = 1; i < count; i++) { + final Setting next = + new Setting<>(new Setting.SimpleKey("fallback" + i), current, Function.identity(), Property.NodeScope); + current = next; + } + final Setting fooSetting = new Setting<>(new Setting.SimpleKey("foo"), current, Function.identity(), Property.NodeScope); + assertFalse(fooSetting.exists(Settings.EMPTY)); + if (randomBoolean()) { + assertTrue(fooSetting.exists(Settings.builder().put("foo", "bar").build())); + } else { + final String setting = "fallback" + randomIntBetween(0, count - 1); + assertFalse(fooSetting.exists(Settings.builder().put(setting, "bar").build())); + assertTrue(fooSetting.existsOrFallbackExists(Settings.builder().put(setting, "bar").build())); + } + } + } diff --git a/server/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java b/server/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java index fbf1dcd5b33ec..33e9af91501d8 100644 --- a/server/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java +++ b/server/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java @@ -129,18 +129,18 @@ public void testUpdateDependentClusterSettings() { IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> client().admin().cluster().prepareUpdateSettings().setPersistentSettings(Settings.builder() .put("cluster.acc.test.pw", "asdf")).get()); - assertEquals("Missing required setting [cluster.acc.test.user] for setting [cluster.acc.test.pw]", iae.getMessage()); + assertEquals("missing required setting [cluster.acc.test.user] for setting [cluster.acc.test.pw]", iae.getMessage()); iae = expectThrows(IllegalArgumentException.class, () -> client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder() .put("cluster.acc.test.pw", "asdf")).get()); - assertEquals("Missing required setting [cluster.acc.test.user] for setting [cluster.acc.test.pw]", iae.getMessage()); + assertEquals("missing required setting [cluster.acc.test.user] for setting [cluster.acc.test.pw]", iae.getMessage()); iae = expectThrows(IllegalArgumentException.class, () -> client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder() .put("cluster.acc.test.pw", "asdf")).setPersistentSettings(Settings.builder() .put("cluster.acc.test.user", "asdf")).get()); - assertEquals("Missing required setting [cluster.acc.test.user] for setting [cluster.acc.test.pw]", iae.getMessage()); + assertEquals("missing required setting [cluster.acc.test.user] for setting [cluster.acc.test.pw]", iae.getMessage()); if (randomBoolean()) { client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder() @@ -149,7 +149,7 @@ public void testUpdateDependentClusterSettings() { iae = expectThrows(IllegalArgumentException.class, () -> client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder() .putNull("cluster.acc.test.user")).get()); - assertEquals("Missing required setting [cluster.acc.test.user] for setting [cluster.acc.test.pw]", iae.getMessage()); + assertEquals("missing required setting [cluster.acc.test.user] for setting [cluster.acc.test.pw]", iae.getMessage()); client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder() .putNull("cluster.acc.test.pw") .putNull("cluster.acc.test.user")).get(); @@ -161,7 +161,7 @@ public void testUpdateDependentClusterSettings() { iae = expectThrows(IllegalArgumentException.class, () -> client().admin().cluster().prepareUpdateSettings().setPersistentSettings(Settings.builder() .putNull("cluster.acc.test.user")).get()); - assertEquals("Missing required setting [cluster.acc.test.user] for setting [cluster.acc.test.pw]", iae.getMessage()); + assertEquals("missing required setting [cluster.acc.test.user] for setting [cluster.acc.test.pw]", iae.getMessage()); client().admin().cluster().prepareUpdateSettings().setPersistentSettings(Settings.builder() .putNull("cluster.acc.test.pw") @@ -173,7 +173,7 @@ public void testUpdateDependentClusterSettings() { public void testUpdateDependentIndexSettings() { IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> prepareCreate("test", Settings.builder().put("index.acc.test.pw", "asdf")).get()); - assertEquals("Missing required setting [index.acc.test.user] for setting [index.acc.test.pw]", iae.getMessage()); + assertEquals("missing required setting [index.acc.test.user] for setting [index.acc.test.pw]", iae.getMessage()); createIndex("test"); for (int i = 0; i < 2; i++) { @@ -192,7 +192,7 @@ public void testUpdateDependentIndexSettings() { .put("index.acc.test.pw", "asdf")) .execute() .actionGet()); - assertEquals("Missing required setting [index.acc.test.user] for setting [index.acc.test.pw]", iae.getMessage()); + assertEquals("missing required setting [index.acc.test.user] for setting [index.acc.test.pw]", iae.getMessage()); // user has no dependency client() @@ -227,7 +227,7 @@ public void testUpdateDependentIndexSettings() { .putNull("index.acc.test.user")) .execute() .actionGet()); - assertEquals("Missing required setting [index.acc.test.user] for setting [index.acc.test.pw]", iae.getMessage()); + assertEquals("missing required setting [index.acc.test.user] for setting [index.acc.test.pw]", iae.getMessage()); // now we are consistent client() diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java index 0abde8839b44b..9732edb42276e 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java @@ -714,7 +714,7 @@ public void testRemoteClusterSkipIfDisconnectedSetting() { { Settings settings = Settings.builder().put("cluster.remote.foo.skip_unavailable", randomBoolean()).build(); IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> service.validate(settings, true)); - assertEquals("Missing required setting [cluster.remote.foo.seeds] for setting [cluster.remote.foo.skip_unavailable]", + assertEquals("missing required setting [cluster.remote.foo.seeds] for setting [cluster.remote.foo.skip_unavailable]", iae.getMessage()); } { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index e8bcf42233a7b..363cc7bb8827d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -257,9 +257,11 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw static final Setting> AUDIT_OUTPUTS_SETTING = Setting.listSetting(SecurityField.setting("audit.outputs"), - s -> s.keySet().contains(SecurityField.setting("audit.outputs")) ? - Collections.emptyList() : Collections.singletonList(LoggingAuditTrail.NAME), - Function.identity(), Property.NodeScope); + Function.identity(), + s -> s.keySet().contains(SecurityField.setting("audit.outputs")) + ? Collections.emptyList() + : Collections.singletonList(LoggingAuditTrail.NAME), + Property.NodeScope); private final Settings settings; private final Environment env; From facec187bbee24e3c3c43547e5d785191f2fd6d5 Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Fri, 7 Sep 2018 18:16:07 -0700 Subject: [PATCH 49/91] Painless: Add Imported Static Method (#33440) Allow static methods to be imported in Painless and called using just the method name. --- .../elasticsearch/painless/spi/Whitelist.java | 11 +- .../painless/spi/WhitelistLoader.java | 51 +++--- .../elasticsearch/painless/FeatureTest.java | 26 ++- .../painless/lookup/PainlessLookup.java | 15 ++ .../lookup/PainlessLookupBuilder.java | 171 +++++++++++++++++- .../painless/node/ECallLocal.java | 51 ++++-- .../painless/spi/org.elasticsearch.txt | 3 +- .../elasticsearch/painless/BasicAPITests.java | 4 + .../painless/ScriptTestCase.java | 8 +- 9 files changed, 282 insertions(+), 58 deletions(-) diff --git a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/Whitelist.java b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/Whitelist.java index 7acbff6cb0b93..31a9e595d0b6d 100644 --- a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/Whitelist.java +++ b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/Whitelist.java @@ -61,12 +61,19 @@ public final class Whitelist { /** The {@link List} of all the whitelisted Painless classes. */ public final List whitelistClasses; + /** The {@link List} of all the whitelisted static Painless methods. */ + public final List whitelistImportedMethods; + + /** The {@link List} of all the whitelisted Painless bindings. */ public final List whitelistBindings; - /** Standard constructor. All values must be not {@code null}. */ - public Whitelist(ClassLoader classLoader, List whitelistClasses, List whitelistBindings) { + /** Standard constructor. All values must be not {@code null}. */ + public Whitelist(ClassLoader classLoader, List whitelistClasses, + List whitelistImportedMethods, List whitelistBindings) { + this.classLoader = Objects.requireNonNull(classLoader); this.whitelistClasses = Collections.unmodifiableList(Objects.requireNonNull(whitelistClasses)); + this.whitelistImportedMethods = Collections.unmodifiableList(Objects.requireNonNull(whitelistImportedMethods)); this.whitelistBindings = Collections.unmodifiableList(Objects.requireNonNull(whitelistBindings)); } } diff --git a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistLoader.java b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistLoader.java index 0279c82f1b67b..2f5dec769fc2f 100644 --- a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistLoader.java +++ b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistLoader.java @@ -133,6 +133,7 @@ public final class WhitelistLoader { */ public static Whitelist loadFromResourceFiles(Class resource, String... filepaths) { List whitelistClasses = new ArrayList<>(); + List whitelistStatics = new ArrayList<>(); List whitelistBindings = new ArrayList<>(); // Execute a single pass through the whitelist text files. This will gather all the @@ -192,18 +193,18 @@ public static Whitelist loadFromResourceFiles(Class resource, String... filep whitelistConstructors = new ArrayList<>(); whitelistMethods = new ArrayList<>(); whitelistFields = new ArrayList<>(); - } else if (line.startsWith("static ")) { + } else if (line.startsWith("static_import ")) { // Ensure the final token of the line is '{'. if (line.endsWith("{") == false) { throw new IllegalArgumentException( - "invalid static definition: failed to parse static opening bracket [" + line + "]"); + "invalid static import definition: failed to parse static import opening bracket [" + line + "]"); } if (parseType != null) { - throw new IllegalArgumentException("invalid definition: cannot embed static definition [" + line + "]"); + throw new IllegalArgumentException("invalid definition: cannot embed static import definition [" + line + "]"); } - parseType = "static"; + parseType = "static_import"; // Handle the end of a definition and reset all previously gathered values. // Expects the following format: '}' '\n' @@ -229,9 +230,9 @@ public static Whitelist loadFromResourceFiles(Class resource, String... filep // Reset the parseType. parseType = null; - // Handle static definition types. - // Expects the following format: ID ID '(' ( ID ( ',' ID )* )? ')' 'bound_to' ID '\n' - } else if ("static".equals(parseType)) { + // Handle static import definition types. + // Expects the following format: ID ID '(' ( ID ( ',' ID )* )? ')' ( 'from_class' | 'bound_to' ) ID '\n' + } else if ("static_import".equals(parseType)) { // Mark the origin of this parsable object. String origin = "[" + filepath + "]:[" + number + "]"; @@ -240,7 +241,7 @@ public static Whitelist loadFromResourceFiles(Class resource, String... filep if (parameterStartIndex == -1) { throw new IllegalArgumentException( - "illegal static definition: start of method parameters not found [" + line + "]"); + "illegal static import definition: start of method parameters not found [" + line + "]"); } String[] tokens = line.substring(0, parameterStartIndex).trim().split("\\s+"); @@ -261,7 +262,7 @@ public static Whitelist loadFromResourceFiles(Class resource, String... filep if (parameterEndIndex == -1) { throw new IllegalArgumentException( - "illegal static definition: end of method parameters not found [" + line + "]"); + "illegal static import definition: end of method parameters not found [" + line + "]"); } String[] canonicalTypeNameParameters = @@ -272,39 +273,37 @@ public static Whitelist loadFromResourceFiles(Class resource, String... filep canonicalTypeNameParameters = new String[0]; } - // Parse the static type and class. + // Parse the static import type and class. tokens = line.substring(parameterEndIndex + 1).trim().split("\\s+"); - String staticType; + String staticImportType; String targetJavaClassName; // Based on the number of tokens, look up the type and class. if (tokens.length == 2) { - staticType = tokens[0]; + staticImportType = tokens[0]; targetJavaClassName = tokens[1]; } else { - throw new IllegalArgumentException("invalid static definition: unexpected format [" + line + "]"); + throw new IllegalArgumentException("invalid static import definition: unexpected format [" + line + "]"); } - // Check the static type is valid. - if ("bound_to".equals(staticType) == false) { - throw new IllegalArgumentException( - "invalid static definition: unexpected static type [" + staticType + "] [" + line + "]"); + // Add a static import method or binding depending on the static import type. + if ("from_class".equals(staticImportType)) { + whitelistStatics.add(new WhitelistMethod(origin, targetJavaClassName, + methodName, returnCanonicalTypeName, Arrays.asList(canonicalTypeNameParameters))); + } else if ("bound_to".equals(staticImportType)) { + whitelistBindings.add(new WhitelistBinding(origin, targetJavaClassName, + methodName, returnCanonicalTypeName, Arrays.asList(canonicalTypeNameParameters))); + } else { + throw new IllegalArgumentException("invalid static import definition: " + + "unexpected static import type [" + staticImportType + "] [" + line + "]"); } - whitelistBindings.add(new WhitelistBinding(origin, targetJavaClassName, - methodName, returnCanonicalTypeName, Arrays.asList(canonicalTypeNameParameters))); - // Handle class definition types. } else if ("class".equals(parseType)) { // Mark the origin of this parsable object. String origin = "[" + filepath + "]:[" + number + "]"; - // Ensure we have a defined class before adding any constructors, methods, augmented methods, or fields. - if (parseType == null) { - throw new IllegalArgumentException("invalid definition: expected one of ['class', 'static'] [" + line + "]"); - } - // Handle the case for a constructor definition. // Expects the following format: '(' ( ID ( ',' ID )* )? ')' '\n' if (line.startsWith("(")) { @@ -393,7 +392,7 @@ public static Whitelist loadFromResourceFiles(Class resource, String... filep ClassLoader loader = AccessController.doPrivileged((PrivilegedAction)resource::getClassLoader); - return new Whitelist(loader, whitelistClasses, whitelistBindings); + return new Whitelist(loader, whitelistClasses, whitelistStatics, whitelistBindings); } private WhitelistLoader() {} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FeatureTest.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FeatureTest.java index 1e94c19f6d90e..28cbb4aee19a6 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FeatureTest.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FeatureTest.java @@ -24,6 +24,21 @@ /** Currently just a dummy class for testing a few features not yet exposed by whitelist! */ public class FeatureTest { + /** static method that returns true */ + public static boolean overloadedStatic() { + return true; + } + + /** static method that returns what you ask it */ + public static boolean overloadedStatic(boolean whatToReturn) { + return whatToReturn; + } + + /** static method only whitelisted as a static */ + public static float staticAddFloatsTest(float x, float y) { + return x + y; + } + private int x; private int y; public int z; @@ -58,21 +73,12 @@ public void setY(int y) { this.y = y; } - /** static method that returns true */ - public static boolean overloadedStatic() { - return true; - } - - /** static method that returns what you ask it */ - public static boolean overloadedStatic(boolean whatToReturn) { - return whatToReturn; - } - /** method taking two functions! */ public Object twoFunctionsOfX(Function f, Function g) { return f.apply(g.apply(x)); } + /** method to take in a list */ public void listInput(List list) { } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java index 2d6ed3e361dc3..7be659d11a124 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java @@ -37,16 +37,23 @@ public final class PainlessLookup { private final Map> canonicalClassNamesToClasses; private final Map, PainlessClass> classesToPainlessClasses; + private final Map painlessMethodKeysToImportedPainlessMethods; private final Map painlessMethodKeysToPainlessBindings; PainlessLookup(Map> canonicalClassNamesToClasses, Map, PainlessClass> classesToPainlessClasses, + Map painlessMethodKeysToImportedPainlessMethods, Map painlessMethodKeysToPainlessBindings) { + Objects.requireNonNull(canonicalClassNamesToClasses); Objects.requireNonNull(classesToPainlessClasses); + Objects.requireNonNull(painlessMethodKeysToImportedPainlessMethods); + Objects.requireNonNull(painlessMethodKeysToPainlessBindings); + this.canonicalClassNamesToClasses = Collections.unmodifiableMap(canonicalClassNamesToClasses); this.classesToPainlessClasses = Collections.unmodifiableMap(classesToPainlessClasses); + this.painlessMethodKeysToImportedPainlessMethods = Collections.unmodifiableMap(painlessMethodKeysToImportedPainlessMethods); this.painlessMethodKeysToPainlessBindings = Collections.unmodifiableMap(painlessMethodKeysToPainlessBindings); } @@ -167,6 +174,14 @@ public PainlessField lookupPainlessField(Class targetClass, boolean isStatic, return painlessField; } + public PainlessMethod lookupImportedPainlessMethod(String methodName, int arity) { + Objects.requireNonNull(methodName); + + String painlessMethodKey = buildPainlessMethodKey(methodName, arity); + + return painlessMethodKeysToImportedPainlessMethods.get(painlessMethodKey); + } + public PainlessBinding lookupPainlessBinding(String methodName, int arity) { Objects.requireNonNull(methodName); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java index a64814f866113..b822bd47c7a48 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java @@ -243,6 +243,14 @@ public static PainlessLookup buildFromWhitelists(List whitelists) { } } + for (WhitelistMethod whitelistStatic : whitelist.whitelistImportedMethods) { + origin = whitelistStatic.origin; + painlessLookupBuilder.addImportedPainlessMethod( + whitelist.classLoader, whitelistStatic.augmentedCanonicalClassName, + whitelistStatic.methodName, whitelistStatic.returnCanonicalTypeName, + whitelistStatic.canonicalTypeNameParameters); + } + for (WhitelistBinding whitelistBinding : whitelist.whitelistBindings) { origin = whitelistBinding.origin; painlessLookupBuilder.addPainlessBinding( @@ -261,12 +269,14 @@ public static PainlessLookup buildFromWhitelists(List whitelists) { private final Map> canonicalClassNamesToClasses; private final Map, PainlessClassBuilder> classesToPainlessClassBuilders; + private final Map painlessMethodKeysToImportedPainlessMethods; private final Map painlessMethodKeysToPainlessBindings; public PainlessLookupBuilder() { canonicalClassNamesToClasses = new HashMap<>(); classesToPainlessClassBuilders = new HashMap<>(); + painlessMethodKeysToImportedPainlessMethods = new HashMap<>(); painlessMethodKeysToPainlessBindings = new HashMap<>(); } @@ -513,8 +523,9 @@ public void addPainlessMethod(ClassLoader classLoader, String targetCanonicalCla addPainlessMethod(targetClass, augmentedClass, methodName, returnType, typeParameters); } - public void addPainlessMethod(Class targetClass, Class augmentedClass, String methodName, - Class returnType, List> typeParameters) { + public void addPainlessMethod(Class targetClass, Class augmentedClass, + String methodName, Class returnType, List> typeParameters) { + Objects.requireNonNull(targetClass); Objects.requireNonNull(methodName); Objects.requireNonNull(returnType); @@ -573,6 +584,12 @@ public void addPainlessMethod(Class targetClass, Class augmentedClass, Str } else { try { javaMethod = augmentedClass.getMethod(methodName, javaTypeParameters.toArray(new Class[typeParametersSize])); + + if (Modifier.isStatic(javaMethod.getModifiers()) == false) { + throw new IllegalArgumentException("method [[" + targetCanonicalClassName + "], [" + methodName + "], " + + typesToCanonicalTypeNames(typeParameters) + "] with augmented class " + + "[" + typeToCanonicalTypeName(augmentedClass) + "] must be static"); + } } catch (NoSuchMethodException nsme) { throw new IllegalArgumentException("method reflection object [[" + targetCanonicalClassName + "], " + "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found " + @@ -620,7 +637,7 @@ public void addPainlessMethod(Class targetClass, Class augmentedClass, Str "with the same arity and different return type or type parameters"); } } else { - PainlessMethod painlessMethod = painlessClassBuilder.staticMethods.get(painlessMethodKey); + PainlessMethod painlessMethod = painlessClassBuilder.methods.get(painlessMethodKey); if (painlessMethod == null) { MethodHandle methodHandle; @@ -788,6 +805,146 @@ public void addPainlessField(Class targetClass, String fieldName, Class ty } } + public void addImportedPainlessMethod(ClassLoader classLoader, String targetCanonicalClassName, + String methodName, String returnCanonicalTypeName, List canonicalTypeNameParameters) { + + Objects.requireNonNull(classLoader); + Objects.requireNonNull(targetCanonicalClassName); + Objects.requireNonNull(methodName); + Objects.requireNonNull(returnCanonicalTypeName); + Objects.requireNonNull(canonicalTypeNameParameters); + + Class targetClass = canonicalClassNamesToClasses.get(targetCanonicalClassName); + + if (targetClass == null) { + throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for imported method " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]"); + } + + List> typeParameters = new ArrayList<>(canonicalTypeNameParameters.size()); + + for (String canonicalTypeNameParameter : canonicalTypeNameParameters) { + Class typeParameter = canonicalTypeNameToType(canonicalTypeNameParameter); + + if (typeParameter == null) { + throw new IllegalArgumentException("type parameter [" + canonicalTypeNameParameter + "] not found for imported method " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]"); + } + + typeParameters.add(typeParameter); + } + + Class returnType = canonicalTypeNameToType(returnCanonicalTypeName); + + if (returnType == null) { + throw new IllegalArgumentException("return type [" + returnCanonicalTypeName + "] not found for imported method " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + canonicalTypeNameParameters + "]"); + } + + addImportedPainlessMethod(targetClass, methodName, returnType, typeParameters); + } + + public void addImportedPainlessMethod(Class targetClass, String methodName, Class returnType, List> typeParameters) { + Objects.requireNonNull(targetClass); + Objects.requireNonNull(methodName); + Objects.requireNonNull(returnType); + Objects.requireNonNull(typeParameters); + + if (targetClass == def.class) { + throw new IllegalArgumentException("cannot add imported method from reserved class [" + DEF_CLASS_NAME + "]"); + } + + String targetCanonicalClassName = typeToCanonicalTypeName(targetClass); + + if (METHOD_NAME_PATTERN.matcher(methodName).matches() == false) { + throw new IllegalArgumentException( + "invalid imported method name [" + methodName + "] for target class [" + targetCanonicalClassName + "]."); + } + + PainlessClassBuilder painlessClassBuilder = classesToPainlessClassBuilders.get(targetClass); + + if (painlessClassBuilder == null) { + throw new IllegalArgumentException("target class [" + targetCanonicalClassName + "] not found for imported method " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "]"); + } + + int typeParametersSize = typeParameters.size(); + List> javaTypeParameters = new ArrayList<>(typeParametersSize); + + for (Class typeParameter : typeParameters) { + if (isValidType(typeParameter) == false) { + throw new IllegalArgumentException("type parameter [" + typeToCanonicalTypeName(typeParameter) + "] " + + "not found for imported method [[" + targetCanonicalClassName + "], [" + methodName + "], " + + typesToCanonicalTypeNames(typeParameters) + "]"); + } + + javaTypeParameters.add(typeToJavaType(typeParameter)); + } + + if (isValidType(returnType) == false) { + throw new IllegalArgumentException("return type [" + typeToCanonicalTypeName(returnType) + "] not found for imported method " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "]"); + } + + Method javaMethod; + + try { + javaMethod = targetClass.getMethod(methodName, javaTypeParameters.toArray(new Class[typeParametersSize])); + } catch (NoSuchMethodException nsme) { + throw new IllegalArgumentException("imported method reflection object [[" + targetCanonicalClassName + "], " + + "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", nsme); + } + + if (javaMethod.getReturnType() != typeToJavaType(returnType)) { + throw new IllegalArgumentException("return type [" + typeToCanonicalTypeName(javaMethod.getReturnType()) + "] " + + "does not match the specified returned type [" + typeToCanonicalTypeName(returnType) + "] " + + "for imported method [[" + targetClass.getCanonicalName() + "], [" + methodName + "], " + + typesToCanonicalTypeNames(typeParameters) + "]"); + } + + if (Modifier.isStatic(javaMethod.getModifiers()) == false) { + throw new IllegalArgumentException("imported method [[" + targetClass.getCanonicalName() + "], [" + methodName + "], " + + typesToCanonicalTypeNames(typeParameters) + "] must be static"); + } + + String painlessMethodKey = buildPainlessMethodKey(methodName, typeParametersSize); + + if (painlessMethodKeysToPainlessBindings.containsKey(painlessMethodKey)) { + throw new IllegalArgumentException("imported method and binding cannot have the same name [" + methodName + "]"); + } + + PainlessMethod importedPainlessMethod = painlessMethodKeysToImportedPainlessMethods.get(painlessMethodKey); + + if (importedPainlessMethod == null) { + MethodHandle methodHandle; + + try { + methodHandle = MethodHandles.publicLookup().in(targetClass).unreflect(javaMethod); + } catch (IllegalAccessException iae) { + throw new IllegalArgumentException("imported method handle [[" + targetClass.getCanonicalName() + "], " + + "[" + methodName + "], " + typesToCanonicalTypeNames(typeParameters) + "] not found", iae); + } + + MethodType methodType = methodHandle.type(); + + importedPainlessMethod = painlessMethodCache.computeIfAbsent( + new PainlessMethodCacheKey(targetClass, methodName, returnType, typeParameters), + key -> new PainlessMethod(javaMethod, targetClass, returnType, typeParameters, methodHandle, methodType)); + + painlessMethodKeysToImportedPainlessMethods.put(painlessMethodKey, importedPainlessMethod); + } else if (importedPainlessMethod.returnType == returnType && + importedPainlessMethod.typeParameters.equals(typeParameters) == false) { + throw new IllegalArgumentException("cannot have imported methods " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + + "[" + typeToCanonicalTypeName(returnType) + "], " + + typesToCanonicalTypeNames(typeParameters) + "] and " + + "[[" + targetCanonicalClassName + "], [" + methodName + "], " + + "[" + typeToCanonicalTypeName(importedPainlessMethod.returnType) + "], " + + typesToCanonicalTypeNames(importedPainlessMethod.typeParameters) + "] " + + "with the same arity and different return type or type parameters"); + } + } + public void addPainlessBinding(ClassLoader classLoader, String targetJavaClassName, String methodName, String returnCanonicalTypeName, List canonicalTypeNameParameters) { @@ -937,6 +1094,11 @@ public void addPainlessBinding(Class targetClass, String methodName, Class } String painlessMethodKey = buildPainlessMethodKey(methodName, constructorTypeParametersSize + methodTypeParametersSize); + + if (painlessMethodKeysToImportedPainlessMethods.containsKey(painlessMethodKey)) { + throw new IllegalArgumentException("binding and imported method cannot have the same name [" + methodName + "]"); + } + PainlessBinding painlessBinding = painlessMethodKeysToPainlessBindings.get(painlessMethodKey); if (painlessBinding == null) { @@ -976,7 +1138,8 @@ public PainlessLookup build() { classesToPainlessClasses.put(painlessClassBuilderEntry.getKey(), painlessClassBuilderEntry.getValue().build()); } - return new PainlessLookup(canonicalClassNamesToClasses, classesToPainlessClasses, painlessMethodKeysToPainlessBindings); + return new PainlessLookup(canonicalClassNamesToClasses, classesToPainlessClasses, + painlessMethodKeysToImportedPainlessMethods, painlessMethodKeysToPainlessBindings); } private void copyPainlessClassMembers() { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java index 8ae6ad9723da4..d161296d90a56 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java @@ -25,6 +25,7 @@ import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessBinding; +import org.elasticsearch.painless.lookup.PainlessMethod; import org.objectweb.asm.Label; import org.objectweb.asm.Type; import org.objectweb.asm.commons.Method; @@ -45,6 +46,7 @@ public final class ECallLocal extends AExpression { private final List arguments; private LocalMethod method = null; + private PainlessMethod imported = null; private PainlessBinding binding = null; public ECallLocal(Location location, String name, List arguments) { @@ -65,16 +67,33 @@ void extractVariables(Set variables) { void analyze(Locals locals) { method = locals.getMethod(name, arguments.size()); - if (method == null) { - binding = locals.getPainlessLookup().lookupPainlessBinding(name, arguments.size()); + imported = locals.getPainlessLookup().lookupImportedPainlessMethod(name, arguments.size()); + + if (imported == null) { + binding = locals.getPainlessLookup().lookupPainlessBinding(name, arguments.size()); - if (binding == null) { - throw createError(new IllegalArgumentException("Unknown call [" + name + "] with [" + arguments.size() + "] arguments.")); + if (binding == null) { + throw createError( + new IllegalArgumentException("Unknown call [" + name + "] with [" + arguments.size() + "] arguments.")); + } } } - List> typeParameters = new ArrayList<>(method == null ? binding.typeParameters : method.typeParameters); + List> typeParameters; + + if (method != null) { + typeParameters = new ArrayList<>(method.typeParameters); + actual = method.returnType; + } else if (imported != null) { + typeParameters = new ArrayList<>(imported.typeParameters); + actual = imported.returnType; + } else if (binding != null) { + typeParameters = new ArrayList<>(binding.typeParameters); + actual = binding.returnType; + } else { + throw new IllegalStateException("Illegal tree structure."); + } for (int argument = 0; argument < arguments.size(); ++argument) { AExpression expression = arguments.get(argument); @@ -86,14 +105,26 @@ void analyze(Locals locals) { } statement = true; - actual = method == null ? binding.returnType : method.returnType; } @Override void write(MethodWriter writer, Globals globals) { writer.writeDebugInfo(location); - if (method == null) { + if (method != null) { + for (AExpression argument : arguments) { + argument.write(writer, globals); + } + + writer.invokeStatic(CLASS_TYPE, new Method(method.name, method.methodType.toMethodDescriptorString())); + } else if (imported != null) { + for (AExpression argument : arguments) { + argument.write(writer, globals); + } + + writer.invokeStatic(Type.getType(imported.targetClass), + new Method(imported.javaMethod.getName(), imported.methodType.toMethodDescriptorString())); + } else if (binding != null) { String name = globals.addBinding(binding.javaConstructor.getDeclaringClass()); Type type = Type.getType(binding.javaConstructor.getDeclaringClass()); int javaConstructorParameterCount = binding.javaConstructor.getParameterCount(); @@ -124,11 +155,7 @@ void write(MethodWriter writer, Globals globals) { writer.invokeVirtual(type, Method.getMethod(binding.javaMethod)); } else { - for (AExpression argument : arguments) { - argument.write(writer, globals); - } - - writer.invokeStatic(CLASS_TYPE, new Method(method.name, method.methodType.toMethodDescriptorString())); + throw new IllegalStateException("Illegal tree structure."); } } diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt index 444234384c6d3..81009de9979eb 100644 --- a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt @@ -176,6 +176,7 @@ class org.elasticsearch.painless.FeatureTest no_import { } # for testing -static { +static_import { + float staticAddFloatsTest(float, float) from_class org.elasticsearch.painless.FeatureTest int testAddWithState(int, int, int, double) bound_to org.elasticsearch.painless.BindingTest } \ No newline at end of file diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java index 25866c8d668a3..9863db0b21eac 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicAPITests.java @@ -133,4 +133,8 @@ public void testPublicMemberAccess() { public void testNoSemicolon() { assertEquals(true, exec("def x = true; if (x) return x")); } + + public void testStatic() { + assertEquals(15.5f, exec("staticAddFloatsTest(6.5f, 9.0f)")); + } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java index 96cc296a1af52..963a433f172e8 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java @@ -47,6 +47,8 @@ * Typically just asserts the output of {@code exec()} */ public abstract class ScriptTestCase extends ESTestCase { + private static final PainlessLookup PAINLESS_LOOKUP = PainlessLookupBuilder.buildFromWhitelists(Whitelist.BASE_WHITELISTS); + protected PainlessScriptEngine scriptEngine; @Before @@ -92,12 +94,12 @@ public Object exec(String script, Map vars, boolean picky) { public Object exec(String script, Map vars, Map compileParams, Scorer scorer, boolean picky) { // test for ambiguity errors before running the actual script if picky is true if (picky) { - PainlessLookup painlessLookup = PainlessLookupBuilder.buildFromWhitelists(Whitelist.BASE_WHITELISTS); - ScriptClassInfo scriptClassInfo = new ScriptClassInfo(painlessLookup, GenericElasticsearchScript.class); + ScriptClassInfo scriptClassInfo = new ScriptClassInfo(PAINLESS_LOOKUP, GenericElasticsearchScript.class); CompilerSettings pickySettings = new CompilerSettings(); pickySettings.setPicky(true); pickySettings.setRegexesEnabled(CompilerSettings.REGEX_ENABLED.get(scriptEngineSettings())); - Walker.buildPainlessTree(scriptClassInfo, new MainMethodReserved(), getTestName(), script, pickySettings, painlessLookup, null); + Walker.buildPainlessTree( + scriptClassInfo, new MainMethodReserved(), getTestName(), script, pickySettings, PAINLESS_LOOKUP, null); } // test actual script execution ExecutableScript.Factory factory = scriptEngine.compile(null, script, ExecutableScript.CONTEXT, compileParams); From 0b62d6bb9d7a441ef6802d43dcd4e2452961498f Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 7 Sep 2018 21:27:59 -0400 Subject: [PATCH 50/91] CRUD: AwaitsFix entire wait_for_refresh close test I disabled one branch a few hours ago because it failed in CI. It looks like other branches can also fail so I'll disable them as well and look more closely on Monday. --- .../org/elasticsearch/test/rest/WaitForRefreshAndCloseIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseIT.java b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseIT.java index 09d6b9e51a5fc..ffd3a1f6c0c3c 100644 --- a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseIT.java +++ b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseIT.java @@ -38,6 +38,7 @@ /** * Tests that wait for refresh is fired if the index is closed. */ +@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33533") public class WaitForRefreshAndCloseIT extends ESRestTestCase { @Before public void setupIndex() throws IOException { @@ -76,7 +77,6 @@ public void testUpdateAndThenClose() throws Exception { closeWhileListenerEngaged(start("POST", "/_update", "{\"doc\":{\"name\":\"test\"}}")); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/33533") public void testDeleteAndThenClose() throws Exception { Request request = new Request("PUT", docPath()); request.setJsonEntity("{\"test\":\"test\"}"); From 97736ac46a54073cf7081e9ed08bccbdac79ea6a Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 7 Sep 2018 21:42:58 -0400 Subject: [PATCH 51/91] Logging: Skip test if it'd fail If we're running on a platform where we can't install syscall filters Elasticsearch logs a message before it reads the data directory to get the node name. Because that log message doesn't have a node name this test will fail. Since we mostly run the test on OSes where we *can* install the syscall filters we can fairly safely skip the test on OSes where we can't install the syscall filters. Closes #33540 --- .../elasticsearch/unconfigured_node_name/NodeNameInLogsIT.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/qa/unconfigured-node-name/src/test/java/org/elasticsearch/unconfigured_node_name/NodeNameInLogsIT.java b/qa/unconfigured-node-name/src/test/java/org/elasticsearch/unconfigured_node_name/NodeNameInLogsIT.java index 512fc2345549c..76bc9fa4d55fb 100644 --- a/qa/unconfigured-node-name/src/test/java/org/elasticsearch/unconfigured_node_name/NodeNameInLogsIT.java +++ b/qa/unconfigured-node-name/src/test/java/org/elasticsearch/unconfigured_node_name/NodeNameInLogsIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.unconfigured_node_name; +import org.elasticsearch.bootstrap.BootstrapInfo; import org.elasticsearch.common.logging.NodeNameInLogsIntegTestCase; import java.io.IOException; @@ -32,6 +33,8 @@ public class NodeNameInLogsIT extends NodeNameInLogsIntegTestCase { @Override protected BufferedReader openReader(Path logFile) throws IOException { + assumeTrue("We log a line without the node name if we can't install the seccomp filters", + BootstrapInfo.isSystemCallFilterInstalled()); return AccessController.doPrivileged((PrivilegedAction) () -> { try { return Files.newBufferedReader(logFile, StandardCharsets.UTF_8); From ab9e2cddf169be536065e9b32d367fee978bbb0c Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 7 Sep 2018 21:54:38 -0400 Subject: [PATCH 52/91] Logging: Clean up skipping test Clean up on top of the last fix: if we skip the entire test case then the test run would fail because we skipped all the tests. This adds a dummy test case to prevent that. It is a fairly nasty work around I plan to work on something that makes this not required any more anyway. --- .../unconfigured_node_name/NodeNameInLogsIT.java | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/qa/unconfigured-node-name/src/test/java/org/elasticsearch/unconfigured_node_name/NodeNameInLogsIT.java b/qa/unconfigured-node-name/src/test/java/org/elasticsearch/unconfigured_node_name/NodeNameInLogsIT.java index 76bc9fa4d55fb..9f36a600b68de 100644 --- a/qa/unconfigured-node-name/src/test/java/org/elasticsearch/unconfigured_node_name/NodeNameInLogsIT.java +++ b/qa/unconfigured-node-name/src/test/java/org/elasticsearch/unconfigured_node_name/NodeNameInLogsIT.java @@ -43,4 +43,11 @@ protected BufferedReader openReader(Path logFile) throws IOException { } }); } + + public void testDummy() { + /* Dummy test case so that when we run this test on a platform that + * does not support our syscall filters and we skip the test above + * we don't fail the entire test run because we skipped all the tests. + */ + } } From f27c3dcf881d8dc426d6d58afb63fb3774f45479 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Sat, 8 Sep 2018 10:18:45 +0200 Subject: [PATCH 53/91] INGEST: Remove Outdated TODOs (#33458) * CompoundProcessor is in the ingest package now -> resolved * Java generics don't offer type checking so nothing can be done here -> remvoed TODO and test * #16019 was closed and not acted on -> todo can go away --- .../java/org/elasticsearch/ingest/CompoundProcessor.java | 1 - .../org/elasticsearch/ingest/ConfigurationUtilsTests.java | 6 ------ .../test/java/org/elasticsearch/ingest/IngestClientIT.java | 1 - 3 files changed, 8 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java b/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java index f576667f44109..e1a413f6aa9bb 100644 --- a/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java +++ b/server/src/main/java/org/elasticsearch/ingest/CompoundProcessor.java @@ -16,7 +16,6 @@ * specific language governing permissions and limitations * under the License. */ -//TODO(simonw): can all these classes go into org.elasticsearch.ingest? package org.elasticsearch.ingest; diff --git a/server/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java b/server/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java index f3a11a86e54e5..9111658e49ca8 100644 --- a/server/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java @@ -92,12 +92,6 @@ public void testReadBooleanPropertyInvalidType() { } } - // TODO(talevy): Issue with generics. This test should fail, "int" is of type List - public void testOptional_InvalidType() { - List val = ConfigurationUtils.readList(null, null, config, "int"); - assertThat(val, equalTo(Collections.singletonList(2))); - } - public void testReadStringOrIntProperty() { String val1 = ConfigurationUtils.readStringOrIntProperty(null, null, config, "foo", null); String val2 = ConfigurationUtils.readStringOrIntProperty(null, null, config, "num", null); diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/server/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 65139109a83a2..6e5d862372ac6 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -60,7 +60,6 @@ public class IngestClientIT extends ESIntegTestCase { @Override protected Settings nodeSettings(int nodeOrdinal) { - // TODO: Remove this method once gets in: https://github.com/elastic/elasticsearch/issues/16019 if (nodeOrdinal % 2 == 0) { return Settings.builder().put("node.ingest", false).put(super.nodeSettings(nodeOrdinal)).build(); } From 94e4cb64c2480141fd54e19f64291ad7cd870bef Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Sat, 8 Sep 2018 19:29:31 -0400 Subject: [PATCH 54/91] Bootstrap a new history_uuid when force allocating a stale primary (#33432) This commit ensures that we bootstrap a new history_uuid when force allocating a stale primary. A stale primary should never be the source of an operation-based recovery to another shard which exists before the forced-allocation. Closes #26712 --- .../cluster/routing/IndexRoutingTable.java | 9 ++- .../cluster/routing/RecoverySource.java | 80 +++++++++++++++---- .../cluster/routing/ShardRouting.java | 5 +- ...AllocateEmptyPrimaryAllocationCommand.java | 4 +- ...AllocateStalePrimaryAllocationCommand.java | 3 +- .../index/shard/StoreRecovery.java | 3 + .../action/search/SearchAsyncActionTests.java | 2 +- .../cluster/routing/AllocationIdTests.java | 10 +-- .../routing/GroupShardsIteratorTests.java | 2 +- .../cluster/routing/PrimaryAllocationIT.java | 39 ++++++--- .../allocation/ThrottlingAllocationTests.java | 2 +- .../DiskThresholdDeciderUnitTests.java | 12 +-- .../index/shard/IndexShardIT.java | 2 +- .../index/shard/IndexShardTests.java | 55 +++++++++---- ...dicesLifecycleListenerSingleNodeTests.java | 2 +- .../indices/recovery/IndexRecoveryIT.java | 7 +- .../BlobStoreRepositoryRestoreTests.java | 5 +- .../action/cat/RestIndicesActionTests.java | 4 +- .../cluster/routing/TestShardRouting.java | 8 +- .../ESIndexLevelReplicationTestCase.java | 2 +- .../index/shard/IndexShardTestCase.java | 8 +- .../action/TransportOpenJobActionTests.java | 4 +- .../datafeed/DatafeedNodeSelectorTests.java | 2 +- .../ClusterStatsMonitoringDocTests.java | 5 +- .../authz/store/NativeRolesStoreTests.java | 6 +- .../support/SecurityIndexManagerTests.java | 9 ++- .../security/test/SecurityTestUtils.java | 68 +--------------- 27 files changed, 194 insertions(+), 164 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java b/server/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java index 4e7e81def8739..e9d805d34c8a1 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/IndexRoutingTable.java @@ -27,10 +27,11 @@ import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.routing.RecoverySource.EmptyStoreRecoverySource; +import org.elasticsearch.cluster.routing.RecoverySource.ExistingStoreRecoverySource; import org.elasticsearch.cluster.routing.RecoverySource.LocalShardsRecoverySource; import org.elasticsearch.cluster.routing.RecoverySource.PeerRecoverySource; import org.elasticsearch.cluster.routing.RecoverySource.SnapshotRecoverySource; -import org.elasticsearch.cluster.routing.RecoverySource.StoreRecoverySource; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.collect.ImmutableOpenIntMap; import org.elasticsearch.common.io.stream.StreamInput; @@ -386,7 +387,7 @@ private Builder initializeAsRestore(IndexMetaData indexMetaData, SnapshotRecover if (asNew && ignoreShards.contains(shardNumber)) { // This shards wasn't completely snapshotted - restore it as new shard indexShardRoutingBuilder.addShard(ShardRouting.newUnassigned(shardId, primary, - primary ? StoreRecoverySource.EMPTY_STORE_INSTANCE : PeerRecoverySource.INSTANCE, unassignedInfo)); + primary ? EmptyStoreRecoverySource.INSTANCE : PeerRecoverySource.INSTANCE, unassignedInfo)); } else { indexShardRoutingBuilder.addShard(ShardRouting.newUnassigned(shardId, primary, primary ? recoverySource : PeerRecoverySource.INSTANCE, unassignedInfo)); @@ -410,13 +411,13 @@ private Builder initializeEmpty(IndexMetaData indexMetaData, UnassignedInfo unas final RecoverySource primaryRecoverySource; if (indexMetaData.inSyncAllocationIds(shardNumber).isEmpty() == false) { // we have previous valid copies for this shard. use them for recovery - primaryRecoverySource = StoreRecoverySource.EXISTING_STORE_INSTANCE; + primaryRecoverySource = ExistingStoreRecoverySource.INSTANCE; } else if (indexMetaData.getResizeSourceIndex() != null) { // this is a new index but the initial shards should merged from another index primaryRecoverySource = LocalShardsRecoverySource.INSTANCE; } else { // a freshly created index with no restriction - primaryRecoverySource = StoreRecoverySource.EMPTY_STORE_INSTANCE; + primaryRecoverySource = EmptyStoreRecoverySource.INSTANCE; } IndexShardRoutingTable.Builder indexShardRoutingBuilder = new IndexShardRoutingTable.Builder(shardId); for (int i = 0; i <= indexMetaData.getNumberOfReplicas(); i++) { diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java b/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java index 13cb85ea399d5..2502fb0f3cc62 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java @@ -34,7 +34,8 @@ /** * Represents the recovery source of a shard. Available recovery types are: * - * - {@link StoreRecoverySource} recovery from the local store (empty or with existing data) + * - {@link EmptyStoreRecoverySource} recovery from an empty store + * - {@link ExistingStoreRecoverySource} recovery from an existing store * - {@link PeerRecoverySource} recovery from a primary on another node * - {@link SnapshotRecoverySource} recovery from a snapshot * - {@link LocalShardsRecoverySource} recovery from other shards of another index on the same node @@ -59,8 +60,8 @@ public void addAdditionalFields(XContentBuilder builder, ToXContent.Params param public static RecoverySource readFrom(StreamInput in) throws IOException { Type type = Type.values()[in.readByte()]; switch (type) { - case EMPTY_STORE: return StoreRecoverySource.EMPTY_STORE_INSTANCE; - case EXISTING_STORE: return StoreRecoverySource.EXISTING_STORE_INSTANCE; + case EMPTY_STORE: return EmptyStoreRecoverySource.INSTANCE; + case EXISTING_STORE: return new ExistingStoreRecoverySource(in); case PEER: return PeerRecoverySource.INSTANCE; case SNAPSHOT: return new SnapshotRecoverySource(in); case LOCAL_SHARDS: return LocalShardsRecoverySource.INSTANCE; @@ -91,6 +92,10 @@ public enum Type { public abstract Type getType(); + public boolean shouldBootstrapNewHistoryUUID() { + return false; + } + @Override public boolean equals(Object o) { if (this == o) return true; @@ -107,25 +112,68 @@ public int hashCode() { } /** - * recovery from an existing on-disk store or a fresh copy + * Recovery from a fresh copy */ - public abstract static class StoreRecoverySource extends RecoverySource { - public static final StoreRecoverySource EMPTY_STORE_INSTANCE = new StoreRecoverySource() { - @Override - public Type getType() { - return Type.EMPTY_STORE; + public static final class EmptyStoreRecoverySource extends RecoverySource { + public static final EmptyStoreRecoverySource INSTANCE = new EmptyStoreRecoverySource(); + + @Override + public Type getType() { + return Type.EMPTY_STORE; + } + + @Override + public String toString() { + return "new shard recovery"; + } + } + + /** + * Recovery from an existing on-disk store + */ + public static final class ExistingStoreRecoverySource extends RecoverySource { + public static final ExistingStoreRecoverySource INSTANCE = new ExistingStoreRecoverySource(false); + public static final ExistingStoreRecoverySource FORCE_STALE_PRIMARY_INSTANCE = new ExistingStoreRecoverySource(true); + + private final boolean bootstrapNewHistoryUUID; + + private ExistingStoreRecoverySource(boolean bootstrapNewHistoryUUID) { + this.bootstrapNewHistoryUUID = bootstrapNewHistoryUUID; + } + + private ExistingStoreRecoverySource(StreamInput in) throws IOException { + if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + bootstrapNewHistoryUUID = in.readBoolean(); + } else { + bootstrapNewHistoryUUID = false; } - }; - public static final StoreRecoverySource EXISTING_STORE_INSTANCE = new StoreRecoverySource() { - @Override - public Type getType() { - return Type.EXISTING_STORE; + } + + @Override + public void addAdditionalFields(XContentBuilder builder, Params params) throws IOException { + builder.field("bootstrap_new_history_uuid", bootstrapNewHistoryUUID); + } + + @Override + protected void writeAdditionalFields(StreamOutput out) throws IOException { + if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + out.writeBoolean(bootstrapNewHistoryUUID); } - }; + } + + @Override + public boolean shouldBootstrapNewHistoryUUID() { + return bootstrapNewHistoryUUID; + } + + @Override + public Type getType() { + return Type.EXISTING_STORE; + } @Override public String toString() { - return getType() == Type.EMPTY_STORE ? "new shard recovery" : "existing recovery"; + return "existing store recovery; bootstrap_history_uuid=" + bootstrapNewHistoryUUID; } } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java b/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java index 6a9a105b6c432..74341ca271a9c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/ShardRouting.java @@ -19,14 +19,13 @@ package org.elasticsearch.cluster.routing; +import org.elasticsearch.cluster.routing.RecoverySource.ExistingStoreRecoverySource; import org.elasticsearch.cluster.routing.RecoverySource.PeerRecoverySource; -import org.elasticsearch.cluster.routing.RecoverySource.StoreRecoverySource; import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.Index; @@ -318,7 +317,7 @@ public ShardRouting moveToUnassigned(UnassignedInfo unassignedInfo) { final RecoverySource recoverySource; if (active()) { if (primary()) { - recoverySource = StoreRecoverySource.EXISTING_STORE_INSTANCE; + recoverySource = ExistingStoreRecoverySource.INSTANCE; } else { recoverySource = PeerRecoverySource.INSTANCE; } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java index 66281b73458b3..a42fd2765b598 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateEmptyPrimaryAllocationCommand.java @@ -21,7 +21,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RecoverySource; -import org.elasticsearch.cluster.routing.RecoverySource.StoreRecoverySource; +import org.elasticsearch.cluster.routing.RecoverySource.EmptyStoreRecoverySource; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.ShardRouting; @@ -136,7 +136,7 @@ public RerouteExplanation execute(RoutingAllocation allocation, boolean explain) } initializeUnassignedShard(allocation, routingNodes, routingNode, shardRouting, unassignedInfoToUpdate, - StoreRecoverySource.EMPTY_STORE_INSTANCE); + EmptyStoreRecoverySource.INSTANCE); return new RerouteExplanation(this, allocation.decision(Decision.YES, name() + " (allocation command)", "ignore deciders")); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java index 11c4420200e33..f4c9aba17d71e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/command/AllocateStalePrimaryAllocationCommand.java @@ -129,7 +129,8 @@ public RerouteExplanation execute(RoutingAllocation allocation, boolean explain) "trying to allocate an existing primary shard [" + index + "][" + shardId + "], while no such shard has ever been active"); } - initializeUnassignedShard(allocation, routingNodes, routingNode, shardRouting); + initializeUnassignedShard(allocation, routingNodes, routingNode, shardRouting, null, + RecoverySource.ExistingStoreRecoverySource.FORCE_STALE_PRIMARY_INSTANCE); return new RerouteExplanation(this, allocation.decision(Decision.YES, name() + " (allocation command)", "ignore deciders")); } diff --git a/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java b/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java index ae3f90e63e7d1..c4b971e470d66 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java +++ b/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java @@ -398,6 +398,9 @@ private void internalRecoverFromStore(IndexShard indexShard) throws IndexShardRe indexShard.shardPath().resolveTranslog(), maxSeqNo, shardId, indexShard.getPendingPrimaryTerm()); store.associateIndexWithNewTranslog(translogUUID); } else if (indexShouldExists) { + if (recoveryState.getRecoverySource().shouldBootstrapNewHistoryUUID()) { + store.bootstrapNewHistory(); + } // since we recover from local, just fill the files and size try { final RecoveryState.Index index = recoveryState.getIndex(); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java index 1f7f6f4249b0c..95282e358e144 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java @@ -377,7 +377,7 @@ static GroupShardsIterator getShardsIter(String index, Orig ArrayList unassigned = new ArrayList<>(); ShardRouting routing = ShardRouting.newUnassigned(new ShardId(new Index(index, "_na_"), i), true, - RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foobar")); + RecoverySource.EmptyStoreRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foobar")); routing = routing.initialize(primaryNode.getId(), i + "p", 0); routing.started(); started.add(routing); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/AllocationIdTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/AllocationIdTests.java index c1861572d8352..86dbeabd1d73e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/AllocationIdTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/AllocationIdTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.cluster.routing; -import org.elasticsearch.cluster.routing.RecoverySource.StoreRecoverySource; +import org.elasticsearch.cluster.routing.RecoverySource.ExistingStoreRecoverySource; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentFactory; @@ -37,7 +37,7 @@ public class AllocationIdTests extends ESTestCase { public void testShardToStarted() { logger.info("-- create unassigned shard"); - ShardRouting shard = ShardRouting.newUnassigned(new ShardId("test","_na_", 0), true, StoreRecoverySource.EXISTING_STORE_INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null)); + ShardRouting shard = ShardRouting.newUnassigned(new ShardId("test","_na_", 0), true, ExistingStoreRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null)); assertThat(shard.allocationId(), nullValue()); logger.info("-- initialize the shard"); @@ -57,7 +57,7 @@ public void testShardToStarted() { public void testSuccessfulRelocation() { logger.info("-- build started shard"); - ShardRouting shard = ShardRouting.newUnassigned(new ShardId("test","_na_", 0), true, StoreRecoverySource.EXISTING_STORE_INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null)); + ShardRouting shard = ShardRouting.newUnassigned(new ShardId("test","_na_", 0), true, ExistingStoreRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null)); shard = shard.initialize("node1", null, -1); shard = shard.moveToStarted(); @@ -80,7 +80,7 @@ public void testSuccessfulRelocation() { public void testCancelRelocation() { logger.info("-- build started shard"); - ShardRouting shard = ShardRouting.newUnassigned(new ShardId("test","_na_", 0), true, StoreRecoverySource.EXISTING_STORE_INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null)); + ShardRouting shard = ShardRouting.newUnassigned(new ShardId("test","_na_", 0), true, ExistingStoreRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null)); shard = shard.initialize("node1", null, -1); shard = shard.moveToStarted(); @@ -100,7 +100,7 @@ public void testCancelRelocation() { public void testMoveToUnassigned() { logger.info("-- build started shard"); - ShardRouting shard = ShardRouting.newUnassigned(new ShardId("test","_na_", 0), true, StoreRecoverySource.EXISTING_STORE_INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null)); + ShardRouting shard = ShardRouting.newUnassigned(new ShardId("test","_na_", 0), true, ExistingStoreRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null)); shard = shard.initialize("node1", null, -1); shard = shard.moveToStarted(); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/GroupShardsIteratorTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/GroupShardsIteratorTests.java index f2571fce3391d..66eabd4cbd921 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/GroupShardsIteratorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/GroupShardsIteratorTests.java @@ -77,7 +77,7 @@ public void testIterate() { public ShardRouting newRouting(Index index, int id, boolean started) { ShardRouting shardRouting = ShardRouting.newUnassigned(new ShardId(index, id), true, - RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); + RecoverySource.EmptyStoreRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); shardRouting = ShardRoutingHelper.initialize(shardRouting, "some node"); if (started) { shardRouting = ShardRoutingHelper.moveToStarted(shardRouting); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java b/server/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java index 9786c0eaf5290..9b2db5b34b1da 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.gateway.GatewayAllocator; +import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineTestCase; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardTestCase; @@ -55,6 +56,7 @@ import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; @@ -64,6 +66,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.everyItem; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.isIn; import static org.hamcrest.Matchers.not; @@ -83,18 +86,9 @@ protected Settings nodeSettings(int nodeOrdinal) { .put(TestZenDiscovery.USE_MOCK_PINGS.getKey(), false).build(); } - private void createStaleReplicaScenario() throws Exception { - logger.info("--> starting 3 nodes, 1 master, 2 data"); - String master = internalCluster().startMasterOnlyNode(Settings.EMPTY); - internalCluster().startDataOnlyNodes(2); - - assertAcked(client().admin().indices().prepareCreate("test").setSettings(Settings.builder() - .put("index.number_of_shards", 1).put("index.number_of_replicas", 1)).get()); - ensureGreen(); - logger.info("--> indexing..."); + private void createStaleReplicaScenario(String master) throws Exception { client().prepareIndex("test", "type1").setSource(jsonBuilder().startObject().field("field", "value1").endObject()).get(); refresh(); - ClusterState state = client().admin().cluster().prepareState().all().get().getState(); List shards = state.routingTable().allShards("test"); assertThat(shards.size(), equalTo(2)); @@ -140,7 +134,13 @@ private void createStaleReplicaScenario() throws Exception { } public void testDoNotAllowStaleReplicasToBePromotedToPrimary() throws Exception { - createStaleReplicaScenario(); + logger.info("--> starting 3 nodes, 1 master, 2 data"); + String master = internalCluster().startMasterOnlyNode(Settings.EMPTY); + internalCluster().startDataOnlyNodes(2); + assertAcked(client().admin().indices().prepareCreate("test") + .setSettings(Settings.builder().put("index.number_of_shards", 1).put("index.number_of_replicas", 1)).get()); + ensureGreen(); + createStaleReplicaScenario(master); logger.info("--> starting node that reuses data folder with the up-to-date primary shard"); internalCluster().startDataOnlyNode(Settings.EMPTY); @@ -176,9 +176,17 @@ public void testFailedAllocationOfStalePrimaryToDataNodeWithNoData() throws Exce } public void testForceStaleReplicaToBePromotedToPrimary() throws Exception { - boolean useStaleReplica = randomBoolean(); // if true, use stale replica, otherwise a completely empty copy - createStaleReplicaScenario(); + logger.info("--> starting 3 nodes, 1 master, 2 data"); + String master = internalCluster().startMasterOnlyNode(Settings.EMPTY); + internalCluster().startDataOnlyNodes(2); + assertAcked(client().admin().indices().prepareCreate("test") + .setSettings(Settings.builder().put("index.number_of_shards", 1).put("index.number_of_replicas", 1)).get()); + ensureGreen(); + Set historyUUIDs = Arrays.stream(client().admin().indices().prepareStats("test").clear().get().getShards()) + .map(shard -> shard.getCommitStats().getUserData().get(Engine.HISTORY_UUID_KEY)).collect(Collectors.toSet()); + createStaleReplicaScenario(master); + boolean useStaleReplica = randomBoolean(); // if true, use stale replica, otherwise a completely empty copy logger.info("--> explicitly promote old primary shard"); final String idxName = "test"; ImmutableOpenIntMap> storeStatuses = client().admin().indices().prepareShardStores(idxName).get().getStoreStatuses().get(idxName); @@ -213,6 +221,11 @@ public void testForceStaleReplicaToBePromotedToPrimary() throws Exception { ClusterState state = client().admin().cluster().prepareState().get().getState(); assertEquals(Collections.singleton(state.routingTable().index(idxName).shard(0).primary.allocationId().getId()), state.metaData().index(idxName).inSyncAllocationIds(0)); + + Set newHistoryUUIds = Arrays.stream(client().admin().indices().prepareStats("test").clear().get().getShards()) + .map(shard -> shard.getCommitStats().getUserData().get(Engine.HISTORY_UUID_KEY)).collect(Collectors.toSet()); + assertThat(newHistoryUUIds, everyItem(not(isIn(historyUUIDs)))); + assertThat(newHistoryUUIds, hasSize(1)); } public void testForcePrimaryShardIfAllocationDecidersSayNoAfterIndexCreation() throws ExecutionException, InterruptedException { diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java index d32ebe62ec1aa..01586d9c49575 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ThrottlingAllocationTests.java @@ -399,7 +399,7 @@ private void addInSyncAllocationIds(Index index, IndexMetaData.Builder indexMeta final boolean primary = randomBoolean(); final ShardRouting unassigned = ShardRouting.newUnassigned(new ShardId(index, shard), primary, primary ? - RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE : + RecoverySource.EmptyStoreRecoverySource.INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "test") ); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java index da0e0a9b0bcfa..ce53c14807c22 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java @@ -29,9 +29,9 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.RecoverySource.EmptyStoreRecoverySource; import org.elasticsearch.cluster.routing.RecoverySource.LocalShardsRecoverySource; import org.elasticsearch.cluster.routing.RecoverySource.PeerRecoverySource; -import org.elasticsearch.cluster.routing.RecoverySource.StoreRecoverySource; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; @@ -69,7 +69,7 @@ public void testCanAllocateUsesMaxAvailableSpace() { final Index index = metaData.index("test").getIndex(); - ShardRouting test_0 = ShardRouting.newUnassigned(new ShardId(index, 0), true, StoreRecoverySource.EMPTY_STORE_INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); + ShardRouting test_0 = ShardRouting.newUnassigned(new ShardId(index, 0), true, EmptyStoreRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); DiscoveryNode node_0 = new DiscoveryNode("node_0", buildNewFakeTransportAddress(), Collections.emptyMap(), new HashSet<>(Arrays.asList(DiscoveryNode.Role.values())), Version.CURRENT); DiscoveryNode node_1 = new DiscoveryNode("node_1", buildNewFakeTransportAddress(), Collections.emptyMap(), @@ -125,22 +125,22 @@ public void testCanRemainUsesLeastAvailableSpace() { .build(); final IndexMetaData indexMetaData = metaData.index("test"); - ShardRouting test_0 = ShardRouting.newUnassigned(new ShardId(indexMetaData.getIndex(), 0), true, StoreRecoverySource.EMPTY_STORE_INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); + ShardRouting test_0 = ShardRouting.newUnassigned(new ShardId(indexMetaData.getIndex(), 0), true, EmptyStoreRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); test_0 = ShardRoutingHelper.initialize(test_0, node_0.getId()); test_0 = ShardRoutingHelper.moveToStarted(test_0); shardRoutingMap.put(test_0, "/node0/least"); - ShardRouting test_1 = ShardRouting.newUnassigned(new ShardId(indexMetaData.getIndex(), 1), true, StoreRecoverySource.EMPTY_STORE_INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); + ShardRouting test_1 = ShardRouting.newUnassigned(new ShardId(indexMetaData.getIndex(), 1), true, EmptyStoreRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); test_1 = ShardRoutingHelper.initialize(test_1, node_1.getId()); test_1 = ShardRoutingHelper.moveToStarted(test_1); shardRoutingMap.put(test_1, "/node1/least"); - ShardRouting test_2 = ShardRouting.newUnassigned(new ShardId(indexMetaData.getIndex(), 2), true, StoreRecoverySource.EMPTY_STORE_INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); + ShardRouting test_2 = ShardRouting.newUnassigned(new ShardId(indexMetaData.getIndex(), 2), true, EmptyStoreRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); test_2 = ShardRoutingHelper.initialize(test_2, node_1.getId()); test_2 = ShardRoutingHelper.moveToStarted(test_2); shardRoutingMap.put(test_2, "/node1/most"); - ShardRouting test_3 = ShardRouting.newUnassigned(new ShardId(indexMetaData.getIndex(), 3), true, StoreRecoverySource.EMPTY_STORE_INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); + ShardRouting test_3 = ShardRouting.newUnassigned(new ShardId(indexMetaData.getIndex(), 3), true, EmptyStoreRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); test_3 = ShardRoutingHelper.initialize(test_3, node_1.getId()); test_3 = ShardRoutingHelper.moveToStarted(test_3); // Intentionally not in the shardRoutingMap. We want to test what happens when we don't know where it is. diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java index 182747e7dda5d..87edfcfccb150 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java @@ -637,7 +637,7 @@ private static ShardRouting getInitializingShardRouting(ShardRouting existingSha existingShardRouting.currentNodeId(), null, existingShardRouting.primary(), ShardRoutingState.INITIALIZING, existingShardRouting.allocationId()); shardRouting = shardRouting.updateUnassigned(new UnassignedInfo(UnassignedInfo.Reason.INDEX_REOPENED, "fake recovery"), - RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE); + RecoverySource.ExistingStoreRecoverySource.INSTANCE); return shardRouting; } diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 713bc04634b0a..4ed74388f0e1e 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -178,6 +178,7 @@ import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -843,7 +844,7 @@ public void testGlobalCheckpointSync() throws IOException { randomAlphaOfLength(8), true, ShardRoutingState.INITIALIZING, - RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE); + RecoverySource.EmptyStoreRecoverySource.INSTANCE); final Settings settings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 2) @@ -1199,7 +1200,7 @@ public void testShardStats() throws IOException { public void testShardStatsWithFailures() throws IOException { allowShardFailures(); final ShardId shardId = new ShardId("index", "_na_", 0); - final ShardRouting shardRouting = newShardRouting(shardId, "node", true, RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE, ShardRoutingState.INITIALIZING); + final ShardRouting shardRouting = newShardRouting(shardId, "node", true, RecoverySource.EmptyStoreRecoverySource.INSTANCE, ShardRoutingState.INITIALIZING); final NodeEnvironment.NodePath nodePath = new NodeEnvironment.NodePath(createTempDir()); @@ -1659,7 +1660,7 @@ public void testRecoverFromStoreWithOutOfOrderDelete() throws IOException { final ShardRouting replicaRouting = shard.routingEntry(); IndexShard newShard = reinitShard(shard, newShardRouting(replicaRouting.shardId(), replicaRouting.currentNodeId(), true, ShardRoutingState.INITIALIZING, - RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE)); + RecoverySource.ExistingStoreRecoverySource.INSTANCE)); DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); newShard.markAsRecovering("store", new RecoveryState(newShard.routingEntry(), localNode, null)); assertTrue(newShard.recoverFromStore()); @@ -1684,6 +1685,7 @@ public void testRecoverFromStore() throws IOException { flushShard(shard); translogOps = 0; } + String historyUUID = shard.getHistoryUUID(); IndexShard newShard = reinitShard(shard); DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); newShard.markAsRecovering("store", new RecoveryState(newShard.routingEntry(), localNode, null)); @@ -1698,6 +1700,29 @@ public void testRecoverFromStore() throws IOException { assertThat(newShard.getReplicationTracker().getTrackedLocalCheckpointForShard(newShard.routingEntry().allocationId().getId()) .getLocalCheckpoint(), equalTo(totalOps - 1L)); assertDocCount(newShard, totalOps); + assertThat(newShard.getHistoryUUID(), equalTo(historyUUID)); + closeShards(newShard); + } + + public void testRecoverFromStalePrimaryForceNewHistoryUUID() throws IOException { + final IndexShard shard = newStartedShard(true); + int totalOps = randomInt(10); + for (int i = 0; i < totalOps; i++) { + indexDoc(shard, "_doc", Integer.toString(i)); + } + if (randomBoolean()) { + shard.updateLocalCheckpointForShard(shard.shardRouting.allocationId().getId(), totalOps - 1); + flushShard(shard); + } + String historyUUID = shard.getHistoryUUID(); + IndexShard newShard = reinitShard(shard, newShardRouting(shard.shardId(), shard.shardRouting.currentNodeId(), true, + ShardRoutingState.INITIALIZING, RecoverySource.ExistingStoreRecoverySource.FORCE_STALE_PRIMARY_INSTANCE)); + DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); + newShard.markAsRecovering("store", new RecoveryState(newShard.routingEntry(), localNode, null)); + assertTrue(newShard.recoverFromStore()); + IndexShardTestCase.updateRoutingEntry(newShard, newShard.routingEntry().moveToStarted()); + assertDocCount(newShard, totalOps); + assertThat(newShard.getHistoryUUID(), not(equalTo(historyUUID))); closeShards(newShard); } @@ -1734,7 +1759,7 @@ public void testRecoverFromStoreWithNoOps() throws IOException { final ShardRouting primaryShardRouting = shard.routingEntry(); IndexShard newShard = reinitShard(otherShard, ShardRoutingHelper.initWithSameId(primaryShardRouting, - RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE)); + RecoverySource.ExistingStoreRecoverySource.INSTANCE)); DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); newShard.markAsRecovering("store", new RecoveryState(newShard.routingEntry(), localNode, null)); assertTrue(newShard.recoverFromStore()); @@ -1760,7 +1785,7 @@ public void testRecoverFromStoreWithNoOps() throws IOException { for (int i = 0; i < 2; i++) { newShard = reinitShard(newShard, ShardRoutingHelper.initWithSameId(primaryShardRouting, - RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE)); + RecoverySource.ExistingStoreRecoverySource.INSTANCE)); newShard.markAsRecovering("store", new RecoveryState(newShard.routingEntry(), localNode, null)); assertTrue(newShard.recoverFromStore()); try (Translog.Snapshot snapshot = getTranslog(newShard).newSnapshot()) { @@ -1778,7 +1803,7 @@ public void testRecoverFromCleanStore() throws IOException { } final ShardRouting shardRouting = shard.routingEntry(); IndexShard newShard = reinitShard(shard, - ShardRoutingHelper.initWithSameId(shardRouting, RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE) + ShardRoutingHelper.initWithSameId(shardRouting, RecoverySource.EmptyStoreRecoverySource.INSTANCE) ); DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); @@ -1827,7 +1852,7 @@ public void testFailIfIndexNotPresentInRecoverFromStore() throws Exception { } newShard = reinitShard(newShard, - ShardRoutingHelper.initWithSameId(routing, RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE)); + ShardRoutingHelper.initWithSameId(routing, RecoverySource.EmptyStoreRecoverySource.INSTANCE)); newShard.markAsRecovering("store", new RecoveryState(newShard.routingEntry(), localNode, null)); assertTrue("recover even if there is nothing to recover", newShard.recoverFromStore()); @@ -1865,7 +1890,7 @@ public void testRecoverFromStoreRemoveStaleOperations() throws Exception { final ShardRouting replicaRouting = shard.routingEntry(); IndexShard newShard = reinitShard(shard, newShardRouting(replicaRouting.shardId(), replicaRouting.currentNodeId(), true, ShardRoutingState.INITIALIZING, - RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE)); + RecoverySource.ExistingStoreRecoverySource.INSTANCE)); newShard.pendingPrimaryTerm++; newShard.operationPrimaryTerm++; DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); @@ -1905,7 +1930,7 @@ public void testRestoreShard() throws IOException { assertDocs(target, "1"); flushShard(source); // only flush source ShardRouting routing = ShardRoutingHelper.initWithSameId(target.routingEntry(), - RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE); + RecoverySource.ExistingStoreRecoverySource.INSTANCE); final Snapshot snapshot = new Snapshot("foo", new SnapshotId("bar", UUIDs.randomBase64UUID())); routing = ShardRoutingHelper.newWithRestoreSource(routing, new RecoverySource.SnapshotRecoverySource(snapshot, Version.CURRENT, "test")); @@ -1974,7 +1999,7 @@ public IndexSearcher wrap(IndexSearcher searcher) throws EngineException { }; closeShards(shard); IndexShard newShard = newShard( - ShardRoutingHelper.initWithSameId(shard.routingEntry(), RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE), + ShardRoutingHelper.initWithSameId(shard.routingEntry(), RecoverySource.ExistingStoreRecoverySource.INSTANCE), shard.shardPath(), shard.indexSettings().getIndexMetaData(), null, @@ -2127,7 +2152,7 @@ public IndexSearcher wrap(IndexSearcher searcher) throws EngineException { closeShards(shard); IndexShard newShard = newShard( - ShardRoutingHelper.initWithSameId(shard.routingEntry(), RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE), + ShardRoutingHelper.initWithSameId(shard.routingEntry(), RecoverySource.ExistingStoreRecoverySource.INSTANCE), shard.shardPath(), shard.indexSettings().getIndexMetaData(), null, @@ -2625,7 +2650,7 @@ public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IO assertThat("corruption marker should not be there", corruptedMarkerCount.get(), equalTo(0)); final ShardRouting shardRouting = ShardRoutingHelper.initWithSameId(indexShard.routingEntry(), - RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE + RecoverySource.ExistingStoreRecoverySource.INSTANCE ); // start shard and perform index check on startup. It enforce shard to fail due to corrupted index files final IndexMetaData indexMetaData = IndexMetaData.builder(indexShard.indexSettings().getIndexMetaData()) @@ -2666,7 +2691,7 @@ public void testShardDoesNotStartIfCorruptedMarkerIsPresent() throws Exception { final ShardPath shardPath = indexShard.shardPath(); final ShardRouting shardRouting = ShardRoutingHelper.initWithSameId(indexShard.routingEntry(), - RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE + RecoverySource.ExistingStoreRecoverySource.INSTANCE ); final IndexMetaData indexMetaData = indexShard.indexSettings().getIndexMetaData(); @@ -2751,7 +2776,7 @@ public void testReadSnapshotAndCheckIndexConcurrently() throws Exception { closeShards(indexShard); final ShardRouting shardRouting = ShardRoutingHelper.initWithSameId(indexShard.routingEntry(), - isPrimary ? RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE + isPrimary ? RecoverySource.ExistingStoreRecoverySource.INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE ); final IndexMetaData indexMetaData = IndexMetaData.builder(indexShard.indexSettings().getIndexMetaData()) .settings(Settings.builder() @@ -3261,7 +3286,7 @@ public void testFlushOnInactive() throws Exception { .settings(settings) .primaryTerm(0, 1).build(); ShardRouting shardRouting = TestShardRouting.newShardRouting(new ShardId(metaData.getIndex(), 0), "n1", true, ShardRoutingState - .INITIALIZING, RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE); + .INITIALIZING, RecoverySource.EmptyStoreRecoverySource.INSTANCE); final ShardId shardId = shardRouting.shardId(); final NodeEnvironment.NodePath nodePath = new NodeEnvironment.NodePath(createTempDir()); ShardPath shardPath = new ShardPath(false, nodePath.resolve(shardId), nodePath.resolve(shardId), shardId); diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerSingleNodeTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerSingleNodeTests.java index aa06f9e9b7dfe..769cdfc8a9b53 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerSingleNodeTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerSingleNodeTests.java @@ -128,7 +128,7 @@ public void afterIndexRemoved(Index index, IndexSettings indexSettings, IndexRem String nodeId = newRouting.currentNodeId(); UnassignedInfo unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "boom"); newRouting = newRouting.moveToUnassigned(unassignedInfo) - .updateUnassigned(unassignedInfo, RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE); + .updateUnassigned(unassignedInfo, RecoverySource.EmptyStoreRecoverySource.INSTANCE); newRouting = ShardRoutingHelper.initialize(newRouting, nodeId); IndexShard shard = index.createShard(newRouting, s -> {}); IndexShardTestCase.updateRoutingEntry(shard, newRouting); diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/server/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index 213aa644665dd..6a6970675eb9b 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -36,7 +36,6 @@ import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.RecoverySource.PeerRecoverySource; import org.elasticsearch.cluster.routing.RecoverySource.SnapshotRecoverySource; -import org.elasticsearch.cluster.routing.RecoverySource.StoreRecoverySource; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; @@ -186,7 +185,7 @@ public void testGatewayRecovery() throws Exception { RecoveryState recoveryState = recoveryStates.get(0); - assertRecoveryState(recoveryState, 0, StoreRecoverySource.EXISTING_STORE_INSTANCE, true, Stage.DONE, null, node); + assertRecoveryState(recoveryState, 0, RecoverySource.ExistingStoreRecoverySource.INSTANCE, true, Stage.DONE, null, node); validateIndexRecoveryState(recoveryState.getIndex()); } @@ -239,7 +238,7 @@ public void testReplicaRecovery() throws Exception { // validate node A recovery RecoveryState nodeARecoveryState = nodeAResponses.get(0); - assertRecoveryState(nodeARecoveryState, 0, StoreRecoverySource.EMPTY_STORE_INSTANCE, true, Stage.DONE, null, nodeA); + assertRecoveryState(nodeARecoveryState, 0, RecoverySource.EmptyStoreRecoverySource.INSTANCE, true, Stage.DONE, null, nodeA); validateIndexRecoveryState(nodeARecoveryState.getIndex()); // validate node B recovery @@ -295,7 +294,7 @@ public void testRerouteRecovery() throws Exception { List nodeBRecoveryStates = findRecoveriesForTargetNode(nodeB, recoveryStates); assertThat(nodeBRecoveryStates.size(), equalTo(1)); - assertRecoveryState(nodeARecoveryStates.get(0), 0, StoreRecoverySource.EMPTY_STORE_INSTANCE, true, Stage.DONE, null, nodeA); + assertRecoveryState(nodeARecoveryStates.get(0), 0, RecoverySource.EmptyStoreRecoverySource.INSTANCE, true, Stage.DONE, null, nodeA); validateIndexRecoveryState(nodeARecoveryStates.get(0).getIndex()); assertOnGoingRecoveryState(nodeBRecoveryStates.get(0), 0, PeerRecoverySource.INSTANCE, true, nodeA, nodeB); diff --git a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java index fa7de2d629112..ba3fa84a19641 100644 --- a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java @@ -22,6 +22,7 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.util.TestUtil; import org.elasticsearch.cluster.metadata.RepositoryMetaData; +import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingHelper; import org.elasticsearch.common.UUIDs; @@ -49,7 +50,6 @@ import java.util.Arrays; import java.util.List; -import static org.elasticsearch.cluster.routing.RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE; import static org.hamcrest.Matchers.containsString; /** @@ -99,7 +99,8 @@ public void testRestoreSnapshotWithExistingFiles() throws IOException { } // build a new shard using the same store directory as the closed shard - ShardRouting shardRouting = ShardRoutingHelper.initWithSameId(shard.routingEntry(), EXISTING_STORE_INSTANCE); + ShardRouting shardRouting = ShardRoutingHelper.initWithSameId(shard.routingEntry(), + RecoverySource.ExistingStoreRecoverySource.INSTANCE); shard = newShard( shardRouting, shard.shardPath(), diff --git a/server/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java index cd592c9ed1e9c..4535bf7a91b0d 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java @@ -30,8 +30,8 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.RecoverySource.PeerRecoverySource; -import org.elasticsearch.cluster.routing.RecoverySource.StoreRecoverySource; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.Table; @@ -143,7 +143,7 @@ private IndicesStatsResponse randomIndicesStatsResponse(final Index[] indices) { boolean primary = (i == primaryIdx); Path path = createTempDir().resolve("indices").resolve(index.getUUID()).resolve(String.valueOf(i)); ShardRouting shardRouting = ShardRouting.newUnassigned(shardId, primary, - primary ? StoreRecoverySource.EMPTY_STORE_INSTANCE : PeerRecoverySource.INSTANCE, + primary ? RecoverySource.EmptyStoreRecoverySource.INSTANCE : PeerRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null) ); shardRouting = shardRouting.initialize("node-0", null, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE); diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java b/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java index 2291c3d39e200..c91c04884c5a7 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java @@ -88,8 +88,8 @@ private static RecoverySource buildRecoveryTarget(boolean primary, ShardRoutingS case UNASSIGNED: case INITIALIZING: if (primary) { - return ESTestCase.randomFrom(RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE, - RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE); + return ESTestCase.randomFrom(RecoverySource.EmptyStoreRecoverySource.INSTANCE, + RecoverySource.ExistingStoreRecoverySource.INSTANCE); } else { return RecoverySource.PeerRecoverySource.INSTANCE; } @@ -130,8 +130,8 @@ private static UnassignedInfo buildUnassignedInfo(ShardRoutingState state) { } public static RecoverySource randomRecoverySource() { - return ESTestCase.randomFrom(RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE, - RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE, + return ESTestCase.randomFrom(RecoverySource.EmptyStoreRecoverySource.INSTANCE, + RecoverySource.ExistingStoreRecoverySource.INSTANCE, RecoverySource.PeerRecoverySource.INSTANCE, RecoverySource.LocalShardsRecoverySource.INSTANCE, new RecoverySource.SnapshotRecoverySource( diff --git a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java index f2afdff9c3a3a..8717d7ba146fb 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java @@ -172,7 +172,7 @@ protected ReplicationGroup(final IndexMetaData indexMetaData) throws IOException private ShardRouting createShardRouting(String nodeId, boolean primary) { return TestShardRouting.newShardRouting(shardId, nodeId, primary, ShardRoutingState.INITIALIZING, - primary ? RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE); + primary ? RecoverySource.EmptyStoreRecoverySource.INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE); } protected EngineFactory getEngineFactory(ShardRouting routing) { diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java index 53576a1d80a70..9082b4153b0bf 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java @@ -193,7 +193,7 @@ protected IndexShard newShard(final boolean primary, final Settings settings) th */ protected IndexShard newShard(boolean primary, Settings settings, EngineFactory engineFactory) throws IOException { final RecoverySource recoverySource = - primary ? RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE; + primary ? RecoverySource.EmptyStoreRecoverySource.INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE; final ShardRouting shardRouting = TestShardRouting.newShardRouting( new ShardId("index", "_na_", 0), randomAlphaOfLength(10), primary, ShardRoutingState.INITIALIZING, recoverySource); @@ -244,7 +244,7 @@ protected IndexShard newShard( protected IndexShard newShard(ShardId shardId, boolean primary, IndexingOperationListener... listeners) throws IOException { ShardRouting shardRouting = TestShardRouting.newShardRouting(shardId, randomAlphaOfLength(5), primary, ShardRoutingState.INITIALIZING, - primary ? RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE); + primary ? RecoverySource.EmptyStoreRecoverySource.INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE); return newShard(shardRouting, Settings.EMPTY, new InternalEngineFactory(), listeners); } @@ -272,7 +272,7 @@ protected IndexShard newShard(ShardId shardId, boolean primary, String nodeId, I protected IndexShard newShard(ShardId shardId, boolean primary, String nodeId, IndexMetaData indexMetaData, @Nullable IndexSearcherWrapper searcherWrapper, Runnable globalCheckpointSyncer) throws IOException { ShardRouting shardRouting = TestShardRouting.newShardRouting(shardId, nodeId, primary, ShardRoutingState.INITIALIZING, - primary ? RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE); + primary ? RecoverySource.EmptyStoreRecoverySource.INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE); return newShard(shardRouting, indexMetaData, searcherWrapper, new InternalEngineFactory(), globalCheckpointSyncer); } @@ -371,7 +371,7 @@ protected IndexShard newShard(ShardRouting routing, ShardPath shardPath, IndexMe protected IndexShard reinitShard(IndexShard current, IndexingOperationListener... listeners) throws IOException { final ShardRouting shardRouting = current.routingEntry(); return reinitShard(current, ShardRoutingHelper.initWithSameId(shardRouting, - shardRouting.primary() ? RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE + shardRouting.primary() ? RecoverySource.ExistingStoreRecoverySource.INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE ), listeners); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java index 5bf8fb6956bfe..bef7705e83533 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java @@ -532,7 +532,7 @@ public void testVerifyIndicesPrimaryShardsAreActive() { } else { Index index = new Index(indexToRemove, "_uuid"); ShardId shardId = new ShardId(index, 0); - ShardRouting shardRouting = ShardRouting.newUnassigned(shardId, true, RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE, + ShardRouting shardRouting = ShardRouting.newUnassigned(shardId, true, RecoverySource.EmptyStoreRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "")); shardRouting = shardRouting.initialize("node_id", null, 0L); routingTable.add(IndexRoutingTable.builder(index) @@ -656,7 +656,7 @@ private void addJobAndIndices(MetaData.Builder metaData, RoutingTable.Builder ro metaData.put(indexMetaData); Index index = new Index(indexName, "_uuid"); ShardId shardId = new ShardId(index, 0); - ShardRouting shardRouting = ShardRouting.newUnassigned(shardId, true, RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE, + ShardRouting shardRouting = ShardRouting.newUnassigned(shardId, true, RecoverySource.EmptyStoreRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "")); shardRouting = shardRouting.initialize("node_id", null, 0L); shardRouting = shardRouting.moveToStarted(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java index 3a6082c6cf057..4b8ad1d08aed3 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java @@ -349,7 +349,7 @@ private static RoutingTable generateRoutingTable(IndexMetaData indexMetaData, Li true, ShardRoutingState.RELOCATING); } else { shardRouting = ShardRouting.newUnassigned(shardId, true, - RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE, + RecoverySource.EmptyStoreRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "")); } diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java index c7ddb3c4d2427..61224ac0fd735 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.bytes.BytesReference; @@ -60,7 +61,6 @@ import static java.util.Collections.singleton; import static java.util.Collections.singletonList; import static java.util.Collections.singletonMap; -import static org.elasticsearch.cluster.routing.RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @@ -289,7 +289,8 @@ public void testToXContent() throws IOException { final ShardId shardId = new ShardId("_index", "_index_id", 7); final UnassignedInfo unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "_message"); - final ShardRouting shardRouting = ShardRouting.newUnassigned(shardId, true, EXISTING_STORE_INSTANCE, unassignedInfo); + final ShardRouting shardRouting = ShardRouting.newUnassigned(shardId, true, + RecoverySource.ExistingStoreRecoverySource.INSTANCE, unassignedInfo); final ShardStats mockShardStats = mock(ShardStats.class); when(mockShardStats.getShardRouting()).thenReturn(shardRouting); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java index 4000969187548..4e5271c520a81 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStoreTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; @@ -55,7 +56,6 @@ import java.util.UUID; import java.util.concurrent.atomic.AtomicBoolean; -import static org.elasticsearch.cluster.routing.RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_INDEX_NAME; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.contains; @@ -267,8 +267,8 @@ private ClusterState getClusterStateWithSecurityIndex() { } Index index = new Index(securityIndexName, UUID.randomUUID().toString()); - ShardRouting shardRouting = ShardRouting.newUnassigned(new ShardId(index, 0), true, EXISTING_STORE_INSTANCE, - new UnassignedInfo(Reason.INDEX_CREATED, "")); + ShardRouting shardRouting = ShardRouting.newUnassigned(new ShardId(index, 0), true, + RecoverySource.ExistingStoreRecoverySource.INSTANCE, new UnassignedInfo(Reason.INDEX_CREATED, "")); IndexShardRoutingTable table = new IndexShardRoutingTable.Builder(new ShardId(index, 0)) .addShard(shardRouting.initialize(randomAlphaOfLength(8), null, shardRouting.getExpectedShardSize()).moveToStarted()) .build(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java index c3a6d7e920d1a..76e84f83137e3 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; @@ -50,7 +51,6 @@ import org.hamcrest.Matchers; import org.junit.Before; -import static org.elasticsearch.cluster.routing.RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_INDEX_NAME; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_TEMPLATE_NAME; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.TEMPLATE_VERSION_PATTERN; @@ -106,8 +106,8 @@ public void testIndexWithoutPrimaryShards() throws IOException { final ClusterState.Builder clusterStateBuilder = createClusterState(INDEX_NAME, TEMPLATE_NAME); Index index = new Index(INDEX_NAME, UUID.randomUUID().toString()); - ShardRouting shardRouting = ShardRouting.newUnassigned(new ShardId(index, 0), true, EXISTING_STORE_INSTANCE, - new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "")); + ShardRouting shardRouting = ShardRouting.newUnassigned(new ShardId(index, 0), true, + RecoverySource.ExistingStoreRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "")); String nodeId = ESTestCase.randomAlphaOfLength(8); IndexShardRoutingTable table = new IndexShardRoutingTable.Builder(new ShardId(index, 0)) .addShard(shardRouting.initialize(nodeId, null, shardRouting.getExpectedShardSize()) @@ -165,7 +165,8 @@ public void testIndexHealthChangeListeners() throws Exception { clusterStateBuilder.routingTable(RoutingTable.builder() .add(IndexRoutingTable.builder(prevIndex) .addIndexShard(new IndexShardRoutingTable.Builder(new ShardId(prevIndex, 0)) - .addShard(ShardRouting.newUnassigned(new ShardId(prevIndex, 0), true, EXISTING_STORE_INSTANCE, + .addShard(ShardRouting.newUnassigned(new ShardId(prevIndex, 0), true, + RecoverySource.ExistingStoreRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "")) .initialize(UUIDs.randomBase64UUID(random()), null, 0L) .moveToUnassigned(new UnassignedInfo(UnassignedInfo.Reason.ALLOCATION_FAILED, ""))) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/test/SecurityTestUtils.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/test/SecurityTestUtils.java index aa4982cce3f84..12474b7a04d59 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/test/SecurityTestUtils.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/test/SecurityTestUtils.java @@ -5,21 +5,17 @@ */ package org.elasticsearch.xpack.security.test; -import org.elasticsearch.core.internal.io.IOUtils; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.health.ClusterHealthStatus; -import org.elasticsearch.cluster.health.ClusterIndexHealth; import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; -import org.elasticsearch.cluster.routing.RecoverySource; +import org.elasticsearch.cluster.routing.RecoverySource.ExistingStoreRecoverySource; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.io.Streams; -import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; @@ -39,9 +35,7 @@ import static java.nio.file.StandardOpenOption.CREATE; import static java.nio.file.StandardOpenOption.TRUNCATE_EXISTING; import static java.nio.file.StandardOpenOption.WRITE; -import static org.elasticsearch.cluster.routing.RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE; import static org.elasticsearch.xpack.security.support.SecurityIndexManager.SECURITY_INDEX_NAME; -import static org.junit.Assert.assertEquals; public class SecurityTestUtils { @@ -74,7 +68,7 @@ public static String writeFile(Path folder, String name, String content) { public static RoutingTable buildIndexRoutingTable(String indexName) { Index index = new Index(indexName, UUID.randomUUID().toString()); - ShardRouting shardRouting = ShardRouting.newUnassigned(new ShardId(index, 0), true, EXISTING_STORE_INSTANCE, + ShardRouting shardRouting = ShardRouting.newUnassigned(new ShardId(index, 0), true, ExistingStoreRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "")); String nodeId = ESTestCase.randomAlphaOfLength(8); IndexShardRoutingTable table = new IndexShardRoutingTable.Builder(new ShardId(index, 0)) @@ -95,60 +89,4 @@ public static MetaData addAliasToMetaData(MetaData metaData, String indexName) { metaDataBuilder.put(IndexMetaData.builder(indexMetaData).putAlias(aliasMetaData)); return metaDataBuilder.build(); } - - public static ClusterIndexHealth getClusterIndexHealth(ClusterHealthStatus status) { - IndexMetaData metaData = IndexMetaData.builder("foo").settings(Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .build()) - .build(); - final IndexRoutingTable routingTable; - switch (status) { - case RED: - routingTable = IndexRoutingTable.builder(metaData.getIndex()) - .addIndexShard(new IndexShardRoutingTable.Builder(new ShardId(metaData.getIndex(), 0)) - .addShard(ShardRouting.newUnassigned(new ShardId(metaData.getIndex(), 0), true, EXISTING_STORE_INSTANCE, - new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "")) - .initialize(ESTestCase.randomAlphaOfLength(8), null, 0L)) - .addShard(ShardRouting.newUnassigned(new ShardId(metaData.getIndex(), 0), false, - RecoverySource.PeerRecoverySource.INSTANCE, - new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "")) - .initialize(ESTestCase.randomAlphaOfLength(8), null, 0L)) - .build()) - .build(); - break; - case YELLOW: - routingTable = IndexRoutingTable.builder(metaData.getIndex()) - .addIndexShard(new IndexShardRoutingTable.Builder(new ShardId(metaData.getIndex(), 0)) - .addShard(ShardRouting.newUnassigned(new ShardId(metaData.getIndex(), 0), true, EXISTING_STORE_INSTANCE, - new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "")) - .initialize(ESTestCase.randomAlphaOfLength(8), null, 0L).moveToStarted()) - .addShard(ShardRouting.newUnassigned(new ShardId(metaData.getIndex(), 0), false, - RecoverySource.PeerRecoverySource.INSTANCE, - new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "")) - .initialize(ESTestCase.randomAlphaOfLength(8), null, 0L)) - .build()) - .build(); - break; - case GREEN: - routingTable = IndexRoutingTable.builder(metaData.getIndex()) - .addIndexShard(new IndexShardRoutingTable.Builder(new ShardId(metaData.getIndex(), 0)) - .addShard(ShardRouting.newUnassigned(new ShardId(metaData.getIndex(), 0), true, EXISTING_STORE_INSTANCE, - new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "")) - .initialize(ESTestCase.randomAlphaOfLength(8), null, 0L).moveToStarted()) - .addShard(ShardRouting.newUnassigned(new ShardId(metaData.getIndex(), 0), false, - RecoverySource.PeerRecoverySource.INSTANCE, - new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "")) - .initialize(ESTestCase.randomAlphaOfLength(8), null, 0L).moveToStarted()) - .build()) - .build(); - break; - default: - throw new IllegalStateException("unknown status: " + status); - } - ClusterIndexHealth health = new ClusterIndexHealth(metaData, routingTable); - assertEquals(status, health.getStatus()); - return health; - } } From 5a38c930fcc42d93475596c92903123e06613bac Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sun, 9 Sep 2018 07:06:55 -0400 Subject: [PATCH 55/91] Add license checks for auto-follow implementation (#33496) This commit adds license checks for the auto-follow implementation. We check the license on put auto-follow patterns, and then for every coordination round we check that the local and remote clusters are licensed for CCR. In the case of non-compliance, we skip coordination yet continue to schedule follow-ups. --- .../elasticsearch/test/MockLogAppender.java | 33 ++++- .../build.gradle | 15 +++ .../xpack/ccr/CcrMultiClusterLicenseIT.java | 44 ++++++- .../java/org/elasticsearch/xpack/ccr/Ccr.java | 2 +- .../xpack/ccr/CcrLicenseChecker.java | 124 ++++++++++++++---- .../ccr/action/AutoFollowCoordinator.java | 76 +++++++---- .../action/CreateAndFollowIndexAction.java | 2 +- .../xpack/ccr/action/FollowIndexAction.java | 2 +- .../TransportPutAutoFollowPatternAction.java | 57 +++++--- .../elasticsearch/xpack/ccr/CcrLicenseIT.java | 68 ++++++++-- ... => NonCompliantLicenseLocalStateCcr.java} | 6 +- .../action/AutoFollowCoordinatorTests.java | 16 +-- 12 files changed, 348 insertions(+), 97 deletions(-) rename x-pack/plugin/ccr/qa/{multi-cluster-with-incompatible-license => multi-cluster-with-non-compliant-license}/build.gradle (61%) rename x-pack/plugin/ccr/qa/{multi-cluster-with-incompatible-license => multi-cluster-with-non-compliant-license}/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java (50%) rename x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/{IncompatibleLicenseLocalStateCcr.java => NonCompliantLicenseLocalStateCcr.java} (80%) diff --git a/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java b/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java index a53ba046d32c3..895bd7ec77a2b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java +++ b/test/framework/src/main/java/org/elasticsearch/test/MockLogAppender.java @@ -85,7 +85,7 @@ public AbstractEventExpectation(String name, String logger, Level level, String @Override public void match(LogEvent event) { - if (event.getLevel().equals(level) && event.getLoggerName().equals(logger)) { + if (event.getLevel().equals(level) && event.getLoggerName().equals(logger) && innerMatch(event)) { if (Regex.isSimpleMatchPattern(message)) { if (Regex.simpleMatch(message, event.getMessage().getFormattedMessage())) { saw = true; @@ -97,6 +97,11 @@ public void match(LogEvent event) { } } } + + public boolean innerMatch(final LogEvent event) { + return true; + } + } public static class UnseenEventExpectation extends AbstractEventExpectation { @@ -123,6 +128,32 @@ public void assertMatched() { } } + public static class ExceptionSeenEventExpectation extends SeenEventExpectation { + + private final Class clazz; + private final String exceptionMessage; + + public ExceptionSeenEventExpectation( + final String name, + final String logger, + final Level level, + final String message, + final Class clazz, + final String exceptionMessage) { + super(name, logger, level, message); + this.clazz = clazz; + this.exceptionMessage = exceptionMessage; + } + + @Override + public boolean innerMatch(final LogEvent event) { + return event.getThrown() != null + && event.getThrown().getClass() == clazz + && event.getThrown().getMessage().equals(exceptionMessage); + } + + } + public static class PatternSeenEventExcpectation implements LoggingExpectation { protected final String name; diff --git a/x-pack/plugin/ccr/qa/multi-cluster-with-incompatible-license/build.gradle b/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/build.gradle similarity index 61% rename from x-pack/plugin/ccr/qa/multi-cluster-with-incompatible-license/build.gradle rename to x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/build.gradle index 1566333e60848..c599903ced12e 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster-with-incompatible-license/build.gradle +++ b/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/build.gradle @@ -20,7 +20,20 @@ leaderClusterTestRunner { systemProperty 'tests.is_leader_cluster', 'true' } +task writeJavaPolicy { + doLast { + final File javaPolicy = file("${buildDir}/tmp/java.policy") + javaPolicy.write( + [ + "grant {", + " permission java.io.FilePermission \"${-> followClusterTest.getNodes().get(0).homeDir}/logs/${-> followClusterTest.getNodes().get(0).clusterName}.log\", \"read\";", + "};" + ].join("\n")) + } +} + task followClusterTest(type: RestIntegTestTask) {} +followClusterTest.dependsOn writeJavaPolicy followClusterTestCluster { dependsOn leaderClusterTestRunner @@ -31,8 +44,10 @@ followClusterTestCluster { } followClusterTestRunner { + systemProperty 'java.security.policy', "file://${buildDir}/tmp/java.policy" systemProperty 'tests.is_leader_cluster', 'false' systemProperty 'tests.leader_host', "${-> leaderClusterTest.nodes.get(0).httpUri()}" + systemProperty 'log', "${-> followClusterTest.getNodes().get(0).homeDir}/logs/${-> followClusterTest.getNodes().get(0).clusterName}.log" finalizedBy 'leaderClusterTestCluster#stop' } diff --git a/x-pack/plugin/ccr/qa/multi-cluster-with-incompatible-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java b/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java similarity index 50% rename from x-pack/plugin/ccr/qa/multi-cluster-with-incompatible-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java rename to x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java index 06d9f91c7abb7..c52a4a9b59d78 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster-with-incompatible-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java @@ -9,11 +9,16 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.test.rest.ESRestTestCase; +import java.nio.file.Files; +import java.util.Iterator; +import java.util.List; import java.util.Locale; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasToString; public class CcrMultiClusterLicenseIT extends ESRestTestCase { @@ -29,7 +34,7 @@ public void testFollowIndex() { if (runningAgainstLeaderCluster == false) { final Request request = new Request("POST", "/follower/_ccr/follow"); request.setJsonEntity("{\"leader_index\": \"leader_cluster:leader\"}"); - assertLicenseIncompatible(request); + assertNonCompliantLicense(request); } } @@ -37,11 +42,44 @@ public void testCreateAndFollowIndex() { if (runningAgainstLeaderCluster == false) { final Request request = new Request("POST", "/follower/_ccr/create_and_follow"); request.setJsonEntity("{\"leader_index\": \"leader_cluster:leader\"}"); - assertLicenseIncompatible(request); + assertNonCompliantLicense(request); } } - private static void assertLicenseIncompatible(final Request request) { + public void testAutoFollow() throws Exception { + if (runningAgainstLeaderCluster == false) { + final Request request = new Request("PUT", "/_ccr/_auto_follow/leader_cluster"); + request.setJsonEntity("{\"leader_index_patterns\":[\"*\"]}"); + client().performRequest(request); + + // parse the logs and ensure that the auto-coordinator skipped coordination on the leader cluster + assertBusy(() -> { + final List lines = Files.readAllLines(PathUtils.get(System.getProperty("log"))); + + final Iterator it = lines.iterator(); + + boolean warn = false; + while (it.hasNext()) { + final String line = it.next(); + if (line.matches(".*\\[WARN\\s*\\]\\[o\\.e\\.x\\.c\\.a\\.AutoFollowCoordinator\\s*\\] \\[node-0\\] " + + "failure occurred during auto-follower coordination")) { + warn = true; + break; + } + } + assertTrue(warn); + assertTrue(it.hasNext()); + final String lineAfterWarn = it.next(); + assertThat( + lineAfterWarn, + equalTo("org.elasticsearch.ElasticsearchStatusException: " + + "can not fetch remote cluster state as the remote cluster [leader_cluster] is not licensed for [ccr]; " + + "the license mode [BASIC] on cluster [leader_cluster] does not enable [ccr]")); + }); + } + } + + private static void assertNonCompliantLicense(final Request request) { final ResponseException e = expectThrows(ResponseException.class, () -> client().performRequest(request)); final String expected = String.format( Locale.ROOT, diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java index cd0561b1c0c60..353a66db26339 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java @@ -126,7 +126,7 @@ public Collection createComponents( return Arrays.asList( ccrLicenseChecker, - new AutoFollowCoordinator(settings, client, threadPool, clusterService) + new AutoFollowCoordinator(settings, client, threadPool, clusterService, ccrLicenseChecker) ); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java index cefa490f4f7e2..f9a5d8fe83035 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java @@ -23,6 +23,7 @@ import java.util.Objects; import java.util.function.BooleanSupplier; import java.util.function.Consumer; +import java.util.function.Function; /** * Encapsulates licensing checking for CCR. @@ -58,14 +59,13 @@ public boolean isCcrAllowed() { /** * Fetches the leader index metadata from the remote cluster. Before fetching the index metadata, the remote cluster is checked for - * license compatibility with CCR. If the remote cluster is not licensed for CCR, the {@link ActionListener#onFailure(Exception)} method - * of the specified listener is invoked. Otherwise, the specified consumer is invoked with the leader index metadata fetched from the - * remote cluster. + * license compatibility with CCR. If the remote cluster is not licensed for CCR, the {@code onFailure} consumer is is invoked. + * Otherwise, the specified consumer is invoked with the leader index metadata fetched from the remote cluster. * * @param client the client * @param clusterAlias the remote cluster alias * @param leaderIndex the name of the leader index - * @param listener the listener + * @param onFailure the failure consumer * @param leaderIndexMetadataConsumer the leader index metadata consumer * @param the type of response the listener is waiting for */ @@ -73,8 +73,75 @@ public void checkRemoteClusterLicenseAndFetchLeaderIndexMetadata( final Client client, final String clusterAlias, final String leaderIndex, - final ActionListener listener, + final Consumer onFailure, final Consumer leaderIndexMetadataConsumer) { + + final ClusterStateRequest request = new ClusterStateRequest(); + request.clear(); + request.metaData(true); + request.indices(leaderIndex); + checkRemoteClusterLicenseAndFetchClusterState( + client, + clusterAlias, + request, + onFailure, + leaderClusterState -> leaderIndexMetadataConsumer.accept(leaderClusterState.getMetaData().index(leaderIndex)), + licenseCheck -> indexMetadataNonCompliantRemoteLicense(leaderIndex, licenseCheck), + e -> indexMetadataUnknownRemoteLicense(leaderIndex, clusterAlias, e)); + } + + /** + * Fetches the leader cluster state from the remote cluster by the specified cluster state request. Before fetching the cluster state, + * the remote cluster is checked for license compliance with CCR. If the remote cluster is not licensed for CCR, + * the {@code onFailure} consumer is invoked. Otherwise, the specified consumer is invoked with the leader cluster state fetched from + * the remote cluster. + * + * @param client the client + * @param clusterAlias the remote cluster alias + * @param request the cluster state request + * @param onFailure the failure consumer + * @param leaderClusterStateConsumer the leader cluster state consumer + * @param the type of response the listener is waiting for + */ + public void checkRemoteClusterLicenseAndFetchClusterState( + final Client client, + final String clusterAlias, + final ClusterStateRequest request, + final Consumer onFailure, + final Consumer leaderClusterStateConsumer) { + checkRemoteClusterLicenseAndFetchClusterState( + client, + clusterAlias, + request, + onFailure, + leaderClusterStateConsumer, + CcrLicenseChecker::clusterStateNonCompliantRemoteLicense, + e -> clusterStateUnknownRemoteLicense(clusterAlias, e)); + } + + /** + * Fetches the leader cluster state from the remote cluster by the specified cluster state request. Before fetching the cluster state, + * the remote cluster is checked for license compliance with CCR. If the remote cluster is not licensed for CCR, + * the {@code onFailure} consumer is invoked. Otherwise, the specified consumer is invoked with the leader cluster state fetched from + * the remote cluster. + * + * @param client the client + * @param clusterAlias the remote cluster alias + * @param request the cluster state request + * @param onFailure the failure consumer + * @param leaderClusterStateConsumer the leader cluster state consumer + * @param nonCompliantLicense the supplier for when the license state of the remote cluster is non-compliant + * @param unknownLicense the supplier for when the license state of the remote cluster is unknown due to failure + * @param the type of response the listener is waiting for + */ + private void checkRemoteClusterLicenseAndFetchClusterState( + final Client client, + final String clusterAlias, + final ClusterStateRequest request, + final Consumer onFailure, + final Consumer leaderClusterStateConsumer, + final Function nonCompliantLicense, + final Function unknownLicense) { // we have to check the license on the remote cluster new RemoteClusterLicenseChecker(client, XPackLicenseState::isCcrAllowedForOperationMode).checkRemoteClusterLicenses( Collections.singletonList(clusterAlias), @@ -83,35 +150,25 @@ public void checkRemoteClusterLicenseAndFetchLeaderIndexMetadata( @Override public void onResponse(final RemoteClusterLicenseChecker.LicenseCheck licenseCheck) { if (licenseCheck.isSuccess()) { - final Client remoteClient = client.getRemoteClusterClient(clusterAlias); - final ClusterStateRequest clusterStateRequest = new ClusterStateRequest(); - clusterStateRequest.clear(); - clusterStateRequest.metaData(true); - clusterStateRequest.indices(leaderIndex); - final ActionListener clusterStateListener = ActionListener.wrap( - r -> { - final ClusterState remoteClusterState = r.getState(); - final IndexMetaData leaderIndexMetadata = - remoteClusterState.getMetaData().index(leaderIndex); - leaderIndexMetadataConsumer.accept(leaderIndexMetadata); - }, - listener::onFailure); + final Client leaderClient = client.getRemoteClusterClient(clusterAlias); + final ActionListener clusterStateListener = + ActionListener.wrap(s -> leaderClusterStateConsumer.accept(s.getState()), onFailure); // following an index in remote cluster, so use remote client to fetch leader index metadata - remoteClient.admin().cluster().state(clusterStateRequest, clusterStateListener); + leaderClient.admin().cluster().state(request, clusterStateListener); } else { - listener.onFailure(incompatibleRemoteLicense(leaderIndex, licenseCheck)); + onFailure.accept(nonCompliantLicense.apply(licenseCheck)); } } @Override public void onFailure(final Exception e) { - listener.onFailure(unknownRemoteLicense(leaderIndex, clusterAlias, e)); + onFailure.accept(unknownLicense.apply(e)); } }); } - private static ElasticsearchStatusException incompatibleRemoteLicense( + private static ElasticsearchStatusException indexMetadataNonCompliantRemoteLicense( final String leaderIndex, final RemoteClusterLicenseChecker.LicenseCheck licenseCheck) { final String clusterAlias = licenseCheck.remoteClusterLicenseInfo().clusterAlias(); final String message = String.format( @@ -127,7 +184,21 @@ private static ElasticsearchStatusException incompatibleRemoteLicense( return new ElasticsearchStatusException(message, RestStatus.BAD_REQUEST); } - private static ElasticsearchStatusException unknownRemoteLicense( + private static ElasticsearchStatusException clusterStateNonCompliantRemoteLicense( + final RemoteClusterLicenseChecker.LicenseCheck licenseCheck) { + final String clusterAlias = licenseCheck.remoteClusterLicenseInfo().clusterAlias(); + final String message = String.format( + Locale.ROOT, + "can not fetch remote cluster state as the remote cluster [%s] is not licensed for [ccr]; %s", + clusterAlias, + RemoteClusterLicenseChecker.buildErrorMessage( + "ccr", + licenseCheck.remoteClusterLicenseInfo(), + RemoteClusterLicenseChecker::isLicensePlatinumOrTrial)); + return new ElasticsearchStatusException(message, RestStatus.BAD_REQUEST); + } + + private static ElasticsearchStatusException indexMetadataUnknownRemoteLicense( final String leaderIndex, final String clusterAlias, final Exception cause) { final String message = String.format( Locale.ROOT, @@ -138,4 +209,11 @@ private static ElasticsearchStatusException unknownRemoteLicense( return new ElasticsearchStatusException(message, RestStatus.BAD_REQUEST, cause); } + private static ElasticsearchStatusException clusterStateUnknownRemoteLicense(final String clusterAlias, final Exception cause) { + final String message = String.format( + Locale.ROOT, + "can not fetch remote cluster state as the license state of the remote cluster [%s] could not be determined", clusterAlias); + return new ElasticsearchStatusException(message, RestStatus.BAD_REQUEST, cause); + } + } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java index 234fe32cdd0ee..639cd4d5782ab 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java @@ -21,7 +21,9 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.index.Index; +import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.ccr.CcrLicenseChecker; import org.elasticsearch.xpack.ccr.CcrSettings; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern; @@ -30,6 +32,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiConsumer; import java.util.function.Consumer; @@ -47,22 +50,32 @@ public class AutoFollowCoordinator implements ClusterStateApplier { private final TimeValue pollInterval; private final ThreadPool threadPool; private final ClusterService clusterService; + private final CcrLicenseChecker ccrLicenseChecker; private volatile boolean localNodeMaster = false; - public AutoFollowCoordinator(Settings settings, - Client client, - ThreadPool threadPool, - ClusterService clusterService) { + public AutoFollowCoordinator( + Settings settings, + Client client, + ThreadPool threadPool, + ClusterService clusterService, + CcrLicenseChecker ccrLicenseChecker) { this.client = client; this.threadPool = threadPool; this.clusterService = clusterService; + this.ccrLicenseChecker = Objects.requireNonNull(ccrLicenseChecker, "ccrLicenseChecker"); this.pollInterval = CcrSettings.CCR_AUTO_FOLLOW_POLL_INTERVAL.get(settings); clusterService.addStateApplier(this); } private void doAutoFollow() { + if (ccrLicenseChecker.isCcrAllowed() == false) { + // TODO: set non-compliant status on auto-follow coordination that can be viewed via a stats API + LOGGER.warn("skipping auto-follower coordination", LicenseUtils.newComplianceException("ccr")); + threadPool.schedule(pollInterval, ThreadPool.Names.SAME, this::doAutoFollow); + return; + } if (localNodeMaster == false) { return; } @@ -80,23 +93,32 @@ private void doAutoFollow() { Consumer handler = e -> { if (e != null) { - LOGGER.warn("Failure occurred during auto following indices", e); + LOGGER.warn("failure occurred during auto-follower coordination", e); } threadPool.schedule(pollInterval, ThreadPool.Names.SAME, this::doAutoFollow); }; - AutoFollower operation = new AutoFollower(client, handler, followerClusterState) { + AutoFollower operation = new AutoFollower(handler, followerClusterState) { @Override - void getLeaderClusterState(Client leaderClient, BiConsumer handler) { - ClusterStateRequest request = new ClusterStateRequest(); + void getLeaderClusterState(final String leaderClusterAlias, final BiConsumer handler) { + final ClusterStateRequest request = new ClusterStateRequest(); request.clear(); request.metaData(true); - leaderClient.admin().cluster().state(request, - ActionListener.wrap( - r -> handler.accept(r.getState(), null), - e -> handler.accept(null, e) - ) - ); + + if ("_local_".equals(leaderClusterAlias)) { + client.admin().cluster().state( + request, ActionListener.wrap(r -> handler.accept(r.getState(), null), e -> handler.accept(null, e))); + } else { + final Client leaderClient = client.getRemoteClusterClient(leaderClusterAlias); + // TODO: set non-compliant status on auto-follow coordination that can be viewed via a stats API + ccrLicenseChecker.checkRemoteClusterLicenseAndFetchClusterState( + leaderClient, + leaderClusterAlias, + request, + e -> handler.accept(null, e), + leaderClusterState -> handler.accept(leaderClusterState, null)); + } + } @Override @@ -143,7 +165,6 @@ public void applyClusterState(ClusterChangedEvent event) { abstract static class AutoFollower { - private final Client client; private final Consumer handler; private final ClusterState followerClusterState; private final AutoFollowMetadata autoFollowMetadata; @@ -151,8 +172,7 @@ abstract static class AutoFollower { private final CountDown autoFollowPatternsCountDown; private final AtomicReference autoFollowPatternsErrorHolder = new AtomicReference<>(); - AutoFollower(Client client, Consumer handler, ClusterState followerClusterState) { - this.client = client; + AutoFollower(final Consumer handler, final ClusterState followerClusterState) { this.handler = handler; this.followerClusterState = followerClusterState; this.autoFollowMetadata = followerClusterState.getMetaData().custom(AutoFollowMetadata.TYPE); @@ -163,10 +183,9 @@ void autoFollowIndices() { for (Map.Entry entry : autoFollowMetadata.getPatterns().entrySet()) { String clusterAlias = entry.getKey(); AutoFollowPattern autoFollowPattern = entry.getValue(); - Client leaderClient = clusterAlias.equals("_local_") ? client : client.getRemoteClusterClient(clusterAlias); List followedIndices = autoFollowMetadata.getFollowedLeaderIndexUUIDs().get(clusterAlias); - getLeaderClusterState(leaderClient, (leaderClusterState, e) -> { + getLeaderClusterState(clusterAlias, (leaderClusterState, e) -> { if (leaderClusterState != null) { assert e == null; handleClusterAlias(clusterAlias, autoFollowPattern, followedIndices, leaderClusterState); @@ -289,18 +308,17 @@ static Function recordLeaderIndexAsFollowFunction(St }; } - // abstract methods to make unit testing possible: - - abstract void getLeaderClusterState(Client leaderClient, - BiConsumer handler); + /** + * Fetch the cluster state from the leader with the specified cluster alias + * + * @param leaderClusterAlias the cluster alias of the leader + * @param handler the callback to invoke + */ + abstract void getLeaderClusterState(String leaderClusterAlias, BiConsumer handler); - abstract void createAndFollow(FollowIndexAction.Request followRequest, - Runnable successHandler, - Consumer failureHandler); + abstract void createAndFollow(FollowIndexAction.Request followRequest, Runnable successHandler, Consumer failureHandler); - abstract void updateAutoFollowMetadata(Function updateFunction, - Consumer handler); + abstract void updateAutoFollowMetadata(Function updateFunction, Consumer handler); } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/CreateAndFollowIndexAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/CreateAndFollowIndexAction.java index 2e36bca293225..cf77bf8112f91 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/CreateAndFollowIndexAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/CreateAndFollowIndexAction.java @@ -255,7 +255,7 @@ private void createFollowerIndexAndFollowRemoteIndex( client, clusterAlias, leaderIndex, - listener, + listener::onFailure, leaderIndexMetaData -> createFollowerIndex(leaderIndexMetaData, request, listener)); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/FollowIndexAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/FollowIndexAction.java index 17b7bbe674b38..2a14c4e9a50bb 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/FollowIndexAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/FollowIndexAction.java @@ -370,7 +370,7 @@ private void followRemoteIndex( client, clusterAlias, leaderIndex, - listener, + listener::onFailure, leaderIndexMetadata -> { try { start(request, clusterAlias, leaderIndexMetadata, followerIndexMetadata, listener); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java index 3d3e342c0cd3e..a4ff9511cfbd8 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java @@ -21,8 +21,10 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.ccr.CcrLicenseChecker; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern; @@ -30,20 +32,29 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.stream.Collectors; public class TransportPutAutoFollowPatternAction extends TransportMasterNodeAction { private final Client client; + private final CcrLicenseChecker ccrLicenseChecker; @Inject - public TransportPutAutoFollowPatternAction(Settings settings, TransportService transportService, ClusterService clusterService, - ThreadPool threadPool, ActionFilters actionFilters, Client client, - IndexNameExpressionResolver indexNameExpressionResolver) { + public TransportPutAutoFollowPatternAction( + final Settings settings, + final TransportService transportService, + final ClusterService clusterService, + final ThreadPool threadPool, + final ActionFilters actionFilters, + final Client client, + final IndexNameExpressionResolver indexNameExpressionResolver, + final CcrLicenseChecker ccrLicenseChecker) { super(settings, PutAutoFollowPatternAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, PutAutoFollowPatternAction.Request::new); this.client = client; + this.ccrLicenseChecker = Objects.requireNonNull(ccrLicenseChecker, "ccrLicenseChecker"); } @Override @@ -60,6 +71,10 @@ protected AcknowledgedResponse newResponse() { protected void masterOperation(PutAutoFollowPatternAction.Request request, ClusterState state, ActionListener listener) throws Exception { + if (ccrLicenseChecker.isCcrAllowed() == false) { + listener.onFailure(LicenseUtils.newComplianceException("ccr")); + return; + } final Client leaderClient; if (request.getLeaderClusterAlias().equals("_local_")) { leaderClient = client; @@ -71,22 +86,26 @@ protected void masterOperation(PutAutoFollowPatternAction.Request request, clusterStateRequest.clear(); clusterStateRequest.metaData(true); - leaderClient.admin().cluster().state(clusterStateRequest, ActionListener.wrap(clusterStateResponse -> { - final ClusterState leaderClusterState = clusterStateResponse.getState(); - clusterService.submitStateUpdateTask("put-auto-follow-pattern-" + request.getLeaderClusterAlias(), - new AckedClusterStateUpdateTask(request, listener) { - - @Override - protected AcknowledgedResponse newResponse(boolean acknowledged) { - return new AcknowledgedResponse(acknowledged); - } - - @Override - public ClusterState execute(ClusterState currentState) throws Exception { - return innerPut(request, currentState, leaderClusterState); - } - }); - }, listener::onFailure)); + leaderClient.admin().cluster().state( + clusterStateRequest, + ActionListener.wrap( + clusterStateResponse -> { + final ClusterState leaderClusterState = clusterStateResponse.getState(); + clusterService.submitStateUpdateTask("put-auto-follow-pattern-" + request.getLeaderClusterAlias(), + new AckedClusterStateUpdateTask(request, listener) { + + @Override + protected AcknowledgedResponse newResponse(boolean acknowledged) { + return new AcknowledgedResponse(acknowledged); + } + + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + return innerPut(request, currentState, leaderClusterState); + } + }); + }, + listener::onFailure)); } static ClusterState innerPut(PutAutoFollowPatternAction.Request request, diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java index 675758903bf27..05383b280e6ce 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java @@ -6,15 +6,22 @@ package org.elasticsearch.xpack.ccr; +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.MockLogAppender; +import org.elasticsearch.xpack.ccr.action.AutoFollowCoordinator; import org.elasticsearch.xpack.ccr.action.CcrStatsAction; import org.elasticsearch.xpack.ccr.action.CreateAndFollowIndexAction; import org.elasticsearch.xpack.ccr.action.FollowIndexAction; +import org.elasticsearch.xpack.ccr.action.PutAutoFollowPatternAction; import org.elasticsearch.xpack.ccr.action.ShardFollowNodeTask; import java.util.Collection; @@ -28,10 +35,10 @@ public class CcrLicenseIT extends ESSingleNodeTestCase { @Override protected Collection> getPlugins() { - return Collections.singletonList(IncompatibleLicenseLocalStateCcr.class); + return Collections.singletonList(NonCompliantLicenseLocalStateCcr.class); } - public void testThatFollowingIndexIsUnavailableWithIncompatibleLicense() throws InterruptedException { + public void testThatFollowingIndexIsUnavailableWithNonCompliantLicense() throws InterruptedException { final FollowIndexAction.Request followRequest = getFollowRequest(); final CountDownLatch latch = new CountDownLatch(1); client().execute( @@ -45,14 +52,14 @@ public void onResponse(final AcknowledgedResponse response) { @Override public void onFailure(final Exception e) { - assertIncompatibleLicense(e); + assertNonCompliantLicense(e); latch.countDown(); } }); latch.await(); } - public void testThatCreateAndFollowingIndexIsUnavailableWithIncompatibleLicense() throws InterruptedException { + public void testThatCreateAndFollowingIndexIsUnavailableWithNonCompliantLicense() throws InterruptedException { final FollowIndexAction.Request followRequest = getFollowRequest(); final CreateAndFollowIndexAction.Request createAndFollowRequest = new CreateAndFollowIndexAction.Request(followRequest); final CountDownLatch latch = new CountDownLatch(1); @@ -67,14 +74,14 @@ public void onResponse(final CreateAndFollowIndexAction.Response response) { @Override public void onFailure(final Exception e) { - assertIncompatibleLicense(e); + assertNonCompliantLicense(e); latch.countDown(); } }); latch.await(); } - public void testThatCcrStatsAreUnavailableWithIncompatibleLicense() throws InterruptedException { + public void testThatCcrStatsAreUnavailableWithNonCompliantLicense() throws InterruptedException { final CountDownLatch latch = new CountDownLatch(1); client().execute(CcrStatsAction.INSTANCE, new CcrStatsAction.TasksRequest(), new ActionListener() { @Override @@ -84,7 +91,7 @@ public void onResponse(final CcrStatsAction.TasksResponse tasksResponse) { @Override public void onFailure(final Exception e) { - assertIncompatibleLicense(e); + assertNonCompliantLicense(e); latch.countDown(); } }); @@ -92,7 +99,52 @@ public void onFailure(final Exception e) { latch.await(); } - private void assertIncompatibleLicense(final Exception e) { + public void testThatPutAutoFollowPatternsIsUnavailableWithNonCompliantLicense() throws InterruptedException { + final CountDownLatch latch = new CountDownLatch(1); + final PutAutoFollowPatternAction.Request request = new PutAutoFollowPatternAction.Request(); + request.setLeaderClusterAlias("leader"); + request.setLeaderIndexPatterns(Collections.singletonList("*")); + client().execute( + PutAutoFollowPatternAction.INSTANCE, + request, + new ActionListener() { + @Override + public void onResponse(final AcknowledgedResponse response) { + latch.countDown(); + fail(); + } + + @Override + public void onFailure(final Exception e) { + assertNonCompliantLicense(e); + latch.countDown(); + } + }); + latch.await(); + } + + public void testAutoFollowCoordinatorLogsSkippingAutoFollowCoordinationWithNonCompliantLicense() throws Exception { + final Logger logger = LogManager.getLogger(AutoFollowCoordinator.class); + final MockLogAppender appender = new MockLogAppender(); + appender.start(); + appender.addExpectation( + new MockLogAppender.ExceptionSeenEventExpectation( + getTestName(), + logger.getName(), + Level.WARN, + "skipping auto-follower coordination", + ElasticsearchSecurityException.class, + "current license is non-compliant for [ccr]")); + Loggers.addAppender(logger, appender); + try { + assertBusy(appender::assertAllExpectationsMatched); + } finally { + Loggers.removeAppender(logger, appender); + appender.stop(); + } + } + + private void assertNonCompliantLicense(final Exception e) { assertThat(e, instanceOf(ElasticsearchSecurityException.class)); assertThat(e.getMessage(), equalTo("current license is non-compliant for [ccr]")); } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/IncompatibleLicenseLocalStateCcr.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/NonCompliantLicenseLocalStateCcr.java similarity index 80% rename from x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/IncompatibleLicenseLocalStateCcr.java rename to x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/NonCompliantLicenseLocalStateCcr.java index c4b765d3c65ea..f960668a7dff1 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/IncompatibleLicenseLocalStateCcr.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/NonCompliantLicenseLocalStateCcr.java @@ -12,16 +12,16 @@ import java.nio.file.Path; -public class IncompatibleLicenseLocalStateCcr extends LocalStateCompositeXPackPlugin { +public class NonCompliantLicenseLocalStateCcr extends LocalStateCompositeXPackPlugin { - public IncompatibleLicenseLocalStateCcr(final Settings settings, final Path configPath) throws Exception { + public NonCompliantLicenseLocalStateCcr(final Settings settings, final Path configPath) throws Exception { super(settings, configPath); plugins.add(new Ccr(settings, new CcrLicenseChecker(() -> false)) { @Override protected XPackLicenseState getLicenseState() { - return IncompatibleLicenseLocalStateCcr.this.getLicenseState(); + return NonCompliantLicenseLocalStateCcr.this.getLicenseState(); } }); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java index dd1376a4d7a73..2ef841292322a 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java @@ -66,9 +66,9 @@ public void testAutoFollower() { invoked[0] = true; assertThat(e, nullValue()); }; - AutoFollower autoFollower = new AutoFollower(client, handler, currentState) { + AutoFollower autoFollower = new AutoFollower(handler, currentState) { @Override - void getLeaderClusterState(Client leaderClient, BiConsumer handler) { + void getLeaderClusterState(String leaderClusterAlias, BiConsumer handler) { handler.accept(leaderState, null); } @@ -113,9 +113,9 @@ public void testAutoFollowerClusterStateApiFailure() { invoked[0] = true; assertThat(e, sameInstance(failure)); }; - AutoFollower autoFollower = new AutoFollower(client, handler, followerState) { + AutoFollower autoFollower = new AutoFollower(handler, followerState) { @Override - void getLeaderClusterState(Client leaderClient, BiConsumer handler) { + void getLeaderClusterState(String leaderClusterAlias, BiConsumer handler) { handler.accept(null, failure); } @@ -161,9 +161,9 @@ public void testAutoFollowerUpdateClusterStateFailure() { invoked[0] = true; assertThat(e, sameInstance(failure)); }; - AutoFollower autoFollower = new AutoFollower(client, handler, followerState) { + AutoFollower autoFollower = new AutoFollower(handler, followerState) { @Override - void getLeaderClusterState(Client leaderClient, BiConsumer handler) { + void getLeaderClusterState(String leaderClusterAlias, BiConsumer handler) { handler.accept(leaderState, null); } @@ -211,9 +211,9 @@ public void testAutoFollowerCreateAndFollowApiCallFailure() { invoked[0] = true; assertThat(e, sameInstance(failure)); }; - AutoFollower autoFollower = new AutoFollower(client, handler, followerState) { + AutoFollower autoFollower = new AutoFollower(handler, followerState) { @Override - void getLeaderClusterState(Client leaderClient, BiConsumer handler) { + void getLeaderClusterState(String leaderClusterAlias, BiConsumer handler) { handler.accept(leaderState, null); } From c67b0ba33edd361b49e2915d04ecf74e7e81f4d5 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sun, 9 Sep 2018 07:16:56 -0400 Subject: [PATCH 56/91] Create temporary directory if needed in CCR test In the multi-cluster-with-non-compliant-license tests, we try to write out a java.policy to a temporary directory. However, if this temporary directory does not already exist then writing the java.policy file will fail. This commit ensures that the temporary directory exists before we attempt to write the java.policy file. --- .../multi-cluster-with-non-compliant-license/build.gradle | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/build.gradle b/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/build.gradle index c599903ced12e..845c9df533dba 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/build.gradle +++ b/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/build.gradle @@ -22,7 +22,11 @@ leaderClusterTestRunner { task writeJavaPolicy { doLast { - final File javaPolicy = file("${buildDir}/tmp/java.policy") + final File tmp = file("${buildDir}/tmp") + if (tmp.exists() == false && tmp.mkdirs() == false) { + throw new GradleException("failed to create temporary directory [${tmp}]") + } + final File javaPolicy = file("${tmp}/java.policy") javaPolicy.write( [ "grant {", From 9073dbefd6d5a1441ac11e7990e181fa74f3ce15 Mon Sep 17 00:00:00 2001 From: "S.Y. Wang" Date: Sun, 9 Sep 2018 20:47:47 +0900 Subject: [PATCH 57/91] HLRC: Add put stored script support to high-level rest client (#31323) Relates to #27205 --- .../client/RequestConverters.java | 14 ++ .../client/RestHighLevelClient.java | 64 +++++--- .../client/RequestConvertersTests.java | 37 +++++ .../client/RestHighLevelClientTests.java | 1 - .../elasticsearch/client/StoredScriptsIT.java | 58 ++++---- .../StoredScriptsDocumentationIT.java | 139 ++++++++++++++++-- .../high-level/script/put_script.asciidoc | 106 +++++++++++++ .../high-level/supported-apis.asciidoc | 2 + .../storedscripts/PutStoredScriptRequest.java | 12 +- .../PutStoredScriptRequestTests.java | 29 ++++ 10 files changed, 403 insertions(+), 59 deletions(-) create mode 100644 docs/java-rest/high-level/script/put_script.asciidoc diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 7fe3e08f3afb0..89f81512bc9d2 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -32,6 +32,7 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; +import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; @@ -887,6 +888,19 @@ static Request getTemplates(GetIndexTemplatesRequest getIndexTemplatesRequest) t return request; } + static Request putScript(PutStoredScriptRequest putStoredScriptRequest) throws IOException { + String endpoint = new EndpointBuilder().addPathPartAsIs("_scripts").addPathPart(putStoredScriptRequest.id()).build(); + Request request = new Request(HttpPost.METHOD_NAME, endpoint); + Params params = new Params(request); + params.withTimeout(putStoredScriptRequest.timeout()); + params.withMasterTimeout(putStoredScriptRequest.masterNodeTimeout()); + if (Strings.hasText(putStoredScriptRequest.context())) { + params.putParam("context", putStoredScriptRequest.context()); + } + request.setEntity(createEntity(putStoredScriptRequest, REQUEST_BODY_CONTENT_TYPE)); + return request; + } + static Request analyze(AnalyzeRequest request) throws IOException { EndpointBuilder builder = new EndpointBuilder(); String index = request.index(); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index 17f8f65943012..687290abe8866 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -28,6 +28,7 @@ import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse; +import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteRequest; @@ -121,36 +122,36 @@ import org.elasticsearch.search.aggregations.bucket.terms.ParsedStringTerms; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.metrics.AvgAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ParsedAvg; import org.elasticsearch.search.aggregations.metrics.CardinalityAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ParsedCardinality; +import org.elasticsearch.search.aggregations.metrics.ExtendedStatsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.GeoBoundsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ParsedGeoBounds; import org.elasticsearch.search.aggregations.metrics.GeoCentroidAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ParsedGeoCentroid; -import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ParsedMax; -import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ParsedMin; import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentileRanks; import org.elasticsearch.search.aggregations.metrics.InternalHDRPercentiles; -import org.elasticsearch.search.aggregations.metrics.ParsedHDRPercentileRanks; -import org.elasticsearch.search.aggregations.metrics.ParsedHDRPercentiles; import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentileRanks; import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentiles; +import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ParsedAvg; +import org.elasticsearch.search.aggregations.metrics.ParsedCardinality; +import org.elasticsearch.search.aggregations.metrics.ParsedExtendedStats; +import org.elasticsearch.search.aggregations.metrics.ParsedGeoBounds; +import org.elasticsearch.search.aggregations.metrics.ParsedGeoCentroid; +import org.elasticsearch.search.aggregations.metrics.ParsedHDRPercentileRanks; +import org.elasticsearch.search.aggregations.metrics.ParsedHDRPercentiles; +import org.elasticsearch.search.aggregations.metrics.ParsedMax; +import org.elasticsearch.search.aggregations.metrics.ParsedMin; +import org.elasticsearch.search.aggregations.metrics.ParsedScriptedMetric; +import org.elasticsearch.search.aggregations.metrics.ParsedStats; +import org.elasticsearch.search.aggregations.metrics.ParsedSum; import org.elasticsearch.search.aggregations.metrics.ParsedTDigestPercentileRanks; import org.elasticsearch.search.aggregations.metrics.ParsedTDigestPercentiles; -import org.elasticsearch.search.aggregations.metrics.ParsedScriptedMetric; +import org.elasticsearch.search.aggregations.metrics.ParsedTopHits; +import org.elasticsearch.search.aggregations.metrics.ParsedValueCount; import org.elasticsearch.search.aggregations.metrics.ScriptedMetricAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ParsedStats; import org.elasticsearch.search.aggregations.metrics.StatsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ExtendedStatsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ParsedExtendedStats; -import org.elasticsearch.search.aggregations.metrics.ParsedSum; import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ParsedTopHits; import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.ParsedValueCount; import org.elasticsearch.search.aggregations.metrics.ValueCountAggregationBuilder; import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue; import org.elasticsearch.search.aggregations.pipeline.ParsedSimpleValue; @@ -1050,6 +1051,35 @@ public void deleteScriptAsync(DeleteStoredScriptRequest request, RequestOptions AcknowledgedResponse::fromXContent, listener, emptySet()); } + /** + * Puts an stored script using the Scripting API. + * See Scripting API + * on elastic.co + * @param putStoredScriptRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public AcknowledgedResponse putScript(PutStoredScriptRequest putStoredScriptRequest, + RequestOptions options) throws IOException { + return performRequestAndParseEntity(putStoredScriptRequest, RequestConverters::putScript, options, + AcknowledgedResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously puts an stored script using the Scripting API. + * See Scripting API + * on elastic.co + * @param putStoredScriptRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void putScriptAsync(PutStoredScriptRequest putStoredScriptRequest, RequestOptions options, + ActionListener listener) { + performRequestAsyncAndParseEntity(putStoredScriptRequest, RequestConverters::putScript, options, + AcknowledgedResponse::fromXContent, listener, emptySet()); + } + /** * Asynchronously executes a request using the Field Capabilities API. * See Field Capabilities API diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index 840df49b47811..6f48d305a7799 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; +import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; @@ -1991,6 +1992,42 @@ public void testGetTemplateRequest() throws Exception { assertThat(request.getEntity(), nullValue()); } + public void testPutScript() throws Exception { + PutStoredScriptRequest putStoredScriptRequest = new PutStoredScriptRequest(); + + String id = randomAlphaOfLengthBetween(5, 10); + putStoredScriptRequest.id(id); + + XContentType xContentType = randomFrom(XContentType.values()); + try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) { + builder.startObject(); + builder.startObject("script") + .field("lang", "painless") + .field("source", "Math.log(_score * 2) + params.multiplier") + .endObject(); + builder.endObject(); + + putStoredScriptRequest.content(BytesReference.bytes(builder), xContentType); + } + + Map expectedParams = new HashMap<>(); + setRandomMasterTimeout(putStoredScriptRequest, expectedParams); + setRandomTimeout(putStoredScriptRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); + + if (randomBoolean()) { + String context = randomAlphaOfLengthBetween(5, 10); + putStoredScriptRequest.context(context); + expectedParams.put("context", context); + } + + Request request = RequestConverters.putScript(putStoredScriptRequest); + + assertThat(request.getEndpoint(), equalTo("/_scripts/" + id)); + assertThat(request.getParameters(), equalTo(expectedParams)); + assertNotNull(request.getEntity()); + assertToXContentBody(putStoredScriptRequest, request.getEntity()); + } + public void testAnalyzeRequest() throws Exception { AnalyzeRequest indexAnalyzeRequest = new AnalyzeRequest() .text("Here is some text") diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java index b6562cd44cd55..3bd47306e5e10 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java @@ -658,7 +658,6 @@ public void testApiNamingConventions() throws Exception { "indices.get_upgrade", "indices.put_alias", "mtermvectors", - "put_script", "reindex_rethrottle", "render_search_template", "scripts_painless_execute", diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/StoredScriptsIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/StoredScriptsIT.java index 1d693eee8396e..b15467d24ba2b 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/StoredScriptsIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/StoredScriptsIT.java @@ -1,4 +1,5 @@ -package org.elasticsearch.client;/* +package org.elasticsearch.client; +/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright @@ -17,27 +18,27 @@ * under the License. */ -import org.apache.http.util.EntityUtils; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse; -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.Script; import org.elasticsearch.script.StoredScriptSource; import java.util.Collections; +import java.util.Map; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.common.xcontent.support.XContentMapValues.extractValue; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; public class StoredScriptsIT extends ESRestHighLevelClientTestCase { - final String id = "calculate-score"; + private static final String id = "calculate-score"; public void testGetStoredScript() throws Exception { final StoredScriptSource scriptSource = @@ -45,13 +46,9 @@ public void testGetStoredScript() throws Exception { "Math.log(_score * 2) + params.my_modifier", Collections.singletonMap(Script.CONTENT_TYPE_OPTION, XContentType.JSON.mediaType())); - final String script = Strings.toString(scriptSource.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)); - // TODO: change to HighLevel PutStoredScriptRequest when it will be ready - // so far - using low-level REST API - Request putRequest = new Request("PUT", "/_scripts/calculate-score"); - putRequest.setJsonEntity("{\"script\":" + script + "}"); - Response putResponse = adminClient().performRequest(putRequest); - assertEquals("{\"acknowledged\":true}", EntityUtils.toString(putResponse.getEntity())); + PutStoredScriptRequest request = + new PutStoredScriptRequest(id, "search", new BytesArray("{}"), XContentType.JSON, scriptSource); + assertAcked(execute(request, highLevelClient()::putScript, highLevelClient()::putScriptAsync)); GetStoredScriptRequest getRequest = new GetStoredScriptRequest("calculate-score"); getRequest.masterNodeTimeout("50s"); @@ -68,22 +65,14 @@ public void testDeleteStoredScript() throws Exception { "Math.log(_score * 2) + params.my_modifier", Collections.singletonMap(Script.CONTENT_TYPE_OPTION, XContentType.JSON.mediaType())); - final String script = Strings.toString(scriptSource.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)); - // TODO: change to HighLevel PutStoredScriptRequest when it will be ready - // so far - using low-level REST API - Request putRequest = new Request("PUT", "/_scripts/" + id); - putRequest.setJsonEntity("{\"script\":" + script + "}"); - Response putResponse = adminClient().performRequest(putRequest); - assertEquals("{\"acknowledged\":true}", EntityUtils.toString(putResponse.getEntity())); + PutStoredScriptRequest request = + new PutStoredScriptRequest(id, "search", new BytesArray("{}"), XContentType.JSON, scriptSource); + assertAcked(execute(request, highLevelClient()::putScript, highLevelClient()::putScriptAsync)); DeleteStoredScriptRequest deleteRequest = new DeleteStoredScriptRequest(id); deleteRequest.masterNodeTimeout("50s"); deleteRequest.timeout("50s"); - - AcknowledgedResponse deleteResponse = execute(deleteRequest, highLevelClient()::deleteScript, - highLevelClient()::deleteScriptAsync); - - assertThat(deleteResponse.isAcknowledged(), equalTo(true)); + assertAcked(execute(deleteRequest, highLevelClient()::deleteScript, highLevelClient()::deleteScriptAsync)); GetStoredScriptRequest getRequest = new GetStoredScriptRequest(id); @@ -92,4 +81,21 @@ public void testDeleteStoredScript() throws Exception { highLevelClient()::getScriptAsync)); assertThat(statusException.status(), equalTo(RestStatus.NOT_FOUND)); } + + public void testPutScript() throws Exception { + final StoredScriptSource scriptSource = + new StoredScriptSource("painless", + "Math.log(_score * 2) + params.my_modifier", + Collections.singletonMap(Script.CONTENT_TYPE_OPTION, XContentType.JSON.mediaType())); + + PutStoredScriptRequest request = + new PutStoredScriptRequest(id, "search", new BytesArray("{}"), XContentType.JSON, scriptSource); + assertAcked(execute(request, highLevelClient()::putScript, highLevelClient()::putScriptAsync)); + + Map script = getAsMap("/_scripts/" + id); + assertThat(extractValue("_id", script), equalTo(id)); + assertThat(extractValue("found", script), equalTo(true)); + assertThat(extractValue("script.lang", script), equalTo("painless")); + assertThat(extractValue("script.source", script), equalTo("Math.log(_score * 2) + params.my_modifier")); + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/StoredScriptsDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/StoredScriptsDocumentationIT.java index fc38090ef5b5b..c5d53abd978e1 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/StoredScriptsDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/StoredScriptsDocumentationIT.java @@ -17,21 +17,21 @@ * under the License. */ -import org.apache.http.util.EntityUtils; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse; +import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.ESRestHighLevelClientTestCase; -import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; -import org.elasticsearch.client.Response; import org.elasticsearch.client.RestHighLevelClient; -import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.script.Script; import org.elasticsearch.script.StoredScriptSource; @@ -42,7 +42,8 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.common.xcontent.support.XContentMapValues.extractValue; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; /** @@ -187,14 +188,124 @@ public void onFailure(Exception e) { assertTrue(latch.await(30L, TimeUnit.SECONDS)); } + public void testPutScript() throws Exception { + RestHighLevelClient client = highLevelClient(); + + { + // tag::put-stored-script-request + PutStoredScriptRequest request = new PutStoredScriptRequest(); + request.id("id"); // <1> + request.content(new BytesArray( + "{\n" + + "\"script\": {\n" + + "\"lang\": \"painless\",\n" + + "\"source\": \"Math.log(_score * 2) + params.multiplier\"" + + "}\n" + + "}\n" + ), XContentType.JSON); // <2> + // end::put-stored-script-request + + // tag::put-stored-script-context + request.context("context"); // <1> + // end::put-stored-script-context + + // tag::put-stored-script-timeout + request.timeout(TimeValue.timeValueMinutes(2)); // <1> + request.timeout("2m"); // <2> + // end::put-stored-script-timeout + + // tag::put-stored-script-masterTimeout + request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); // <1> + request.masterNodeTimeout("1m"); // <2> + // end::put-stored-script-masterTimeout + } + + { + PutStoredScriptRequest request = new PutStoredScriptRequest(); + request.id("id"); + + // tag::put-stored-script-content-painless + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.startObject(); + { + builder.startObject("script"); + { + builder.field("lang", "painless"); + builder.field("source", "Math.log(_score * 2) + params.multiplier"); + } + builder.endObject(); + } + builder.endObject(); + request.content(BytesReference.bytes(builder), XContentType.JSON); // <1> + // end::put-stored-script-content-painless + + + // tag::put-stored-script-execute + AcknowledgedResponse putStoredScriptResponse = client.putScript(request, RequestOptions.DEFAULT); + // end::put-stored-script-execute + + // tag::put-stored-script-response + boolean acknowledged = putStoredScriptResponse.isAcknowledged(); // <1> + // end::put-stored-script-response + + assertTrue(acknowledged); + + // tag::put-stored-script-execute-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(AcknowledgedResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::put-stored-script-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::put-stored-script-execute-async + client.putScriptAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::put-stored-script-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + + { + PutStoredScriptRequest request = new PutStoredScriptRequest(); + request.id("id"); + + // tag::put-stored-script-content-mustache + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.startObject(); + { + builder.startObject("script"); + { + builder.field("lang", "mustache"); + builder.field("source", "{\"query\":{\"match\":{\"title\":\"{{query_string}}\"}}}"); + } + builder.endObject(); + } + builder.endObject(); + request.content(BytesReference.bytes(builder), XContentType.JSON); // <1> + // end::put-stored-script-content-mustache + + client.putScript(request, RequestOptions.DEFAULT); + + Map script = getAsMap("/_scripts/id"); + assertThat(extractValue("script.lang", script), equalTo("mustache")); + assertThat(extractValue("script.source", script), equalTo("{\"query\":{\"match\":{\"title\":\"{{query_string}}\"}}}")); + } + } + private void putStoredScript(String id, StoredScriptSource scriptSource) throws IOException { - final String script = Strings.toString(scriptSource.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)); - // TODO: change to HighLevel PutStoredScriptRequest when it will be ready - // so far - using low-level REST API - Request request = new Request("PUT", "/_scripts/" + id); - request.setJsonEntity("{\"script\":" + script + "}"); - Response putResponse = adminClient().performRequest(request); - assertEquals(putResponse.getStatusLine().getReasonPhrase(), 200, putResponse.getStatusLine().getStatusCode()); - assertEquals("{\"acknowledged\":true}", EntityUtils.toString(putResponse.getEntity())); + PutStoredScriptRequest request = + new PutStoredScriptRequest(id, "search", new BytesArray("{}"), XContentType.JSON, scriptSource); + assertAcked(execute(request, highLevelClient()::putScript, highLevelClient()::putScriptAsync)); } } diff --git a/docs/java-rest/high-level/script/put_script.asciidoc b/docs/java-rest/high-level/script/put_script.asciidoc new file mode 100644 index 0000000000000..acc80e82d11e6 --- /dev/null +++ b/docs/java-rest/high-level/script/put_script.asciidoc @@ -0,0 +1,106 @@ +[[java-rest-high-put-stored-script]] +=== Put Stored Script API + +[[java-rest-high-put-stored-script-request]] +==== Put Stored Script Request + +A `PutStoredScriptRequest` requires an `id` and `content`: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[put-stored-script-request] +-------------------------------------------------- +<1> The id of the script +<2> The content of the script + +[[java-rest-high-put-stored-script-content]] +==== Content +The content of a script can be written in different languages and provided in +different ways: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[put-stored-script-content-painless] +-------------------------------------------------- +<1> Specify a painless script and provided as `XContentBuilder` object. +Note that the builder needs to be passed as a `BytesReference` object + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[put-stored-script-content-mustache] +-------------------------------------------------- +<1> Specify a mustache script and provided as `XContentBuilder` object. +Note that value of source can be directly provided as a JSON string + +==== Optional arguments +The following arguments can optionally be provided: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[put-stored-script-context] +-------------------------------------------------- +<1> The context the script should be executed in. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[put-stored-script-timeout] +-------------------------------------------------- +<1> Timeout to wait for the all the nodes to acknowledge the script creation as a `TimeValue` +<2> Timeout to wait for the all the nodes to acknowledge the script creation as a `String` + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[put-stored-script-masterTimeout] +-------------------------------------------------- +<1> Timeout to connect to the master node as a `TimeValue` +<2> Timeout to connect to the master node as a `String` + +[[java-rest-high-put-stored-script-sync]] +==== Synchronous Execution +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[put-stored-script-execute] +-------------------------------------------------- + +[[java-rest-high-put-stored-script-async]] +==== Asynchronous Execution + +The asynchronous execution of a put stored script request requires both the `PutStoredScriptRequest` +instance and an `ActionListener` instance to be passed to the asynchronous method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[put-stored-script-execute-async] +-------------------------------------------------- +<1> The `PutStoredScriptRequest` to execute and the `ActionListener` to use when +the execution completes + +[[java-rest-high-put-stored-script-listener]] +===== Action Listener + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `AcknowledgedResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[put-stored-script-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument + +[[java-rest-high-put-stored-script-response]] +==== Put Stored Script Response + +The returned `AcknowledgedResponse` allows to retrieve information about the +executed operation as follows: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[put-stored-script-response] +-------------------------------------------------- +<1> Indicates whether all of the nodes have acknowledged the request \ No newline at end of file diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 8d49724353e6f..8d92653ce5702 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -189,9 +189,11 @@ include::tasks/cancel_tasks.asciidoc[] The Java High Level REST Client supports the following Scripts APIs: * <> +* <> * <> include::script/get_script.asciidoc[] +include::script/put_script.asciidoc[] include::script/delete_script.asciidoc[] == Licensing APIs diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequest.java index d02d6272c9514..e7c5a07f56874 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequest.java @@ -25,6 +25,8 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.script.StoredScriptSource; @@ -34,7 +36,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; -public class PutStoredScriptRequest extends AcknowledgedRequest { +public class PutStoredScriptRequest extends AcknowledgedRequest implements ToXContent { private String id; private String context; @@ -160,4 +162,12 @@ public String toString() { (context != null ? ", context [" + context + "]" : "") + ", content [" + source + "]}"; } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field("script"); + source.toXContent(builder, params); + + return builder; + } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequestTests.java index 2ca71fabbc7dc..821c75c2ed7d3 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequestTests.java @@ -20,8 +20,11 @@ package org.elasticsearch.action.admin.cluster.storedscripts; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.script.StoredScriptSource; import org.elasticsearch.test.ESTestCase; @@ -48,4 +51,30 @@ public void testSerialization() throws IOException { } } } + + public void testToXContent() throws IOException { + XContentType xContentType = randomFrom(XContentType.values()); + XContentBuilder builder = XContentBuilder.builder(xContentType.xContent()); + builder.startObject(); + builder.startObject("script") + .field("lang", "painless") + .field("source", "Math.log(_score * 2) + params.multiplier") + .endObject(); + builder.endObject(); + + BytesReference expectedRequestBody = BytesReference.bytes(builder); + + PutStoredScriptRequest request = new PutStoredScriptRequest(); + request.id("test1"); + request.content(expectedRequestBody, xContentType); + + XContentBuilder requestBuilder = XContentBuilder.builder(xContentType.xContent()); + requestBuilder.startObject(); + request.toXContent(requestBuilder, ToXContent.EMPTY_PARAMS); + requestBuilder.endObject(); + + BytesReference actualRequestBody = BytesReference.bytes(requestBuilder); + + assertEquals(expectedRequestBody, actualRequestBody); + } } From edc492419be3a30550f4b5816d438867d7a7c3c0 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sun, 9 Sep 2018 09:52:40 -0400 Subject: [PATCH 58/91] Add latch countdown on failure in CCR license tests (#33548) We have some listeners in the CCR license tests that invoke Assert#fail if the onSuccess method for the listener is unexpectedly invoked. This can leave the main test thread hanging until the test suite times out rather than failing quickly. This commit adds some latch countdowns so that we fail quickly if these cases are hit. --- .../test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java index 05383b280e6ce..06cafc4777a49 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java @@ -47,6 +47,7 @@ public void testThatFollowingIndexIsUnavailableWithNonCompliantLicense() throws new ActionListener() { @Override public void onResponse(final AcknowledgedResponse response) { + latch.countDown(); fail(); } @@ -69,6 +70,7 @@ public void testThatCreateAndFollowingIndexIsUnavailableWithNonCompliantLicense( new ActionListener() { @Override public void onResponse(final CreateAndFollowIndexAction.Response response) { + latch.countDown(); fail(); } @@ -86,6 +88,7 @@ public void testThatCcrStatsAreUnavailableWithNonCompliantLicense() throws Inter client().execute(CcrStatsAction.INSTANCE, new CcrStatsAction.TasksRequest(), new ActionListener() { @Override public void onResponse(final CcrStatsAction.TasksResponse tasksResponse) { + latch.countDown(); fail(); } From 6eca6274090f8a460d9381ef27d6f335bfdd738b Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sun, 9 Sep 2018 10:22:22 -0400 Subject: [PATCH 59/91] Reverse logic for CCR license checks (#33549) This commit reverses the logic for CCR license checks in a few actions. This is done so that the successful case, which tends to be a larger block of code, does not require indentation. --- .../action/CreateAndFollowIndexAction.java | 28 +++++++++---------- .../xpack/ccr/action/FollowIndexAction.java | 28 +++++++++---------- .../ccr/action/TransportCcrStatsAction.java | 6 ++-- 3 files changed, 31 insertions(+), 31 deletions(-) diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/CreateAndFollowIndexAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/CreateAndFollowIndexAction.java index cf77bf8112f91..1e14eb8979fb7 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/CreateAndFollowIndexAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/CreateAndFollowIndexAction.java @@ -221,21 +221,21 @@ protected Response newResponse() { @Override protected void masterOperation( final Request request, final ClusterState state, final ActionListener listener) throws Exception { - if (ccrLicenseChecker.isCcrAllowed()) { - final String[] indices = new String[]{request.getFollowRequest().getLeaderIndex()}; - final Map> remoteClusterIndices = remoteClusterService.groupClusterIndices(indices, s -> false); - if (remoteClusterIndices.containsKey(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY)) { - createFollowerIndexAndFollowLocalIndex(request, state, listener); - } else { - assert remoteClusterIndices.size() == 1; - final Map.Entry> entry = remoteClusterIndices.entrySet().iterator().next(); - assert entry.getValue().size() == 1; - final String clusterAlias = entry.getKey(); - final String leaderIndex = entry.getValue().get(0); - createFollowerIndexAndFollowRemoteIndex(request, clusterAlias, leaderIndex, listener); - } - } else { + if (ccrLicenseChecker.isCcrAllowed() == false) { listener.onFailure(LicenseUtils.newComplianceException("ccr")); + return; + } + final String[] indices = new String[]{request.getFollowRequest().getLeaderIndex()}; + final Map> remoteClusterIndices = remoteClusterService.groupClusterIndices(indices, s -> false); + if (remoteClusterIndices.containsKey(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY)) { + createFollowerIndexAndFollowLocalIndex(request, state, listener); + } else { + assert remoteClusterIndices.size() == 1; + final Map.Entry> entry = remoteClusterIndices.entrySet().iterator().next(); + assert entry.getValue().size() == 1; + final String clusterAlias = entry.getKey(); + final String leaderIndex = entry.getValue().get(0); + createFollowerIndexAndFollowRemoteIndex(request, clusterAlias, leaderIndex, listener); } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/FollowIndexAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/FollowIndexAction.java index 2a14c4e9a50bb..498224551106d 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/FollowIndexAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/FollowIndexAction.java @@ -328,21 +328,21 @@ public TransportAction( protected void doExecute(final Task task, final Request request, final ActionListener listener) { - if (ccrLicenseChecker.isCcrAllowed()) { - final String[] indices = new String[]{request.leaderIndex}; - final Map> remoteClusterIndices = remoteClusterService.groupClusterIndices(indices, s -> false); - if (remoteClusterIndices.containsKey(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY)) { - followLocalIndex(request, listener); - } else { - assert remoteClusterIndices.size() == 1; - final Map.Entry> entry = remoteClusterIndices.entrySet().iterator().next(); - assert entry.getValue().size() == 1; - final String clusterAlias = entry.getKey(); - final String leaderIndex = entry.getValue().get(0); - followRemoteIndex(request, clusterAlias, leaderIndex, listener); - } - } else { + if (ccrLicenseChecker.isCcrAllowed() == false) { listener.onFailure(LicenseUtils.newComplianceException("ccr")); + return; + } + final String[] indices = new String[]{request.leaderIndex}; + final Map> remoteClusterIndices = remoteClusterService.groupClusterIndices(indices, s -> false); + if (remoteClusterIndices.containsKey(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY)) { + followLocalIndex(request, listener); + } else { + assert remoteClusterIndices.size() == 1; + final Map.Entry> entry = remoteClusterIndices.entrySet().iterator().next(); + assert entry.getValue().size() == 1; + final String clusterAlias = entry.getKey(); + final String leaderIndex = entry.getValue().get(0); + followRemoteIndex(request, clusterAlias, leaderIndex, listener); } } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportCcrStatsAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportCcrStatsAction.java index 33873201f5fb3..3b5d0ac53cf81 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportCcrStatsAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportCcrStatsAction.java @@ -65,11 +65,11 @@ protected void doExecute( final Task task, final CcrStatsAction.TasksRequest request, final ActionListener listener) { - if (ccrLicenseChecker.isCcrAllowed()) { - super.doExecute(task, request, listener); - } else { + if (ccrLicenseChecker.isCcrAllowed() == false) { listener.onFailure(LicenseUtils.newComplianceException("ccr")); + return; } + super.doExecute(task, request, listener); } @Override From d4b212c4c9f8f153522ef4c1d8f0c588bb5fc3d6 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Sun, 9 Sep 2018 17:31:02 +0200 Subject: [PATCH 60/91] CORE: Make Pattern Exclusion Work with Aliases (#33518) * CORE: Make Pattern Exclusion Work with Aliases * Adds the pattern exclusion logic to finding aliases * Closes #33395 --- .../cluster/metadata/MetaData.java | 37 +++++++++++++------ .../cluster/metadata/MetaDataTests.java | 32 ++++++++++++++++ 2 files changed, 58 insertions(+), 11 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index c3da63886140a..75869b54850d4 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -296,14 +296,38 @@ private ImmutableOpenMap> findAliases(String[] origi return ImmutableOpenMap.of(); } - boolean matchAllAliases = matchAllAliases(aliases); + String[] patterns = new String[aliases.length]; + boolean[] include = new boolean[aliases.length]; + for (int i = 0; i < aliases.length; i++) { + String alias = aliases[i]; + if (alias.charAt(0) == '-') { + patterns[i] = alias.substring(1); + include[i] = false; + } else { + patterns[i] = alias; + include[i] = true; + } + } + boolean matchAllAliases = patterns.length == 0; ImmutableOpenMap.Builder> mapBuilder = ImmutableOpenMap.builder(); for (String index : concreteIndices) { IndexMetaData indexMetaData = indices.get(index); List filteredValues = new ArrayList<>(); for (ObjectCursor cursor : indexMetaData.getAliases().values()) { AliasMetaData value = cursor.value; - if (matchAllAliases || Regex.simpleMatch(aliases, value.alias())) { + boolean matched = matchAllAliases; + String alias = value.alias(); + for (int i = 0; i < patterns.length; i++) { + if (include[i]) { + if (matched == false) { + String pattern = patterns[i]; + matched = ALL.equals(pattern) || Regex.simpleMatch(pattern, alias); + } + } else if (matched) { + matched = Regex.simpleMatch(patterns[i], alias) == false; + } + } + if (matched) { filteredValues.add(value); } } @@ -317,15 +341,6 @@ private ImmutableOpenMap> findAliases(String[] origi return mapBuilder.build(); } - private static boolean matchAllAliases(final String[] aliases) { - for (String alias : aliases) { - if (alias.equals(ALL)) { - return true; - } - } - return aliases.length == 0; - } - /** * Checks if at least one of the specified aliases exists in the specified concrete indices. Wildcards are supported in the * alias names for partial matches. diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java index 9d82e9e1cdca5..da50e99705dfb 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java @@ -109,6 +109,38 @@ public void testFindAliases() { } } + public void testFindAliasWithExclusion() { + MetaData metaData = MetaData.builder().put( + IndexMetaData.builder("index") + .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0) + .putAlias(AliasMetaData.builder("alias1").build()) + .putAlias(AliasMetaData.builder("alias2").build()) + ).build(); + List aliases = + metaData.findAliases(new GetAliasesRequest().aliases("*", "-alias1"), new String[] {"index"}).get("index"); + assertThat(aliases.size(), equalTo(1)); + assertThat(aliases.get(0).alias(), equalTo("alias2")); + } + + public void testFindAliasWithExclusionAndOverride() { + MetaData metaData = MetaData.builder().put( + IndexMetaData.builder("index") + .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0) + .putAlias(AliasMetaData.builder("aa").build()) + .putAlias(AliasMetaData.builder("ab").build()) + .putAlias(AliasMetaData.builder("bb").build()) + ).build(); + List aliases = + metaData.findAliases(new GetAliasesRequest().aliases("a*", "-*b", "b*"), new String[] {"index"}).get("index"); + assertThat(aliases.size(), equalTo(2)); + assertThat(aliases.get(0).alias(), equalTo("aa")); + assertThat(aliases.get(1).alias(), equalTo("bb")); + } + public void testIndexAndAliasWithSameName() { IndexMetaData.Builder builder = IndexMetaData.builder("index") .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) From 902d20cbbe7fa8a8fc9fe2408ad8d5711a4fc78e Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Sun, 9 Sep 2018 13:18:30 -0400 Subject: [PATCH 61/91] CCR: Use single global checkpoint to normalize range (#33545) We may use different global checkpoints to validate/normalize the range of a change request if the global checkpoint is advanced between these calls. If this is the case, then we generate an invalid request range. --- .../org/elasticsearch/xpack/ccr/action/ShardChangesAction.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java index b505ee015bab6..d102c6b5b7af8 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardChangesAction.java @@ -297,12 +297,13 @@ static Translog.Operation[] getOperations(IndexShard indexShard, long globalChec if (indexShard.state() != IndexShardState.STARTED) { throw new IndexShardNotStartedException(indexShard.shardId(), indexShard.state()); } - if (fromSeqNo > indexShard.getGlobalCheckpoint()) { + if (fromSeqNo > globalCheckpoint) { return EMPTY_OPERATIONS_ARRAY; } int seenBytes = 0; // - 1 is needed, because toSeqNo is inclusive long toSeqNo = Math.min(globalCheckpoint, (fromSeqNo + maxOperationCount) - 1); + assert fromSeqNo <= toSeqNo : "invalid range from_seqno[" + fromSeqNo + "] > to_seqno[" + toSeqNo + "]"; final List operations = new ArrayList<>(); try (Translog.Snapshot snapshot = indexShard.newChangesSnapshot("ccr", fromSeqNo, toSeqNo, true)) { Translog.Operation op; From d1b99877fad4af7473e18e46cc461d3901b39921 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sun, 9 Sep 2018 14:42:49 -0400 Subject: [PATCH 62/91] Remove underscore from auto-follow API (#33550) This commit removes the leading underscore from _auto_follow in the auto-follow API endpoints. --- .../org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java | 2 +- .../test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java | 2 +- .../xpack/ccr/rest/RestDeleteAutoFollowPatternAction.java | 2 +- .../xpack/ccr/rest/RestPutAutoFollowPatternAction.java | 2 +- .../rest-api-spec/api/ccr.delete_auto_follow_pattern.json | 4 ++-- .../rest-api-spec/api/ccr.put_auto_follow_pattern.json | 4 ++-- 6 files changed, 8 insertions(+), 8 deletions(-) diff --git a/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java b/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java index c52a4a9b59d78..0562a88957ccb 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java @@ -48,7 +48,7 @@ public void testCreateAndFollowIndex() { public void testAutoFollow() throws Exception { if (runningAgainstLeaderCluster == false) { - final Request request = new Request("PUT", "/_ccr/_auto_follow/leader_cluster"); + final Request request = new Request("PUT", "/_ccr/auto_follow/leader_cluster"); request.setJsonEntity("{\"leader_index_patterns\":[\"*\"]}"); client().performRequest(request); diff --git a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java index 17a6db286f283..76d0e43813594 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexIT.java @@ -81,7 +81,7 @@ public void testFollowIndex() throws Exception { public void testAutoFollowPatterns() throws Exception { assumeFalse("Test should only run when both clusters are running", runningAgainstLeaderCluster); - Request request = new Request("PUT", "/_ccr/_auto_follow/leader_cluster"); + Request request = new Request("PUT", "/_ccr/auto_follow/leader_cluster"); request.setJsonEntity("{\"leader_index_patterns\": [\"logs-*\"]}"); assertOK(client().performRequest(request)); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestDeleteAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestDeleteAutoFollowPatternAction.java index bd3585c7982c1..d25e9bf65fdee 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestDeleteAutoFollowPatternAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestDeleteAutoFollowPatternAction.java @@ -21,7 +21,7 @@ public class RestDeleteAutoFollowPatternAction extends BaseRestHandler { public RestDeleteAutoFollowPatternAction(Settings settings, RestController controller) { super(settings); - controller.registerHandler(RestRequest.Method.DELETE, "/_ccr/_auto_follow/{leader_cluster_alias}", this); + controller.registerHandler(RestRequest.Method.DELETE, "/_ccr/auto_follow/{leader_cluster_alias}", this); } @Override diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutAutoFollowPatternAction.java index d92ebb7b0bbe5..9b3aac3bbb5da 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutAutoFollowPatternAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/rest/RestPutAutoFollowPatternAction.java @@ -22,7 +22,7 @@ public class RestPutAutoFollowPatternAction extends BaseRestHandler { public RestPutAutoFollowPatternAction(Settings settings, RestController controller) { super(settings); - controller.registerHandler(RestRequest.Method.PUT, "/_ccr/_auto_follow/{leader_cluster_alias}", this); + controller.registerHandler(RestRequest.Method.PUT, "/_ccr/auto_follow/{leader_cluster_alias}", this); } @Override diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.delete_auto_follow_pattern.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.delete_auto_follow_pattern.json index b14effd5f3f73..c958c842b54bb 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.delete_auto_follow_pattern.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.delete_auto_follow_pattern.json @@ -3,8 +3,8 @@ "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/current", "methods": [ "DELETE" ], "url": { - "path": "/_ccr/_auto_follow/{leader_cluster_alias}", - "paths": [ "/_ccr/_auto_follow/{leader_cluster_alias}" ], + "path": "/_ccr/auto_follow/{leader_cluster_alias}", + "paths": [ "/_ccr/auto_follow/{leader_cluster_alias}" ], "parts": { "leader_cluster_alias": { "type": "string", diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.put_auto_follow_pattern.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.put_auto_follow_pattern.json index 28e7299713da5..ca9c255097f01 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.put_auto_follow_pattern.json +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/ccr.put_auto_follow_pattern.json @@ -3,8 +3,8 @@ "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/current", "methods": [ "PUT" ], "url": { - "path": "/_ccr/_auto_follow/{leader_cluster_alias}", - "paths": [ "/_ccr/_auto_follow/{leader_cluster_alias}" ], + "path": "/_ccr/auto_follow/{leader_cluster_alias}", + "paths": [ "/_ccr/auto_follow/{leader_cluster_alias}" ], "parts": { "leader_cluster_alias": { "type": "string", From fcb15b0ce30e1a46d42f65874c1e4a91e2c3794c Mon Sep 17 00:00:00 2001 From: Dimitris Athanasiou Date: Sun, 9 Sep 2018 22:53:03 +0100 Subject: [PATCH 63/91] [ML] Get job stats request should filter non-ML job tasks (#33516) When requesting job stats for `_all`, all ES tasks are accepted resulting to loads of cluster traffic and a memory overhead. This commit correctly filters out non ML job tasks. Closes #33515 --- .../core/ml/action/GetJobsStatsAction.java | 3 +-- .../xpack/core/ml/action/OpenJobAction.java | 11 ++++++++-- .../action/GetJobStatsActionRequestTests.java | 9 +++++++++ .../action/TransportOpenJobActionTests.java | 20 +++++++++++++++++++ 4 files changed, 39 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java index 807c09363759b..d2d5d09090e76 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.support.tasks.BaseTasksRequest; import org.elasticsearch.action.support.tasks.BaseTasksResponse; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; @@ -95,7 +94,7 @@ public boolean allowNoJobs() { @Override public boolean match(Task task) { - return jobId.equals(MetaData.ALL) || OpenJobAction.JobTaskMatcher.match(task, jobId); + return OpenJobAction.JobTaskMatcher.match(task, jobId); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java index fc38d974defff..bbc39c7d73118 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -239,8 +240,14 @@ public Version getMinimalSupportedVersion() { public interface JobTaskMatcher { static boolean match(Task task, String expectedJobId) { - String expectedDescription = "job-" + expectedJobId; - return task instanceof JobTaskMatcher && expectedDescription.equals(task.getDescription()); + if (task instanceof JobTaskMatcher) { + if (MetaData.ALL.equals(expectedJobId)) { + return true; + } + String expectedDescription = "job-" + expectedJobId; + return expectedDescription.equals(task.getDescription()); + } + return false; } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetJobStatsActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetJobStatsActionRequestTests.java index 913618de38b58..edf3f73a8afc8 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetJobStatsActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/GetJobStatsActionRequestTests.java @@ -6,9 +6,13 @@ package org.elasticsearch.xpack.core.ml.action; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.AbstractStreamableTestCase; import org.elasticsearch.xpack.core.ml.action.GetJobsStatsAction.Request; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + public class GetJobStatsActionRequestTests extends AbstractStreamableTestCase { @Override @@ -23,4 +27,9 @@ protected Request createBlankInstance() { return new Request(); } + public void testMatch_GivenAll_FailsForNonJobTasks() { + Task nonJobTask = mock(Task.class); + + assertThat(new Request("_all").match(nonJobTask), is(false)); + } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java index bef7705e83533..58b60273b0e6d 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.persistent.PersistentTasksCustomMetaData.Assignment; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xpack.core.ml.MlMetaIndex; @@ -66,6 +67,7 @@ import static org.elasticsearch.xpack.core.ml.job.config.JobTests.buildJobBuilder; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -626,6 +628,24 @@ public void testNodeNameAndMlAttributes() { assertEquals("{_node_name1}{ml.machine_memory=5}{node.ml=true}", TransportOpenJobAction.nodeNameAndMlAttributes(node)); } + public void testJobTaskMatcherMatch() { + Task nonJobTask1 = mock(Task.class); + Task nonJobTask2 = mock(Task.class); + TransportOpenJobAction.JobTask jobTask1 = new TransportOpenJobAction.JobTask("ml-1", + 0, "persistent", "", null, null); + TransportOpenJobAction.JobTask jobTask2 = new TransportOpenJobAction.JobTask("ml-2", + 1, "persistent", "", null, null); + + assertThat(OpenJobAction.JobTaskMatcher.match(nonJobTask1, "_all"), is(false)); + assertThat(OpenJobAction.JobTaskMatcher.match(nonJobTask2, "_all"), is(false)); + assertThat(OpenJobAction.JobTaskMatcher.match(jobTask1, "_all"), is(true)); + assertThat(OpenJobAction.JobTaskMatcher.match(jobTask2, "_all"), is(true)); + assertThat(OpenJobAction.JobTaskMatcher.match(jobTask1, "ml-1"), is(true)); + assertThat(OpenJobAction.JobTaskMatcher.match(jobTask2, "ml-1"), is(false)); + assertThat(OpenJobAction.JobTaskMatcher.match(jobTask1, "ml-2"), is(false)); + assertThat(OpenJobAction.JobTaskMatcher.match(jobTask2, "ml-2"), is(true)); + } + public static void addJobTask(String jobId, String nodeId, JobState jobState, PersistentTasksCustomMetaData.Builder builder) { builder.addTask(MlTasks.jobTaskId(jobId), OpenJobAction.TASK_NAME, new OpenJobAction.JobParams(jobId), new Assignment(nodeId, "test assignment")); From 6bb817004b9d3f4cc9e723f0df50b5a2f1cdc064 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Sun, 9 Sep 2018 20:49:19 -0400 Subject: [PATCH 64/91] Add infrastructure to upgrade settings (#33536) In some cases we want to deprecate a setting, and then automatically upgrade uses of that setting to a replacement setting. This commit adds infrastructure for this so that we can upgrade settings when recovering the cluster state, as well as when such settings are dynamically applied on cluster update settings requests. This commit only focuses on cluster settings, index settings can build on this infrastructure in a follow-up. --- .../TransportClusterUpdateSettingsAction.java | 8 +- .../client/transport/TransportClient.java | 5 +- .../settings/AbstractScopedSettings.java | 47 ++++++- .../common/settings/ClusterSettings.java | 13 +- .../common/settings/IndexScopedSettings.java | 2 +- .../common/settings/SettingUpgrader.java | 54 ++++++++ .../common/settings/SettingsModule.java | 23 +++- .../org/elasticsearch/gateway/Gateway.java | 11 +- .../java/org/elasticsearch/node/Node.java | 12 +- .../org/elasticsearch/plugins/Plugin.java | 10 ++ .../indices/get/GetIndexActionTests.java | 10 +- .../settings/get/GetSettingsActionTests.java | 4 +- .../common/settings/ScopedSettingsTests.java | 126 ++++++++++++++++++ .../common/settings/SettingsModuleTests.java | 5 +- .../common/settings/UpgradeSettingsIT.java | 125 +++++++++++++++++ .../elasticsearch/gateway/GatewayTests.java | 90 +++++++++++++ .../test/AbstractBuilderTestCase.java | 3 +- .../core/security/authc/RealmSettings.java | 4 +- .../test/SettingsFilterTests.java | 4 +- 19 files changed, 530 insertions(+), 26 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/common/settings/SettingUpgrader.java create mode 100644 server/src/test/java/org/elasticsearch/common/settings/UpgradeSettingsIT.java create mode 100644 server/src/test/java/org/elasticsearch/gateway/GatewayTests.java diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java index 4cf74fbf865cc..8360797d66021 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/TransportClusterUpdateSettingsAction.java @@ -179,8 +179,12 @@ public void onFailure(String source, Exception e) { @Override public ClusterState execute(final ClusterState currentState) { - ClusterState clusterState = - updater.updateSettings(currentState, request.transientSettings(), request.persistentSettings(), logger); + final ClusterState clusterState = + updater.updateSettings( + currentState, + clusterSettings.upgradeSettings(request.transientSettings()), + clusterSettings.upgradeSettings(request.persistentSettings()), + logger); changed = clusterState != currentState; return clusterState; } diff --git a/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java index ba18105e3f1ca..39829615fb3fe 100644 --- a/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java +++ b/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java @@ -19,7 +19,6 @@ package org.elasticsearch.client.transport; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionModule; @@ -44,6 +43,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.node.InternalSettingsPreparer; @@ -146,7 +146,8 @@ private static ClientTemplate buildTemplate(Settings providedSettings, Settings for (final ExecutorBuilder builder : threadPool.builders()) { additionalSettings.addAll(builder.getRegisteredSettings()); } - SettingsModule settingsModule = new SettingsModule(settings, additionalSettings, additionalSettingsFilter); + SettingsModule settingsModule = + new SettingsModule(settings, additionalSettings, additionalSettingsFilter, Collections.emptySet()); SearchModule searchModule = new SearchModule(settings, true, pluginsService.filterPlugins(SearchPlugin.class)); IndicesModule indicesModule = new IndicesModule(Collections.emptyList()); diff --git a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index a77d739ffe0b4..e25d954aa4f1c 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.regex.Regex; +import java.util.AbstractMap; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -38,6 +39,7 @@ import java.util.concurrent.CopyOnWriteArrayList; import java.util.function.BiConsumer; import java.util.function.Consumer; +import java.util.function.Function; import java.util.function.Predicate; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -52,14 +54,29 @@ public abstract class AbstractScopedSettings extends AbstractComponent { private final List> settingUpdaters = new CopyOnWriteArrayList<>(); private final Map> complexMatchers; private final Map> keySettings; + private final Map, Function, Map.Entry>> settingUpgraders; private final Setting.Property scope; private static final Pattern KEY_PATTERN = Pattern.compile("^(?:[-\\w]+[.])*[-\\w]+$"); private static final Pattern GROUP_KEY_PATTERN = Pattern.compile("^(?:[-\\w]+[.])+$"); private static final Pattern AFFIX_KEY_PATTERN = Pattern.compile("^(?:[-\\w]+[.])+[*](?:[.][-\\w]+)+$"); - protected AbstractScopedSettings(Settings settings, Set> settingsSet, Setting.Property scope) { + protected AbstractScopedSettings( + final Settings settings, + final Set> settingsSet, + final Set> settingUpgraders, + final Setting.Property scope) { super(settings); this.lastSettingsApplied = Settings.EMPTY; + + this.settingUpgraders = + Collections.unmodifiableMap( + settingUpgraders + .stream() + .collect( + Collectors.toMap( + SettingUpgrader::getSetting, + u -> e -> new AbstractMap.SimpleEntry<>(u.getKey(e.getKey()), u.getValue(e.getValue()))))); + this.scope = scope; Map> complexMatchers = new HashMap<>(); Map> keySettings = new HashMap<>(); @@ -97,6 +114,7 @@ protected AbstractScopedSettings(Settings nodeSettings, Settings scopeSettings, this.scope = other.scope; complexMatchers = other.complexMatchers; keySettings = other.keySettings; + settingUpgraders = Collections.unmodifiableMap(new HashMap<>(other.settingUpgraders)); settingUpdaters.addAll(other.settingUpdaters); } @@ -757,6 +775,32 @@ private static Setting findOverlappingSetting(Setting newSetting, Map setting = getRaw(key); + final Function, Map.Entry> upgrader = settingUpgraders.get(setting); + if (upgrader == null) { + // the setting does not have an upgrader, copy the setting + builder.copy(key, settings); + } else { + // the setting has an upgrader, so mark that we have changed a setting and apply the upgrade logic + changed = true; + final Map.Entry upgrade = upgrader.apply(new Entry(key, settings)); + builder.put(upgrade.getKey(), upgrade.getValue()); + } + } + // we only return a new instance if there was an upgrade + return changed ? builder.build() : settings; + } + /** * Archives invalid or unknown settings. Any setting that is not recognized or fails validation * will be archived. This means the setting is prefixed with {@value ARCHIVED_SETTINGS_PREFIX} @@ -847,4 +891,5 @@ public String setValue(String value) { public boolean isPrivateSetting(String key) { return false; } + } diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 10787140bdec8..cb369d6cfda02 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -100,6 +100,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashSet; +import java.util.List; import java.util.Set; import java.util.function.Predicate; @@ -107,8 +108,13 @@ * Encapsulates all valid cluster level settings. */ public final class ClusterSettings extends AbstractScopedSettings { - public ClusterSettings(Settings nodeSettings, Set> settingsSet) { - super(nodeSettings, settingsSet, Property.NodeScope); + public ClusterSettings(final Settings nodeSettings, final Set> settingsSet) { + this(nodeSettings, settingsSet, Collections.emptySet()); + } + + public ClusterSettings( + final Settings nodeSettings, final Set> settingsSet, final Set> settingUpgraders) { + super(nodeSettings, settingsSet, settingUpgraders, Property.NodeScope); addSettingsUpdater(new LoggingSettingUpdater(nodeSettings)); } @@ -436,4 +442,7 @@ public void apply(Settings value, Settings current, Settings previous) { IndexGraveyard.SETTING_MAX_TOMBSTONES, EnableAssignmentDecider.CLUSTER_TASKS_ALLOCATION_ENABLE_SETTING ))); + + public static List> BUILT_IN_SETTING_UPGRADERS = Collections.emptyList(); + } diff --git a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index 2116d1eff7510..ae8529af5b53e 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -178,7 +178,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings { public static final IndexScopedSettings DEFAULT_SCOPED_SETTINGS = new IndexScopedSettings(Settings.EMPTY, BUILT_IN_INDEX_SETTINGS); public IndexScopedSettings(Settings settings, Set> settingsSet) { - super(settings, settingsSet, Property.IndexScope); + super(settings, settingsSet, Collections.emptySet(), Property.IndexScope); } private IndexScopedSettings(Settings settings, IndexScopedSettings other, IndexMetaData metaData) { diff --git a/server/src/main/java/org/elasticsearch/common/settings/SettingUpgrader.java b/server/src/main/java/org/elasticsearch/common/settings/SettingUpgrader.java new file mode 100644 index 0000000000000..91f2bead300d3 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/settings/SettingUpgrader.java @@ -0,0 +1,54 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.settings; + +/** + * Represents the logic to upgrade a setting. + * + * @param the type of the underlying setting + */ +public interface SettingUpgrader { + + /** + * The setting upgraded by this upgrader. + * + * @return the setting + */ + Setting getSetting(); + + /** + * The logic to upgrade the setting key, for example by mapping the old setting key to the new setting key. + * + * @param key the setting key to upgrade + * @return the upgraded setting key + */ + String getKey(String key); + + /** + * The logic to upgrade the setting value. + * + * @param value the setting value to upgrade + * @return the upgraded setting value + */ + default String getValue(final String value) { + return value; + } + +} diff --git a/server/src/main/java/org/elasticsearch/common/settings/SettingsModule.java b/server/src/main/java/org/elasticsearch/common/settings/SettingsModule.java index 67037b3708bee..1eca3eb415f12 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/SettingsModule.java +++ b/server/src/main/java/org/elasticsearch/common/settings/SettingsModule.java @@ -54,10 +54,14 @@ public class SettingsModule implements Module { private final SettingsFilter settingsFilter; public SettingsModule(Settings settings, Setting... additionalSettings) { - this(settings, Arrays.asList(additionalSettings), Collections.emptyList()); + this(settings, Arrays.asList(additionalSettings), Collections.emptyList(), Collections.emptySet()); } - public SettingsModule(Settings settings, List> additionalSettings, List settingsFilter) { + public SettingsModule( + Settings settings, + List> additionalSettings, + List settingsFilter, + Set> settingUpgraders) { logger = Loggers.getLogger(getClass(), settings); this.settings = settings; for (Setting setting : ClusterSettings.BUILT_IN_CLUSTER_SETTINGS) { @@ -70,12 +74,22 @@ public SettingsModule(Settings settings, List> additionalSettings, Li for (Setting setting : additionalSettings) { registerSetting(setting); } - for (String filter : settingsFilter) { registerSettingsFilter(filter); } + final Set> clusterSettingUpgraders = new HashSet<>(); + for (final SettingUpgrader settingUpgrader : ClusterSettings.BUILT_IN_SETTING_UPGRADERS) { + assert settingUpgrader.getSetting().hasNodeScope() : settingUpgrader.getSetting().getKey(); + final boolean added = clusterSettingUpgraders.add(settingUpgrader); + assert added : settingUpgrader.getSetting().getKey(); + } + for (final SettingUpgrader settingUpgrader : settingUpgraders) { + assert settingUpgrader.getSetting().hasNodeScope() : settingUpgrader.getSetting().getKey(); + final boolean added = clusterSettingUpgraders.add(settingUpgrader); + assert added : settingUpgrader.getSetting().getKey(); + } this.indexScopedSettings = new IndexScopedSettings(settings, new HashSet<>(this.indexSettings.values())); - this.clusterSettings = new ClusterSettings(settings, new HashSet<>(this.nodeSettings.values())); + this.clusterSettings = new ClusterSettings(settings, new HashSet<>(this.nodeSettings.values()), clusterSettingUpgraders); Settings indexSettings = settings.filter((s) -> (s.startsWith("index.") && // special case - we want to get Did you mean indices.query.bool.max_clause_count // which means we need to by-pass this check for this setting @@ -205,4 +219,5 @@ public ClusterSettings getClusterSettings() { public SettingsFilter getSettingsFilter() { return settingsFilter; } + } diff --git a/server/src/main/java/org/elasticsearch/gateway/Gateway.java b/server/src/main/java/org/elasticsearch/gateway/Gateway.java index d2261e5d1b421..77d2c553c2c51 100644 --- a/server/src/main/java/org/elasticsearch/gateway/Gateway.java +++ b/server/src/main/java/org/elasticsearch/gateway/Gateway.java @@ -137,20 +137,25 @@ public void performStateRecovery(final GatewayStateRecoveredListener listener) t } } } + final ClusterState.Builder builder = upgradeAndArchiveUnknownOrInvalidSettings(metaDataBuilder); + listener.onSuccess(builder.build()); + } + + ClusterState.Builder upgradeAndArchiveUnknownOrInvalidSettings(MetaData.Builder metaDataBuilder) { final ClusterSettings clusterSettings = clusterService.getClusterSettings(); metaDataBuilder.persistentSettings( clusterSettings.archiveUnknownOrInvalidSettings( - metaDataBuilder.persistentSettings(), + clusterSettings.upgradeSettings(metaDataBuilder.persistentSettings()), e -> logUnknownSetting("persistent", e), (e, ex) -> logInvalidSetting("persistent", e, ex))); metaDataBuilder.transientSettings( clusterSettings.archiveUnknownOrInvalidSettings( - metaDataBuilder.transientSettings(), + clusterSettings.upgradeSettings(metaDataBuilder.transientSettings()), e -> logUnknownSetting("transient", e), (e, ex) -> logInvalidSetting("transient", e, ex))); ClusterState.Builder builder = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.get(settings)); builder.metaData(metaDataBuilder); - listener.onSuccess(builder.build()); + return builder; } private void logUnknownSetting(String settingType, Map.Entry e) { diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index c2ef6d12331fe..67c3894ddf40a 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -73,6 +73,7 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.common.settings.SettingUpgrader; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.transport.BoundTransportAddress; @@ -151,6 +152,7 @@ import org.elasticsearch.watcher.ResourceWatcherService; import javax.net.ssl.SNIHostName; + import java.io.BufferedWriter; import java.io.Closeable; import java.io.IOException; @@ -360,7 +362,15 @@ protected Node( AnalysisModule analysisModule = new AnalysisModule(this.environment, pluginsService.filterPlugins(AnalysisPlugin.class)); // this is as early as we can validate settings at this point. we already pass them to ScriptModule as well as ThreadPool // so we might be late here already - final SettingsModule settingsModule = new SettingsModule(this.settings, additionalSettings, additionalSettingsFilter); + + final Set> settingsUpgraders = pluginsService.filterPlugins(Plugin.class) + .stream() + .map(Plugin::getSettingUpgraders) + .flatMap(List::stream) + .collect(Collectors.toSet()); + + final SettingsModule settingsModule = + new SettingsModule(this.settings, additionalSettings, additionalSettingsFilter, settingsUpgraders); scriptModule.registerClusterSettingsListeners(settingsModule.getClusterSettings()); resourcesToClose.add(resourceWatcherService); final NetworkService networkService = new NetworkService( diff --git a/server/src/main/java/org/elasticsearch/plugins/Plugin.java b/server/src/main/java/org/elasticsearch/plugins/Plugin.java index 65d47682a95c0..faef27207e13a 100644 --- a/server/src/main/java/org/elasticsearch/plugins/Plugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/Plugin.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.SettingUpgrader; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -172,6 +173,15 @@ public void onIndexModule(IndexModule indexModule) {} */ public List getSettingsFilter() { return Collections.emptyList(); } + /** + * Get the setting upgraders provided by this plugin. + * + * @return the settings upgraders + */ + public List> getSettingUpgraders() { + return Collections.emptyList(); + } + /** * Provides a function to modify global custom meta data on startup. *

diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexActionTests.java index b67c2e2954d04..9bf4d9d32f622 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/get/GetIndexActionTests.java @@ -40,9 +40,11 @@ import org.junit.After; import org.junit.Before; -import java.util.Collections; import java.util.concurrent.TimeUnit; +import static java.util.Collections.emptyList; +import static java.util.Collections.emptySet; + public class GetIndexActionTests extends ESSingleNodeTestCase { private TransportService transportService; @@ -58,14 +60,14 @@ public class GetIndexActionTests extends ESSingleNodeTestCase { public void setUp() throws Exception { super.setUp(); - settingsFilter = new SettingsModule(Settings.EMPTY, Collections.emptyList(), Collections.emptyList()).getSettingsFilter(); + settingsFilter = new SettingsModule(Settings.EMPTY, emptyList(), emptyList(), emptySet()).getSettingsFilter(); threadPool = new TestThreadPool("GetIndexActionTests"); clusterService = getInstanceFromNode(ClusterService.class); indicesService = getInstanceFromNode(IndicesService.class); CapturingTransport capturingTransport = new CapturingTransport(); transportService = capturingTransport.createCapturingTransportService(clusterService.getSettings(), threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, - boundAddress -> clusterService.localNode(), null, Collections.emptySet()); + boundAddress -> clusterService.localNode(), null, emptySet()); transportService.start(); transportService.acceptIncomingRequests(); getIndexAction = new GetIndexActionTests.TestTransportGetIndexAction(); @@ -106,7 +108,7 @@ class TestTransportGetIndexAction extends TransportGetIndexAction { TestTransportGetIndexAction() { super(Settings.EMPTY, GetIndexActionTests.this.transportService, GetIndexActionTests.this.clusterService, - GetIndexActionTests.this.threadPool, settingsFilter, new ActionFilters(Collections.emptySet()), + GetIndexActionTests.this.threadPool, settingsFilter, new ActionFilters(emptySet()), new GetIndexActionTests.Resolver(Settings.EMPTY), indicesService, IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsActionTests.java index 03ccebba10dbd..85b85cf9e1469 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsActionTests.java @@ -42,6 +42,8 @@ import java.util.Collections; import java.util.concurrent.TimeUnit; +import static java.util.Collections.emptyList; +import static java.util.Collections.emptySet; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; public class GetSettingsActionTests extends ESTestCase { @@ -71,7 +73,7 @@ protected void masterOperation(GetSettingsRequest request, ClusterState state, A public void setUp() throws Exception { super.setUp(); - settingsFilter = new SettingsModule(Settings.EMPTY, Collections.emptyList(), Collections.emptyList()).getSettingsFilter(); + settingsFilter = new SettingsModule(Settings.EMPTY, emptyList(), emptyList(), emptySet()).getSettingsFilter(); threadPool = new TestThreadPool("GetSettingsActionTests"); clusterService = createClusterService(threadPool); CapturingTransport capturingTransport = new CapturingTransport(); diff --git a/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index f0f8b6c417f2f..0ee1d2e9c4a80 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; @@ -53,6 +54,7 @@ import static org.hamcrest.CoreMatchers.startsWith; import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.hasToString; +import static org.hamcrest.Matchers.sameInstance; public class ScopedSettingsTests extends ESTestCase { @@ -1045,4 +1047,128 @@ public void testPrivateIndexSettingsSkipValidation() { indexScopedSettings.validate(settings, false, /* validateInternalOrPrivateIndex */ false); } + public void testUpgradeSetting() { + final Setting oldSetting = Setting.simpleString("foo.old", Property.NodeScope); + final Setting newSetting = Setting.simpleString("foo.new", Property.NodeScope); + final Setting remainingSetting = Setting.simpleString("foo.remaining", Property.NodeScope); + + final AbstractScopedSettings service = + new ClusterSettings( + Settings.EMPTY, + new HashSet<>(Arrays.asList(oldSetting, newSetting, remainingSetting)), + Collections.singleton(new SettingUpgrader() { + + @Override + public Setting getSetting() { + return oldSetting; + } + + @Override + public String getKey(final String key) { + return "foo.new"; + } + + @Override + public String getValue(final String value) { + return "new." + value; + } + + })); + + final Settings settings = + Settings.builder() + .put("foo.old", randomAlphaOfLength(8)) + .put("foo.remaining", randomAlphaOfLength(8)) + .build(); + final Settings upgradedSettings = service.upgradeSettings(settings); + assertFalse(oldSetting.exists(upgradedSettings)); + assertTrue(newSetting.exists(upgradedSettings)); + assertThat(newSetting.get(upgradedSettings), equalTo("new." + oldSetting.get(settings))); + assertTrue(remainingSetting.exists(upgradedSettings)); + assertThat(remainingSetting.get(upgradedSettings), equalTo(remainingSetting.get(settings))); + } + + public void testUpgradeSettingsNoChangesPreservesInstance() { + final Setting oldSetting = Setting.simpleString("foo.old", Property.NodeScope); + final Setting newSetting = Setting.simpleString("foo.new", Property.NodeScope); + final Setting remainingSetting = Setting.simpleString("foo.remaining", Property.NodeScope); + + final AbstractScopedSettings service = + new ClusterSettings( + Settings.EMPTY, + new HashSet<>(Arrays.asList(oldSetting, newSetting, remainingSetting)), + Collections.singleton(new SettingUpgrader() { + + @Override + public Setting getSetting() { + return oldSetting; + } + + @Override + public String getKey(final String key) { + return "foo.new"; + } + + })); + + final Settings settings = Settings.builder().put("foo.remaining", randomAlphaOfLength(8)).build(); + final Settings upgradedSettings = service.upgradeSettings(settings); + assertThat(upgradedSettings, sameInstance(settings)); + } + + public void testUpgradeComplexSetting() { + final Setting.AffixSetting oldSetting = + Setting.affixKeySetting("foo.old.", "suffix", key -> Setting.simpleString(key, Property.NodeScope)); + final Setting.AffixSetting newSetting = + Setting.affixKeySetting("foo.new.", "suffix", key -> Setting.simpleString(key, Property.NodeScope)); + final Setting.AffixSetting remainingSetting = + Setting.affixKeySetting("foo.remaining.", "suffix", key -> Setting.simpleString(key, Property.NodeScope)); + + final AbstractScopedSettings service = + new ClusterSettings( + Settings.EMPTY, + new HashSet<>(Arrays.asList(oldSetting, newSetting, remainingSetting)), + Collections.singleton(new SettingUpgrader() { + + @Override + public Setting getSetting() { + return oldSetting; + } + + @Override + public String getKey(final String key) { + return key.replaceFirst("^foo\\.old", "foo\\.new"); + } + + @Override + public String getValue(final String value) { + return "new." + value; + } + + })); + + final int count = randomIntBetween(1, 8); + final List concretes = new ArrayList<>(count); + final Settings.Builder builder = Settings.builder(); + for (int i = 0; i < count; i++) { + final String concrete = randomAlphaOfLength(8); + concretes.add(concrete); + builder.put("foo.old." + concrete + ".suffix", randomAlphaOfLength(8)); + builder.put("foo.remaining." + concrete + ".suffix", randomAlphaOfLength(8)); + } + final Settings settings = builder.build(); + final Settings upgradedSettings = service.upgradeSettings(settings); + for (final String concrete : concretes) { + assertFalse(oldSetting.getConcreteSettingForNamespace(concrete).exists(upgradedSettings)); + assertTrue(newSetting.getConcreteSettingForNamespace(concrete).exists(upgradedSettings)); + assertThat( + newSetting.getConcreteSettingForNamespace(concrete).get(upgradedSettings), + equalTo("new." + oldSetting.getConcreteSettingForNamespace(concrete).get(settings))); + assertTrue(remainingSetting.getConcreteSettingForNamespace(concrete).exists(upgradedSettings)); + assertThat( + remainingSetting.getConcreteSettingForNamespace(concrete).get(upgradedSettings), + equalTo(remainingSetting.getConcreteSettingForNamespace(concrete).get(settings))); + } + } + } diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java index 6a2be8217a661..c6182eac8f680 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/SettingsModuleTests.java @@ -24,6 +24,7 @@ import java.util.Arrays; +import static java.util.Collections.emptySet; import static org.hamcrest.Matchers.containsString; public class SettingsModuleTests extends ModuleTestCase { @@ -103,14 +104,14 @@ public void testRegisterSettingsFilter() { try { new SettingsModule(settings, Arrays.asList(Setting.boolSetting("foo.bar", true, Property.NodeScope), Setting.boolSetting("bar.foo", true, Property.NodeScope, Property.Filtered), - Setting.boolSetting("bar.baz", true, Property.NodeScope)), Arrays.asList("foo.*", "bar.foo")); + Setting.boolSetting("bar.baz", true, Property.NodeScope)), Arrays.asList("foo.*", "bar.foo"), emptySet()); fail(); } catch (IllegalArgumentException ex) { assertEquals("filter [bar.foo] has already been registered", ex.getMessage()); } SettingsModule module = new SettingsModule(settings, Arrays.asList(Setting.boolSetting("foo.bar", true, Property.NodeScope), Setting.boolSetting("bar.foo", true, Property.NodeScope, Property.Filtered), - Setting.boolSetting("bar.baz", true, Property.NodeScope)), Arrays.asList("foo.*")); + Setting.boolSetting("bar.baz", true, Property.NodeScope)), Arrays.asList("foo.*"), emptySet()); assertInstanceBinding(module, Settings.class, (s) -> s == settings); assertInstanceBinding(module, SettingsFilter.class, (s) -> s.filter(settings).size() == 1); assertInstanceBinding(module, SettingsFilter.class, (s) -> s.filter(settings).keySet().contains("bar.baz")); diff --git a/server/src/test/java/org/elasticsearch/common/settings/UpgradeSettingsIT.java b/server/src/test/java/org/elasticsearch/common/settings/UpgradeSettingsIT.java new file mode 100644 index 0000000000000..839b96e641870 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/common/settings/UpgradeSettingsIT.java @@ -0,0 +1,125 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.settings; + +import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequestBuilder; +import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.junit.After; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.function.BiConsumer; +import java.util.function.Function; + +import static org.hamcrest.Matchers.equalTo; + +public class UpgradeSettingsIT extends ESSingleNodeTestCase { + + @After + public void cleanup() throws Exception { + client() + .admin() + .cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().putNull("*")) + .setTransientSettings(Settings.builder().putNull("*")) + .get(); + } + + @Override + protected Collection> getPlugins() { + return Collections.singletonList(UpgradeSettingsPlugin.class); + } + + public static class UpgradeSettingsPlugin extends Plugin { + + static final Setting oldSetting = Setting.simpleString("foo.old", Setting.Property.Dynamic, Setting.Property.NodeScope); + static final Setting newSetting = Setting.simpleString("foo.new", Setting.Property.Dynamic, Setting.Property.NodeScope); + + public UpgradeSettingsPlugin(){ + + } + + @Override + public List> getSettings() { + return Arrays.asList(oldSetting, newSetting); + } + + @Override + public List> getSettingUpgraders() { + return Collections.singletonList(new SettingUpgrader() { + + @Override + public Setting getSetting() { + return oldSetting; + } + + @Override + public String getKey(final String key) { + return "foo.new"; + } + + @Override + public String getValue(final String value) { + return "new." + value; + } + }); + } + } + + public void testUpgradePersistentSettingsOnUpdate() { + runUpgradeSettingsOnUpdateTest((settings, builder) -> builder.setPersistentSettings(settings), MetaData::persistentSettings); + } + + public void testUpgradeTransientSettingsOnUpdate() { + runUpgradeSettingsOnUpdateTest((settings, builder) -> builder.setTransientSettings(settings), MetaData::transientSettings); + } + + private void runUpgradeSettingsOnUpdateTest( + final BiConsumer consumer, + final Function settingsFunction) { + final String value = randomAlphaOfLength(8); + final ClusterUpdateSettingsRequestBuilder builder = + client() + .admin() + .cluster() + .prepareUpdateSettings(); + consumer.accept(Settings.builder().put("foo.old", value).build(), builder); + builder.get(); + + final ClusterStateResponse response = client() + .admin() + .cluster() + .prepareState() + .clear() + .setMetaData(true) + .get(); + + assertFalse(UpgradeSettingsPlugin.oldSetting.exists(settingsFunction.apply(response.getState().metaData()))); + assertTrue(UpgradeSettingsPlugin.newSetting.exists(settingsFunction.apply(response.getState().metaData()))); + assertThat(UpgradeSettingsPlugin.newSetting.get(settingsFunction.apply(response.getState().metaData())), equalTo("new." + value)); + } + +} diff --git a/server/src/test/java/org/elasticsearch/gateway/GatewayTests.java b/server/src/test/java/org/elasticsearch/gateway/GatewayTests.java new file mode 100644 index 0000000000000..457b3a14ebf4a --- /dev/null +++ b/server/src/test/java/org/elasticsearch/gateway/GatewayTests.java @@ -0,0 +1,90 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gateway; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.SettingUpgrader; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; + +import java.util.Collections; +import java.util.Set; +import java.util.function.BiConsumer; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static org.hamcrest.Matchers.equalTo; + +public class GatewayTests extends ESTestCase { + + public void testUpgradePersistentSettings() { + runUpgradeSettings(MetaData.Builder::persistentSettings, MetaData::persistentSettings); + } + + public void testUpgradeTransientSettings() { + runUpgradeSettings(MetaData.Builder::transientSettings, MetaData::transientSettings); + } + + private void runUpgradeSettings( + final BiConsumer applySettingsToBuilder, final Function metaDataSettings) { + final Setting oldSetting = Setting.simpleString("foo.old", Setting.Property.Dynamic, Setting.Property.NodeScope); + final Setting newSetting = Setting.simpleString("foo.new", Setting.Property.Dynamic, Setting.Property.NodeScope); + final Set> settingsSet = + Stream.concat( + ClusterSettings.BUILT_IN_CLUSTER_SETTINGS.stream(), + Stream.of(oldSetting, newSetting)).collect(Collectors.toSet()); + final ClusterSettings clusterSettings = new ClusterSettings( + Settings.EMPTY, + settingsSet, + Collections.singleton(new SettingUpgrader() { + + @Override + public Setting getSetting() { + return oldSetting; + } + + @Override + public String getKey(final String key) { + return "foo.new"; + } + + @Override + public String getValue(final String value) { + return "new." + value; + } + + })); + final ClusterService clusterService = new ClusterService(Settings.EMPTY, clusterSettings, null); + final Gateway gateway = new Gateway(Settings.EMPTY, clusterService, null, null); + final MetaData.Builder builder = MetaData.builder(); + final Settings settings = Settings.builder().put("foo.old", randomAlphaOfLength(8)).build(); + applySettingsToBuilder.accept(builder, settings); + final ClusterState state = gateway.upgradeAndArchiveUnknownOrInvalidSettings(builder).build(); + assertFalse(oldSetting.exists(metaDataSettings.apply(state.metaData()))); + assertTrue(newSetting.exists(metaDataSettings.apply(state.metaData()))); + assertThat(newSetting.get(metaDataSettings.apply(state.metaData())), equalTo("new." + oldSetting.get(settings))); + } + +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java index cc3902893411a..60f93f8ea30fe 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractBuilderTestCase.java @@ -340,7 +340,8 @@ private static class ServiceHolder implements Closeable { clientInvocationHandler); ScriptModule scriptModule = createScriptModule(pluginsService.filterPlugins(ScriptPlugin.class)); List> additionalSettings = pluginsService.getPluginSettings(); - SettingsModule settingsModule = new SettingsModule(nodeSettings, additionalSettings, pluginsService.getPluginSettingsFilter()); + SettingsModule settingsModule = + new SettingsModule(nodeSettings, additionalSettings, pluginsService.getPluginSettingsFilter(), Collections.emptySet()); searchModule = new SearchModule(nodeSettings, false, pluginsService.filterPlugins(SearchPlugin.class)); IndicesModule indicesModule = new IndicesModule(pluginsService.filterPlugins(MapperPlugin.class)); List entries = new ArrayList<>(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmSettings.java index f7fabab2799af..daf1775a80a52 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmSettings.java @@ -12,6 +12,7 @@ import org.elasticsearch.xpack.core.security.SecurityExtension; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -181,7 +182,8 @@ private static void validateRealm(String name, String type, Settings settings, S settingSet.add(TYPE_SETTING); settingSet.add(ENABLED_SETTING); settingSet.add(ORDER_SETTING); - final AbstractScopedSettings validator = new AbstractScopedSettings(settings, settingSet, Setting.Property.NodeScope) { }; + final AbstractScopedSettings validator = + new AbstractScopedSettings(settings, settingSet, Collections.emptySet(), Setting.Property.NodeScope) { }; try { validator.validate(settings, false); } catch (RuntimeException e) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SettingsFilterTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SettingsFilterTests.java index 1886dd4249b14..3bf3bb4dc8641 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SettingsFilterTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SettingsFilterTests.java @@ -20,7 +20,9 @@ import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.TrustManagerFactory; + import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -121,7 +123,7 @@ public void testFiltering() throws Exception { List settingsFilterList = new ArrayList<>(); settingsFilterList.addAll(securityPlugin.getSettingsFilter()); // custom settings, potentially added by a plugin - SettingsModule settingsModule = new SettingsModule(settings, settingList, settingsFilterList); + SettingsModule settingsModule = new SettingsModule(settings, settingList, settingsFilterList, Collections.emptySet()); Injector injector = Guice.createInjector(settingsModule); SettingsFilter settingsFilter = injector.getInstance(SettingsFilter.class); From e6ca55bca6942ca4bc0c892a81fec01071b13796 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Sun, 9 Sep 2018 21:34:11 -0400 Subject: [PATCH 65/91] Adjust bwc for stale primary recovery source (#33432) Relates #33432 --- .../org/elasticsearch/cluster/routing/RecoverySource.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java b/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java index 2502fb0f3cc62..b7cc95298c49e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/RecoverySource.java @@ -142,7 +142,7 @@ private ExistingStoreRecoverySource(boolean bootstrapNewHistoryUUID) { } private ExistingStoreRecoverySource(StreamInput in) throws IOException { - if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + if (in.getVersion().onOrAfter(Version.V_6_5_0)) { bootstrapNewHistoryUUID = in.readBoolean(); } else { bootstrapNewHistoryUUID = false; @@ -156,7 +156,7 @@ public void addAdditionalFields(XContentBuilder builder, Params params) throws I @Override protected void writeAdditionalFields(StreamOutput out) throws IOException { - if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + if (out.getVersion().onOrAfter(Version.V_6_5_0)) { out.writeBoolean(bootstrapNewHistoryUUID); } } From 95100e05f97cc56e4fa30d1a7db3f9054885a4c0 Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Sun, 9 Sep 2018 20:02:35 -0700 Subject: [PATCH 66/91] Remove some duplicate request conversion methods. (#33538) --- .../client/RequestConverters.java | 71 ------------------- .../client/RequestConvertersTests.java | 44 ------------ .../client/WatcherRequestConvertersTests.java | 4 +- 3 files changed, 2 insertions(+), 117 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 89f81512bc9d2..840bc4f0c4d9f 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -95,11 +95,6 @@ import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.index.reindex.ReindexRequest; import org.elasticsearch.index.reindex.UpdateByQueryRequest; -import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; -import org.elasticsearch.protocol.xpack.license.GetLicenseRequest; -import org.elasticsearch.protocol.xpack.license.PutLicenseRequest; -import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; -import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.script.mustache.MultiSearchTemplateRequest; import org.elasticsearch.script.mustache.SearchTemplateRequest; @@ -930,72 +925,6 @@ static Request deleteScript(DeleteStoredScriptRequest deleteStoredScriptRequest) return request; } - static Request xPackWatcherPutWatch(PutWatchRequest putWatchRequest) { - String endpoint = new EndpointBuilder() - .addPathPartAsIs("_xpack") - .addPathPartAsIs("watcher") - .addPathPartAsIs("watch") - .addPathPart(putWatchRequest.getId()) - .build(); - - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - Params params = new Params(request).withVersion(putWatchRequest.getVersion()); - if (putWatchRequest.isActive() == false) { - params.putParam("active", "false"); - } - ContentType contentType = createContentType(putWatchRequest.xContentType()); - BytesReference source = putWatchRequest.getSource(); - request.setEntity(new ByteArrayEntity(source.toBytesRef().bytes, 0, source.length(), contentType)); - return request; - } - - static Request xPackWatcherDeleteWatch(DeleteWatchRequest deleteWatchRequest) { - String endpoint = new EndpointBuilder() - .addPathPartAsIs("_xpack") - .addPathPartAsIs("watcher") - .addPathPartAsIs("watch") - .addPathPart(deleteWatchRequest.getId()) - .build(); - - Request request = new Request(HttpDelete.METHOD_NAME, endpoint); - return request; - } - - static Request putLicense(PutLicenseRequest putLicenseRequest) { - String endpoint = new EndpointBuilder() - .addPathPartAsIs("_xpack") - .addPathPartAsIs("license") - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - Params parameters = new Params(request); - parameters.withTimeout(putLicenseRequest.timeout()); - parameters.withMasterTimeout(putLicenseRequest.masterNodeTimeout()); - if (putLicenseRequest.isAcknowledge()) { - parameters.putParam("acknowledge", "true"); - } - request.setJsonEntity(putLicenseRequest.getLicenseDefinition()); - return request; - } - - static Request getLicense(GetLicenseRequest getLicenseRequest) { - String endpoint = new EndpointBuilder() - .addPathPartAsIs("_xpack") - .addPathPartAsIs("license") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - Params parameters = new Params(request); - parameters.withLocal(getLicenseRequest.local()); - return request; - } - - static Request deleteLicense(DeleteLicenseRequest deleteLicenseRequest) { - Request request = new Request(HttpDelete.METHOD_NAME, "/_xpack/license"); - Params parameters = new Params(request); - parameters.withTimeout(deleteLicenseRequest.timeout()); - parameters.withMasterTimeout(deleteLicenseRequest.masterNodeTimeout()); - return request; - } - static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException { BytesRef source = XContentHelper.toXContent(toXContent, xContentType, false).toBytesRef(); return new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType)); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index 6f48d305a7799..4ef8e8542c95e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -109,8 +109,6 @@ import org.elasticsearch.index.reindex.ReindexRequest; import org.elasticsearch.index.reindex.RemoteInfo; import org.elasticsearch.index.reindex.UpdateByQueryRequest; -import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; -import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; @@ -129,7 +127,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.RandomObjects; -import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; @@ -160,7 +157,6 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasKey; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; public class RequestConvertersTests extends ESTestCase { @@ -2212,46 +2208,6 @@ public void testEnforceSameContentType() { + "previous requests have content-type [" + xContentType + "]", exception.getMessage()); } - public void testXPackPutWatch() throws Exception { - PutWatchRequest putWatchRequest = new PutWatchRequest(); - String watchId = randomAlphaOfLength(10); - putWatchRequest.setId(watchId); - String body = randomAlphaOfLength(20); - putWatchRequest.setSource(new BytesArray(body), XContentType.JSON); - - Map expectedParams = new HashMap<>(); - if (randomBoolean()) { - putWatchRequest.setActive(false); - expectedParams.put("active", "false"); - } - - if (randomBoolean()) { - long version = randomLongBetween(10, 100); - putWatchRequest.setVersion(version); - expectedParams.put("version", String.valueOf(version)); - } - - Request request = RequestConverters.xPackWatcherPutWatch(putWatchRequest); - assertEquals(HttpPut.METHOD_NAME, request.getMethod()); - assertEquals("/_xpack/watcher/watch/" + watchId, request.getEndpoint()); - assertEquals(expectedParams, request.getParameters()); - assertThat(request.getEntity().getContentType().getValue(), is(XContentType.JSON.mediaTypeWithoutParameters())); - ByteArrayOutputStream bos = new ByteArrayOutputStream(); - request.getEntity().writeTo(bos); - assertThat(bos.toString("UTF-8"), is(body)); - } - - public void testXPackDeleteWatch() { - DeleteWatchRequest deleteWatchRequest = new DeleteWatchRequest(); - String watchId = randomAlphaOfLength(10); - deleteWatchRequest.setId(watchId); - - Request request = RequestConverters.xPackWatcherDeleteWatch(deleteWatchRequest); - assertEquals(HttpDelete.METHOD_NAME, request.getMethod()); - assertEquals("/_xpack/watcher/watch/" + watchId, request.getEndpoint()); - assertThat(request.getEntity(), nullValue()); - } - /** * Randomize the {@link FetchSourceContext} request parameters. */ diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherRequestConvertersTests.java index 203d0826c6d96..cf5af1dd5949f 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherRequestConvertersTests.java @@ -36,7 +36,7 @@ public class WatcherRequestConvertersTests extends ESTestCase { - public void testXPackPutWatch() throws Exception { + public void testPutWatch() throws Exception { PutWatchRequest putWatchRequest = new PutWatchRequest(); String watchId = randomAlphaOfLength(10); putWatchRequest.setId(watchId); @@ -65,7 +65,7 @@ public void testXPackPutWatch() throws Exception { assertThat(bos.toString("UTF-8"), is(body)); } - public void testXPackDeleteWatch() { + public void testDeleteWatch() { DeleteWatchRequest deleteWatchRequest = new DeleteWatchRequest(); String watchId = randomAlphaOfLength(10); deleteWatchRequest.setId(watchId); From 77aeeda27530cb43206f8a6ce631ee71fa702883 Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Mon, 10 Sep 2018 11:18:44 +0300 Subject: [PATCH 67/91] Correctly handle PKCS#11 tokens for system keystore (#33460) * Correctly handle NONE keyword for system keystore As defined in the PKCS#11 reference guide https://docs.oracle.com/javase/8/docs/technotes/guides/security/p11guide.html PKCS#11 tokens can be used as the JSSE keystore and truststore and the way to indicate this is to set `javax.net.ssl.keyStore` and `javax.net.ssl.trustStore` to `NONE` (case sensitive). This commits ensures that we honor this convention and do not attempt to load the keystore or truststore if the system property is set to NONE. * Handle password protected system truststore When a PKCS#11 token is used as the system truststore, we need to pass a password when loading it, even if only for reading certificate entries. This commit ensures that if `javax.net.ssl.trustStoreType` is set to `PKCS#11` (as it would when a PKCS#11 token is in use) the password specified in `javax.net.ssl.trustStorePassword` is passed when attempting to load the truststore. Relates #33459 --- .../xpack/core/ssl/DefaultJDKTrustConfig.java | 22 ++++++++++++++++++- .../xpack/core/ssl/SSLConfiguration.java | 5 +++-- 2 files changed, 24 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/DefaultJDKTrustConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/DefaultJDKTrustConfig.java index ff818bb09f534..0a4c0552f6920 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/DefaultJDKTrustConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/DefaultJDKTrustConfig.java @@ -16,6 +16,10 @@ import java.io.IOException; import java.nio.file.Path; import java.security.GeneralSecurityException; +import java.security.KeyStore; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.security.cert.CertificateException; import java.util.Arrays; import java.util.Collection; import java.util.Collections; @@ -34,7 +38,7 @@ private DefaultJDKTrustConfig() { @Override X509ExtendedTrustManager createTrustManager(@Nullable Environment environment) { try { - return CertParsingUtils.trustManager(null, TrustManagerFactory.getDefaultAlgorithm()); + return CertParsingUtils.trustManager(getSystemTrustStore(), TrustManagerFactory.getDefaultAlgorithm()); } catch (Exception e) { throw new ElasticsearchException("failed to initialize a TrustManagerFactory", e); } @@ -81,4 +85,20 @@ static TrustConfig merge(TrustConfig trustConfig) { return new CombiningTrustConfig(Arrays.asList(INSTANCE, trustConfig)); } } + + /** + * When a PKCS#11 token is used as the system default keystore/truststore, we need to pass the keystore + * password when loading, even for reading certificates only ( as opposed to i.e. JKS keystores where + * we only need to pass the password for reading Private Key entries ). + * + * @return the KeyStore used as truststore for PKCS#11 initialized with the password, null otherwise + */ + private KeyStore getSystemTrustStore() throws KeyStoreException, CertificateException, NoSuchAlgorithmException, IOException { + if (System.getProperty("javax.net.ssl.trustStoreType", "").equalsIgnoreCase("PKCS11")) { + KeyStore keyStore = KeyStore.getInstance("PKCS11"); + keyStore.load(null, System.getProperty("javax.net.ssl.trustStorePassword", "").toCharArray()); + return keyStore; + } + return null; + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLConfiguration.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLConfiguration.java index 731d59a3ac078..48dba65a3a6b7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLConfiguration.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/SSLConfiguration.java @@ -192,7 +192,7 @@ private static KeyConfig createKeyConfig(Settings settings, SSLConfiguration glo if (global != null) { return global.keyConfig(); } - if (System.getProperty("javax.net.ssl.keyStore") != null) { + if (System.getProperty("javax.net.ssl.keyStore") != null && System.getProperty("javax.net.ssl.keyStore").equals("NONE") == false) { // TODO: we should not support loading a keystore from sysprops... try (SecureString keystorePassword = new SecureString(System.getProperty("javax.net.ssl.keyStorePassword", ""))) { return new StoreKeyConfig(System.getProperty("javax.net.ssl.keyStore"), KeyStore.getDefaultType(), keystorePassword, @@ -233,7 +233,8 @@ private static TrustConfig createCertChainTrustConfig(Settings settings, KeyConf String trustStoreAlgorithm = SETTINGS_PARSER.truststoreAlgorithm.get(settings); String trustStoreType = getKeyStoreType(SETTINGS_PARSER.truststoreType, settings, trustStorePath); return new StoreTrustConfig(trustStorePath, trustStoreType, trustStorePassword, trustStoreAlgorithm); - } else if (global == null && System.getProperty("javax.net.ssl.trustStore") != null) { + } else if (global == null && System.getProperty("javax.net.ssl.trustStore") != null + && System.getProperty("javax.net.ssl.trustStore").equals("NONE") == false) { try (SecureString truststorePassword = new SecureString(System.getProperty("javax.net.ssl.trustStorePassword", ""))) { return new StoreTrustConfig(System.getProperty("javax.net.ssl.trustStore"), KeyStore.getDefaultType(), truststorePassword, System.getProperty("ssl.TrustManagerFactory.algorithm", TrustManagerFactory.getDefaultAlgorithm())); From 284c45a6ffb1a4eb2ce7498db25d362cae2c0c02 Mon Sep 17 00:00:00 2001 From: David Turner Date: Mon, 10 Sep 2018 11:23:05 +0200 Subject: [PATCH 68/91] Strengthen FilterRoutingTests (#33149) Today the FilterRoutingTests take the belt-and-braces approach of excluding some node attribute values and including some others. This means that we don't really test that both inclusion and exclusion work correctly: as long as one of them works as expected then the test will pass. This change improves these tests by only using one approach at once, demonstrating that both do indeed work, and adds tests for various other scenarios too. --- .../resources/checkstyle_suppressions.xml | 1 - .../decider/FilterAllocationDecider.java | 8 +- .../allocation/FilterRoutingTests.java | 304 ++++++++++++++---- 3 files changed, 245 insertions(+), 68 deletions(-) diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index a83aed602e17d..94bea76fe4ba1 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -462,7 +462,6 @@ - diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java index f3146f6f771e8..df623aa8a5e07 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java @@ -158,13 +158,13 @@ private Decision shouldFilter(IndexMetaData indexMd, RoutingNode node, RoutingAl private Decision shouldIndexFilter(IndexMetaData indexMd, RoutingNode node, RoutingAllocation allocation) { if (indexMd.requireFilters() != null) { - if (!indexMd.requireFilters().match(node.node())) { + if (indexMd.requireFilters().match(node.node()) == false) { return allocation.decision(Decision.NO, NAME, "node does not match index setting [%s] filters [%s]", IndexMetaData.INDEX_ROUTING_REQUIRE_GROUP_PREFIX, indexMd.requireFilters()); } } if (indexMd.includeFilters() != null) { - if (!indexMd.includeFilters().match(node.node())) { + if (indexMd.includeFilters().match(node.node()) == false) { return allocation.decision(Decision.NO, NAME, "node does not match index setting [%s] filters [%s]", IndexMetaData.INDEX_ROUTING_INCLUDE_GROUP_PREFIX, indexMd.includeFilters()); } @@ -180,13 +180,13 @@ private Decision shouldIndexFilter(IndexMetaData indexMd, RoutingNode node, Rout private Decision shouldClusterFilter(RoutingNode node, RoutingAllocation allocation) { if (clusterRequireFilters != null) { - if (!clusterRequireFilters.match(node.node())) { + if (clusterRequireFilters.match(node.node()) == false) { return allocation.decision(Decision.NO, NAME, "node does not match cluster setting [%s] filters [%s]", CLUSTER_ROUTING_REQUIRE_GROUP_PREFIX, clusterRequireFilters); } } if (clusterIncludeFilters != null) { - if (!clusterIncludeFilters.match(node.node())) { + if (clusterIncludeFilters.match(node.node()) == false) { return allocation.decision(Decision.NO, NAME, "node does not cluster setting [%s] filters [%s]", CLUSTER_ROUTING_INCLUDE_GROUP_PREFIX, clusterIncludeFilters); } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FilterRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FilterRoutingTests.java index 79473759f8f72..86e8887688ff2 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FilterRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FilterRoutingTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.cluster.routing.allocation; -import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ESAllocationTestCase; @@ -27,48 +26,170 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.node.DiscoveryNodes.Builder; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.hamcrest.Matchers; +import java.util.HashMap; import java.util.List; +import java.util.Map; +import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; +import static org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING; +import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_ROUTING_EXCLUDE_GROUP_SETTING; +import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_ROUTING_INCLUDE_GROUP_SETTING; +import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_ROUTING_REQUIRE_GROUP_SETTING; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; +import static org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider.CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING; +import static org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider.CLUSTER_ROUTING_INCLUDE_GROUP_SETTING; +import static org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider.CLUSTER_ROUTING_REQUIRE_GROUP_SETTING; +import static org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING; import static org.hamcrest.Matchers.equalTo; public class FilterRoutingTests extends ESAllocationTestCase { - private final Logger logger = Loggers.getLogger(FilterRoutingTests.class); - public void testClusterFilters() { - AllocationService strategy = createAllocationService(Settings.builder() - .put("cluster.routing.allocation.include.tag1", "value1,value2") - .put("cluster.routing.allocation.exclude.tag1", "value3,value4") - .build()); + public void testClusterIncludeFiltersSingleAttribute() { + testClusterFilters(Settings.builder().put(CLUSTER_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "tag1", "value1,value2"), + DiscoveryNodes.builder() + .add(newNode("node1", attrMap("tag1", "value1"))) + .add(newNode("node2", attrMap("tag1", "value2"))) + .add(newNode("node3", attrMap("tag1", "value3"))) + .add(newNode("node4", attrMap("tag1", "value4")))); + } + + public void testClusterIncludeFiltersMultipleAttributes() { + testClusterFilters(Settings.builder() + .put(CLUSTER_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "tag1", "value1") + .put(CLUSTER_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "tag2", "value2"), + DiscoveryNodes.builder() + .add(newNode("node1", attrMap("tag1", "value1"))) + .add(newNode("node2", attrMap("tag2", "value2"))) + .add(newNode("node3", attrMap("tag1", "value3"))) + .add(newNode("node4", attrMap("tag2", "value4")))); + } + + public void testClusterIncludeFiltersOptionalAttribute() { + testClusterFilters(Settings.builder().put(CLUSTER_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "tag1", "value1,value2"), + DiscoveryNodes.builder() + .add(newNode("node1", attrMap("tag1", "value1"))) + .add(newNode("node2", attrMap("tag1", "value2"))) + .add(newNode("node3", attrMap())) + .add(newNode("node4", attrMap()))); + } + + public void testClusterIncludeFiltersWildcards() { + testClusterFilters(Settings.builder() + .put(CLUSTER_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "tag1", "*incl*") + .put(CLUSTER_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "tag2", "*incl*"), + DiscoveryNodes.builder() + .add(newNode("node1", attrMap("tag1", "do_include_this"))) + .add(newNode("node2", attrMap("tag2", "also_include_this"))) + .add(newNode("node3", attrMap("tag1", "exclude_this"))) + .add(newNode("node4", attrMap("tag2", "also_exclude_this")))); + } + + public void testClusterExcludeFiltersSingleAttribute() { + testClusterFilters(Settings.builder().put(CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + "tag1", "value3,value4"), + DiscoveryNodes.builder() + .add(newNode("node1", attrMap("tag1", "value1"))) + .add(newNode("node2", attrMap("tag1", "value2"))) + .add(newNode("node3", attrMap("tag1", "value3"))) + .add(newNode("node4", attrMap("tag1", "value4")))); + } + + public void testClusterExcludeFiltersMultipleAttributes() { + testClusterFilters(Settings.builder() + .put(CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + "tag1", "value3") + .put(CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + "tag2", "value4"), + DiscoveryNodes.builder() + .add(newNode("node1", attrMap("tag1", "value1"))) + .add(newNode("node2", attrMap("tag2", "value2"))) + .add(newNode("node3", attrMap("tag1", "value3"))) + .add(newNode("node4", attrMap("tag2", "value4")))); + } + + public void testClusterExcludeFiltersOptionalAttribute() { + testClusterFilters(Settings.builder().put(CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + "tag1", "value3,value4"), + DiscoveryNodes.builder() + .add(newNode("node1", attrMap())) + .add(newNode("node2", attrMap())) + .add(newNode("node3", attrMap("tag1", "value3"))) + .add(newNode("node4", attrMap("tag1", "value4")))); + } + + public void testClusterExcludeFiltersWildcards() { + testClusterFilters(Settings.builder() + .put(CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + "tag1", "*excl*") + .put(CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + "tag2", "*excl*"), + DiscoveryNodes.builder() + .add(newNode("node1", attrMap("tag1", "do_include_this"))) + .add(newNode("node2", attrMap("tag2", "also_include_this"))) + .add(newNode("node3", attrMap("tag1", "exclude_this"))) + .add(newNode("node4", attrMap("tag2", "also_exclude_this")))); + } + + public void testClusterIncludeAndExcludeFilters() { + testClusterFilters(Settings.builder() + .put(CLUSTER_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "tag1", "*incl*") + .put(CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + "tag2", "*excl*"), + DiscoveryNodes.builder() + .add(newNode("node1", attrMap("tag1", "do_include_this"))) + .add(newNode("node2", attrMap("tag1", "also_include_this", "tag2", "ok_by_tag2"))) + .add(newNode("node3", attrMap("tag1", "included_by_tag1", "tag2", "excluded_by_tag2"))) + .add(newNode("node4", attrMap("tag1", "excluded_by_tag1", "tag2", "included_by_tag2")))); + } + + public void testClusterRequireFilters() { + testClusterFilters(Settings.builder() + .put(CLUSTER_ROUTING_REQUIRE_GROUP_SETTING.getKey() + "tag1", "req1") + .put(CLUSTER_ROUTING_REQUIRE_GROUP_SETTING.getKey() + "tag2", "req2"), + DiscoveryNodes.builder() + .add(newNode("node1", attrMap("tag1", "req1", "tag2", "req2"))) + .add(newNode("node2", attrMap("tag1", "req1", "tag2", "req2"))) + .add(newNode("node3", attrMap("tag1", "req1"))) + .add(newNode("node4", attrMap("tag1", "other", "tag2", "req2")))); + } + + private static Map attrMap(String... keysValues) { + if (keysValues.length == 0) { + return emptyMap(); + } + if (keysValues.length == 2) { + return singletonMap(keysValues[0], keysValues[1]); + } + Map result = new HashMap<>(); + for (int i = 0; i < keysValues.length; i += 2) { + result.put(keysValues[i], keysValues[i + 1]); + } + return result; + } + + /** + * A test that creates a 2p1r index and which expects the given allocation service's settings only to allocate the shards of this index + * to `node1` and `node2`. + */ + private void testClusterFilters(Settings.Builder allocationServiceSettings, DiscoveryNodes.Builder nodes) { + final AllocationService strategy = createAllocationService(allocationServiceSettings.build()); logger.info("Building initial routing table"); - MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(2).numberOfReplicas(1)) - .build(); + final MetaData metaData = MetaData.builder() + .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(2).numberOfReplicas(1)) + .build(); - RoutingTable initialRoutingTable = RoutingTable.builder() - .addAsNew(metaData.index("test")) - .build(); - - ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).metaData(metaData).routingTable(initialRoutingTable).build(); - - logger.info("--> adding four nodes and performing rerouting"); - clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder() - .add(newNode("node1", singletonMap("tag1", "value1"))) - .add(newNode("node2", singletonMap("tag1", "value2"))) - .add(newNode("node3", singletonMap("tag1", "value3"))) - .add(newNode("node4", singletonMap("tag1", "value4"))) - ).build(); + final RoutingTable initialRoutingTable = RoutingTable.builder() + .addAsNew(metaData.index("test")) + .build(); + + ClusterState clusterState = ClusterState.builder(CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) + .metaData(metaData).routingTable(initialRoutingTable).nodes(nodes).build(); + + logger.info("--> rerouting"); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(2)); @@ -79,41 +200,99 @@ public void testClusterFilters() { clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING)); logger.info("--> make sure shards are only allocated on tag1 with value1 and value2"); - List startedShards = clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED); + final List startedShards = clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED); assertThat(startedShards.size(), equalTo(4)); for (ShardRouting startedShard : startedShards) { assertThat(startedShard.currentNodeId(), Matchers.anyOf(equalTo("node1"), equalTo("node2"))); } } - public void testIndexFilters() { + public void testIndexIncludeFilters() { + testIndexFilters( + Settings.builder().put(INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "tag1", "value1,value2"), + Settings.builder().put(INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "tag1", "value1,value4"), + DiscoveryNodes.builder() + .add(newNode("node1", attrMap("tag1", "value1"))) + .add(newNode("node2", attrMap("tag1", "value2"))) + .add(newNode("node3", attrMap("tag1", "value3"))) + .add(newNode("node4", attrMap("tag1", "value4"))) + .add(newNode("node5", attrMap())) + ); + } + + public void testIndexExcludeFilters() { + testIndexFilters( + Settings.builder().put(INDEX_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + "tag1", "value3,value4"), + Settings.builder().put(INDEX_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + "tag1", "value2,value3"), + DiscoveryNodes.builder() + .add(newNode("node1", attrMap())) + .add(newNode("node2", attrMap("tag1", "value2"))) + .add(newNode("node3", attrMap("tag1", "value3"))) + .add(newNode("node4", attrMap("tag1", "value4")))); + } + + public void testIndexIncludeThenExcludeFilters() { + testIndexFilters( + Settings.builder().put(INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "tag1", "value1,value2"), + Settings.builder().put(INDEX_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + "tag1", "value2,value3") + .putNull(INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "tag1"), + DiscoveryNodes.builder() + .add(newNode("node1", attrMap("tag1", "value1"))) + .add(newNode("node2", attrMap("tag1", "value2"))) + .add(newNode("node3", attrMap("tag1", "value3"))) + .add(newNode("node4", attrMap()))); + } + + public void testIndexExcludeThenIncludeFilters() { + testIndexFilters( + Settings.builder().put(INDEX_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + "tag1", "value3,value4"), + Settings.builder().put(INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + "tag1", "value1,value4") + .putNull(INDEX_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + "tag1"), + DiscoveryNodes.builder() + .add(newNode("node1", attrMap("tag1", "value1"))) + .add(newNode("node2", attrMap())) + .add(newNode("node3", attrMap("tag1", "value3"))) + .add(newNode("node4", attrMap("tag1", "value4")))); + } + + public void testIndexRequireFilters() { + testIndexFilters( + Settings.builder() + .put(INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + "tag1", "value1") + .put(INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + "tag2", "value2"), + Settings.builder() + .putNull(INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + "tag2") + .put(INDEX_ROUTING_REQUIRE_GROUP_SETTING.getKey() + "tag3", "value3"), + DiscoveryNodes.builder() + .add(newNode("node1", attrMap("tag1", "value1", "tag2", "value2", "tag3", "value3"))) + .add(newNode("node2", attrMap("tag1", "value1", "tag2", "value2", "tag3", "other"))) + .add(newNode("node3", attrMap("tag1", "other", "tag2", "value2", "tag3", "other"))) + .add(newNode("node4", attrMap("tag1", "value1", "tag2", "other", "tag3", "value3"))) + .add(newNode("node5", attrMap("tag2", "value2", "tag3", "value3"))) + .add(newNode("node6", attrMap()))); + } + + /** + * A test that creates a 2p1r index and expects the given index allocation settings only to allocate the shards to `node1` and `node2`; + * on updating the index allocation settings the shards should be relocated to nodes `node1` and `node4`. + */ + private void testIndexFilters(Settings.Builder initialIndexSettings, Settings.Builder updatedIndexSettings, Builder nodesBuilder) { AllocationService strategy = createAllocationService(Settings.builder() - .build()); + .build()); logger.info("Building initial routing table"); - MetaData initialMetaData = MetaData.builder() - .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT) - .put("index.number_of_shards", 2) - .put("index.number_of_replicas", 1) - .put("index.routing.allocation.include.tag1", "value1,value2") - .put("index.routing.allocation.exclude.tag1", "value3,value4") - .build())) - .build(); + final MetaData initialMetaData = MetaData.builder().put(IndexMetaData.builder("test").settings(settings(Version.CURRENT) + .put("index.number_of_shards", 2).put("index.number_of_replicas", 1).put(initialIndexSettings.build()))).build(); - RoutingTable initialRoutingTable = RoutingTable.builder() - .addAsNew(initialMetaData.index("test")) - .build(); + final RoutingTable initialRoutingTable = RoutingTable.builder() + .addAsNew(initialMetaData.index("test")) + .build(); - ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).metaData(initialMetaData).routingTable(initialRoutingTable).build(); + ClusterState clusterState = ClusterState.builder(CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) + .metaData(initialMetaData).routingTable(initialRoutingTable).nodes(nodesBuilder).build(); - logger.info("--> adding two nodes and performing rerouting"); - clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder() - .add(newNode("node1", singletonMap("tag1", "value1"))) - .add(newNode("node2", singletonMap("tag1", "value2"))) - .add(newNode("node3", singletonMap("tag1", "value3"))) - .add(newNode("node4", singletonMap("tag1", "value4"))) - ).build(); + logger.info("--> rerouting"); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(2)); @@ -132,13 +311,11 @@ public void testIndexFilters() { logger.info("--> switch between value2 and value4, shards should be relocating"); - IndexMetaData existingMetaData = clusterState.metaData().index("test"); - MetaData updatedMetaData = MetaData.builder() - .put(IndexMetaData.builder(existingMetaData).settings(Settings.builder().put(existingMetaData.getSettings()) - .put("index.routing.allocation.include.tag1", "value1,value4") - .put("index.routing.allocation.exclude.tag1", "value2,value3") - .build())) - .build(); + final IndexMetaData existingMetaData = clusterState.metaData().index("test"); + final MetaData updatedMetaData + = MetaData.builder().put(IndexMetaData.builder(existingMetaData).settings(Settings.builder() + .put(existingMetaData.getSettings()).put(updatedIndexSettings.build()).build())).build(); + clusterState = ClusterState.builder(clusterState).metaData(updatedMetaData).build(); clusterState = strategy.reroute(clusterState, "reroute"); assertThat(clusterState.getRoutingNodes().shardsWithState(ShardRoutingState.STARTED).size(), equalTo(2)); @@ -160,16 +337,17 @@ public void testConcurrentRecoveriesAfterShardsCannotRemainOnNode() { logger.info("Building initial routing table"); MetaData metaData = MetaData.builder() - .put(IndexMetaData.builder("test1").settings(settings(Version.CURRENT)).numberOfShards(2).numberOfReplicas(0)) - .put(IndexMetaData.builder("test2").settings(settings(Version.CURRENT)).numberOfShards(2).numberOfReplicas(0)) - .build(); + .put(IndexMetaData.builder("test1").settings(settings(Version.CURRENT)).numberOfShards(2).numberOfReplicas(0)) + .put(IndexMetaData.builder("test2").settings(settings(Version.CURRENT)).numberOfShards(2).numberOfReplicas(0)) + .build(); RoutingTable initialRoutingTable = RoutingTable.builder() - .addAsNew(metaData.index("test1")) - .addAsNew(metaData.index("test2")) - .build(); + .addAsNew(metaData.index("test1")) + .addAsNew(metaData.index("test2")) + .build(); - ClusterState clusterState = ClusterState.builder(org.elasticsearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).metaData(metaData).routingTable(initialRoutingTable).build(); + ClusterState clusterState = ClusterState.builder(CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) + .metaData(metaData).routingTable(initialRoutingTable).build(); logger.info("--> adding two nodes and performing rerouting"); DiscoveryNode node1 = newNode("node1", singletonMap("tag1", "value1")); @@ -187,9 +365,9 @@ public void testConcurrentRecoveriesAfterShardsCannotRemainOnNode() { logger.info("--> disable allocation for node1 and reroute"); strategy = createAllocationService(Settings.builder() - .put("cluster.routing.allocation.node_concurrent_recoveries", "1") - .put("cluster.routing.allocation.exclude.tag1", "value1") - .build()); + .put(CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.getKey(), "1") + .put(CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING.getKey() + "tag1", "value1") + .build()); logger.info("--> move shards from node1 to node2"); clusterState = strategy.reroute(clusterState, "reroute"); From c4adcee3ea3e224bf690cc9e9cbfc764b430c41c Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 10 Sep 2018 13:08:20 +0200 Subject: [PATCH 69/91] [CCR] Add create_follow_index privilege (#33559) This is a new index privilege that the user needs to have in the follow cluster. This privilege is required in addition to the `manage_ccr` cluster privilege in order to execute the create and follow api. Closes #33555 --- .../qa/multi-cluster-with-security/roles.yml | 1 + .../xpack/ccr/FollowIndexSecurityIT.java | 26 ++++++++++++++----- .../action/CreateAndFollowIndexAction.java | 16 ++++++++++-- .../authz/privilege/IndexPrivilege.java | 3 +++ 4 files changed, 37 insertions(+), 9 deletions(-) diff --git a/x-pack/plugin/ccr/qa/multi-cluster-with-security/roles.yml b/x-pack/plugin/ccr/qa/multi-cluster-with-security/roles.yml index 700a2416c6609..7916bc6eee2cc 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster-with-security/roles.yml +++ b/x-pack/plugin/ccr/qa/multi-cluster-with-security/roles.yml @@ -7,3 +7,4 @@ ccruser: - monitor - read - write + - create_follow_index diff --git a/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java b/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java index 7d658550d92b9..d8357a74e8ebc 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster-with-security/src/test/java/org/elasticsearch/xpack/ccr/FollowIndexSecurityIT.java @@ -8,6 +8,7 @@ import org.apache.http.util.EntityUtils; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Strings; @@ -18,6 +19,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.rest.ESRestTestCase; import java.io.IOException; @@ -26,7 +28,9 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; public class FollowIndexSecurityIT extends ESRestTestCase { @@ -96,16 +100,19 @@ public void testFollowIndex() throws Exception { assertThat(countCcrNodeTasks(), equalTo(0)); }); - createAndFollowIndex("leader_cluster:" + unallowedIndex, unallowedIndex); - // Verify that nothing has been replicated and no node tasks are running - // These node tasks should have been failed due to the fact that the user - // has no sufficient priviledges. + Exception e = expectThrows(ResponseException.class, + () -> createAndFollowIndex("leader_cluster:" + unallowedIndex, unallowedIndex)); + assertThat(e.getMessage(), + containsString("action [indices:admin/xpack/ccr/create_and_follow_index] is unauthorized for user [test_ccr]")); + // Verify that the follow index has not been created and no node tasks are running + assertThat(indexExists(adminClient(), unallowedIndex), is(false)); assertBusy(() -> assertThat(countCcrNodeTasks(), equalTo(0))); - verifyDocuments(adminClient(), unallowedIndex, 0); - followIndex("leader_cluster:" + unallowedIndex, unallowedIndex); + e = expectThrows(ResponseException.class, + () -> followIndex("leader_cluster:" + unallowedIndex, unallowedIndex)); + assertThat(e.getMessage(), containsString("follow index [" + unallowedIndex + "] does not exist")); + assertThat(indexExists(adminClient(), unallowedIndex), is(false)); assertBusy(() -> assertThat(countCcrNodeTasks(), equalTo(0))); - verifyDocuments(adminClient(), unallowedIndex, 0); } } @@ -191,4 +198,9 @@ protected static void createIndex(String name, Settings settings, String mapping assertOK(adminClient().performRequest(request)); } + private static boolean indexExists(RestClient client, String index) throws IOException { + Response response = client.performRequest(new Request("HEAD", "/" + index)); + return RestStatus.OK.getStatus() == response.getStatusLine().getStatusCode(); + } + } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/CreateAndFollowIndexAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/CreateAndFollowIndexAction.java index 1e14eb8979fb7..223f6ed8e6d25 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/CreateAndFollowIndexAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/CreateAndFollowIndexAction.java @@ -12,9 +12,11 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.ActiveShardsObserver; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.client.Client; @@ -52,7 +54,7 @@ public class CreateAndFollowIndexAction extends Action { public static final CreateAndFollowIndexAction INSTANCE = new CreateAndFollowIndexAction(); - public static final String NAME = "cluster:admin/xpack/ccr/create_and_follow_index"; + public static final String NAME = "indices:admin/xpack/ccr/create_and_follow_index"; private CreateAndFollowIndexAction() { super(NAME); @@ -63,7 +65,7 @@ public Response newResponse() { return new Response(); } - public static class Request extends AcknowledgedRequest { + public static class Request extends AcknowledgedRequest implements IndicesRequest { private FollowIndexAction.Request followRequest; @@ -83,6 +85,16 @@ public ActionRequestValidationException validate() { return followRequest.validate(); } + @Override + public String[] indices() { + return new String[]{followRequest.getFollowerIndex()}; + } + + @Override + public IndicesOptions indicesOptions() { + return IndicesOptions.strictSingleIndexNoExpandForbidClosed(); + } + @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java index 723dff61679f8..779f2765f4803 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java @@ -55,6 +55,7 @@ public final class IndexPrivilege extends Privilege { private static final Automaton VIEW_METADATA_AUTOMATON = patterns(GetAliasesAction.NAME, AliasesExistAction.NAME, GetIndexAction.NAME, IndicesExistsAction.NAME, GetFieldMappingsAction.NAME + "*", GetMappingsAction.NAME, ClusterSearchShardsAction.NAME, TypesExistsAction.NAME, ValidateQueryAction.NAME + "*", GetSettingsAction.NAME); + private static final Automaton CREATE_FOLLOW_INDEX_AUTOMATON = patterns("indices:admin/xpack/ccr/create_and_follow_index"); public static final IndexPrivilege NONE = new IndexPrivilege("none", Automatons.EMPTY); public static final IndexPrivilege ALL = new IndexPrivilege("all", ALL_AUTOMATON); @@ -69,6 +70,7 @@ public final class IndexPrivilege extends Privilege { public static final IndexPrivilege DELETE_INDEX = new IndexPrivilege("delete_index", DELETE_INDEX_AUTOMATON); public static final IndexPrivilege CREATE_INDEX = new IndexPrivilege("create_index", CREATE_INDEX_AUTOMATON); public static final IndexPrivilege VIEW_METADATA = new IndexPrivilege("view_index_metadata", VIEW_METADATA_AUTOMATON); + public static final IndexPrivilege CREATE_FOLLOW_INDEX = new IndexPrivilege("create_follow_index", CREATE_FOLLOW_INDEX_AUTOMATON); private static final Map VALUES = MapBuilder.newMapBuilder() .put("none", NONE) @@ -84,6 +86,7 @@ public final class IndexPrivilege extends Privilege { .put("delete_index", DELETE_INDEX) .put("view_index_metadata", VIEW_METADATA) .put("read_cross_cluster", READ_CROSS_CLUSTER) + .put("create_follow_index", CREATE_FOLLOW_INDEX) .immutableMap(); public static final Predicate ACTION_MATCHER = ALL.predicate(); From 8eebca32d2b3882d9f6c7bced4320f84b88f6d57 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 10 Sep 2018 13:23:02 +0200 Subject: [PATCH 70/91] [CCR] Delay auto follow license check (#33557) * [CCR] Delay auto follow license check so that we're sure that there are auto follow patterns configured Otherwise we log a warning in case someone is running with basic or gold license and has not used the ccr feature. --- .../ccr/action/AutoFollowCoordinator.java | 13 +++--- .../elasticsearch/xpack/ccr/CcrLicenseIT.java | 40 +++++++++++++++++++ 2 files changed, 47 insertions(+), 6 deletions(-) diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java index 639cd4d5782ab..e28214341a927 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java @@ -70,12 +70,6 @@ public AutoFollowCoordinator( } private void doAutoFollow() { - if (ccrLicenseChecker.isCcrAllowed() == false) { - // TODO: set non-compliant status on auto-follow coordination that can be viewed via a stats API - LOGGER.warn("skipping auto-follower coordination", LicenseUtils.newComplianceException("ccr")); - threadPool.schedule(pollInterval, ThreadPool.Names.SAME, this::doAutoFollow); - return; - } if (localNodeMaster == false) { return; } @@ -91,6 +85,13 @@ private void doAutoFollow() { return; } + if (ccrLicenseChecker.isCcrAllowed() == false) { + // TODO: set non-compliant status on auto-follow coordination that can be viewed via a stats API + LOGGER.warn("skipping auto-follower coordination", LicenseUtils.newComplianceException("ccr")); + threadPool.schedule(pollInterval, ThreadPool.Names.SAME, this::doAutoFollow); + return; + } + Consumer handler = e -> { if (e != null) { LOGGER.warn("failure occurred during auto-follower coordination", e); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java index 06cafc4777a49..2d58358d11f88 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/CcrLicenseIT.java @@ -12,6 +12,10 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateUpdateTask; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.plugins.Plugin; @@ -23,6 +27,8 @@ import org.elasticsearch.xpack.ccr.action.FollowIndexAction; import org.elasticsearch.xpack.ccr.action.PutAutoFollowPatternAction; import org.elasticsearch.xpack.ccr.action.ShardFollowNodeTask; +import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; +import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata.AutoFollowPattern; import java.util.Collection; import java.util.Collections; @@ -127,6 +133,40 @@ public void onFailure(final Exception e) { } public void testAutoFollowCoordinatorLogsSkippingAutoFollowCoordinationWithNonCompliantLicense() throws Exception { + // Update the cluster state so that we have auto follow patterns and verify that we log a warning in case of incompatible license: + CountDownLatch latch = new CountDownLatch(1); + ClusterService clusterService = getInstanceFromNode(ClusterService.class); + clusterService.submitStateUpdateTask("test-add-auto-follow-pattern", new ClusterStateUpdateTask() { + + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + AutoFollowPattern autoFollowPattern = + new AutoFollowPattern(Collections.singletonList("logs-*"), null, null, null, null, null, null, null, null); + AutoFollowMetadata autoFollowMetadata = new AutoFollowMetadata( + Collections.singletonMap("test_alias", autoFollowPattern), + Collections.emptyMap() + ); + + ClusterState.Builder newState = ClusterState.builder(currentState); + newState.metaData(MetaData.builder(currentState.getMetaData()) + .putCustom(AutoFollowMetadata.TYPE, autoFollowMetadata) + .build()); + return newState.build(); + } + + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + latch.countDown(); + } + + @Override + public void onFailure(String source, Exception e) { + latch.countDown(); + fail("unexpected error [" + e.getMessage() + "]"); + } + }); + latch.await(); + final Logger logger = LogManager.getLogger(AutoFollowCoordinator.class); final MockLogAppender appender = new MockLogAppender(); appender.start(); From 80c4661d0c90fec91b072799c8d9dd34700cb8bb Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Mon, 10 Sep 2018 14:20:04 +0200 Subject: [PATCH 71/91] Fix typos (#33499) --- .../src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy | 2 +- .../org/elasticsearch/gradle/precommit/PrecommitTasks.groovy | 2 +- modules/reindex/build.gradle | 2 +- x-pack/docs/en/watcher/customizing-watches.asciidoc | 2 +- .../src/main/resources/meta-plugin-descriptor.properties | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 110982e31e661..39f985c2eb765 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -66,7 +66,7 @@ class BuildPlugin implements Plugin { void apply(Project project) { if (project.pluginManager.hasPlugin('elasticsearch.standalone-rest-test')) { throw new InvalidUserDataException('elasticsearch.standalone-test, ' - + 'elasticearch.standalone-rest-test, and elasticsearch.build ' + + 'elasticsearch.standalone-rest-test, and elasticsearch.build ' + 'are mutually exclusive') } final String minimumGradleVersion diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy index 06557d4ccfdb7..e8ea1ab35d343 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy @@ -63,7 +63,7 @@ class PrecommitTasks { * (which provides NamingConventionsCheck) and :test:logger-usage * which provides the logger usage check. Since the build tools * don't use the logger usage check because they don't have any - * of Elaticsearch's loggers and :test:logger-usage actually does + * of Elasticsearch's loggers and :test:logger-usage actually does * use the NamingConventionsCheck we break the circular dependency * here. */ diff --git a/modules/reindex/build.gradle b/modules/reindex/build.gradle index 8870e21858d18..e1b352e1a22e6 100644 --- a/modules/reindex/build.gradle +++ b/modules/reindex/build.gradle @@ -71,7 +71,7 @@ thirdPartyAudit.excludes = [ 'org.apache.log.Logger', ] -// Support for testing reindex-from-remote against old Elaticsearch versions +// Support for testing reindex-from-remote against old Elasticsearch versions configurations { oldesFixture es2 diff --git a/x-pack/docs/en/watcher/customizing-watches.asciidoc b/x-pack/docs/en/watcher/customizing-watches.asciidoc index fc45bc636bfc5..b250ea89d6fa0 100644 --- a/x-pack/docs/en/watcher/customizing-watches.asciidoc +++ b/x-pack/docs/en/watcher/customizing-watches.asciidoc @@ -49,7 +49,7 @@ initial payload. A <> input contains a `request` object that specifies the indices you want to search, the {ref}/search-request-search-type.html[search type], and the search request body. The `body` field of a search input is the same as -the body of an Elasticsearch `_search` request, making the full Elaticsearch +the body of an Elasticsearch `_search` request, making the full Elasticsearch Query DSL available for you to use. For example, the following `search` input loads the latest VIX quote: diff --git a/x-pack/plugin/security/src/main/resources/meta-plugin-descriptor.properties b/x-pack/plugin/security/src/main/resources/meta-plugin-descriptor.properties index 2e878c207acfc..e80494fc1cb2c 100644 --- a/x-pack/plugin/security/src/main/resources/meta-plugin-descriptor.properties +++ b/x-pack/plugin/security/src/main/resources/meta-plugin-descriptor.properties @@ -5,7 +5,7 @@ # # meta-foo.zip <-- zip file for the meta plugin, with this structure: #|____elasticsearch/ -#| |____ <-- The plugin files for bundled_plugin_1 (the content of the elastisearch directory) +#| |____ <-- The plugin files for bundled_plugin_1 (the content of the elasticsearch directory) #| |____ <-- The plugin files for bundled_plugin_2 #| |____ meta-plugin-descriptor.properties <-- example contents below: # From 369db8a9d66950cd20ad634d9bff24f6578a6b16 Mon Sep 17 00:00:00 2001 From: Chris Roberson Date: Mon, 10 Sep 2018 08:50:07 -0500 Subject: [PATCH 72/91] Update beats template to include apm-server metrics (#33286) --- .../src/main/resources/monitoring-beats.json | 268 ++++++++++++++++++ 1 file changed, 268 insertions(+) diff --git a/x-pack/plugin/core/src/main/resources/monitoring-beats.json b/x-pack/plugin/core/src/main/resources/monitoring-beats.json index 07756ba2602f0..d23db9a11a4aa 100644 --- a/x-pack/plugin/core/src/main/resources/monitoring-beats.json +++ b/x-pack/plugin/core/src/main/resources/monitoring-beats.json @@ -224,6 +224,274 @@ } } }, + "apm-server": { + "properties": { + "server": { + "properties": { + "request": { + "properties": { + "count": { + "type": "long" + } + } + }, + "concurrent": { + "properties": { + "wait": { + "properties": { + "ms": { + "type": "long" + } + } + } + } + }, + "response": { + "properties": { + "count": { + "type": "long" + }, + "errors": { + "properties": { + "count": { + "type": "long" + }, + "toolarge": { + "type": "long" + }, + "validate": { + "type": "long" + }, + "ratelimit": { + "type": "long" + }, + "queue": { + "type": "long" + }, + "closed": { + "type": "long" + }, + "forbidden": { + "type": "long" + }, + "concurrency": { + "type": "long" + }, + "unauthorized": { + "type": "long" + }, + "decode": { + "type": "long" + }, + "method": { + "type": "long" + } + } + }, + "valid": { + "properties": { + "ok": { + "type": "long" + }, + "accepted": { + "type": "long" + }, + "count": { + "type": "long" + } + } + } + } + } + } + }, + "decoder": { + "properties": { + "deflate": { + "properties": { + "content-length": { + "type": "long" + }, + "count": { + "type": "long" + } + } + }, + "gzip": { + "properties": { + "content-length": { + "type": "long" + }, + "count": { + "type": "long" + } + } + }, + "uncompressed": { + "properties": { + "content-length": { + "type": "long" + }, + "count": { + "type": "long" + } + } + }, + "reader": { + "properties": { + "size": { + "type": "long" + }, + "count": { + "type": "long" + } + } + }, + "missing-content-length": { + "properties": { + "count": { + "type": "long" + } + } + } + } + + }, + "processor": { + "properties": { + "metric": { + "properties": { + "decoding": { + "properties": { + "errors": { + "type": "long" + }, + "count": { + "type": "long" + } + } + }, + "validation": { + "properties": { + "errors": { + "type": "long" + }, + "count": { + "type": "long" + } + } + }, + "transformations": { + "type": "long" + } + } + }, + "sourcemap": { + "properties": { + "counter": { + "type": "long" + }, + "decoding": { + "properties": { + "errors": { + "type": "long" + }, + "count": { + "type": "long" + } + } + }, + "validation": { + "properties": { + "errors": { + "type": "long" + }, + "count": { + "type": "long" + } + } + } + } + }, + "transaction": { + "properties": { + "decoding": { + "properties": { + "errors": { + "type": "long" + }, + "count": { + "type": "long" + } + } + }, + "validation": { + "properties": { + "errors": { + "type": "long" + }, + "count": { + "type": "long" + } + } + }, + "transformations": { + "type": "long" + }, + "transactions": { + "type": "long" + }, + "spans": { + "type": "long" + }, + "stacktraces": { + "type": "long" + }, + "frames": { + "type": "long" + } + } + }, + "error": { + "properties": { + "decoding": { + "properties": { + "errors": { + "type": "long" + }, + "count": { + "type": "long" + } + } + }, + "validation": { + "properties": { + "errors": { + "type": "long" + }, + "count": { + "type": "long" + } + } + }, + "transformations": { + "type": "long" + }, + "errors": { + "type": "long" + }, + "stacktraces": { + "type": "long" + }, + "frames": { + "type": "long" + } + } + } + } + } + } + }, "libbeat": { "properties": { "config": { From 74d41857c67a9dac0c94adeb0f61b609f63ed1ee Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 10 Sep 2018 16:46:15 +0200 Subject: [PATCH 73/91] mute test on windows Relates #33570 --- .../org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java b/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java index 0562a88957ccb..7bc952a3ea8e8 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java +++ b/x-pack/plugin/ccr/qa/multi-cluster-with-non-compliant-license/src/test/java/org/elasticsearch/xpack/ccr/CcrMultiClusterLicenseIT.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.ccr; +import org.apache.lucene.util.Constants; import org.elasticsearch.client.Request; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.Booleans; @@ -47,6 +48,7 @@ public void testCreateAndFollowIndex() { } public void testAutoFollow() throws Exception { + assumeFalse("windows is the worst", Constants.WINDOWS); if (runningAgainstLeaderCluster == false) { final Request request = new Request("PUT", "/_ccr/auto_follow/leader_cluster"); request.setJsonEntity("{\"leader_index_patterns\":[\"*\"]}"); From 079d130d8c4827426ee15e7d7964b1c993a6c491 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Mon, 10 Sep 2018 18:29:00 +0200 Subject: [PATCH 74/91] [Test] Remove duplicate method in TestShardRouting (#32815) --- .../org/elasticsearch/gradle/BuildPlugin.groovy | 2 +- .../gradle/precommit/PrecommitTasks.groovy | 2 +- docs/reference/setup/setup-xclient.asciidoc | 2 +- docs/reference/upgrade/set-paths-tip.asciidoc | 2 +- modules/reindex/build.gradle | 2 +- .../allocation/ResizeAllocationDeciderTests.java | 12 ++++++------ .../elasticsearch/index/shard/IndexShardTests.java | 3 ++- .../cluster/routing/TestShardRouting.java | 4 ---- .../index/shard/IndexShardTestCase.java | 2 +- x-pack/docs/en/watcher/customizing-watches.asciidoc | 2 +- .../main/resources/meta-plugin-descriptor.properties | 2 +- 11 files changed, 16 insertions(+), 19 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 39f985c2eb765..110982e31e661 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -66,7 +66,7 @@ class BuildPlugin implements Plugin { void apply(Project project) { if (project.pluginManager.hasPlugin('elasticsearch.standalone-rest-test')) { throw new InvalidUserDataException('elasticsearch.standalone-test, ' - + 'elasticsearch.standalone-rest-test, and elasticsearch.build ' + + 'elasticearch.standalone-rest-test, and elasticsearch.build ' + 'are mutually exclusive') } final String minimumGradleVersion diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy index e8ea1ab35d343..06557d4ccfdb7 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/precommit/PrecommitTasks.groovy @@ -63,7 +63,7 @@ class PrecommitTasks { * (which provides NamingConventionsCheck) and :test:logger-usage * which provides the logger usage check. Since the build tools * don't use the logger usage check because they don't have any - * of Elasticsearch's loggers and :test:logger-usage actually does + * of Elaticsearch's loggers and :test:logger-usage actually does * use the NamingConventionsCheck we break the circular dependency * here. */ diff --git a/docs/reference/setup/setup-xclient.asciidoc b/docs/reference/setup/setup-xclient.asciidoc index 4b38e869e25fc..4282264e39524 100644 --- a/docs/reference/setup/setup-xclient.asciidoc +++ b/docs/reference/setup/setup-xclient.asciidoc @@ -11,7 +11,7 @@ cluster where {xpack} is installed, then you must download and configure the . Add the {xpack} transport JAR file to your *CLASSPATH*. You can download the {xpack} distribution and extract the JAR file manually or you can get it from the -https://artifacts.elastic.co/maven/org/elasticsearch/client/x-pack-transport/{version}/x-pack-transport-{version}.jar[Elasticsearch Maven repository]. +https://artifacts.elastic.co/maven/org/elasticsearch/client/x-pack-transport/{version}/x-pack-transport-{version}.jar[Elasticsearc Maven repository]. As with any dependency, you will also need its transitive dependencies. Refer to the https://artifacts.elastic.co/maven/org/elasticsearch/client/x-pack-transport/{version}/x-pack-transport-{version}.pom[X-Pack POM file for your version] when downloading for offline usage. diff --git a/docs/reference/upgrade/set-paths-tip.asciidoc b/docs/reference/upgrade/set-paths-tip.asciidoc index adfe3e29dac3a..2dd120767c268 100644 --- a/docs/reference/upgrade/set-paths-tip.asciidoc +++ b/docs/reference/upgrade/set-paths-tip.asciidoc @@ -2,7 +2,7 @@ ================================================ When you extract the zip or tarball packages, the `elasticsearch-n.n.n` -directory contains the Elasticsearch `config`, `data`, `logs` and +directory contains the Elasticsearh `config`, `data`, `logs` and `plugins` directories. We recommend moving these directories out of the Elasticsearch directory diff --git a/modules/reindex/build.gradle b/modules/reindex/build.gradle index e1b352e1a22e6..8870e21858d18 100644 --- a/modules/reindex/build.gradle +++ b/modules/reindex/build.gradle @@ -71,7 +71,7 @@ thirdPartyAudit.excludes = [ 'org.apache.log.Logger', ] -// Support for testing reindex-from-remote against old Elasticsearch versions +// Support for testing reindex-from-remote against old Elaticsearch versions configurations { oldesFixture es2 diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ResizeAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ResizeAllocationDeciderTests.java index 536e3cbb7e08d..eeec65f0e2e29 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ResizeAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ResizeAllocationDeciderTests.java @@ -134,8 +134,8 @@ public void testShrink() { // we don't handle shrink yet ResizeAllocationDecider resizeAllocationDecider = new ResizeAllocationDecider(Settings.EMPTY); RoutingAllocation routingAllocation = new RoutingAllocation(null, null, clusterState, null, 0); - ShardRouting shardRouting = TestShardRouting.newShardRouting(new ShardId(idx, 0), null, true, RecoverySource - .LocalShardsRecoverySource.INSTANCE, ShardRoutingState.UNASSIGNED); + ShardRouting shardRouting = TestShardRouting.newShardRouting(new ShardId(idx, 0), null, true, ShardRoutingState.UNASSIGNED, + RecoverySource.LocalShardsRecoverySource.INSTANCE); assertEquals(Decision.ALWAYS, resizeAllocationDecider.canAllocate(shardRouting, routingAllocation)); assertEquals(Decision.ALWAYS, resizeAllocationDecider.canAllocate(shardRouting, clusterState.getRoutingNodes().node("node1"), routingAllocation)); @@ -164,8 +164,8 @@ public void testSourceNotActive() { RoutingAllocation routingAllocation = new RoutingAllocation(null, clusterState.getRoutingNodes(), clusterState, null, 0); int shardId = randomIntBetween(0, 3); int sourceShardId = IndexMetaData.selectSplitShard(shardId, clusterState.metaData().index("source"), 4).id(); - ShardRouting shardRouting = TestShardRouting.newShardRouting(new ShardId(idx, shardId), null, true, RecoverySource - .LocalShardsRecoverySource.INSTANCE, ShardRoutingState.UNASSIGNED); + ShardRouting shardRouting = TestShardRouting.newShardRouting(new ShardId(idx, shardId), null, true, ShardRoutingState.UNASSIGNED, + RecoverySource.LocalShardsRecoverySource.INSTANCE); assertEquals(Decision.NO, resizeAllocationDecider.canAllocate(shardRouting, routingAllocation)); assertEquals(Decision.NO, resizeAllocationDecider.canAllocate(shardRouting, clusterState.getRoutingNodes().node("node1"), routingAllocation)); @@ -204,8 +204,8 @@ public void testSourcePrimaryActive() { RoutingAllocation routingAllocation = new RoutingAllocation(null, clusterState.getRoutingNodes(), clusterState, null, 0); int shardId = randomIntBetween(0, 3); int sourceShardId = IndexMetaData.selectSplitShard(shardId, clusterState.metaData().index("source"), 4).id(); - ShardRouting shardRouting = TestShardRouting.newShardRouting(new ShardId(idx, shardId), null, true, RecoverySource - .LocalShardsRecoverySource.INSTANCE, ShardRoutingState.UNASSIGNED); + ShardRouting shardRouting = TestShardRouting.newShardRouting(new ShardId(idx, shardId), null, true, ShardRoutingState.UNASSIGNED, + RecoverySource.LocalShardsRecoverySource.INSTANCE); assertEquals(Decision.YES, resizeAllocationDecider.canAllocate(shardRouting, routingAllocation)); String allowedNode = clusterState.getRoutingTable().index("source").shard(sourceShardId).primaryShard().currentNodeId(); diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 4ed74388f0e1e..7f37846d3f045 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -1200,7 +1200,8 @@ public void testShardStats() throws IOException { public void testShardStatsWithFailures() throws IOException { allowShardFailures(); final ShardId shardId = new ShardId("index", "_na_", 0); - final ShardRouting shardRouting = newShardRouting(shardId, "node", true, RecoverySource.EmptyStoreRecoverySource.INSTANCE, ShardRoutingState.INITIALIZING); + final ShardRouting shardRouting = + newShardRouting(shardId, "node", true, ShardRoutingState.INITIALIZING, RecoverySource.EmptyStoreRecoverySource.INSTANCE); final NodeEnvironment.NodePath nodePath = new NodeEnvironment.NodePath(createTempDir()); diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java b/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java index c91c04884c5a7..ee270ee6e4803 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java @@ -39,10 +39,6 @@ public static ShardRouting newShardRouting(String index, int shardId, String cur return newShardRouting(new ShardId(index, IndexMetaData.INDEX_UUID_NA_VALUE, shardId), currentNodeId, primary, state); } - public static ShardRouting newShardRouting(ShardId shardId, String currentNodeId, boolean primary, RecoverySource recoverySource, ShardRoutingState state) { - return new ShardRouting(shardId, currentNodeId, null, primary, state, recoverySource, buildUnassignedInfo(state), buildAllocationId(state), -1); - } - public static ShardRouting newShardRouting(ShardId shardId, String currentNodeId, boolean primary, ShardRoutingState state) { return new ShardRouting(shardId, currentNodeId, null, primary, state, buildRecoveryTarget(primary, state), buildUnassignedInfo(state), buildAllocationId(state), -1); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java index 9082b4153b0bf..ca2156144b3be 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java @@ -726,7 +726,7 @@ protected void recoverShardFromSnapshot(final IndexShard shard, final IndexId indexId = new IndexId(shardId.getIndex().getName(), shardId.getIndex().getUUID()); final DiscoveryNode node = getFakeDiscoNode(shard.routingEntry().currentNodeId()); final RecoverySource.SnapshotRecoverySource recoverySource = new RecoverySource.SnapshotRecoverySource(snapshot, version, index); - final ShardRouting shardRouting = newShardRouting(shardId, node.getId(), true, recoverySource, ShardRoutingState.INITIALIZING); + final ShardRouting shardRouting = newShardRouting(shardId, node.getId(), true, ShardRoutingState.INITIALIZING, recoverySource); shard.markAsRecovering("from snapshot", new RecoveryState(shardRouting, node, null)); repository.restoreShard(shard, snapshot.getSnapshotId(), version, indexId, shard.shardId(), shard.recoveryState()); diff --git a/x-pack/docs/en/watcher/customizing-watches.asciidoc b/x-pack/docs/en/watcher/customizing-watches.asciidoc index b250ea89d6fa0..fc45bc636bfc5 100644 --- a/x-pack/docs/en/watcher/customizing-watches.asciidoc +++ b/x-pack/docs/en/watcher/customizing-watches.asciidoc @@ -49,7 +49,7 @@ initial payload. A <> input contains a `request` object that specifies the indices you want to search, the {ref}/search-request-search-type.html[search type], and the search request body. The `body` field of a search input is the same as -the body of an Elasticsearch `_search` request, making the full Elasticsearch +the body of an Elasticsearch `_search` request, making the full Elaticsearch Query DSL available for you to use. For example, the following `search` input loads the latest VIX quote: diff --git a/x-pack/plugin/security/src/main/resources/meta-plugin-descriptor.properties b/x-pack/plugin/security/src/main/resources/meta-plugin-descriptor.properties index e80494fc1cb2c..2e878c207acfc 100644 --- a/x-pack/plugin/security/src/main/resources/meta-plugin-descriptor.properties +++ b/x-pack/plugin/security/src/main/resources/meta-plugin-descriptor.properties @@ -5,7 +5,7 @@ # # meta-foo.zip <-- zip file for the meta plugin, with this structure: #|____elasticsearch/ -#| |____ <-- The plugin files for bundled_plugin_1 (the content of the elasticsearch directory) +#| |____ <-- The plugin files for bundled_plugin_1 (the content of the elastisearch directory) #| |____ <-- The plugin files for bundled_plugin_2 #| |____ meta-plugin-descriptor.properties <-- example contents below: # From 9a2c77d1c363b83bf423fc567bcac4cd72812f0f Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Mon, 10 Sep 2018 18:56:21 +0200 Subject: [PATCH 75/91] MINOR: Remove Dead Code in SearchScript (#33569) * `lookup` is not used anywhere * `getLeafContext` is not used anywhere --- .../java/org/elasticsearch/script/SearchScript.java | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/script/SearchScript.java b/server/src/main/java/org/elasticsearch/script/SearchScript.java index 43ea020aa6e24..7bef78f9930c4 100644 --- a/server/src/main/java/org/elasticsearch/script/SearchScript.java +++ b/server/src/main/java/org/elasticsearch/script/SearchScript.java @@ -46,12 +46,6 @@ public abstract class SearchScript implements ScorerAware, ExecutableScript { /** The generic runtime parameters for the script. */ private final Map params; - /** A lookup for the index this script will operate on. */ - private final SearchLookup lookup; - - /** A leaf lookup for the bound segment this script will operate on. */ - private final LeafReaderContext leafContext; - /** A leaf lookup for the bound segment this script will operate on. */ private final LeafSearchLookup leafLookup; @@ -60,8 +54,6 @@ public abstract class SearchScript implements ScorerAware, ExecutableScript { public SearchScript(Map params, SearchLookup lookup, LeafReaderContext leafContext) { this.params = params; - this.lookup = lookup; - this.leafContext = leafContext; // TODO: remove leniency when painless does not implement SearchScript for executable script cases this.leafLookup = leafContext == null ? null : lookup.getLeafSearchLookup(leafContext); } @@ -76,11 +68,6 @@ protected final LeafSearchLookup getLeafLookup() { return leafLookup; } - /** The leaf context for the Lucene segment this script was created for. */ - protected final LeafReaderContext getLeafContext() { - return leafContext; - } - /** The doc lookup for the Lucene segment this script was created for. */ public final LeafDocLookup getDoc() { // TODO: remove leniency when painless does not implement SearchScript for executable script cases From 5f4244755e9670d3ffb660827597e2deac76c2e2 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Mon, 10 Sep 2018 14:25:30 -0400 Subject: [PATCH 76/91] Enable not wiping cluster settings after REST test (#33575) In some cases we want to skip wiping cluster settings after a REST test. For example, one use-case would be in the full cluster restart tests where want to test cluster settings before and after a full cluster restart. If we wipe the cluster settings before the restart, then it would not be possible to assert on them after the restart. --- .../elasticsearch/test/rest/ESRestTestCase.java | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index ecb965040f87b..9d47c4e24a90b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -236,6 +236,16 @@ protected boolean preserveTemplatesUponCompletion() { return false; } + /** + * Controls whether or not to preserve cluster settings upon completion of the test. The default implementation is to remove all cluster + * settings. + * + * @return true if cluster settings should be preserved and otherwise false + */ + protected boolean preserveClusterSettings() { + return false; + } + /** * Returns whether to preserve the repositories on completion of this test. * Defaults to not preserving repos. See also @@ -295,7 +305,11 @@ private void wipeCluster() throws IOException { } wipeSnapshots(); - wipeClusterSettings(); + + // wipe cluster settings + if (preserveClusterSettings() == false) { + wipeClusterSettings(); + } } /** From 39c3234c2f3f1331a1a8cffc547c0998590e1598 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Mon, 10 Sep 2018 20:51:55 +0100 Subject: [PATCH 77/91] Upgrade to latest Lucene snapshot (#33505) * LeafCollector.setScorer() now takes a Scorable * Scorers may not have null Weights * IndexWriter.getFlushingBytes() reports how much memory is being used by IW threads writing to disk --- buildSrc/version.properties | 2 +- ...essions-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 - ...essions-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 + .../expression/ExpressionScriptEngine.java | 4 +- .../elasticsearch/painless/ScoreTests.java | 34 ++---------- .../painless/ScriptTestCase.java | 4 +- .../ScriptedMetricAggContextsTests.java | 14 +---- .../ParentToChildrenAggregator.java | 14 ++++- ...ers-icu-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 - ...ers-icu-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 + ...uromoji-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 - ...uromoji-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 + ...rs-nori-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 - ...rs-nori-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 + ...honetic-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 - ...honetic-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 + ...smartcn-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 - ...smartcn-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 + ...stempel-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 - ...stempel-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 + ...fologik-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 - ...fologik-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 + ...-common-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 - ...-common-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 + ...-codecs-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 - ...-codecs-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 + ...ne-core-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 - ...ne-core-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 + ...rouping-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 - ...rouping-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 + ...lighter-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 - ...lighter-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 + ...ne-join-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 - ...ne-join-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 + ...-memory-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 - ...-memory-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 + ...ne-misc-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 - ...ne-misc-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 + ...queries-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 - ...queries-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 + ...yparser-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 - ...yparser-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 + ...sandbox-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 - ...sandbox-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 + ...spatial-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 - ...spatial-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 + ...-extras-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 - ...-extras-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 + ...atial3d-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 - ...atial3d-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 + ...suggest-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 - ...suggest-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 + .../grouping/CollapsingTopDocsCollector.java | 6 +-- .../action/search/MaxScoreCollector.java | 6 +-- .../elasticsearch/common/lucene/Lucene.java | 25 --------- .../common/lucene/MinimumScoreCollector.java | 6 +-- .../common/lucene/ScorerAware.java | 4 +- .../search/function/MinScoreScorer.java | 19 +++---- .../search/function/ScriptScoreFunction.java | 21 ++------ .../BytesRefFieldComparatorSource.java | 8 +-- .../DoubleValuesComparatorSource.java | 6 +-- .../elasticsearch/script/ScoreAccessor.java | 6 +-- .../org/elasticsearch/script/ScoreScript.java | 40 +++++++------- .../script/ScriptedMetricAggContexts.java | 6 +-- .../elasticsearch/script/SearchScript.java | 6 +-- .../aggregations/AggregatorFactory.java | 6 +-- .../aggregations/LeafBucketCollector.java | 8 +-- .../aggregations/LeafBucketCollectorBase.java | 4 +- .../aggregations/MultiBucketCollector.java | 4 +- .../sampler/BestDocsDeferringCollector.java | 25 +++------ .../metrics/ScriptedMetricAggregator.java | 6 +-- .../metrics/TopHitsAggregator.java | 7 ++- .../aggregations/support/ValuesSource.java | 8 +-- .../support/values/ScriptBytesValues.java | 4 +- .../support/values/ScriptDoubleValues.java | 4 +- .../support/values/ScriptLongValues.java | 4 +- .../ProfilingLeafBucketCollector.java | 4 +- .../profile/query/ProfileCollector.java | 4 +- .../search/profile/query/ProfileScorer.java | 2 +- .../search/sort/ScriptSortBuilder.java | 6 +-- .../search/function/MinScoreScorerTests.java | 52 +++++++++++++++---- .../MultiBucketCollectorTests.java | 35 ++----------- .../support/ScriptValuesTests.java | 4 +- .../slice/DocValuesSliceQueryTests.java | 6 +-- .../search/slice/TermsSliceQueryTests.java | 6 +-- .../script/MockScriptEngine.java | 8 +-- ...ne-core-8.0.0-snapshot-4d78db26be.jar.sha1 | 1 - ...ne-core-8.0.0-snapshot-66c671ea80.jar.sha1 | 1 + 88 files changed, 207 insertions(+), 279 deletions(-) delete mode 100644 modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-4d78db26be.jar.sha1 create mode 100644 modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-66c671ea80.jar.sha1 delete mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-4d78db26be.jar.sha1 create mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-66c671ea80.jar.sha1 delete mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-4d78db26be.jar.sha1 create mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-66c671ea80.jar.sha1 delete mode 100644 plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-4d78db26be.jar.sha1 create mode 100644 plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-66c671ea80.jar.sha1 delete mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-4d78db26be.jar.sha1 create mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-66c671ea80.jar.sha1 delete mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-4d78db26be.jar.sha1 create mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-66c671ea80.jar.sha1 delete mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-4d78db26be.jar.sha1 create mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-66c671ea80.jar.sha1 delete mode 100644 plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-4d78db26be.jar.sha1 create mode 100644 plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-66c671ea80.jar.sha1 delete mode 100644 server/licenses/lucene-analyzers-common-8.0.0-snapshot-4d78db26be.jar.sha1 create mode 100644 server/licenses/lucene-analyzers-common-8.0.0-snapshot-66c671ea80.jar.sha1 delete mode 100644 server/licenses/lucene-backward-codecs-8.0.0-snapshot-4d78db26be.jar.sha1 create mode 100644 server/licenses/lucene-backward-codecs-8.0.0-snapshot-66c671ea80.jar.sha1 delete mode 100644 server/licenses/lucene-core-8.0.0-snapshot-4d78db26be.jar.sha1 create mode 100644 server/licenses/lucene-core-8.0.0-snapshot-66c671ea80.jar.sha1 delete mode 100644 server/licenses/lucene-grouping-8.0.0-snapshot-4d78db26be.jar.sha1 create mode 100644 server/licenses/lucene-grouping-8.0.0-snapshot-66c671ea80.jar.sha1 delete mode 100644 server/licenses/lucene-highlighter-8.0.0-snapshot-4d78db26be.jar.sha1 create mode 100644 server/licenses/lucene-highlighter-8.0.0-snapshot-66c671ea80.jar.sha1 delete mode 100644 server/licenses/lucene-join-8.0.0-snapshot-4d78db26be.jar.sha1 create mode 100644 server/licenses/lucene-join-8.0.0-snapshot-66c671ea80.jar.sha1 delete mode 100644 server/licenses/lucene-memory-8.0.0-snapshot-4d78db26be.jar.sha1 create mode 100644 server/licenses/lucene-memory-8.0.0-snapshot-66c671ea80.jar.sha1 delete mode 100644 server/licenses/lucene-misc-8.0.0-snapshot-4d78db26be.jar.sha1 create mode 100644 server/licenses/lucene-misc-8.0.0-snapshot-66c671ea80.jar.sha1 delete mode 100644 server/licenses/lucene-queries-8.0.0-snapshot-4d78db26be.jar.sha1 create mode 100644 server/licenses/lucene-queries-8.0.0-snapshot-66c671ea80.jar.sha1 delete mode 100644 server/licenses/lucene-queryparser-8.0.0-snapshot-4d78db26be.jar.sha1 create mode 100644 server/licenses/lucene-queryparser-8.0.0-snapshot-66c671ea80.jar.sha1 delete mode 100644 server/licenses/lucene-sandbox-8.0.0-snapshot-4d78db26be.jar.sha1 create mode 100644 server/licenses/lucene-sandbox-8.0.0-snapshot-66c671ea80.jar.sha1 delete mode 100644 server/licenses/lucene-spatial-8.0.0-snapshot-4d78db26be.jar.sha1 create mode 100644 server/licenses/lucene-spatial-8.0.0-snapshot-66c671ea80.jar.sha1 delete mode 100644 server/licenses/lucene-spatial-extras-8.0.0-snapshot-4d78db26be.jar.sha1 create mode 100644 server/licenses/lucene-spatial-extras-8.0.0-snapshot-66c671ea80.jar.sha1 delete mode 100644 server/licenses/lucene-spatial3d-8.0.0-snapshot-4d78db26be.jar.sha1 create mode 100644 server/licenses/lucene-spatial3d-8.0.0-snapshot-66c671ea80.jar.sha1 delete mode 100644 server/licenses/lucene-suggest-8.0.0-snapshot-4d78db26be.jar.sha1 create mode 100644 server/licenses/lucene-suggest-8.0.0-snapshot-66c671ea80.jar.sha1 delete mode 100644 x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-4d78db26be.jar.sha1 create mode 100644 x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-66c671ea80.jar.sha1 diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 386457146685f..914bae4d2c871 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,5 +1,5 @@ elasticsearch = 7.0.0-alpha1 -lucene = 8.0.0-snapshot-4d78db26be +lucene = 8.0.0-snapshot-66c671ea80 # optional dependencies spatial4j = 0.7 diff --git a/modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-4d78db26be.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-4d78db26be.jar.sha1 deleted file mode 100644 index bec50d36793d8..0000000000000 --- a/modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-4d78db26be.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5f469e925dde5dff81b9d56f465a8babb56cd26b \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-66c671ea80.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-66c671ea80.jar.sha1 new file mode 100644 index 0000000000000..047bca7b614bf --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-8.0.0-snapshot-66c671ea80.jar.sha1 @@ -0,0 +1 @@ +58b9db095c569b4c4da491810f14e1429878b594 \ No newline at end of file diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java index 23dc0fd276cbe..55f8deb059293 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java @@ -26,7 +26,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.queries.function.valuesource.DoubleConstValueSource; -import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Scorable; import org.apache.lucene.search.SortField; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.Nullable; @@ -336,7 +336,7 @@ public void setDocument(int docid) { } @Override - public void setScorer(Scorer scorer) { + public void setScorer(Scorable scorer) { script.setScorer(scorer); } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScoreTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScoreTests.java index 76bb6d14dcf61..3d19dedd3b0a3 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScoreTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScoreTests.java @@ -19,39 +19,25 @@ package org.elasticsearch.painless; -import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Scorable; -import java.io.IOException; import java.util.Collections; public class ScoreTests extends ScriptTestCase { /** Most of a dummy scorer impl that requires overriding just score(). */ - abstract class MockScorer extends Scorer { - MockScorer() { - super(null); - } + abstract class MockScorer extends Scorable { @Override public int docID() { return 0; } - @Override - public DocIdSetIterator iterator() { - throw new UnsupportedOperationException(); - } } public void testScoreWorks() { assertEquals(2.5, exec("_score", Collections.emptyMap(), Collections.emptyMap(), new MockScorer() { @Override - public float score() throws IOException { - return 2.5f; - } - - @Override - public float getMaxScore(int upTo) throws IOException { + public float score() { return 2.5f; } }, @@ -62,14 +48,9 @@ public void testScoreNotUsed() { assertEquals(3.5, exec("3.5", Collections.emptyMap(), Collections.emptyMap(), new MockScorer() { @Override - public float score() throws IOException { + public float score() { throw new AssertionError("score() should not be called"); } - - @Override - public float getMaxScore(int upTo) throws IOException { - return Float.MAX_VALUE; - } }, true)); } @@ -79,17 +60,12 @@ public void testScoreCached() { new MockScorer() { private boolean used = false; @Override - public float score() throws IOException { + public float score() { if (used == false) { return 4.5f; } throw new AssertionError("score() should not be called twice"); } - - @Override - public float getMaxScore(int upTo) throws IOException { - return 4.5f; - } }, true)); } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java index 963a433f172e8..577b120fc9024 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java @@ -20,7 +20,7 @@ package org.elasticsearch.painless; import junit.framework.AssertionFailedError; -import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Scorable; import org.elasticsearch.common.lucene.ScorerAware; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.painless.antlr.Walker; @@ -91,7 +91,7 @@ public Object exec(String script, Map vars, boolean picky) { } /** Compiles and returns the result of {@code script} with access to {@code vars} and compile-time parameters */ - public Object exec(String script, Map vars, Map compileParams, Scorer scorer, boolean picky) { + public Object exec(String script, Map vars, Map compileParams, Scorable scorer, boolean picky) { // test for ambiguity errors before running the actual script if picky is true if (picky) { ScriptClassInfo scriptClassInfo = new ScriptClassInfo(PAINLESS_LOOKUP, GenericElasticsearchScript.class); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java index 4820bc10cf24f..5c6fbc54667f2 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java @@ -19,13 +19,11 @@ package org.elasticsearch.painless; -import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Scorable; import org.elasticsearch.painless.spi.Whitelist; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptedMetricAggContexts; -import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -66,20 +64,12 @@ public void testMapBasic() { Map params = new HashMap<>(); Map state = new HashMap<>(); - Scorer scorer = new Scorer(null) { + Scorable scorer = new Scorable() { @Override public int docID() { return 0; } @Override public float score() { return 0.5f; } - - @Override - public DocIdSetIterator iterator() { return null; } - - @Override - public float getMaxScore(int upTo) throws IOException { - return 0.5f; - } }; ScriptedMetricAggContexts.MapScript.LeafFactory leafFactory = factory.newFactory(params, state, null); diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregator.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregator.java index 4469c9633dd87..064d1d1e5977c 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregator.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregator.java @@ -21,9 +21,9 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedSetDocValues; -import org.apache.lucene.search.ConstantScoreScorer; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; @@ -148,7 +148,17 @@ protected void doPostCollection() throws IOException { final SortedSetDocValues globalOrdinals = valuesSource.globalOrdinalsValues(ctx); // Set the scorer, since we now replay only the child docIds - sub.setScorer(new ConstantScoreScorer(null, 1f, childDocsIter)); + sub.setScorer(new Scorable() { + @Override + public float score() { + return 1f; + } + + @Override + public int docID() { + return childDocsIter.docID(); + } + }); final Bits liveDocs = ctx.reader().getLiveDocs(); for (int docId = childDocsIter diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-4d78db26be.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-4d78db26be.jar.sha1 deleted file mode 100644 index be2e7ec355ac5..0000000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-4d78db26be.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -97a3758487272ba4d15720b0ca15b0f980310c89 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-66c671ea80.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-66c671ea80.jar.sha1 new file mode 100644 index 0000000000000..7369f427ab208 --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.0.0-snapshot-66c671ea80.jar.sha1 @@ -0,0 +1 @@ +f009ee188453aabae77fad55aea08bc60323bb3e \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-4d78db26be.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-4d78db26be.jar.sha1 deleted file mode 100644 index a7f63df28d7e5..0000000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-4d78db26be.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -12ed739794cd317754684308ddc5bdbdcc46cdde \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-66c671ea80.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-66c671ea80.jar.sha1 new file mode 100644 index 0000000000000..16417bbebd1c2 --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.0.0-snapshot-66c671ea80.jar.sha1 @@ -0,0 +1 @@ +af3d2ae975e3560c1ea69222d6c46072857952ba \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-4d78db26be.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-4d78db26be.jar.sha1 deleted file mode 100644 index 8fc57bbf7e46d..0000000000000 --- a/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-4d78db26be.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4da6e5c17a17f0a9a99b518ea9985ea06996b63b \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-66c671ea80.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-66c671ea80.jar.sha1 new file mode 100644 index 0000000000000..9c3524a6789f8 --- /dev/null +++ b/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.0.0-snapshot-66c671ea80.jar.sha1 @@ -0,0 +1 @@ +f17bc5e532d9dc2786a13bd577df64023d1baae1 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-4d78db26be.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-4d78db26be.jar.sha1 deleted file mode 100644 index d94b274bf13ff..0000000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-4d78db26be.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a36b2db18a2a22966ab0bf9fced775f22dd7029d \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-66c671ea80.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-66c671ea80.jar.sha1 new file mode 100644 index 0000000000000..ac81fdd07c2e4 --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.0.0-snapshot-66c671ea80.jar.sha1 @@ -0,0 +1 @@ +7ad89d33c1cd960c91afa05b22024137fe108567 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-4d78db26be.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-4d78db26be.jar.sha1 deleted file mode 100644 index f75d7abd6a36b..0000000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-4d78db26be.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5f1d360a47d2fd166e970d17c46b284830e64258 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-66c671ea80.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-66c671ea80.jar.sha1 new file mode 100644 index 0000000000000..f00a29e781618 --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.0.0-snapshot-66c671ea80.jar.sha1 @@ -0,0 +1 @@ +3f11fb254256d74e911b953994b47e7a95915954 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-4d78db26be.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-4d78db26be.jar.sha1 deleted file mode 100644 index 2e3943cf79345..0000000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-4d78db26be.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b07883b5e988d1d991503aa49d9b59059518825d \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-66c671ea80.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-66c671ea80.jar.sha1 new file mode 100644 index 0000000000000..76fa8e90eae98 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.0.0-snapshot-66c671ea80.jar.sha1 @@ -0,0 +1 @@ +b2348d140ef0c3e674cb81173f61c5e5f430facb \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-4d78db26be.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-4d78db26be.jar.sha1 deleted file mode 100644 index 1d21c6e5b613c..0000000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-4d78db26be.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1b46b3ee62932de7ba7b670820a13eb973ec5777 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-66c671ea80.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-66c671ea80.jar.sha1 new file mode 100644 index 0000000000000..0e2c4d34ef041 --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.0.0-snapshot-66c671ea80.jar.sha1 @@ -0,0 +1 @@ +485a0c3be58a5942b4a28639f1019181ef4cd0e3 \ No newline at end of file diff --git a/server/licenses/lucene-analyzers-common-8.0.0-snapshot-4d78db26be.jar.sha1 b/server/licenses/lucene-analyzers-common-8.0.0-snapshot-4d78db26be.jar.sha1 deleted file mode 100644 index 3a02e483d6808..0000000000000 --- a/server/licenses/lucene-analyzers-common-8.0.0-snapshot-4d78db26be.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fa8e0fbef3e3fcf49ace4a4153580070def770eb \ No newline at end of file diff --git a/server/licenses/lucene-analyzers-common-8.0.0-snapshot-66c671ea80.jar.sha1 b/server/licenses/lucene-analyzers-common-8.0.0-snapshot-66c671ea80.jar.sha1 new file mode 100644 index 0000000000000..72f7319e6af4a --- /dev/null +++ b/server/licenses/lucene-analyzers-common-8.0.0-snapshot-66c671ea80.jar.sha1 @@ -0,0 +1 @@ +a22f1c6749ca4a3fbc9b330161a8ea3301cac8de \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-8.0.0-snapshot-4d78db26be.jar.sha1 b/server/licenses/lucene-backward-codecs-8.0.0-snapshot-4d78db26be.jar.sha1 deleted file mode 100644 index 8279b81d6cfc0..0000000000000 --- a/server/licenses/lucene-backward-codecs-8.0.0-snapshot-4d78db26be.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3d636541581e338a1be7e3e176aac73d7ae0b323 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-8.0.0-snapshot-66c671ea80.jar.sha1 b/server/licenses/lucene-backward-codecs-8.0.0-snapshot-66c671ea80.jar.sha1 new file mode 100644 index 0000000000000..f4bf99b4a03a5 --- /dev/null +++ b/server/licenses/lucene-backward-codecs-8.0.0-snapshot-66c671ea80.jar.sha1 @@ -0,0 +1 @@ +41ce415b93d75662cc2e790d09120bc0234d6b1b \ No newline at end of file diff --git a/server/licenses/lucene-core-8.0.0-snapshot-4d78db26be.jar.sha1 b/server/licenses/lucene-core-8.0.0-snapshot-4d78db26be.jar.sha1 deleted file mode 100644 index 683b585bb2f61..0000000000000 --- a/server/licenses/lucene-core-8.0.0-snapshot-4d78db26be.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -126faacb28d1b8cc1ab81d702973d057892120d1 \ No newline at end of file diff --git a/server/licenses/lucene-core-8.0.0-snapshot-66c671ea80.jar.sha1 b/server/licenses/lucene-core-8.0.0-snapshot-66c671ea80.jar.sha1 new file mode 100644 index 0000000000000..50a21f5c504a2 --- /dev/null +++ b/server/licenses/lucene-core-8.0.0-snapshot-66c671ea80.jar.sha1 @@ -0,0 +1 @@ +06c1e4fa838807059d27aaf5405cfdfe7303369c \ No newline at end of file diff --git a/server/licenses/lucene-grouping-8.0.0-snapshot-4d78db26be.jar.sha1 b/server/licenses/lucene-grouping-8.0.0-snapshot-4d78db26be.jar.sha1 deleted file mode 100644 index 483f470b5e015..0000000000000 --- a/server/licenses/lucene-grouping-8.0.0-snapshot-4d78db26be.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -abd514ec02837f48b8c478287fde7cc5d6439ada \ No newline at end of file diff --git a/server/licenses/lucene-grouping-8.0.0-snapshot-66c671ea80.jar.sha1 b/server/licenses/lucene-grouping-8.0.0-snapshot-66c671ea80.jar.sha1 new file mode 100644 index 0000000000000..76bdfa1c6c4bc --- /dev/null +++ b/server/licenses/lucene-grouping-8.0.0-snapshot-66c671ea80.jar.sha1 @@ -0,0 +1 @@ +5b0a019a938deb58160647e7640b348bb99c10a8 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-8.0.0-snapshot-4d78db26be.jar.sha1 b/server/licenses/lucene-highlighter-8.0.0-snapshot-4d78db26be.jar.sha1 deleted file mode 100644 index 27dd042c06bf3..0000000000000 --- a/server/licenses/lucene-highlighter-8.0.0-snapshot-4d78db26be.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -778e87a263184b8ddcbb4ef9d244467933f32993 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-8.0.0-snapshot-66c671ea80.jar.sha1 b/server/licenses/lucene-highlighter-8.0.0-snapshot-66c671ea80.jar.sha1 new file mode 100644 index 0000000000000..017225c0e467d --- /dev/null +++ b/server/licenses/lucene-highlighter-8.0.0-snapshot-66c671ea80.jar.sha1 @@ -0,0 +1 @@ +4d813f3ba0ddd56bac728edb88ed8875e6acfd18 \ No newline at end of file diff --git a/server/licenses/lucene-join-8.0.0-snapshot-4d78db26be.jar.sha1 b/server/licenses/lucene-join-8.0.0-snapshot-4d78db26be.jar.sha1 deleted file mode 100644 index 13d2db8d210dc..0000000000000 --- a/server/licenses/lucene-join-8.0.0-snapshot-4d78db26be.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -96aff29ad966204c73f8dd98d8116f09e34b6ebd \ No newline at end of file diff --git a/server/licenses/lucene-join-8.0.0-snapshot-66c671ea80.jar.sha1 b/server/licenses/lucene-join-8.0.0-snapshot-66c671ea80.jar.sha1 new file mode 100644 index 0000000000000..29cdbbfe69f3c --- /dev/null +++ b/server/licenses/lucene-join-8.0.0-snapshot-66c671ea80.jar.sha1 @@ -0,0 +1 @@ +00c7e20b6a35ebecc875dd52bfb324967c5555d6 \ No newline at end of file diff --git a/server/licenses/lucene-memory-8.0.0-snapshot-4d78db26be.jar.sha1 b/server/licenses/lucene-memory-8.0.0-snapshot-4d78db26be.jar.sha1 deleted file mode 100644 index 6e014f20c97fd..0000000000000 --- a/server/licenses/lucene-memory-8.0.0-snapshot-4d78db26be.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e72e2accebb1277c57dfe21bc011195eed91dbfd \ No newline at end of file diff --git a/server/licenses/lucene-memory-8.0.0-snapshot-66c671ea80.jar.sha1 b/server/licenses/lucene-memory-8.0.0-snapshot-66c671ea80.jar.sha1 new file mode 100644 index 0000000000000..49087293afa7c --- /dev/null +++ b/server/licenses/lucene-memory-8.0.0-snapshot-66c671ea80.jar.sha1 @@ -0,0 +1 @@ +e4dbff54a0befdc7d67c0f39890586c220df718e \ No newline at end of file diff --git a/server/licenses/lucene-misc-8.0.0-snapshot-4d78db26be.jar.sha1 b/server/licenses/lucene-misc-8.0.0-snapshot-4d78db26be.jar.sha1 deleted file mode 100644 index 57081e7aa10ba..0000000000000 --- a/server/licenses/lucene-misc-8.0.0-snapshot-4d78db26be.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bf25587ebf6823781f5d7acffd7d65c46c21cb27 \ No newline at end of file diff --git a/server/licenses/lucene-misc-8.0.0-snapshot-66c671ea80.jar.sha1 b/server/licenses/lucene-misc-8.0.0-snapshot-66c671ea80.jar.sha1 new file mode 100644 index 0000000000000..3c12235dff678 --- /dev/null +++ b/server/licenses/lucene-misc-8.0.0-snapshot-66c671ea80.jar.sha1 @@ -0,0 +1 @@ +74d17f6bdf1fa4d499f02904432aa3b1024bde88 \ No newline at end of file diff --git a/server/licenses/lucene-queries-8.0.0-snapshot-4d78db26be.jar.sha1 b/server/licenses/lucene-queries-8.0.0-snapshot-4d78db26be.jar.sha1 deleted file mode 100644 index 6855364592ea5..0000000000000 --- a/server/licenses/lucene-queries-8.0.0-snapshot-4d78db26be.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6cad42923bcb6e1c6060ae1cbab574646e8c808e \ No newline at end of file diff --git a/server/licenses/lucene-queries-8.0.0-snapshot-66c671ea80.jar.sha1 b/server/licenses/lucene-queries-8.0.0-snapshot-66c671ea80.jar.sha1 new file mode 100644 index 0000000000000..a423deb397de6 --- /dev/null +++ b/server/licenses/lucene-queries-8.0.0-snapshot-66c671ea80.jar.sha1 @@ -0,0 +1 @@ +bec78be38f777765146c35f65e247909563d6814 \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-8.0.0-snapshot-4d78db26be.jar.sha1 b/server/licenses/lucene-queryparser-8.0.0-snapshot-4d78db26be.jar.sha1 deleted file mode 100644 index f9d037120a342..0000000000000 --- a/server/licenses/lucene-queryparser-8.0.0-snapshot-4d78db26be.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e5841d7e877e51bbd2d325709353f5ab7e94b49a \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-8.0.0-snapshot-66c671ea80.jar.sha1 b/server/licenses/lucene-queryparser-8.0.0-snapshot-66c671ea80.jar.sha1 new file mode 100644 index 0000000000000..79195ed1d5e1c --- /dev/null +++ b/server/licenses/lucene-queryparser-8.0.0-snapshot-66c671ea80.jar.sha1 @@ -0,0 +1 @@ +74b76f8fed44400bc2a5d938ca2611a97b4d7a7c \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-8.0.0-snapshot-4d78db26be.jar.sha1 b/server/licenses/lucene-sandbox-8.0.0-snapshot-4d78db26be.jar.sha1 deleted file mode 100644 index 45c8934a8d41b..0000000000000 --- a/server/licenses/lucene-sandbox-8.0.0-snapshot-4d78db26be.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fefe17f6ac0c7d505c5051e96d0f4916fec2bf9e \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-8.0.0-snapshot-66c671ea80.jar.sha1 b/server/licenses/lucene-sandbox-8.0.0-snapshot-66c671ea80.jar.sha1 new file mode 100644 index 0000000000000..d5cd94b7fe5d6 --- /dev/null +++ b/server/licenses/lucene-sandbox-8.0.0-snapshot-66c671ea80.jar.sha1 @@ -0,0 +1 @@ +2f65fa728b3bc924db6538f4c3caf2fcd25451cf \ No newline at end of file diff --git a/server/licenses/lucene-spatial-8.0.0-snapshot-4d78db26be.jar.sha1 b/server/licenses/lucene-spatial-8.0.0-snapshot-4d78db26be.jar.sha1 deleted file mode 100644 index b02408a7683b3..0000000000000 --- a/server/licenses/lucene-spatial-8.0.0-snapshot-4d78db26be.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -22b0a9d9fb675f7c82a7a2b18f593f3278b40f11 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-8.0.0-snapshot-66c671ea80.jar.sha1 b/server/licenses/lucene-spatial-8.0.0-snapshot-66c671ea80.jar.sha1 new file mode 100644 index 0000000000000..76857b72f012b --- /dev/null +++ b/server/licenses/lucene-spatial-8.0.0-snapshot-66c671ea80.jar.sha1 @@ -0,0 +1 @@ +916a91f0cab2d3684707c59e9adca7b3030b2c66 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-8.0.0-snapshot-4d78db26be.jar.sha1 b/server/licenses/lucene-spatial-extras-8.0.0-snapshot-4d78db26be.jar.sha1 deleted file mode 100644 index d4e8b662ce465..0000000000000 --- a/server/licenses/lucene-spatial-extras-8.0.0-snapshot-4d78db26be.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bd6449cc67a36891f6b3201489c5ed44d795fab0 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-8.0.0-snapshot-66c671ea80.jar.sha1 b/server/licenses/lucene-spatial-extras-8.0.0-snapshot-66c671ea80.jar.sha1 new file mode 100644 index 0000000000000..7ab84df992bc4 --- /dev/null +++ b/server/licenses/lucene-spatial-extras-8.0.0-snapshot-66c671ea80.jar.sha1 @@ -0,0 +1 @@ +eb3e630d6013e41838fb277943ce921f256f1c61 \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-8.0.0-snapshot-4d78db26be.jar.sha1 b/server/licenses/lucene-spatial3d-8.0.0-snapshot-4d78db26be.jar.sha1 deleted file mode 100644 index 9743868e5c748..0000000000000 --- a/server/licenses/lucene-spatial3d-8.0.0-snapshot-4d78db26be.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5e2a8b3e9e19ad61fcbd27a138cf55f2d6cbfb2d \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-8.0.0-snapshot-66c671ea80.jar.sha1 b/server/licenses/lucene-spatial3d-8.0.0-snapshot-66c671ea80.jar.sha1 new file mode 100644 index 0000000000000..d793f4c54d9d1 --- /dev/null +++ b/server/licenses/lucene-spatial3d-8.0.0-snapshot-66c671ea80.jar.sha1 @@ -0,0 +1 @@ +fa10ff14eab2f579cff2f0fa33c9c7f3b24daf12 \ No newline at end of file diff --git a/server/licenses/lucene-suggest-8.0.0-snapshot-4d78db26be.jar.sha1 b/server/licenses/lucene-suggest-8.0.0-snapshot-4d78db26be.jar.sha1 deleted file mode 100644 index 8b722955278cf..0000000000000 --- a/server/licenses/lucene-suggest-8.0.0-snapshot-4d78db26be.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bd5931d1d5ca3f84565534182881565a44aeb72a \ No newline at end of file diff --git a/server/licenses/lucene-suggest-8.0.0-snapshot-66c671ea80.jar.sha1 b/server/licenses/lucene-suggest-8.0.0-snapshot-66c671ea80.jar.sha1 new file mode 100644 index 0000000000000..0ea0c2fb573fd --- /dev/null +++ b/server/licenses/lucene-suggest-8.0.0-snapshot-66c671ea80.jar.sha1 @@ -0,0 +1 @@ +3dd65ca6612b4f98530847b99ab348fd83055fdf \ No newline at end of file diff --git a/server/src/main/java/org/apache/lucene/search/grouping/CollapsingTopDocsCollector.java b/server/src/main/java/org/apache/lucene/search/grouping/CollapsingTopDocsCollector.java index 7f36074d1459b..e28d8990c91e3 100644 --- a/server/src/main/java/org/apache/lucene/search/grouping/CollapsingTopDocsCollector.java +++ b/server/src/main/java/org/apache/lucene/search/grouping/CollapsingTopDocsCollector.java @@ -19,9 +19,9 @@ package org.apache.lucene.search.grouping; import org.apache.lucene.search.FieldDoc; +import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TotalHits; @@ -44,7 +44,7 @@ public final class CollapsingTopDocsCollector extends FirstPassGroupingCollec protected final String collapseField; protected final Sort sort; - protected Scorer scorer; + protected Scorable scorer; private int totalHitCount; @@ -102,7 +102,7 @@ public ScoreMode scoreMode() { } @Override - public void setScorer(Scorer scorer) throws IOException { + public void setScorer(Scorable scorer) throws IOException { super.setScorer(scorer); this.scorer = scorer; } diff --git a/server/src/main/java/org/elasticsearch/action/search/MaxScoreCollector.java b/server/src/main/java/org/elasticsearch/action/search/MaxScoreCollector.java index 071cd92330496..2959802f2e3f5 100644 --- a/server/src/main/java/org/elasticsearch/action/search/MaxScoreCollector.java +++ b/server/src/main/java/org/elasticsearch/action/search/MaxScoreCollector.java @@ -19,8 +19,8 @@ package org.elasticsearch.action.search; +import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Scorer; import org.apache.lucene.search.SimpleCollector; import java.io.IOException; @@ -30,12 +30,12 @@ */ public class MaxScoreCollector extends SimpleCollector { - private Scorer scorer; + private Scorable scorer; private float maxScore = Float.NEGATIVE_INFINITY; private boolean hasHits = false; @Override - public void setScorer(Scorer scorer) { + public void setScorer(Scorable scorer) { this.scorer = scorer; } diff --git a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java index 47453aa8a41db..dc8628f184e43 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -745,31 +745,6 @@ public static Version parse(String toParse, Version defaultValue) { } } - /** - * Return a Scorer that throws an ElasticsearchIllegalStateException - * on all operations with the given message. - */ - public static Scorer illegalScorer(final String message) { - return new Scorer(null) { - @Override - public float score() throws IOException { - throw new IllegalStateException(message); - } - @Override - public int docID() { - throw new IllegalStateException(message); - } - @Override - public DocIdSetIterator iterator() { - throw new IllegalStateException(message); - } - @Override - public float getMaxScore(int upTo) throws IOException { - throw new IllegalStateException(message); - } - }; - } - private static final class CommitPoint extends IndexCommit { private String segmentsFileName; private final Collection files; diff --git a/server/src/main/java/org/elasticsearch/common/lucene/MinimumScoreCollector.java b/server/src/main/java/org/elasticsearch/common/lucene/MinimumScoreCollector.java index 76b59887fb946..f99d68952e557 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/MinimumScoreCollector.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/MinimumScoreCollector.java @@ -22,9 +22,9 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Collector; import org.apache.lucene.search.LeafCollector; +import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreCachingWrappingScorer; import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Scorer; import org.apache.lucene.search.SimpleCollector; import java.io.IOException; @@ -34,7 +34,7 @@ public class MinimumScoreCollector extends SimpleCollector { private final Collector collector; private final float minimumScore; - private Scorer scorer; + private Scorable scorer; private LeafCollector leafCollector; public MinimumScoreCollector(Collector collector, float minimumScore) { @@ -43,7 +43,7 @@ public MinimumScoreCollector(Collector collector, float minimumScore) { } @Override - public void setScorer(Scorer scorer) throws IOException { + public void setScorer(Scorable scorer) throws IOException { if (!(scorer instanceof ScoreCachingWrappingScorer)) { scorer = new ScoreCachingWrappingScorer(scorer); } diff --git a/server/src/main/java/org/elasticsearch/common/lucene/ScorerAware.java b/server/src/main/java/org/elasticsearch/common/lucene/ScorerAware.java index df17f8d7757b3..13a2a23ec56a6 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/ScorerAware.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/ScorerAware.java @@ -18,10 +18,10 @@ */ package org.elasticsearch.common.lucene; -import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Scorable; public interface ScorerAware { - void setScorer(Scorer scorer); + void setScorer(Scorable scorer); } diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java index 5296926e9869d..204f69f1e0af0 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java @@ -19,14 +19,13 @@ package org.elasticsearch.common.lucene.search.function; -import java.io.IOException; - import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.ScoreCachingWrappingScorer; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; +import java.io.IOException; + /** A {@link Scorer} that filters out documents that have a score that is * lower than a configured constant. */ final class MinScoreScorer extends Scorer { @@ -34,13 +33,10 @@ final class MinScoreScorer extends Scorer { private final Scorer in; private final float minScore; + private float curScore; + MinScoreScorer(Weight weight, Scorer scorer, float minScore) { super(weight); - if (scorer instanceof ScoreCachingWrappingScorer == false) { - // when minScore is set, scores might be requested twice: once - // to verify the match, and once by the collector - scorer = new ScoreCachingWrappingScorer(scorer); - } this.in = scorer; this.minScore = minScore; } @@ -55,8 +51,8 @@ public int docID() { } @Override - public float score() throws IOException { - return in.score(); + public float score() { + return curScore; } @Override @@ -87,7 +83,8 @@ public boolean matches() throws IOException { if (inTwoPhase != null && inTwoPhase.matches() == false) { return false; } - return in.score() >= minScore; + curScore = in.score(); + return curScore >= minScore; } @Override diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java index bf1ea637a9671..5edc1659f54f7 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java @@ -20,9 +20,8 @@ package org.elasticsearch.common.lucene.search.function; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; -import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Scorable; import org.elasticsearch.script.ExplainableSearchScript; import org.elasticsearch.script.ScoreScript; import org.elasticsearch.script.Script; @@ -32,33 +31,19 @@ public class ScriptScoreFunction extends ScoreFunction { - static final class CannedScorer extends Scorer { + static final class CannedScorer extends Scorable { protected int docid; protected float score; - CannedScorer() { - super(null); - } - @Override public int docID() { return docid; } @Override - public float score() throws IOException { + public float score() { return score; } - - @Override - public DocIdSetIterator iterator() { - throw new UnsupportedOperationException(); - } - - @Override - public float getMaxScore(int upTo) throws IOException { - throw new UnsupportedOperationException(); - } } private final Script sScript; diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java index eaa16e9f07db8..8e0a31859a132 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java @@ -25,7 +25,7 @@ import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; -import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Scorable; import org.apache.lucene.search.SortField; import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BytesRef; @@ -71,7 +71,7 @@ protected SortedBinaryDocValues getValues(LeafReaderContext context) throws IOEx return indexFieldData.load(context).getBytesValues(); } - protected void setScorer(Scorer scorer) {} + protected void setScorer(Scorable scorer) {} @Override public FieldComparator newComparator(String fieldname, int numHits, int sortPos, boolean reversed) { @@ -101,7 +101,7 @@ protected SortedDocValues getSortedDocValues(LeafReaderContext context, String f } @Override - public void setScorer(Scorer scorer) { + public void setScorer(Scorable scorer) { BytesRefFieldComparatorSource.this.setScorer(scorer); } @@ -125,7 +125,7 @@ protected BinaryDocValues getBinaryDocValues(LeafReaderContext context, String f } @Override - public void setScorer(Scorer scorer) { + public void setScorer(Scorable scorer) { BytesRefFieldComparatorSource.this.setScorer(scorer); } diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java index 43bc19a12a384..1ae3fb692ec61 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java @@ -23,7 +23,7 @@ import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; -import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Scorable; import org.apache.lucene.search.SortField; import org.apache.lucene.util.BitSet; import org.elasticsearch.common.Nullable; @@ -57,7 +57,7 @@ protected SortedNumericDoubleValues getValues(LeafReaderContext context) throws return indexFieldData.load(context).getDoubleValues(); } - protected void setScorer(Scorer scorer) {} + protected void setScorer(Scorable scorer) {} @Override public FieldComparator newComparator(String fieldname, int numHits, int sortPos, boolean reversed) { @@ -81,7 +81,7 @@ protected NumericDocValues getNumericDocValues(LeafReaderContext context, String return selectedValues.getRawDoubleValues(); } @Override - public void setScorer(Scorer scorer) { + public void setScorer(Scorable scorer) { DoubleValuesComparatorSource.this.setScorer(scorer); } }; diff --git a/server/src/main/java/org/elasticsearch/script/ScoreAccessor.java b/server/src/main/java/org/elasticsearch/script/ScoreAccessor.java index e8c433347b916..b3cdecb3e0485 100644 --- a/server/src/main/java/org/elasticsearch/script/ScoreAccessor.java +++ b/server/src/main/java/org/elasticsearch/script/ScoreAccessor.java @@ -19,7 +19,7 @@ package org.elasticsearch.script; -import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Scorable; import org.elasticsearch.search.lookup.DocLookup; import java.io.IOException; @@ -32,9 +32,9 @@ */ public final class ScoreAccessor extends Number implements Comparable { - Scorer scorer; + Scorable scorer; - public ScoreAccessor(Scorer scorer) { + public ScoreAccessor(Scorable scorer) { this.scorer = scorer; } diff --git a/server/src/main/java/org/elasticsearch/script/ScoreScript.java b/server/src/main/java/org/elasticsearch/script/ScoreScript.java index d9e56d5573cae..11b135e9a65af 100644 --- a/server/src/main/java/org/elasticsearch/script/ScoreScript.java +++ b/server/src/main/java/org/elasticsearch/script/ScoreScript.java @@ -19,7 +19,7 @@ package org.elasticsearch.script; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Scorable; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.search.lookup.LeafSearchLookup; import org.elasticsearch.search.lookup.SearchLookup; @@ -33,40 +33,40 @@ * A script used for adjusting the score on a per document basis. */ public abstract class ScoreScript { - + public static final String[] PARAMETERS = new String[]{}; - + /** The generic runtime parameters for the script. */ private final Map params; - + /** A leaf lookup for the bound segment this script will operate on. */ private final LeafSearchLookup leafLookup; - + private DoubleSupplier scoreSupplier = () -> 0.0; - + public ScoreScript(Map params, SearchLookup lookup, LeafReaderContext leafContext) { this.params = params; this.leafLookup = lookup.getLeafSearchLookup(leafContext); } - + public abstract double execute(); - + /** Return the parameters for this script. */ public Map getParams() { return params; } - + /** The doc lookup for the Lucene segment this script was created for. */ public final Map> getDoc() { return leafLookup.doc(); } - + /** Set the current document to run the script on next. */ public void setDocument(int docid) { leafLookup.setDocument(docid); } - - public void setScorer(Scorer scorer) { + + public void setScorer(Scorable scorer) { this.scoreSupplier = () -> { try { return scorer.score(); @@ -75,28 +75,28 @@ public void setScorer(Scorer scorer) { } }; } - + public double get_score() { return scoreSupplier.getAsDouble(); } - + /** A factory to construct {@link ScoreScript} instances. */ public interface LeafFactory { - + /** * Return {@code true} if the script needs {@code _score} calculated, or {@code false} otherwise. */ boolean needs_score(); - + ScoreScript newInstance(LeafReaderContext ctx) throws IOException; } - + /** A factory to construct stateful {@link ScoreScript} factories for a specific index. */ public interface Factory { - + ScoreScript.LeafFactory newFactory(Map params, SearchLookup lookup); - + } - + public static final ScriptContext CONTEXT = new ScriptContext<>("score", ScoreScript.Factory.class); } diff --git a/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java b/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java index 9f6ea999a9306..e72d597a6afb4 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java @@ -20,7 +20,7 @@ package org.elasticsearch.script; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Scorable; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.search.lookup.LeafSearchLookup; @@ -66,7 +66,7 @@ public interface Factory { public abstract static class MapScript extends ParamsAndStateBase { private final LeafSearchLookup leafLookup; - private Scorer scorer; + private Scorable scorer; public MapScript(Map params, Map state, SearchLookup lookup, LeafReaderContext leafContext) { super(params, state); @@ -86,7 +86,7 @@ public void setDocument(int docId) { } } - public void setScorer(Scorer scorer) { + public void setScorer(Scorable scorer) { this.scorer = scorer; } diff --git a/server/src/main/java/org/elasticsearch/script/SearchScript.java b/server/src/main/java/org/elasticsearch/script/SearchScript.java index 7bef78f9930c4..fb5f950d61d7e 100644 --- a/server/src/main/java/org/elasticsearch/script/SearchScript.java +++ b/server/src/main/java/org/elasticsearch/script/SearchScript.java @@ -19,7 +19,7 @@ package org.elasticsearch.script; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Scorable; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.lucene.ScorerAware; import org.elasticsearch.search.lookup.LeafDocLookup; @@ -50,7 +50,7 @@ public abstract class SearchScript implements ScorerAware, ExecutableScript { private final LeafSearchLookup leafLookup; /** A scorer that will return the score for the current document when the script is run. */ - private Scorer scorer; + private Scorable scorer; public SearchScript(Map params, SearchLookup lookup, LeafReaderContext leafContext) { this.params = params; @@ -83,7 +83,7 @@ public void setDocument(int docid) { } @Override - public void setScorer(Scorer scorer) { + public void setScorer(Scorable scorer) { this.scorer = scorer; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactory.java index 59b63520a1bd3..d6e7aca46a63b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactory.java @@ -20,8 +20,8 @@ package org.elasticsearch.search.aggregations; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Scorer; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.ObjectArray; @@ -110,10 +110,10 @@ public LeafBucketCollector getLeafCollector(final LeafReaderContext ctx) { collectors.set(i, null); } return new LeafBucketCollector() { - Scorer scorer; + Scorable scorer; @Override - public void setScorer(Scorer scorer) throws IOException { + public void setScorer(Scorable scorer) throws IOException { this.scorer = scorer; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/LeafBucketCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/LeafBucketCollector.java index f5b7f15bb9403..367e1cce0608d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/LeafBucketCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/LeafBucketCollector.java @@ -20,7 +20,7 @@ package org.elasticsearch.search.aggregations; import org.apache.lucene.search.LeafCollector; -import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Scorable; import java.io.IOException; import java.util.stream.Stream; @@ -33,7 +33,7 @@ public abstract class LeafBucketCollector implements LeafCollector { public static final LeafBucketCollector NO_OP_COLLECTOR = new LeafBucketCollector() { @Override - public void setScorer(Scorer arg0) throws IOException { + public void setScorer(Scorable arg0) throws IOException { // no-op } @Override @@ -55,7 +55,7 @@ public static LeafBucketCollector wrap(Iterable collectors) return new LeafBucketCollector() { @Override - public void setScorer(Scorer s) throws IOException { + public void setScorer(Scorable s) throws IOException { for (LeafBucketCollector c : colls) { c.setScorer(s); } @@ -83,7 +83,7 @@ public final void collect(int doc) throws IOException { } @Override - public void setScorer(Scorer scorer) throws IOException { + public void setScorer(Scorable scorer) throws IOException { // no-op by default } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/LeafBucketCollectorBase.java b/server/src/main/java/org/elasticsearch/search/aggregations/LeafBucketCollectorBase.java index 45e7db08e2d27..529483107b192 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/LeafBucketCollectorBase.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/LeafBucketCollectorBase.java @@ -19,7 +19,7 @@ package org.elasticsearch.search.aggregations; -import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Scorable; import org.elasticsearch.common.lucene.ScorerAware; import java.io.IOException; @@ -48,7 +48,7 @@ public LeafBucketCollectorBase(LeafBucketCollector sub, Object values) { } @Override - public void setScorer(Scorer s) throws IOException { + public void setScorer(Scorable s) throws IOException { sub.setScorer(s); if (values != null) { values.setScorer(s); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java index 624c8d5409a56..552ad8c024ddc 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java @@ -24,9 +24,9 @@ import org.apache.lucene.search.Collector; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.MultiCollector; +import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreCachingWrappingScorer; import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Scorer; import java.io.IOException; import java.util.ArrayList; @@ -174,7 +174,7 @@ private MultiLeafBucketCollector(List collectors, boolean c } @Override - public void setScorer(Scorer scorer) throws IOException { + public void setScorer(Scorable scorer) throws IOException { if (cacheScores) { scorer = new ScoreCachingWrappingScorer(scorer); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java index 4e63d693d1875..9ef72ad17d8fb 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java @@ -19,11 +19,10 @@ package org.elasticsearch.search.aggregations.bucket.sampler; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.LeafCollector; +import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocsCollector; import org.apache.lucene.search.TopScoreDocCollector; @@ -89,7 +88,7 @@ public LeafBucketCollector getLeafCollector(LeafReaderContext ctx) throws IOExce // Deferring collector return new LeafBucketCollector() { @Override - public void setScorer(Scorer scorer) throws IOException { + public void setScorer(Scorable scorer) throws IOException { perSegCollector.setScorer(scorer); } @@ -156,7 +155,7 @@ class PerParentBucketSamples { private long parentBucket; private int matchedDocs; - PerParentBucketSamples(long parentBucket, Scorer scorer, LeafReaderContext readerContext) { + PerParentBucketSamples(long parentBucket, Scorable scorer, LeafReaderContext readerContext) { try { this.parentBucket = parentBucket; tdc = createTopDocsCollector(shardSize); @@ -185,7 +184,7 @@ public void collect(int doc) throws IOException { currentLeafCollector.collect(doc); } - public void setScorer(Scorer scorer) throws IOException { + public void setScorer(Scorable scorer) throws IOException { currentLeafCollector.setScorer(scorer); } @@ -198,19 +197,18 @@ public int getDocCount() { } } - class PerSegmentCollects extends Scorer { + class PerSegmentCollects extends Scorable { private LeafReaderContext readerContext; int maxDocId = Integer.MIN_VALUE; private float currentScore; private int currentDocId = -1; - private Scorer currentScorer; + private Scorable currentScorer; PerSegmentCollects(LeafReaderContext readerContext) throws IOException { // The publisher behaviour for Reader/Scorer listeners triggers a // call to this constructor with a null scorer so we can't call // scorer.getWeight() and pass the Weight to our base class. // However, passing null seems to have no adverse effects here... - super(null); this.readerContext = readerContext; for (int i = 0; i < perBucketSamples.size(); i++) { PerParentBucketSamples perBucketSample = perBucketSamples.get(i); @@ -221,7 +219,7 @@ class PerSegmentCollects extends Scorer { } } - public void setScorer(Scorer scorer) throws IOException { + public void setScorer(Scorable scorer) throws IOException { this.currentScorer = scorer; for (int i = 0; i < perBucketSamples.size(); i++) { PerParentBucketSamples perBucketSample = perBucketSamples.get(i); @@ -266,11 +264,6 @@ public int docID() { return currentDocId; } - @Override - public DocIdSetIterator iterator() { - throw new ElasticsearchException("This caching scorer implementation only implements score() and docID()"); - } - public void collect(int docId, long parentBucket) throws IOException { perBucketSamples = bigArrays.grow(perBucketSamples, parentBucket + 1); PerParentBucketSamples sampler = perBucketSamples.get((int) parentBucket); @@ -282,10 +275,6 @@ public void collect(int docId, long parentBucket) throws IOException { maxDocId = Math.max(maxDocId, docId); } - @Override - public float getMaxScore(int upTo) throws IOException { - return Float.MAX_VALUE; - } } public int getDocCount(long parentBucket) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregator.java index a0c287f6eac51..345b21d03887e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregator.java @@ -20,11 +20,11 @@ package org.elasticsearch.search.aggregations.metrics; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Scorer; import org.elasticsearch.common.util.CollectionUtils; -import org.elasticsearch.script.ScriptedMetricAggContexts; import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptedMetricAggContexts; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; @@ -70,7 +70,7 @@ public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final ScriptedMetricAggContexts.MapScript leafMapScript = mapScript.newInstance(ctx); return new LeafBucketCollectorBase(sub, leafMapScript) { @Override - public void setScorer(Scorer scorer) throws IOException { + public void setScorer(Scorable scorer) throws IOException { leafMapScript.setScorer(scorer); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java index ddd62b82500ac..c017eb4a5e3bc 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java @@ -21,15 +21,14 @@ import com.carrotsearch.hppc.LongObjectHashMap; import com.carrotsearch.hppc.cursors.ObjectCursor; - import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Collector; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.MultiCollector; +import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocsCollector; import org.apache.lucene.search.TopFieldCollector; @@ -106,10 +105,10 @@ public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucketCol final LongObjectHashMap leafCollectors = new LongObjectHashMap<>(1); return new LeafBucketCollectorBase(sub, null) { - Scorer scorer; + Scorable scorer; @Override - public void setScorer(Scorer scorer) throws IOException { + public void setScorer(Scorable scorer) throws IOException { this.scorer = scorer; super.setScorer(scorer); for (ObjectCursor cursor : leafCollectors.values()) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java index 25e3d38af5baa..4e6760f44fe90 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java @@ -26,7 +26,7 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Scorable; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lucene.ScorerAware; import org.elasticsearch.common.util.CollectionUtils; @@ -295,7 +295,7 @@ static class LongValues extends AbstractSortingNumericDocValues implements Score } @Override - public void setScorer(Scorer scorer) { + public void setScorer(Scorable scorer) { script.setScorer(scorer); } @@ -326,7 +326,7 @@ static class DoubleValues extends SortingNumericDoubleValues implements ScorerAw } @Override - public void setScorer(Scorer scorer) { + public void setScorer(Scorable scorer) { script.setScorer(scorer); } @@ -445,7 +445,7 @@ static class BytesValues extends SortingBinaryDocValues implements ScorerAware { } @Override - public void setScorer(Scorer scorer) { + public void setScorer(Scorable scorer) { script.setScorer(scorer); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptBytesValues.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptBytesValues.java index 5ec1858487e09..144e08ce6f275 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptBytesValues.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptBytesValues.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.search.aggregations.support.values; -import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Scorable; import org.elasticsearch.common.lucene.ScorerAware; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; @@ -85,7 +85,7 @@ public boolean advanceExact(int doc) throws IOException { } @Override - public void setScorer(Scorer scorer) { + public void setScorer(Scorable scorer) { script.setScorer(scorer); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptDoubleValues.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptDoubleValues.java index 1227efb5ea0af..4bb531c0d40d7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptDoubleValues.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptDoubleValues.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.search.aggregations.support.values; -import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Scorable; import org.elasticsearch.common.lucene.ScorerAware; import org.elasticsearch.index.fielddata.SortingNumericDoubleValues; import org.elasticsearch.script.SearchScript; @@ -107,7 +107,7 @@ private static double toDoubleValue(Object o) { } @Override - public void setScorer(Scorer scorer) { + public void setScorer(Scorable scorer) { script.setScorer(scorer); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptLongValues.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptLongValues.java index cdc448bd04130..c57afa1960d97 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptLongValues.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptLongValues.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.search.aggregations.support.values; -import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Scorable; import org.apache.lucene.util.LongValues; import org.elasticsearch.common.lucene.ScorerAware; import org.elasticsearch.index.fielddata.AbstractSortingNumericDocValues; @@ -106,7 +106,7 @@ private static long toLongValue(Object o) { } @Override - public void setScorer(Scorer scorer) { + public void setScorer(Scorable scorer) { script.setScorer(scorer); } } diff --git a/server/src/main/java/org/elasticsearch/search/profile/aggregation/ProfilingLeafBucketCollector.java b/server/src/main/java/org/elasticsearch/search/profile/aggregation/ProfilingLeafBucketCollector.java index 4db67967dcb2b..cc84b1cfb668f 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/aggregation/ProfilingLeafBucketCollector.java +++ b/server/src/main/java/org/elasticsearch/search/profile/aggregation/ProfilingLeafBucketCollector.java @@ -19,7 +19,7 @@ package org.elasticsearch.search.profile.aggregation; -import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Scorable; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.profile.Timer; @@ -46,7 +46,7 @@ public void collect(int doc, long bucket) throws IOException { } @Override - public void setScorer(Scorer scorer) throws IOException { + public void setScorer(Scorable scorer) throws IOException { delegate.setScorer(scorer); } diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/ProfileCollector.java b/server/src/main/java/org/elasticsearch/search/profile/query/ProfileCollector.java index 940e3902954b5..b900cb04f79dd 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/query/ProfileCollector.java +++ b/server/src/main/java/org/elasticsearch/search/profile/query/ProfileCollector.java @@ -24,8 +24,8 @@ import org.apache.lucene.search.FilterCollector; import org.apache.lucene.search.FilterLeafCollector; import org.apache.lucene.search.LeafCollector; +import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Scorer; import java.io.IOException; @@ -76,7 +76,7 @@ public void collect(int doc) throws IOException { } @Override - public void setScorer(Scorer scorer) throws IOException { + public void setScorer(Scorable scorer) throws IOException { final long start = System.nanoTime(); try { super.setScorer(scorer); diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java b/server/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java index 8913f484847e6..7899750461e52 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java +++ b/server/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java @@ -71,7 +71,7 @@ public Weight getWeight() { } @Override - public Collection getChildren() throws IOException { + public Collection getChildren() throws IOException { return scorer.getChildren(); } diff --git a/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java index 4759027ee51b0..1b71c51d4162b 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java @@ -21,7 +21,7 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Scorable; import org.apache.lucene.search.SortField; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; @@ -351,7 +351,7 @@ public BytesRef binaryValue() { return FieldData.singleton(values); } @Override - protected void setScorer(Scorer scorer) { + protected void setScorer(Scorable scorer) { leafScript.setScorer(scorer); } }; @@ -376,7 +376,7 @@ public double doubleValue() { return FieldData.singleton(values); } @Override - protected void setScorer(Scorer scorer) { + protected void setScorer(Scorable scorer) { leafScript.setScorer(scorer); } }; diff --git a/server/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java b/server/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java index d60458cf82642..e9685c03bc4b8 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java @@ -19,9 +19,14 @@ package org.elasticsearch.common.lucene.search.function; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.Term; import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Explanation; +import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; +import org.apache.lucene.search.Weight; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; @@ -36,7 +41,7 @@ private static DocIdSetIterator iterator(final int... docs) { return new DocIdSetIterator() { int i = -1; - + @Override public int nextDoc() throws IOException { if (i + 1 == docs.length) { @@ -45,17 +50,17 @@ public int nextDoc() throws IOException { return docs[++i]; } } - + @Override public int docID() { return i < 0 ? -1 : i == docs.length ? NO_MORE_DOCS : docs[i]; } - + @Override public long cost() { return docs.length; } - + @Override public int advance(int target) throws IOException { return slowAdvance(target); @@ -63,9 +68,36 @@ public int advance(int target) throws IOException { }; } + private static Weight fakeWeight() { + return new Weight(new MatchAllDocsQuery()) { + @Override + public void extractTerms(Set terms) { + + } + + @Override + public Explanation explain(LeafReaderContext context, int doc) throws IOException { + return null; + } + + @Override + public Scorer scorer(LeafReaderContext context) throws IOException { + return null; + } + + @Override + public boolean isCacheable(LeafReaderContext ctx) { + return false; + } + }; + } + private static Scorer scorer(int maxDoc, final int[] docs, final float[] scores, final boolean twoPhase) { final DocIdSetIterator iterator = twoPhase ? DocIdSetIterator.all(maxDoc) : iterator(docs); - return new Scorer(null) { + return new Scorer(fakeWeight()) { + + int lastScoredDoc = -1; + public DocIdSetIterator iterator() { if (twoPhase) { return TwoPhaseIterator.asDocIdSetIterator(twoPhaseIterator()); @@ -77,12 +109,12 @@ public DocIdSetIterator iterator() { public TwoPhaseIterator twoPhaseIterator() { if (twoPhase) { return new TwoPhaseIterator(iterator) { - + @Override public boolean matches() throws IOException { return Arrays.binarySearch(docs, iterator.docID()) >= 0; } - + @Override public float matchCost() { return 10; @@ -100,6 +132,8 @@ public int docID() { @Override public float score() throws IOException { + assertNotEquals("score() called twice on doc " + docID(), lastScoredDoc, docID()); + lastScoredDoc = docID(); final int idx = Arrays.binarySearch(docs, docID()); return scores[idx]; } @@ -130,7 +164,7 @@ public void doTestRandom(boolean twoPhase) throws IOException { } Scorer scorer = scorer(maxDoc, docs, scores, twoPhase); final float minScore = random().nextFloat(); - Scorer minScoreScorer = new MinScoreScorer(null, scorer, minScore); + Scorer minScoreScorer = new MinScoreScorer(fakeWeight(), scorer, minScore); int doc = -1; while (doc != DocIdSetIterator.NO_MORE_DOCS) { final int target; @@ -152,7 +186,7 @@ public void doTestRandom(boolean twoPhase) throws IOException { assertEquals(DocIdSetIterator.NO_MORE_DOCS, doc); } else { assertEquals(docs[idx], doc); - assertEquals(scores[idx], scorer.score(), 0f); + assertEquals(scores[idx], minScoreScorer.score(), 0f); } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java index e3fe39db95246..bc8070d7ae40f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java @@ -24,19 +24,16 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.search.CollectionTerminatedException; -import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -44,14 +41,10 @@ import java.util.concurrent.atomic.AtomicBoolean; public class MultiBucketCollectorTests extends ESTestCase { - private static class FakeScorer extends Scorer { + private static class ScoreAndDoc extends Scorable { float score; int doc = -1; - FakeScorer() { - super(null); - } - @Override public int docID() { return doc; @@ -61,26 +54,6 @@ public int docID() { public float score() { return score; } - - @Override - public DocIdSetIterator iterator() { - throw new UnsupportedOperationException(); - } - - @Override - public float getMaxScore(int upTo) throws IOException { - return Float.MAX_VALUE; - } - - @Override - public Weight getWeight() { - throw new UnsupportedOperationException(); - } - - @Override - public Collection getChildren() { - throw new UnsupportedOperationException(); - } } private static class TerminateAfterBucketCollector extends BucketCollector { @@ -171,7 +144,7 @@ public LeafBucketCollector getLeafCollector(LeafReaderContext context) throws IO final LeafBucketCollector leafCollector = in.getLeafCollector(context); return new LeafBucketCollectorBase(leafCollector, null) { @Override - public void setScorer(Scorer scorer) throws IOException { + public void setScorer(Scorable scorer) throws IOException { super.setScorer(scorer); setScorerCalled.set(true); } @@ -235,7 +208,7 @@ public void testSetScorerAfterCollectionTerminated() throws IOException { collector1 = new TerminateAfterBucketCollector(collector1, 1); collector2 = new TerminateAfterBucketCollector(collector2, 2); - Scorer scorer = new FakeScorer(); + Scorable scorer = new ScoreAndDoc(); List collectors = Arrays.asList(collector1, collector2); Collections.shuffle(collectors, random()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/ScriptValuesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/ScriptValuesTests.java index 1915857302cd3..6f2bedbdd3712 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/ScriptValuesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/ScriptValuesTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.search.aggregations.support; import com.carrotsearch.randomizedtesting.generators.RandomStrings; -import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Scorable; import org.apache.lucene.util.BytesRef; import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.aggregations.support.values.ScriptBytesValues; @@ -59,7 +59,7 @@ public Object run() { } @Override - public void setScorer(Scorer scorer) { + public void setScorer(Scorable scorer) { } @Override diff --git a/server/src/test/java/org/elasticsearch/search/slice/DocValuesSliceQueryTests.java b/server/src/test/java/org/elasticsearch/search/slice/DocValuesSliceQueryTests.java index 70eb0266eea38..76807f4722afc 100644 --- a/server/src/test/java/org/elasticsearch/search/slice/DocValuesSliceQueryTests.java +++ b/server/src/test/java/org/elasticsearch/search/slice/DocValuesSliceQueryTests.java @@ -27,11 +27,11 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.RandomIndexWriter; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Collector; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; -import org.apache.lucene.search.Scorer; import org.apache.lucene.search.QueryUtils; +import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.store.Directory; import org.apache.lucene.util.NumericUtils; @@ -99,7 +99,7 @@ public void testSearch() throws Exception { public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException { return new LeafCollector() { @Override - public void setScorer(Scorer scorer) throws IOException { + public void setScorer(Scorable scorer) throws IOException { } @Override diff --git a/server/src/test/java/org/elasticsearch/search/slice/TermsSliceQueryTests.java b/server/src/test/java/org/elasticsearch/search/slice/TermsSliceQueryTests.java index 9ae4b9bc7daf5..881dc6f9587af 100644 --- a/server/src/test/java/org/elasticsearch/search/slice/TermsSliceQueryTests.java +++ b/server/src/test/java/org/elasticsearch/search/slice/TermsSliceQueryTests.java @@ -26,11 +26,11 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.RandomIndexWriter; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Collector; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; -import org.apache.lucene.search.Scorer; import org.apache.lucene.search.QueryUtils; +import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; @@ -92,7 +92,7 @@ public void testSearch() throws Exception { public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException { return new LeafCollector() { @Override - public void setScorer(Scorer scorer) throws IOException { + public void setScorer(Scorable scorer) throws IOException { } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java index 71d40a7b86ab6..be77846b2ba34 100644 --- a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java +++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java @@ -20,7 +20,7 @@ package org.elasticsearch.script; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Scorable; import org.elasticsearch.index.similarity.ScriptedSimilarity.Doc; import org.elasticsearch.index.similarity.ScriptedSimilarity.Field; import org.elasticsearch.index.similarity.ScriptedSimilarity.Query; @@ -334,7 +334,7 @@ public void setNextVar(String name, Object value) { } @Override - public void setScorer(Scorer scorer) { + public void setScorer(Scorable scorer) { ctx.put("_score", new ScoreAccessor(scorer)); } @@ -553,7 +553,7 @@ public boolean needs_score() { @Override public ScoreScript newInstance(LeafReaderContext ctx) throws IOException { - Scorer[] scorerHolder = new Scorer[1]; + Scorable[] scorerHolder = new Scorable[1]; return new ScoreScript(params, lookup, ctx) { @Override public double execute() { @@ -566,7 +566,7 @@ public double execute() { } @Override - public void setScorer(Scorer scorer) { + public void setScorer(Scorable scorer) { scorerHolder[0] = scorer; } }; diff --git a/x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-4d78db26be.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-4d78db26be.jar.sha1 deleted file mode 100644 index 683b585bb2f61..0000000000000 --- a/x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-4d78db26be.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -126faacb28d1b8cc1ab81d702973d057892120d1 \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-66c671ea80.jar.sha1 b/x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-66c671ea80.jar.sha1 new file mode 100644 index 0000000000000..50a21f5c504a2 --- /dev/null +++ b/x-pack/plugin/sql/sql-action/licenses/lucene-core-8.0.0-snapshot-66c671ea80.jar.sha1 @@ -0,0 +1 @@ +06c1e4fa838807059d27aaf5405cfdfe7303369c \ No newline at end of file From f441bb87ad9d9089a9a826d0fa9c7eb64ce4faf6 Mon Sep 17 00:00:00 2001 From: Marios Trivyzas Date: Mon, 10 Sep 2018 22:02:11 +0200 Subject: [PATCH 78/91] SQL: Fix result column names for arithmetic functions (#33500) Previously, when an arithmetic function got applied on a table column in the `SELECT` clause, the name of the result column contained weird characters used internally when processing the SQL statement e.g.: SELECT CHAR(emp_no % 10000) FROM "test_emp" returned: CHAR((emp_no{f}#14) % 10000)) as the column name instead of: CHAR((emp_no) % 10000)) Also, fix an issue that causes a ClassCastException to be thrown when using functions where both arguments are literals. Closes #31869 Closes #33461 --- .../xpack/sql/expression/Expressions.java | 10 ++++++-- .../function/scalar/ScalarFunction.java | 10 ++++++++ .../scalar/arithmetic/ArithmeticFunction.java | 11 ++++----- .../function/NamedExpressionTests.java | 10 ++++++++ .../xpack/qa/sql/jdbc/CsvTestUtils.java | 10 ++++---- .../xpack/qa/sql/jdbc/JdbcAssert.java | 6 ++--- .../sql/src/main/resources/functions.csv-spec | 23 +++++++++++++++++++ .../qa/sql/src/main/resources/math.sql-spec | 4 +++- 8 files changed, 66 insertions(+), 18 deletions(-) diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java index 5851e99131435..8ee34e32a552b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/Expressions.java @@ -82,7 +82,13 @@ public static AttributeSet references(List exps) { } public static String name(Expression e) { - return e instanceof NamedExpression ? ((NamedExpression) e).name() : e.nodeName(); + if (e instanceof NamedExpression) { + return ((NamedExpression) e).name(); + } else if (e instanceof Literal) { + return e.toString(); + } else { + return e.nodeName(); + } } public static List names(Collection e) { @@ -120,4 +126,4 @@ public static TypeResolution typeMustBeNumeric(Expression e) { return e.dataType().isNumeric()? TypeResolution.TYPE_RESOLVED : new TypeResolution( "Argument required to be numeric ('" + Expressions.name(e) + "' of type '" + e.dataType().esType + "')"); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ScalarFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ScalarFunction.java index 8462ee293cc48..309ee4e8e8638 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ScalarFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/ScalarFunction.java @@ -10,6 +10,7 @@ import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.FieldAttribute; +import org.elasticsearch.xpack.sql.expression.LiteralAttribute; import org.elasticsearch.xpack.sql.expression.function.Function; import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute; import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; @@ -68,6 +69,9 @@ protected ScriptTemplate asScript(Expression exp) { if (attr instanceof AggregateFunctionAttribute) { return asScriptFrom((AggregateFunctionAttribute) attr); } + if (attr instanceof LiteralAttribute) { + return asScriptFrom((LiteralAttribute) attr); + } // fall-back to return asScriptFrom((FieldAttribute) attr); } @@ -98,6 +102,12 @@ protected ScriptTemplate asScriptFrom(AggregateFunctionAttribute aggregate) { aggregate.dataType()); } + protected ScriptTemplate asScriptFrom(LiteralAttribute literal) { + return new ScriptTemplate(formatScript("{}"), + paramsBuilder().variable(literal.literal()).build(), + literal.dataType()); + } + protected String formatScript(String scriptTemplate) { return formatTemplate(scriptTemplate); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/ArithmeticFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/ArithmeticFunction.java index 5715e19963cbc..e95fec863971b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/ArithmeticFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/arithmetic/ArithmeticFunction.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic; import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.BinaryArithmeticProcessor.BinaryArithmeticOperation; import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryNumericFunction; @@ -65,7 +66,7 @@ protected ProcessorDefinition makeProcessorDefinition() { public String name() { StringBuilder sb = new StringBuilder(); sb.append("("); - sb.append(left()); + sb.append(Expressions.name(left())); if (!(left() instanceof Literal)) { sb.insert(1, "("); sb.append(")"); @@ -74,7 +75,7 @@ public String name() { sb.append(operation); sb.append(" "); int pos = sb.length(); - sb.append(right()); + sb.append(Expressions.name(right())); if (!(right() instanceof Literal)) { sb.insert(pos, "("); sb.append(")"); @@ -87,8 +88,4 @@ public String name() { public String toString() { return name() + "#" + functionId(); } - - protected boolean useParanthesis() { - return !(left() instanceof Literal) || !(right() instanceof Literal); - } -} \ No newline at end of file +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/NamedExpressionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/NamedExpressionTests.java index 79f0e970b1eba..3692e5e4752af 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/NamedExpressionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/NamedExpressionTests.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.sql.expression.function; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.expression.FieldAttribute; import org.elasticsearch.xpack.sql.expression.Literal; import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Add; import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Div; @@ -13,7 +14,10 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Mul; import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Neg; import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Sub; +import org.elasticsearch.xpack.sql.type.DataType; +import org.elasticsearch.xpack.sql.type.EsField; +import static java.util.Collections.emptyMap; import static org.elasticsearch.xpack.sql.tree.Location.EMPTY; public class NamedExpressionTests extends ESTestCase { @@ -38,6 +42,12 @@ public void testArithmeticFunctionName() { assertEquals("-5", neg.name()); } + public void testNameForArithmeticFunctionAppliedOnTableColumn() { + FieldAttribute fa = new FieldAttribute(EMPTY, "myField", new EsField("myESField", DataType.INTEGER, emptyMap(), true)); + Add add = new Add(EMPTY, fa, l(10)); + assertEquals("((myField) + 10)", add.name()); + } + private static Literal l(Object value) { return Literal.of(EMPTY, value); } diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/CsvTestUtils.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/CsvTestUtils.java index a5e8b549bce8f..856629f8d9188 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/CsvTestUtils.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/CsvTestUtils.java @@ -113,18 +113,18 @@ private static Tuple extractColumnTypesAndStripCli(String expect } private static Tuple extractColumnTypesFromHeader(String header) { - String[] columnTypes = Strings.delimitedListToStringArray(header, "|", " \t"); + String[] columnTypes = Strings.tokenizeToStringArray(header, "|"); StringBuilder types = new StringBuilder(); StringBuilder columns = new StringBuilder(); for (String column : columnTypes) { - String[] nameType = Strings.delimitedListToStringArray(column, ":"); + String[] nameType = Strings.delimitedListToStringArray(column.trim(), ":"); assertThat("If at least one column has a type associated with it, all columns should have types", nameType, arrayWithSize(2)); if (types.length() > 0) { types.append(","); columns.append("|"); } - columns.append(nameType[0]); - types.append(resolveColumnType(nameType[1])); + columns.append(nameType[0].trim()); + types.append(resolveColumnType(nameType[1].trim())); } return new Tuple<>(columns.toString(), types.toString()); } @@ -206,4 +206,4 @@ public static class CsvTestCase { public String query; public String expectedResults; } -} \ No newline at end of file +} diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcAssert.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcAssert.java index 47f531ebd1f9b..133006c66a820 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcAssert.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/JdbcAssert.java @@ -176,8 +176,8 @@ private static void doAssertResultSetData(ResultSet expected, ResultSet actual, Object expectedObject = expected.getObject(column); Object actualObject = lenient ? actual.getObject(column, expectedColumnClass) : actual.getObject(column); - String msg = format(Locale.ROOT, "Different result for column [" + metaData.getColumnName(column) + "], " - + "entry [" + (count + 1) + "]"); + String msg = format(Locale.ROOT, "Different result for column [%s], entry [%d]", + metaData.getColumnName(column), count + 1); // handle nulls first if (expectedObject == null || actualObject == null) { @@ -230,4 +230,4 @@ private static int typeOf(int columnType, boolean lenient) { return columnType; } -} \ No newline at end of file +} diff --git a/x-pack/qa/sql/src/main/resources/functions.csv-spec b/x-pack/qa/sql/src/main/resources/functions.csv-spec index 1a610aec04861..3622cfe043381 100644 --- a/x-pack/qa/sql/src/main/resources/functions.csv-spec +++ b/x-pack/qa/sql/src/main/resources/functions.csv-spec @@ -407,3 +407,26 @@ SELECT CONCAT(CONCAT(SUBSTRING("first_name",1,LENGTH("first_name")-2),UCASE(LEFT ---------------+--------------------------------------------- AlejandRo |2 ; + + +checkColumnNameWithNestedArithmeticFunctionCallsOnTableColumn +SELECT CHAR(emp_no % 10000) FROM "test_emp" WHERE emp_no > 10064 ORDER BY emp_no LIMIT 1; + +CHAR(((emp_no) % 10000)):s +A +; + +checkColumnNameWithComplexNestedArithmeticFunctionCallsOnTableColumn1 +SELECT CHAR(emp_no % (7000 + 3000)) FROM "test_emp" WHERE emp_no > 10065 ORDER BY emp_no LIMIT 1; + +CHAR(((emp_no) % ((7000 + 3000)))):s +B +; + + +checkColumnNameWithComplexNestedArithmeticFunctionCallsOnTableColumn2 +SELECT CHAR((emp_no % (emp_no - 1 + 1)) + 67) FROM "test_emp" WHERE emp_no > 10066 ORDER BY emp_no LIMIT 1; + +CHAR(((((emp_no) % (((((emp_no) - 1)) + 1)))) + 67)):s +C +; diff --git a/x-pack/qa/sql/src/main/resources/math.sql-spec b/x-pack/qa/sql/src/main/resources/math.sql-spec index e38de2aa6bcbf..6452d2a3ac0a6 100644 --- a/x-pack/qa/sql/src/main/resources/math.sql-spec +++ b/x-pack/qa/sql/src/main/resources/math.sql-spec @@ -128,7 +128,9 @@ mathATan2 // tag::atan2 SELECT ATAN2(emp_no, emp_no) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; // end::atan2 -mathPower // tag::power +mathPowerPositive SELECT POWER(emp_no, 2) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; +mathPowerNegative +SELECT POWER(salary, -1) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; // end::power From b6d68bd805f1858a0210e381402236dea1d42509 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 10 Sep 2018 17:03:52 -0400 Subject: [PATCH 79/91] Test: Fix package name I created a test a few days ago and declared a package that doesn't line up with the directory structure. Oops. I a little surprised nothing complained. But this fixes it. --- .../test/java/org/elasticsearch/test/rest/NodeNameInLogsIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/NodeNameInLogsIT.java b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/NodeNameInLogsIT.java index 13128b9478e0c..2d57644f9a727 100644 --- a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/NodeNameInLogsIT.java +++ b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/NodeNameInLogsIT.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.unconfigurednodename; +package org.elasticsearch.test.rest; import org.elasticsearch.common.logging.NodeNameInLogsIntegTestCase; From 624b6bb487c6f5901aff8ee4213fea6f68bbd902 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Mon, 10 Sep 2018 17:38:58 -0400 Subject: [PATCH 80/91] Copy and validatie soft-deletes setting on resize (#33517) This change copies and validates the soft-deletes setting during resize. If the source enables soft-deletes, the target must also enable it. Closes #33321 --- .../admin/indices/shrink/TransportResizeAction.java | 7 +++++++ .../cluster/metadata/MetaDataCreateIndexService.java | 3 ++- .../indices/shrink/TransportResizeActionTests.java | 11 +++++++++++ .../metadata/MetaDataCreateIndexServiceTests.java | 11 +++++++++++ 4 files changed, 31 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeAction.java index 5459805416e91..a9d83cfbce628 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeAction.java @@ -39,6 +39,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.shard.DocsStats; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.threadpool.ThreadPool; @@ -171,6 +172,12 @@ static CreateIndexClusterStateUpdateRequest prepareCreateIndexRequest(final Resi throw new IllegalArgumentException("cannot provide index.number_of_routing_shards on resize"); } } + if (IndexSettings.INDEX_SOFT_DELETES_SETTING.exists(metaData.getSettings()) && + IndexSettings.INDEX_SOFT_DELETES_SETTING.get(metaData.getSettings()) && + IndexSettings.INDEX_SOFT_DELETES_SETTING.exists(targetIndexSettings) && + IndexSettings.INDEX_SOFT_DELETES_SETTING.get(targetIndexSettings) == false) { + throw new IllegalArgumentException("Can't disable [index.soft_deletes.enabled] setting on resize"); + } String cause = resizeRequest.getResizeType().name().toLowerCase(Locale.ROOT) + "_index"; targetIndex.cause(cause); Settings.Builder settingsBuilder = Settings.builder().put(targetIndexSettings); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java index e41911d123e5c..9466b03c442a0 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java @@ -749,7 +749,8 @@ static void prepareResizeIndexSettings( } } else { final Predicate sourceSettingsPredicate = - (s) -> (s.startsWith("index.similarity.") || s.startsWith("index.analysis.") || s.startsWith("index.sort.")) + (s) -> (s.startsWith("index.similarity.") || s.startsWith("index.analysis.") || + s.startsWith("index.sort.") || s.equals("index.soft_deletes.enabled")) && indexSettingsBuilder.keys().contains(s) == false; builder.put(sourceMetaData.getSettings().filter(sourceSettingsPredicate)); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeActionTests.java index bd43182f00756..ce60b14b3efc7 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/TransportResizeActionTests.java @@ -47,6 +47,7 @@ import java.util.HashSet; import static java.util.Collections.emptyMap; +import static org.hamcrest.Matchers.equalTo; public class TransportResizeActionTests extends ESTestCase { @@ -92,6 +93,16 @@ public void testErrorCondition() { ).getMessage().startsWith("Can't merge index with more than [2147483519] docs - too many documents in shards ")); + IllegalArgumentException softDeletesError = expectThrows(IllegalArgumentException.class, () -> { + ResizeRequest req = new ResizeRequest("target", "source"); + req.getTargetIndexRequest().settings(Settings.builder().put("index.soft_deletes.enabled", false)); + ClusterState clusterState = createClusterState("source", 8, 1, + Settings.builder().put("index.blocks.write", true).put("index.soft_deletes.enabled", true).build()); + TransportResizeAction.prepareCreateIndexRequest(req, clusterState, + (i) -> new DocsStats(between(10, 1000), between(1, 10), between(1, 10000)), "source", "target"); + }); + assertThat(softDeletesError.getMessage(), equalTo("Can't disable [index.soft_deletes.enabled] setting on resize")); + // create one that won't fail ClusterState clusterState = ClusterState.builder(createClusterState("source", randomIntBetween(2, 10), 0, Settings.builder().put("index.blocks.write", true).build())).nodes(DiscoveryNodes.builder().add(newNode("node1"))) diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java index 24f5a69656114..abb34f80eac0e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java @@ -261,6 +261,7 @@ public void testPrepareResizeIndexSettings() { .put("index.version.upgraded", upgraded) .put("index.similarity.default.type", "BM25") .put("index.analysis.analyzer.default.tokenizer", "keyword") + .put("index.soft_deletes.enabled", "true") .build(); runPrepareResizeIndexSettingsTest( indexSettings, @@ -277,6 +278,7 @@ public void testPrepareResizeIndexSettings() { assertThat(settings.get("index.allocation.max_retries"), equalTo("1")); assertThat(settings.getAsVersion("index.version.created", null), equalTo(version)); assertThat(settings.getAsVersion("index.version.upgraded", null), equalTo(upgraded)); + assertThat(settings.get("index.soft_deletes.enabled"), equalTo("true")); }); } @@ -337,6 +339,15 @@ public void testPrepareResizeIndexSettingsSimilaritySettings() { } + public void testDoNotOverrideSoftDeletesSettingOnResize() { + runPrepareResizeIndexSettingsTest( + Settings.builder().put("index.soft_deletes.enabled", "false").build(), + Settings.builder().put("index.soft_deletes.enabled", "true").build(), + Collections.emptyList(), + randomBoolean(), + settings -> assertThat(settings.get("index.soft_deletes.enabled"), equalTo("true"))); + } + private void runPrepareResizeIndexSettingsTest( final Settings sourceSettings, final Settings requestSettings, From 6075e159e5e47e7ed38073e417feb8a818dde259 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 11 Sep 2018 01:24:18 +0200 Subject: [PATCH 81/91] Validate list values for settings (#33503) When we see a settings value, it could be a list. Yet this should only happen if the underlying setting type is a list setting type. This commit adds validation that when we get a setting value that is a list, that the setting that we are getting is a list setting. And similarly, if we get a value for a list setting, the underlying value should be a list. --- .../common/settings/Setting.java | 8 +++++-- .../common/settings/Settings.java | 24 +++++++++++++++++++ .../common/settings/SettingTests.java | 7 ++++++ 3 files changed, 37 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/settings/Setting.java b/server/src/main/java/org/elasticsearch/common/settings/Setting.java index 89bbe752a1ffc..5244cdd726d05 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -345,6 +345,11 @@ boolean isGroupSetting() { return false; } + + final boolean isListSetting() { + return this instanceof ListSetting; + } + boolean hasComplexMatcher() { return isGroupSetting(); } @@ -453,7 +458,7 @@ public final String getRaw(final Settings settings) { * @return the raw string representation of the setting value */ String innerGetRaw(final Settings settings) { - return settings.get(getKey(), defaultValue.apply(settings)); + return settings.get(getKey(), defaultValue.apply(settings), isListSetting()); } /** Logs a deprecation warning if the setting is deprecated and used. */ @@ -1305,7 +1310,6 @@ public void diff(Settings.Builder builder, Settings source, Settings defaultSett } } } - } static void logSettingUpdate(Setting setting, Settings current, Settings previous, Logger logger) { diff --git a/server/src/main/java/org/elasticsearch/common/settings/Settings.java b/server/src/main/java/org/elasticsearch/common/settings/Settings.java index 2eb14f7ac6592..1aeed2aee5115 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Settings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Settings.java @@ -245,6 +245,30 @@ public String get(String setting, String defaultValue) { return retVal == null ? defaultValue : retVal; } + /** + * Returns the setting value associated with the setting key. If it does not exists, + * returns the default value provided. + */ + String get(String setting, String defaultValue, boolean isList) { + Object value = settings.get(setting); + if (value != null) { + if (value instanceof List) { + if (isList == false) { + throw new IllegalArgumentException( + "Found list type value for setting [" + setting + "] but but did not expect a list for it." + ); + } + } else if (isList) { + throw new IllegalArgumentException( + "Expected list type value for setting [" + setting + "] but found [" + value.getClass() + ']' + ); + } + return toString(value); + } else { + return defaultValue; + } + } + /** * Returns the setting value (as float) associated with the setting key. If it does not exists, * returns the default value provided. diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java index b13988b705059..30cfee81ddd40 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -180,6 +180,13 @@ public void testSimpleUpdate() { } } + public void testValidateStringSetting() { + Settings settings = Settings.builder().putList("foo.bar", Arrays.asList("bla-a", "bla-b")).build(); + Setting stringSetting = Setting.simpleString("foo.bar", Property.NodeScope); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> stringSetting.get(settings)); + assertEquals("Found list type value for setting [foo.bar] but but did not expect a list for it.", e.getMessage()); + } + private static final Setting FOO_BAR_SETTING = new Setting<>( "foo.bar", "foobar", From ea3fdc90c69fdef18521bfdd522574bab4dea0cb Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Mon, 10 Sep 2018 20:06:42 -0400 Subject: [PATCH 82/91] Add full cluster restart base class (#33577) This commit adds a base class for full cluster restart tests. --- .../upgrades/FullClusterRestartIT.java | 82 +++++++------------ .../upgrades/QueryBuilderBWCIT.java | 32 +------- .../AbstractFullClusterRestartTestCase.java | 60 ++++++++++++++ .../xpack/restart/FullClusterRestartIT.java | 58 ++++--------- 4 files changed, 111 insertions(+), 121 deletions(-) create mode 100644 test/framework/src/main/java/org/elasticsearch/upgrades/AbstractFullClusterRestartTestCase.java diff --git a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 80bed9db5f3da..3ee5c07308f10 100644 --- a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -68,10 +68,8 @@ * version is started with the same data directories and then this is rerun * with {@code tests.is_old_cluster} set to {@code false}. */ -public class FullClusterRestartIT extends ESRestTestCase { - private final boolean runningAgainstOldCluster = Booleans.parseBoolean(System.getProperty("tests.is_old_cluster")); - private final Version oldClusterVersion = Version.fromString(System.getProperty("tests.old_cluster_version")); - private final boolean supportsLenientBooleans = oldClusterVersion.before(Version.V_6_0_0_alpha1); +public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase { + private final boolean supportsLenientBooleans = getOldClusterVersion().before(Version.V_6_0_0_alpha1); private static final Version VERSION_5_1_0_UNRELEASED = Version.fromString("5.1.0"); private String index; @@ -81,29 +79,9 @@ public void setIndex() { index = getTestName().toLowerCase(Locale.ROOT); } - @Override - protected boolean preserveIndicesUponCompletion() { - return true; - } - - @Override - protected boolean preserveSnapshotsUponCompletion() { - return true; - } - - @Override - protected boolean preserveReposUponCompletion() { - return true; - } - - @Override - protected boolean preserveTemplatesUponCompletion() { - return true; - } - public void testSearch() throws Exception { int count; - if (runningAgainstOldCluster) { + if (isRunningAgainstOldCluster()) { XContentBuilder mappingsAndSettings = jsonBuilder(); mappingsAndSettings.startObject(); { @@ -169,7 +147,7 @@ public void testSearch() throws Exception { } public void testNewReplicasWork() throws Exception { - if (runningAgainstOldCluster) { + if (isRunningAgainstOldCluster()) { XContentBuilder mappingsAndSettings = jsonBuilder(); mappingsAndSettings.startObject(); { @@ -237,10 +215,10 @@ public void testNewReplicasWork() throws Exception { */ public void testAliasWithBadName() throws Exception { assumeTrue("Can only test bad alias name if old cluster is on 5.1.0 or before", - oldClusterVersion.before(VERSION_5_1_0_UNRELEASED)); + getOldClusterVersion().before(VERSION_5_1_0_UNRELEASED)); int count; - if (runningAgainstOldCluster) { + if (isRunningAgainstOldCluster()) { XContentBuilder mappingsAndSettings = jsonBuilder(); mappingsAndSettings.startObject(); { @@ -291,7 +269,7 @@ public void testAliasWithBadName() throws Exception { Map searchRsp = entityAsMap(client().performRequest(new Request("GET", "/" + aliasName + "/_search"))); int totalHits = (int) XContentMapValues.extractValue("hits.total", searchRsp); assertEquals(count, totalHits); - if (runningAgainstOldCluster == false) { + if (isRunningAgainstOldCluster() == false) { // We can remove the alias. Response response = client().performRequest(new Request("DELETE", "/" + index + "/_alias/" + aliasName)); assertEquals(200, response.getStatusLine().getStatusCode()); @@ -302,7 +280,7 @@ public void testAliasWithBadName() throws Exception { } public void testClusterState() throws Exception { - if (runningAgainstOldCluster) { + if (isRunningAgainstOldCluster()) { XContentBuilder mappingsAndSettings = jsonBuilder(); mappingsAndSettings.startObject(); mappingsAndSettings.field("template", index); @@ -341,14 +319,14 @@ public void testClusterState() throws Exception { assertEquals("0", numberOfReplicas); Version version = Version.fromId(Integer.valueOf((String) XContentMapValues.extractValue("metadata.indices." + index + ".settings.index.version.created", clusterState))); - assertEquals(oldClusterVersion, version); + assertEquals(getOldClusterVersion(), version); } public void testShrink() throws IOException { String shrunkenIndex = index + "_shrunk"; int numDocs; - if (runningAgainstOldCluster) { + if (isRunningAgainstOldCluster()) { XContentBuilder mappingsAndSettings = jsonBuilder(); mappingsAndSettings.startObject(); { @@ -413,7 +391,7 @@ public void testShrink() throws IOException { public void testShrinkAfterUpgrade() throws IOException { String shrunkenIndex = index + "_shrunk"; int numDocs; - if (runningAgainstOldCluster) { + if (isRunningAgainstOldCluster()) { XContentBuilder mappingsAndSettings = jsonBuilder(); mappingsAndSettings.startObject(); { @@ -465,7 +443,7 @@ public void testShrinkAfterUpgrade() throws IOException { int totalHits = (int) XContentMapValues.extractValue("hits.total", response); assertEquals(numDocs, totalHits); - if (runningAgainstOldCluster == false) { + if (isRunningAgainstOldCluster() == false) { response = entityAsMap(client().performRequest(new Request("GET", "/" + shrunkenIndex + "/_search"))); assertNoFailures(response); totalShards = (int) XContentMapValues.extractValue("_shards.total", response); @@ -490,7 +468,7 @@ public void testShrinkAfterUpgrade() throws IOException { * */ public void testRollover() throws IOException { - if (runningAgainstOldCluster) { + if (isRunningAgainstOldCluster()) { Request createIndex = new Request("PUT", "/" + index + "-000001"); createIndex.setJsonEntity("{" + " \"aliases\": {" @@ -511,7 +489,7 @@ public void testRollover() throws IOException { bulkRequest.addParameter("refresh", ""); assertThat(EntityUtils.toString(client().performRequest(bulkRequest).getEntity()), containsString("\"errors\":false")); - if (runningAgainstOldCluster) { + if (isRunningAgainstOldCluster()) { Request rolloverRequest = new Request("POST", "/" + index + "_write/_rollover"); rolloverRequest.setJsonEntity("{" + " \"conditions\": {" @@ -529,7 +507,7 @@ public void testRollover() throws IOException { Map count = entityAsMap(client().performRequest(countRequest)); assertNoFailures(count); - int expectedCount = bulkCount + (runningAgainstOldCluster ? 0 : bulkCount); + int expectedCount = bulkCount + (isRunningAgainstOldCluster() ? 0 : bulkCount); assertEquals(expectedCount, (int) XContentMapValues.extractValue("hits.total", count)); } @@ -688,7 +666,7 @@ public void testSingleDoc() throws IOException { String docLocation = "/" + index + "/doc/1"; String doc = "{\"test\": \"test\"}"; - if (runningAgainstOldCluster) { + if (isRunningAgainstOldCluster()) { Request createDoc = new Request("PUT", docLocation); createDoc.setJsonEntity(doc); client().performRequest(createDoc); @@ -703,7 +681,7 @@ public void testSingleDoc() throws IOException { public void testEmptyShard() throws IOException { final String index = "test_empty_shard"; - if (runningAgainstOldCluster) { + if (isRunningAgainstOldCluster()) { Settings.Builder settings = Settings.builder() .put(IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1) .put(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 1) @@ -726,7 +704,7 @@ public void testEmptyShard() throws IOException { public void testRecovery() throws Exception { int count; boolean shouldHaveTranslog; - if (runningAgainstOldCluster) { + if (isRunningAgainstOldCluster()) { count = between(200, 300); /* We've had bugs in the past where we couldn't restore * an index without a translog so we randomize whether @@ -772,7 +750,7 @@ public void testRecovery() throws Exception { String countResponse = toStr(client().performRequest(countRequest)); assertThat(countResponse, containsString("\"total\":" + count)); - if (false == runningAgainstOldCluster) { + if (false == isRunningAgainstOldCluster()) { boolean restoredFromTranslog = false; boolean foundPrimary = false; Request recoveryRequest = new Request("GET", "/_cat/recovery/" + index); @@ -800,7 +778,7 @@ public void testRecovery() throws Exception { assertEquals("mismatch while checking for translog recovery\n" + recoveryResponse, shouldHaveTranslog, restoredFromTranslog); String currentLuceneVersion = Version.CURRENT.luceneVersion.toString(); - String bwcLuceneVersion = oldClusterVersion.luceneVersion.toString(); + String bwcLuceneVersion = getOldClusterVersion().luceneVersion.toString(); if (shouldHaveTranslog && false == currentLuceneVersion.equals(bwcLuceneVersion)) { int numCurrentVersion = 0; int numBwcVersion = 0; @@ -840,7 +818,7 @@ public void testRecovery() throws Exception { */ public void testSnapshotRestore() throws IOException { int count; - if (runningAgainstOldCluster) { + if (isRunningAgainstOldCluster()) { // Create the index count = between(200, 300); indexRandomDocuments(count, true, true, i -> jsonBuilder().startObject().field("field", "value").endObject()); @@ -860,7 +838,7 @@ public void testSnapshotRestore() throws IOException { // Stick a routing attribute into to cluster settings so we can see it after the restore Request addRoutingSettings = new Request("PUT", "/_cluster/settings"); addRoutingSettings.setJsonEntity( - "{\"persistent\": {\"cluster.routing.allocation.exclude.test_attr\": \"" + oldClusterVersion + "\"}}"); + "{\"persistent\": {\"cluster.routing.allocation.exclude.test_attr\": \"" + getOldClusterVersion() + "\"}}"); client().performRequest(addRoutingSettings); // Stick a template into the cluster so we can see it after the restore @@ -885,7 +863,7 @@ public void testSnapshotRestore() throws IOException { templateBuilder.startObject("alias2"); { templateBuilder.startObject("filter"); { templateBuilder.startObject("term"); { - templateBuilder.field("version", runningAgainstOldCluster ? oldClusterVersion : Version.CURRENT); + templateBuilder.field("version", isRunningAgainstOldCluster() ? getOldClusterVersion() : Version.CURRENT); } templateBuilder.endObject(); } @@ -898,7 +876,7 @@ public void testSnapshotRestore() throws IOException { createTemplateRequest.setJsonEntity(Strings.toString(templateBuilder)); client().performRequest(createTemplateRequest); - if (runningAgainstOldCluster) { + if (isRunningAgainstOldCluster()) { // Create the repo XContentBuilder repoConfig = JsonXContent.contentBuilder().startObject(); { repoConfig.field("type", "fs"); @@ -914,19 +892,19 @@ public void testSnapshotRestore() throws IOException { client().performRequest(createRepoRequest); } - Request createSnapshot = new Request("PUT", "/_snapshot/repo/" + (runningAgainstOldCluster ? "old_snap" : "new_snap")); + Request createSnapshot = new Request("PUT", "/_snapshot/repo/" + (isRunningAgainstOldCluster() ? "old_snap" : "new_snap")); createSnapshot.addParameter("wait_for_completion", "true"); createSnapshot.setJsonEntity("{\"indices\": \"" + index + "\"}"); client().performRequest(createSnapshot); - checkSnapshot("old_snap", count, oldClusterVersion); - if (false == runningAgainstOldCluster) { + checkSnapshot("old_snap", count, getOldClusterVersion()); + if (false == isRunningAgainstOldCluster()) { checkSnapshot("new_snap", count, Version.CURRENT); } } public void testHistoryUUIDIsAdded() throws Exception { - if (runningAgainstOldCluster) { + if (isRunningAgainstOldCluster()) { XContentBuilder mappingsAndSettings = jsonBuilder(); mappingsAndSettings.startObject(); { @@ -1022,7 +1000,7 @@ private void checkSnapshot(String snapshotName, int count, Version tookOnVersion Map expectedClusterSettings = new HashMap<>(); expectedClusterSettings.put("transient", emptyMap()); expectedClusterSettings.put("persistent", - singletonMap("cluster.routing.allocation.exclude.test_attr", oldClusterVersion.toString())); + singletonMap("cluster.routing.allocation.exclude.test_attr", getOldClusterVersion().toString())); if (expectedClusterSettings.equals(clusterSettingsResponse) == false) { NotEqualMessageBuilder builder = new NotEqualMessageBuilder(); builder.compareMaps(clusterSettingsResponse, expectedClusterSettings); @@ -1032,7 +1010,7 @@ private void checkSnapshot(String snapshotName, int count, Version tookOnVersion // Check that the template was restored successfully Map getTemplateResponse = entityAsMap(client().performRequest(new Request("GET", "/_template/test_template"))); Map expectedTemplate = new HashMap<>(); - if (runningAgainstOldCluster && oldClusterVersion.before(Version.V_6_0_0_beta1)) { + if (isRunningAgainstOldCluster() && getOldClusterVersion().before(Version.V_6_0_0_beta1)) { expectedTemplate.put("template", "evil_*"); } else { expectedTemplate.put("index_patterns", singletonList("evil_*")); diff --git a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java index 49a9dec870e75..2b7250f86b7cd 100644 --- a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java +++ b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java @@ -20,10 +20,8 @@ package org.elasticsearch.upgrades; import org.apache.http.util.EntityUtils; -import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; -import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; @@ -48,7 +46,6 @@ import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; import org.elasticsearch.index.query.functionscore.RandomScoreFunctionBuilder; import org.elasticsearch.search.SearchModule; -import org.elasticsearch.test.rest.ESRestTestCase; import java.io.ByteArrayInputStream; import java.io.IOException; @@ -71,7 +68,7 @@ * The queries to test are specified in json format, which turns out to work because we tend break here rarely. If the * json format of a query being tested here then feel free to change this. */ -public class QueryBuilderBWCIT extends ESRestTestCase { +public class QueryBuilderBWCIT extends AbstractFullClusterRestartTestCase { private static final List CANDIDATES = new ArrayList<>(); @@ -145,32 +142,9 @@ private static void addCandidate(String querySource, QueryBuilder expectedQb) { CANDIDATES.add(new Object[]{"{\"query\": {" + querySource + "}}", expectedQb}); } - private final Version oldClusterVersion = Version.fromString(System.getProperty("tests.old_cluster_version")); - private final boolean runningAgainstOldCluster = Booleans.parseBoolean(System.getProperty("tests.is_old_cluster")); - - @Override - protected boolean preserveIndicesUponCompletion() { - return true; - } - - @Override - protected boolean preserveSnapshotsUponCompletion() { - return true; - } - - @Override - protected boolean preserveReposUponCompletion() { - return true; - } - - @Override - protected boolean preserveTemplatesUponCompletion() { - return true; - } - public void testQueryBuilderBWC() throws Exception { String index = "queries"; - if (runningAgainstOldCluster) { + if (isRunningAgainstOldCluster()) { XContentBuilder mappingsAndSettings = jsonBuilder(); mappingsAndSettings.startObject(); { @@ -230,7 +204,7 @@ public void testQueryBuilderBWC() throws Exception { byte[] qbSource = Base64.getDecoder().decode(queryBuilderStr); try (InputStream in = new ByteArrayInputStream(qbSource, 0, qbSource.length)) { try (StreamInput input = new NamedWriteableAwareStreamInput(new InputStreamStreamInput(in), registry)) { - input.setVersion(oldClusterVersion); + input.setVersion(getOldClusterVersion()); QueryBuilder queryBuilder = input.readNamedWriteable(QueryBuilder.class); assert in.read() == -1; assertEquals(expectedQueryBuilder, queryBuilder); diff --git a/test/framework/src/main/java/org/elasticsearch/upgrades/AbstractFullClusterRestartTestCase.java b/test/framework/src/main/java/org/elasticsearch/upgrades/AbstractFullClusterRestartTestCase.java new file mode 100644 index 0000000000000..62c8e2f00ffe5 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/upgrades/AbstractFullClusterRestartTestCase.java @@ -0,0 +1,60 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.upgrades; + +import org.elasticsearch.Version; +import org.elasticsearch.common.Booleans; +import org.elasticsearch.test.rest.ESRestTestCase; + +public abstract class AbstractFullClusterRestartTestCase extends ESRestTestCase { + + private final boolean runningAgainstOldCluster = Booleans.parseBoolean(System.getProperty("tests.is_old_cluster")); + + public final boolean isRunningAgainstOldCluster() { + return runningAgainstOldCluster; + } + + private final Version oldClusterVersion = Version.fromString(System.getProperty("tests.old_cluster_version")); + + public final Version getOldClusterVersion() { + return oldClusterVersion; + } + + @Override + protected boolean preserveIndicesUponCompletion() { + return true; + } + + @Override + protected boolean preserveSnapshotsUponCompletion() { + return true; + } + + @Override + protected boolean preserveReposUponCompletion() { + return true; + } + + @Override + protected boolean preserveTemplatesUponCompletion() { + return true; + } + +} diff --git a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index 7c4eda37d2fb0..8a6944fb87037 100644 --- a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -10,7 +10,6 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; -import org.elasticsearch.common.Booleans; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentType; @@ -18,6 +17,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.StreamsUtils; import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.upgrades.AbstractFullClusterRestartTestCase; import org.elasticsearch.xpack.core.watcher.client.WatchSourceBuilder; import org.elasticsearch.xpack.core.watcher.support.xcontent.ObjectPath; import org.elasticsearch.xpack.security.support.SecurityIndexManager; @@ -54,35 +54,13 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.startsWith; -public class FullClusterRestartIT extends ESRestTestCase { - private final boolean runningAgainstOldCluster = Booleans.parseBoolean(System.getProperty("tests.is_old_cluster")); - private final Version oldClusterVersion = Version.fromString(System.getProperty("tests.old_cluster_version")); +public class FullClusterRestartIT extends AbstractFullClusterRestartTestCase { @Before public void waitForMlTemplates() throws Exception { XPackRestTestHelper.waitForMlTemplates(client()); } - @Override - protected boolean preserveIndicesUponCompletion() { - return true; - } - - @Override - protected boolean preserveSnapshotsUponCompletion() { - return true; - } - - @Override - protected boolean preserveReposUponCompletion() { - return true; - } - - @Override - protected boolean preserveTemplatesUponCompletion() { - return true; - } - @Override protected Settings restClientSettings() { String token = "Basic " + Base64.getEncoder().encodeToString("test_user:x-pack-test-password".getBytes(StandardCharsets.UTF_8)); @@ -103,7 +81,7 @@ public void testSingleDoc() throws IOException { String docLocation = "/testsingledoc/doc/1"; String doc = "{\"test\": \"test\"}"; - if (runningAgainstOldCluster) { + if (isRunningAgainstOldCluster()) { Request createDoc = new Request("PUT", docLocation); createDoc.addParameter("refresh", "true"); createDoc.setJsonEntity(doc); @@ -115,7 +93,7 @@ public void testSingleDoc() throws IOException { @SuppressWarnings("unchecked") public void testSecurityNativeRealm() throws Exception { - if (runningAgainstOldCluster) { + if (isRunningAgainstOldCluster()) { createUser("preupgrade_user"); createRole("preupgrade_role"); } else { @@ -165,15 +143,15 @@ public void testSecurityNativeRealm() throws Exception { assertUserInfo("preupgrade_user"); assertRoleInfo("preupgrade_role"); - if (!runningAgainstOldCluster) { + if (isRunningAgainstOldCluster() == false) { assertUserInfo("postupgrade_user"); assertRoleInfo("postupgrade_role"); } } public void testWatcher() throws Exception { - if (runningAgainstOldCluster) { - logger.info("Adding a watch on old cluster {}", oldClusterVersion); + if (isRunningAgainstOldCluster()) { + logger.info("Adding a watch on old cluster {}", getOldClusterVersion()); Request createBwcWatch = new Request("PUT", "_xpack/watcher/watch/bwc_watch"); createBwcWatch.setJsonEntity(loadWatch("simple-watch.json")); client().performRequest(createBwcWatch); @@ -194,7 +172,7 @@ public void testWatcher() throws Exception { waitForHits(".watcher-history*", 2); logger.info("Done creating watcher-related indices"); } else { - logger.info("testing against {}", oldClusterVersion); + logger.info("testing against {}", getOldClusterVersion()); waitForYellow(".watches,bwc_watch_index,.watcher-history*"); logger.info("checking if the upgrade procedure on the new cluster is required"); @@ -264,8 +242,8 @@ public void testWatcher() throws Exception { * Tests that a RollUp job created on a old cluster is correctly restarted after the upgrade. */ public void testRollupAfterRestart() throws Exception { - assumeTrue("Rollup can be tested with 6.3.0 and onwards", oldClusterVersion.onOrAfter(Version.V_6_3_0)); - if (runningAgainstOldCluster) { + assumeTrue("Rollup can be tested with 6.3.0 and onwards", getOldClusterVersion().onOrAfter(Version.V_6_3_0)); + if (isRunningAgainstOldCluster()) { final int numDocs = 59; final int year = randomIntBetween(1970, 2018); @@ -315,7 +293,7 @@ public void testRollupAfterRestart() throws Exception { final Request clusterHealthRequest = new Request("GET", "/_cluster/health"); clusterHealthRequest.addParameter("wait_for_status", "yellow"); clusterHealthRequest.addParameter("wait_for_no_relocating_shards", "true"); - if (oldClusterVersion.onOrAfter(Version.V_6_2_0)) { + if (getOldClusterVersion().onOrAfter(Version.V_6_2_0)) { clusterHealthRequest.addParameter("wait_for_no_initializing_shards", "true"); } Map clusterHealthResponse = entityAsMap(client().performRequest(clusterHealthRequest)); @@ -326,9 +304,9 @@ public void testRollupAfterRestart() throws Exception { } public void testRollupIDSchemeAfterRestart() throws Exception { - assumeTrue("Rollup can be tested with 6.3.0 and onwards", oldClusterVersion.onOrAfter(Version.V_6_3_0)); - assumeTrue("Rollup ID scheme changed in 6.4", oldClusterVersion.before(Version.V_6_4_0)); - if (runningAgainstOldCluster) { + assumeTrue("Rollup can be tested with 6.3.0 and onwards", getOldClusterVersion().onOrAfter(Version.V_6_3_0)); + assumeTrue("Rollup ID scheme changed in 6.4", getOldClusterVersion().before(Version.V_6_4_0)); + if (isRunningAgainstOldCluster()) { final Request indexRequest = new Request("POST", "/id-test-rollup/_doc/1"); indexRequest.setJsonEntity("{\"timestamp\":\"2018-01-01T00:00:01\",\"value\":123}"); @@ -439,8 +417,8 @@ public void testRollupIDSchemeAfterRestart() throws Exception { public void testSqlFailsOnIndexWithTwoTypes() throws IOException { // TODO this isn't going to trigger until we backport to 6.1 assumeTrue("It is only possible to build an index that sql doesn't like before 6.0.0", - oldClusterVersion.before(Version.V_6_0_0_alpha1)); - if (runningAgainstOldCluster) { + getOldClusterVersion().before(Version.V_6_0_0_alpha1)); + if (isRunningAgainstOldCluster()) { Request doc1 = new Request("POST", "/testsqlfailsonindexwithtwotypes/type1"); doc1.setJsonEntity("{}"); client().performRequest(doc1); @@ -550,7 +528,7 @@ private void waitForYellow(String indexName) throws IOException { request.addParameter("wait_for_status", "yellow"); request.addParameter("timeout", "30s"); request.addParameter("wait_for_no_relocating_shards", "true"); - if (oldClusterVersion.onOrAfter(Version.V_6_2_0)) { + if (getOldClusterVersion().onOrAfter(Version.V_6_2_0)) { request.addParameter("wait_for_no_initializing_shards", "true"); } Map response = entityAsMap(client().performRequest(request)); @@ -668,7 +646,7 @@ private void assertRollUpJob(final String rollupJob) throws Exception { // Persistent task state field has been renamed in 6.4.0 from "status" to "state" final String stateFieldName - = (runningAgainstOldCluster && oldClusterVersion.before(Version.V_6_4_0)) ? "status" : "state"; + = (isRunningAgainstOldCluster() && getOldClusterVersion().before(Version.V_6_4_0)) ? "status" : "state"; final String jobStateField = "task.xpack/rollup/job." + stateFieldName + ".job_state"; assertThat("Expected field [" + jobStateField + "] to be started or indexing in " + task.get("id"), From 8e05ce567fe734ed4d9eb299d5f4a972a200fcf9 Mon Sep 17 00:00:00 2001 From: David Roberts Date: Tue, 11 Sep 2018 08:46:26 +0100 Subject: [PATCH 83/91] [ML] Rename input_fields to column_names in file structure (#33568) This change tightens up the meaning of the "input_fields" field in the file structure finder output. Previously it was permitted but not calculated for JSON and XML files. Following this change the field is called "column_names" and is only permitted for delimited files. Additionally the way the column names are set for headerless delimited files is refactored to encapsulate the way they're named to one line of the code rather than having the same logic in two places. --- .../ml/filestructurefinder/FileStructure.java | 47 ++++++++++--------- .../FileStructureTests.java | 9 ++-- .../DelimitedFileStructureFinder.java | 16 ++++--- .../DelimitedFileStructureFinderTests.java | 12 ++--- 4 files changed, 45 insertions(+), 39 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructure.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructure.java index 5484f9f9902f4..dd508dfb36b74 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructure.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructure.java @@ -92,7 +92,7 @@ public String toString() { static final ParseField STRUCTURE = new ParseField("format"); static final ParseField MULTILINE_START_PATTERN = new ParseField("multiline_start_pattern"); static final ParseField EXCLUDE_LINES_PATTERN = new ParseField("exclude_lines_pattern"); - static final ParseField INPUT_FIELDS = new ParseField("input_fields"); + static final ParseField COLUMN_NAMES = new ParseField("column_names"); static final ParseField HAS_HEADER_ROW = new ParseField("has_header_row"); static final ParseField DELIMITER = new ParseField("delimiter"); static final ParseField SHOULD_TRIM_FIELDS = new ParseField("should_trim_fields"); @@ -115,7 +115,7 @@ public String toString() { PARSER.declareString((p, c) -> p.setFormat(Format.fromString(c)), STRUCTURE); PARSER.declareString(Builder::setMultilineStartPattern, MULTILINE_START_PATTERN); PARSER.declareString(Builder::setExcludeLinesPattern, EXCLUDE_LINES_PATTERN); - PARSER.declareStringArray(Builder::setInputFields, INPUT_FIELDS); + PARSER.declareStringArray(Builder::setColumnNames, COLUMN_NAMES); PARSER.declareBoolean(Builder::setHasHeaderRow, HAS_HEADER_ROW); PARSER.declareString((p, c) -> p.setDelimiter(c.charAt(0)), DELIMITER); PARSER.declareBoolean(Builder::setShouldTrimFields, SHOULD_TRIM_FIELDS); @@ -142,7 +142,7 @@ public String toString() { private final Format format; private final String multilineStartPattern; private final String excludeLinesPattern; - private final List inputFields; + private final List columnNames; private final Boolean hasHeaderRow; private final Character delimiter; private final Boolean shouldTrimFields; @@ -155,7 +155,7 @@ public String toString() { private final List explanation; public FileStructure(int numLinesAnalyzed, int numMessagesAnalyzed, String sampleStart, String charset, Boolean hasByteOrderMarker, - Format format, String multilineStartPattern, String excludeLinesPattern, List inputFields, + Format format, String multilineStartPattern, String excludeLinesPattern, List columnNames, Boolean hasHeaderRow, Character delimiter, Boolean shouldTrimFields, String grokPattern, String timestampField, List timestampFormats, boolean needClientTimezone, Map mappings, Map fieldStats, List explanation) { @@ -168,7 +168,7 @@ public FileStructure(int numLinesAnalyzed, int numMessagesAnalyzed, String sampl this.format = Objects.requireNonNull(format); this.multilineStartPattern = multilineStartPattern; this.excludeLinesPattern = excludeLinesPattern; - this.inputFields = (inputFields == null) ? null : Collections.unmodifiableList(new ArrayList<>(inputFields)); + this.columnNames = (columnNames == null) ? null : Collections.unmodifiableList(new ArrayList<>(columnNames)); this.hasHeaderRow = hasHeaderRow; this.delimiter = delimiter; this.shouldTrimFields = shouldTrimFields; @@ -190,7 +190,7 @@ public FileStructure(StreamInput in) throws IOException { format = in.readEnum(Format.class); multilineStartPattern = in.readOptionalString(); excludeLinesPattern = in.readOptionalString(); - inputFields = in.readBoolean() ? Collections.unmodifiableList(in.readList(StreamInput::readString)) : null; + columnNames = in.readBoolean() ? Collections.unmodifiableList(in.readList(StreamInput::readString)) : null; hasHeaderRow = in.readOptionalBoolean(); delimiter = in.readBoolean() ? (char) in.readVInt() : null; shouldTrimFields = in.readOptionalBoolean(); @@ -213,11 +213,11 @@ public void writeTo(StreamOutput out) throws IOException { out.writeEnum(format); out.writeOptionalString(multilineStartPattern); out.writeOptionalString(excludeLinesPattern); - if (inputFields == null) { + if (columnNames == null) { out.writeBoolean(false); } else { out.writeBoolean(true); - out.writeCollection(inputFields, StreamOutput::writeString); + out.writeCollection(columnNames, StreamOutput::writeString); } out.writeOptionalBoolean(hasHeaderRow); if (delimiter == null) { @@ -273,8 +273,8 @@ public String getExcludeLinesPattern() { return excludeLinesPattern; } - public List getInputFields() { - return inputFields; + public List getColumnNames() { + return columnNames; } public Boolean getHasHeaderRow() { @@ -335,8 +335,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (excludeLinesPattern != null && excludeLinesPattern.isEmpty() == false) { builder.field(EXCLUDE_LINES_PATTERN.getPreferredName(), excludeLinesPattern); } - if (inputFields != null && inputFields.isEmpty() == false) { - builder.field(INPUT_FIELDS.getPreferredName(), inputFields); + if (columnNames != null && columnNames.isEmpty() == false) { + builder.field(COLUMN_NAMES.getPreferredName(), columnNames); } if (hasHeaderRow != null) { builder.field(HAS_HEADER_ROW.getPreferredName(), hasHeaderRow.booleanValue()); @@ -377,7 +377,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws public int hashCode() { return Objects.hash(numLinesAnalyzed, numMessagesAnalyzed, sampleStart, charset, hasByteOrderMarker, format, - multilineStartPattern, excludeLinesPattern, inputFields, hasHeaderRow, delimiter, shouldTrimFields, grokPattern, timestampField, + multilineStartPattern, excludeLinesPattern, columnNames, hasHeaderRow, delimiter, shouldTrimFields, grokPattern, timestampField, timestampFormats, needClientTimezone, mappings, fieldStats, explanation); } @@ -402,7 +402,7 @@ public boolean equals(Object other) { Objects.equals(this.format, that.format) && Objects.equals(this.multilineStartPattern, that.multilineStartPattern) && Objects.equals(this.excludeLinesPattern, that.excludeLinesPattern) && - Objects.equals(this.inputFields, that.inputFields) && + Objects.equals(this.columnNames, that.columnNames) && Objects.equals(this.hasHeaderRow, that.hasHeaderRow) && Objects.equals(this.delimiter, that.delimiter) && Objects.equals(this.shouldTrimFields, that.shouldTrimFields) && @@ -424,7 +424,7 @@ public static class Builder { private Format format; private String multilineStartPattern; private String excludeLinesPattern; - private List inputFields; + private List columnNames; private Boolean hasHeaderRow; private Character delimiter; private Boolean shouldTrimFields; @@ -484,8 +484,8 @@ public Builder setExcludeLinesPattern(String excludeLinesPattern) { return this; } - public Builder setInputFields(List inputFields) { - this.inputFields = inputFields; + public Builder setColumnNames(List columnNames) { + this.columnNames = columnNames; return this; } @@ -573,6 +573,9 @@ public FileStructure build() { } // $FALL-THROUGH$ case XML: + if (columnNames != null) { + throw new IllegalArgumentException("Column names may not be specified for [" + format + "] structures."); + } if (hasHeaderRow != null) { throw new IllegalArgumentException("Has header row may not be specified for [" + format + "] structures."); } @@ -584,8 +587,8 @@ public FileStructure build() { } break; case DELIMITED: - if (inputFields == null || inputFields.isEmpty()) { - throw new IllegalArgumentException("Input fields must be specified for [" + format + "] structures."); + if (columnNames == null || columnNames.isEmpty()) { + throw new IllegalArgumentException("Column names must be specified for [" + format + "] structures."); } if (hasHeaderRow == null) { throw new IllegalArgumentException("Has header row must be specified for [" + format + "] structures."); @@ -598,8 +601,8 @@ public FileStructure build() { } break; case SEMI_STRUCTURED_TEXT: - if (inputFields != null) { - throw new IllegalArgumentException("Input fields may not be specified for [" + format + "] structures."); + if (columnNames != null) { + throw new IllegalArgumentException("Column names may not be specified for [" + format + "] structures."); } if (hasHeaderRow != null) { throw new IllegalArgumentException("Has header row may not be specified for [" + format + "] structures."); @@ -635,7 +638,7 @@ public FileStructure build() { } return new FileStructure(numLinesAnalyzed, numMessagesAnalyzed, sampleStart, charset, hasByteOrderMarker, format, - multilineStartPattern, excludeLinesPattern, inputFields, hasHeaderRow, delimiter, shouldTrimFields, grokPattern, + multilineStartPattern, excludeLinesPattern, columnNames, hasHeaderRow, delimiter, shouldTrimFields, grokPattern, timestampField, timestampFormats, needClientTimezone, mappings, fieldStats, explanation); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructureTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructureTests.java index 6dcf675196508..e09b9e3f91e7a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructureTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/filestructurefinder/FileStructureTests.java @@ -50,18 +50,17 @@ public static FileStructure createTestFileStructure() { builder.setExcludeLinesPattern(randomAlphaOfLength(100)); } - if (format == FileStructure.Format.DELIMITED || (format.supportsNesting() && randomBoolean())) { - builder.setInputFields(Arrays.asList(generateRandomStringArray(10, 10, false, false))); - } if (format == FileStructure.Format.DELIMITED) { + builder.setColumnNames(Arrays.asList(generateRandomStringArray(10, 10, false, false))); builder.setHasHeaderRow(randomBoolean()); builder.setDelimiter(randomFrom(',', '\t', ';', '|')); } - if (format.isSemiStructured()) { + + if (format == FileStructure.Format.SEMI_STRUCTURED_TEXT) { builder.setGrokPattern(randomAlphaOfLength(100)); } - if (format.isSemiStructured() || randomBoolean()) { + if (format == FileStructure.Format.SEMI_STRUCTURED_TEXT || randomBoolean()) { builder.setTimestampField(randomAlphaOfLength(10)); builder.setTimestampFormats(Arrays.asList(generateRandomStringArray(3, 20, false, false))); builder.setNeedClientTimezone(randomBoolean()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinder.java index 625858c867a45..ba6b590dfc8cd 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinder.java @@ -49,10 +49,12 @@ static DelimitedFileStructureFinder makeDelimitedFileStructureFinder(List headerInfo = findHeaderFromSample(explanation, rows); boolean isHeaderInFile = headerInfo.v1(); String[] header = headerInfo.v2(); - String[] headerWithNamedBlanks = new String[header.length]; + // The column names are the header names but with blanks named column1, column2, etc. + String[] columnNames = new String[header.length]; for (int i = 0; i < header.length; ++i) { - String rawHeader = header[i].isEmpty() ? "column" + (i + 1) : header[i]; - headerWithNamedBlanks[i] = trimFields ? rawHeader.trim() : rawHeader; + assert header[i] != null; + String rawHeader = trimFields ? header[i].trim() : header[i]; + columnNames[i] = rawHeader.isEmpty() ? "column" + (i + 1) : rawHeader; } List sampleLines = Arrays.asList(sample.split("\n")); @@ -63,7 +65,7 @@ static DelimitedFileStructureFinder makeDelimitedFileStructureFinder(List row = rows.get(index); int lineNumber = lineNumbers.get(index); Map sampleRecord = new LinkedHashMap<>(); - Util.filterListToMap(sampleRecord, headerWithNamedBlanks, + Util.filterListToMap(sampleRecord, columnNames, trimFields ? row.stream().map(String::trim).collect(Collectors.toList()) : row); sampleRecords.add(sampleRecord); sampleMessages.add( @@ -82,7 +84,7 @@ static DelimitedFileStructureFinder makeDelimitedFileStructureFinder(List findHeaderFromSample(List explanation, L // SuperCSV will put nulls in the header if any columns don't have names, but empty strings are better for us return new Tuple<>(true, firstRow.stream().map(field -> (field == null) ? "" : field).toArray(String[]::new)); } else { - return new Tuple<>(false, IntStream.rangeClosed(1, firstRow.size()).mapToObj(num -> "column" + num).toArray(String[]::new)); + String[] dummyHeader = new String[firstRow.size()]; + Arrays.fill(dummyHeader, ""); + return new Tuple<>(false, dummyHeader); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderTests.java index 6d1f039399eba..4e692d583918e 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/filestructurefinder/DelimitedFileStructureFinderTests.java @@ -45,7 +45,7 @@ public void testCreateConfigsGivenCompleteCsv() throws Exception { assertEquals(Character.valueOf(','), structure.getDelimiter()); assertTrue(structure.getHasHeaderRow()); assertNull(structure.getShouldTrimFields()); - assertEquals(Arrays.asList("time", "message"), structure.getInputFields()); + assertEquals(Arrays.asList("time", "message"), structure.getColumnNames()); assertNull(structure.getGrokPattern()); assertEquals("time", structure.getTimestampField()); assertEquals(Collections.singletonList("ISO8601"), structure.getTimestampFormats()); @@ -76,7 +76,7 @@ public void testCreateConfigsGivenCsvWithIncompleteLastRecord() throws Exception assertEquals(Character.valueOf(','), structure.getDelimiter()); assertTrue(structure.getHasHeaderRow()); assertNull(structure.getShouldTrimFields()); - assertEquals(Arrays.asList("message", "time", "count"), structure.getInputFields()); + assertEquals(Arrays.asList("message", "time", "count"), structure.getColumnNames()); assertNull(structure.getGrokPattern()); assertEquals("time", structure.getTimestampField()); assertEquals(Collections.singletonList("ISO8601"), structure.getTimestampFormats()); @@ -114,7 +114,7 @@ public void testCreateConfigsGivenCsvWithTrailingNulls() throws Exception { assertNull(structure.getShouldTrimFields()); assertEquals(Arrays.asList("VendorID", "tpep_pickup_datetime", "tpep_dropoff_datetime", "passenger_count", "trip_distance", "RatecodeID", "store_and_fwd_flag", "PULocationID", "DOLocationID", "payment_type", "fare_amount", "extra", "mta_tax", - "tip_amount", "tolls_amount", "improvement_surcharge", "total_amount", "column18", "column19"), structure.getInputFields()); + "tip_amount", "tolls_amount", "improvement_surcharge", "total_amount", "column18", "column19"), structure.getColumnNames()); assertNull(structure.getGrokPattern()); assertEquals("tpep_pickup_datetime", structure.getTimestampField()); assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss"), structure.getTimestampFormats()); @@ -152,7 +152,7 @@ public void testCreateConfigsGivenCsvWithTrailingNullsExceptHeader() throws Exce assertNull(structure.getShouldTrimFields()); assertEquals(Arrays.asList("VendorID", "tpep_pickup_datetime", "tpep_dropoff_datetime", "passenger_count", "trip_distance", "RatecodeID", "store_and_fwd_flag", "PULocationID", "DOLocationID", "payment_type", "fare_amount", "extra", "mta_tax", - "tip_amount", "tolls_amount", "improvement_surcharge", "total_amount"), structure.getInputFields()); + "tip_amount", "tolls_amount", "improvement_surcharge", "total_amount"), structure.getColumnNames()); assertNull(structure.getGrokPattern()); assertEquals("tpep_pickup_datetime", structure.getTimestampField()); assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss"), structure.getTimestampFormats()); @@ -183,7 +183,7 @@ public void testCreateConfigsGivenCsvWithTimeLastColumn() throws Exception { assertEquals(Character.valueOf(','), structure.getDelimiter()); assertTrue(structure.getHasHeaderRow()); assertNull(structure.getShouldTrimFields()); - assertEquals(Arrays.asList("pos_id", "trip_id", "latitude", "longitude", "altitude", "timestamp"), structure.getInputFields()); + assertEquals(Arrays.asList("pos_id", "trip_id", "latitude", "longitude", "altitude", "timestamp"), structure.getColumnNames()); assertNull(structure.getGrokPattern()); assertEquals("timestamp", structure.getTimestampField()); assertEquals(Collections.singletonList("YYYY-MM-dd HH:mm:ss.SSSSSS"), structure.getTimestampFormats()); @@ -213,7 +213,7 @@ public void testFindHeaderFromSampleGivenHeaderNotInSample() throws IOException DelimitedFileStructureFinder.readRows(withoutHeader, CsvPreference.EXCEL_PREFERENCE).v1()); assertFalse(header.v1()); - assertThat(header.v2(), arrayContaining("column1", "column2", "column3", "column4")); + assertThat(header.v2(), arrayContaining("", "", "", "")); } public void testLevenshteinDistance() { From a55fa4fd6b145ce9da849834df9abe19307019f5 Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Tue, 11 Sep 2018 11:00:56 +0300 Subject: [PATCH 84/91] Fix Replace function. Adds more tests to all string functions. (#33478) --- .../function/scalar/string/Replace.java | 2 +- .../main/resources/string-functions.sql-spec | 127 ++++++++++++++++++ 2 files changed, 128 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Replace.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Replace.java index 9325986ac1f1c..3834b16ff1e78 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Replace.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Replace.java @@ -22,7 +22,7 @@ import static java.lang.String.format; import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder; import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate; -import static org.elasticsearch.xpack.sql.expression.function.scalar.string.SubstringFunctionProcessor.doProcess; +import static org.elasticsearch.xpack.sql.expression.function.scalar.string.ReplaceFunctionProcessor.doProcess; /** * Search the source string for occurrences of the pattern, and replace with the replacement string. diff --git a/x-pack/qa/sql/src/main/resources/string-functions.sql-spec b/x-pack/qa/sql/src/main/resources/string-functions.sql-spec index 15bb6dea935c8..c0b0430b27897 100644 --- a/x-pack/qa/sql/src/main/resources/string-functions.sql-spec +++ b/x-pack/qa/sql/src/main/resources/string-functions.sql-spec @@ -1,5 +1,6 @@ stringAscii SELECT ASCII(first_name) s FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; + stringChar SELECT CHAR(emp_no % 10000) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; @@ -9,6 +10,9 @@ SELECT emp_no, ASCII(first_name) a FROM "test_emp" WHERE ASCII(first_name) < 100 stringAsciiEqualsConstant SELECT emp_no, ASCII(first_name) a, first_name name FROM "test_emp" WHERE ASCII(first_name) = 65 ORDER BY emp_no; +stringAsciiInline +SELECT ASCII('E') e; + //https://github.com/elastic/elasticsearch/issues/31863 //stringSelectConstantAsciiEqualsConstant //SELECT ASCII('A') = 65 a FROM "test_emp" WHERE ASCII('A') = 65 ORDER BY emp_no; @@ -16,12 +20,105 @@ SELECT emp_no, ASCII(first_name) a, first_name name FROM "test_emp" WHERE ASCII( stringCharFilter SELECT emp_no, CHAR(emp_no % 10000) m FROM "test_emp" WHERE CHAR(emp_no % 10000) = 'A'; +stringSelectCharInline1 +SELECT CHAR(250) c; + +stringSelectCharInline2 +SELECT CHAR(2) c; + +charLengthInline1 +SELECT CAST(CHAR_LENGTH('Elasticsearch') AS INT) charlength; + +charLengthInline2 +SELECT CAST(CHAR_LENGTH(' Elasticsearch ') AS INT) charlength; + +charLengthInline3 +SELECT CAST(CHAR_LENGTH('') AS INT) charlength; + +concatInline1 +SELECT CONCAT('Elastic','search') concat; + +concatInline2 +SELECT CONCAT(CONCAT('Lucene And ', 'Elastic'),'search') concat; + +concatInline3 +SELECT CONCAT(CONCAT('Lucene And ', 'Elastic'),CONCAT('search','')) concat; + lcaseFilter SELECT LCASE(first_name) lc, CHAR(ASCII(LCASE(first_name))) chr FROM "test_emp" WHERE CHAR(ASCII(LCASE(first_name))) = 'a'; +lcaseInline1 +SELECT LCASE('') L; + +lcaseInline2 +SELECT LCASE('ElAsTiC fantastic') lower; + +leftInline1 +SELECT LEFT('Elasticsearch', 7) leftchars; + +leftInline2 +SELECT LEFT('Elasticsearch', 1) leftchars; + +leftInline3 +SELECT LEFT('Elasticsearch', 25) leftchars; + +leftInline4 +SELECT LEFT('Elasticsearch', LENGTH('abcdefghijklmnop')) leftchars; + ltrimFilter SELECT LTRIM(first_name) lt FROM "test_emp" WHERE LTRIM(first_name) = 'Bob'; +ltrimInline1 +SELECT LTRIM(' Elastic ') trimmed; + +ltrimInline2 +SELECT LTRIM(' ') trimmed; + +locateInline1 +SELECT LOCATE('a', 'Elasticsearch', 8) location; + +locateInline2 +SELECT LOCATE('a', 'Elasticsearch') location; + +locateInline3 +SELECT LOCATE('x', 'Elasticsearch') location; + +insertInline1 +SELECT INSERT('Insert [here] your comment!', 8, 6, '(random thoughts about Elasticsearch)') ins; + +insertInline2 +SELECT INSERT('Insert [here] your comment!', 8, 20, '(random thoughts about Elasticsearch)') ins; + +insertInline3 +SELECT INSERT('Insert [here] your comment!', 8, 19, '(random thoughts about Elasticsearch)') ins; + +positionInline1 +SELECT POSITION('a','Elasticsearch') pos; + +positionInline2 +SELECT POSITION('x','Elasticsearch') pos; + +repeatInline1 +SELECT REPEAT('Elastic',2) rep; + +repeatInline2 +SELECT REPEAT('Elastic',1) rep; + +replaceInline1 +SELECT REPLACE('Elasticsearch','sea','A') repl; + +replaceInline2 +SELECT REPLACE('Elasticsearch','x','A') repl; + +rightInline1 +SELECT RIGHT('Elasticsearch', LENGTH('Search')) rightchars; + +rightInline2 +SELECT RIGHT(CONCAT('Elastic','search'), LENGTH('Search')) rightchars; + +rightInline3 +SELECT RIGHT('Elasticsearch', 0) rightchars; + // Unsupported yet // Functions combined with 'LIKE' should perform the match inside a Painless script, whereas at the moment it's handled as a regular `match` query in ES. //ltrimFilterWithLike @@ -30,15 +127,45 @@ SELECT LTRIM(first_name) lt FROM "test_emp" WHERE LTRIM(first_name) = 'Bob'; rtrimFilter SELECT RTRIM(first_name) rt FROM "test_emp" WHERE RTRIM(first_name) = 'Johnny'; +rtrimInline1 +SELECT RTRIM(' Elastic ') trimmed; + +rtrimInline2 +SELECT RTRIM(' ') trimmed; + spaceFilter SELECT SPACE(languages) spaces, languages FROM "test_emp" WHERE SPACE(languages) = ' '; spaceFilterWithLengthFunctions SELECT SPACE(languages) spaces, languages, first_name FROM "test_emp" WHERE CHAR_LENGTH(SPACE(languages)) = 3 ORDER BY first_name; +spaceInline1 +SELECT SPACE(5) space; + +spaceInline1 +SELECT SPACE(0) space; + +substringInline1 +SELECT SUBSTRING('Elasticsearch', 1, 7) sub; + +substringInline2 +SELECT SUBSTRING('Elasticsearch', 1, 15) sub; + +substringInline3 +SELECT SUBSTRING('Elasticsearch', 10, 10) sub; + ucaseFilter SELECT UCASE(gender) uppercased, COUNT(*) count FROM "test_emp" WHERE UCASE(gender) = 'F' GROUP BY UCASE(gender); +ucaseInline1 +SELECT UCASE('ElAsTiC') upper; + +ucaseInline2 +SELECT UCASE('') upper; + +ucaseInline3 +SELECT UCASE(' elastic ') upper; + // // Group and order by // From f598297f55fa60df9fdefa4f34574d485b600b20 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Tue, 11 Sep 2018 09:16:39 +0100 Subject: [PATCH 85/91] Add predicate_token_filter (#33431) This allows users to filter out tokens from a TokenStream using painless scripts, instead of having to write specialised Java code and packaging it up into a plugin. The commit also refactors the AnalysisPredicateScript.Token class so that it wraps and makes read-only an AttributeSource. --- docs/reference/analysis/tokenfilters.asciidoc | 2 + .../predicate-tokenfilter.asciidoc | 79 ++++++++++++++++ .../common/AnalysisPredicateScript.java | 56 ++++++++---- .../analysis/common/CommonAnalysisPlugin.java | 2 + .../PredicateTokenFilterScriptFactory.java | 73 +++++++++++++++ .../ScriptedConditionTokenFilterFactory.java | 47 ++++------ .../PredicateTokenScriptFilterTests.java | 89 +++++++++++++++++++ .../analysis-common/60_analysis_scripting.yml | 37 +++++++- 8 files changed, 341 insertions(+), 44 deletions(-) create mode 100644 docs/reference/analysis/tokenfilters/predicate-tokenfilter.asciidoc create mode 100644 modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PredicateTokenFilterScriptFactory.java create mode 100644 modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PredicateTokenScriptFilterTests.java diff --git a/docs/reference/analysis/tokenfilters.asciidoc b/docs/reference/analysis/tokenfilters.asciidoc index f531bc5d0e9e3..41bb9d38afb5d 100644 --- a/docs/reference/analysis/tokenfilters.asciidoc +++ b/docs/reference/analysis/tokenfilters.asciidoc @@ -37,6 +37,8 @@ include::tokenfilters/multiplexer-tokenfilter.asciidoc[] include::tokenfilters/condition-tokenfilter.asciidoc[] +include::tokenfilters/predicate-tokenfilter.asciidoc[] + include::tokenfilters/stemmer-tokenfilter.asciidoc[] include::tokenfilters/stemmer-override-tokenfilter.asciidoc[] diff --git a/docs/reference/analysis/tokenfilters/predicate-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/predicate-tokenfilter.asciidoc new file mode 100644 index 0000000000000..bebf7bd80f250 --- /dev/null +++ b/docs/reference/analysis/tokenfilters/predicate-tokenfilter.asciidoc @@ -0,0 +1,79 @@ +[[analysis-predicatefilter-tokenfilter]] +=== Predicate Token Filter Script + +The predicate_token_filter token filter takes a predicate script, and removes tokens that do +not match the predicate. + +[float] +=== Options +[horizontal] +script:: a predicate script that determines whether or not the current token will +be emitted. Note that only inline scripts are supported. + +[float] +=== Settings example + +You can set it up like: + +[source,js] +-------------------------------------------------- +PUT /condition_example +{ + "settings" : { + "analysis" : { + "analyzer" : { + "my_analyzer" : { + "tokenizer" : "standard", + "filter" : [ "my_script_filter" ] + } + }, + "filter" : { + "my_script_filter" : { + "type" : "predicate_token_filter", + "script" : { + "source" : "token.getTerm().length() > 5" <1> + } + } + } + } + } +} +-------------------------------------------------- +// CONSOLE + +<1> This will emit tokens that are more than 5 characters long + +And test it like: + +[source,js] +-------------------------------------------------- +POST /condition_example/_analyze +{ + "analyzer" : "my_analyzer", + "text" : "What Flapdoodle" +} +-------------------------------------------------- +// CONSOLE +// TEST[continued] + +And it'd respond: + +[source,js] +-------------------------------------------------- +{ + "tokens": [ + { + "token": "Flapdoodle", <1> + "start_offset": 5, + "end_offset": 15, + "type": "", + "position": 1 <2> + } + ] +} +-------------------------------------------------- +// TESTRESPONSE + +<1> The token 'What' has been removed from the tokenstream because it does not +match the predicate. +<2> The position and offset values are unaffected by the removal of earlier tokens \ No newline at end of file diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/AnalysisPredicateScript.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/AnalysisPredicateScript.java index 7de588a958c77..3bda6f393bfdf 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/AnalysisPredicateScript.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/AnalysisPredicateScript.java @@ -19,6 +19,13 @@ package org.elasticsearch.analysis.common; +import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; +import org.apache.lucene.analysis.tokenattributes.KeywordAttribute; +import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; +import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; +import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute; +import org.apache.lucene.analysis.tokenattributes.TypeAttribute; +import org.apache.lucene.util.AttributeSource; import org.elasticsearch.script.ScriptContext; /** @@ -30,21 +37,40 @@ public abstract class AnalysisPredicateScript { * Encapsulation of the state of the current token */ public static class Token { - public CharSequence term; - public int pos; - public int posInc; - public int posLen; - public int startOffset; - public int endOffset; - public String type; - public boolean isKeyword; + + private final CharTermAttribute termAtt; + private final PositionIncrementAttribute posIncAtt; + private final PositionLengthAttribute posLenAtt; + private final OffsetAttribute offsetAtt; + private final TypeAttribute typeAtt; + private final KeywordAttribute keywordAtt; + + // posInc is always 1 at the beginning of a tokenstream and the convention + // from the _analyze endpoint is that tokenstream positions are 0-based + private int pos = -1; + + /** + * Create a token exposing values from an AttributeSource + */ + public Token(AttributeSource source) { + this.termAtt = source.addAttribute(CharTermAttribute.class); + this.posIncAtt = source.addAttribute(PositionIncrementAttribute.class); + this.posLenAtt = source.addAttribute(PositionLengthAttribute.class); + this.offsetAtt = source.addAttribute(OffsetAttribute.class); + this.typeAtt = source.addAttribute(TypeAttribute.class); + this.keywordAtt = source.addAttribute(KeywordAttribute.class); + } + + public void updatePosition() { + this.pos = this.pos + posIncAtt.getPositionIncrement(); + } public CharSequence getTerm() { - return term; + return termAtt; } public int getPositionIncrement() { - return posInc; + return posIncAtt.getPositionIncrement(); } public int getPosition() { @@ -52,23 +78,23 @@ public int getPosition() { } public int getPositionLength() { - return posLen; + return posLenAtt.getPositionLength(); } public int getStartOffset() { - return startOffset; + return offsetAtt.startOffset(); } public int getEndOffset() { - return endOffset; + return offsetAtt.endOffset(); } public String getType() { - return type; + return typeAtt.type(); } public boolean isKeyword() { - return isKeyword; + return keywordAtt.isKeyword(); } } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index 75ebade0b12bd..175935258ad6e 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -264,6 +264,8 @@ public Map> getTokenFilters() { filters.put("pattern_replace", requiresAnalysisSettings(PatternReplaceTokenFilterFactory::new)); filters.put("persian_normalization", PersianNormalizationFilterFactory::new); filters.put("porter_stem", PorterStemTokenFilterFactory::new); + filters.put("predicate_token_filter", + requiresAnalysisSettings((i, e, n, s) -> new PredicateTokenFilterScriptFactory(i, n, s, scriptService.get()))); filters.put("remove_duplicates", RemoveDuplicatesTokenFilterFactory::new); filters.put("reverse", ReverseTokenFilterFactory::new); filters.put("russian_stem", RussianStemTokenFilterFactory::new); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PredicateTokenFilterScriptFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PredicateTokenFilterScriptFactory.java new file mode 100644 index 0000000000000..84f4bb487060c --- /dev/null +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PredicateTokenFilterScriptFactory.java @@ -0,0 +1,73 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.analysis.common; + +import org.apache.lucene.analysis.FilteringTokenFilter; +import org.apache.lucene.analysis.TokenStream; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractTokenFilterFactory; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.ScriptType; + +import java.io.IOException; + +/** + * A factory for creating FilteringTokenFilters that determine whether or not to + * accept their underlying token by consulting a script + */ +public class PredicateTokenFilterScriptFactory extends AbstractTokenFilterFactory { + + private final AnalysisPredicateScript.Factory factory; + + public PredicateTokenFilterScriptFactory(IndexSettings indexSettings, String name, Settings settings, ScriptService scriptService) { + super(indexSettings, name, settings); + Settings scriptSettings = settings.getAsSettings("script"); + Script script = Script.parse(scriptSettings); + if (script.getType() != ScriptType.INLINE) { + throw new IllegalArgumentException("Cannot use stored scripts in tokenfilter [" + name + "]"); + } + this.factory = scriptService.compile(script, AnalysisPredicateScript.CONTEXT); + } + + @Override + public TokenStream create(TokenStream tokenStream) { + return new ScriptFilteringTokenFilter(tokenStream, factory.newInstance()); + } + + private static class ScriptFilteringTokenFilter extends FilteringTokenFilter { + + final AnalysisPredicateScript script; + final AnalysisPredicateScript.Token token; + + ScriptFilteringTokenFilter(TokenStream in, AnalysisPredicateScript script) { + super(in); + this.script = script; + this.token = new AnalysisPredicateScript.Token(this); + } + + @Override + protected boolean accept() throws IOException { + token.updatePosition(); + return script.execute(token); + } + } +} diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java index cf7fd5b047a89..56f60bb874a5b 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java @@ -21,12 +21,6 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.miscellaneous.ConditionalTokenFilter; -import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; -import org.apache.lucene.analysis.tokenattributes.KeywordAttribute; -import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; -import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; -import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute; -import org.apache.lucene.analysis.tokenattributes.TypeAttribute; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AbstractTokenFilterFactory; @@ -36,6 +30,7 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType; +import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -76,30 +71,26 @@ public TokenStream create(TokenStream tokenStream) { } return in; }; - AnalysisPredicateScript script = factory.newInstance(); - final AnalysisPredicateScript.Token token = new AnalysisPredicateScript.Token(); - return new ConditionalTokenFilter(tokenStream, filter) { + return new ScriptedConditionTokenFilter(tokenStream, filter, factory.newInstance()); + } - CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); - PositionIncrementAttribute posIncAtt = addAttribute(PositionIncrementAttribute.class); - PositionLengthAttribute posLenAtt = addAttribute(PositionLengthAttribute.class); - OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class); - TypeAttribute typeAtt = addAttribute(TypeAttribute.class); - KeywordAttribute keywordAtt = addAttribute(KeywordAttribute.class); + private static class ScriptedConditionTokenFilter extends ConditionalTokenFilter { - @Override - protected boolean shouldFilter() { - token.term = termAtt; - token.posInc = posIncAtt.getPositionIncrement(); - token.pos += token.posInc; - token.posLen = posLenAtt.getPositionLength(); - token.startOffset = offsetAtt.startOffset(); - token.endOffset = offsetAtt.endOffset(); - token.type = typeAtt.type(); - token.isKeyword = keywordAtt.isKeyword(); - return script.execute(token); - } - }; + private final AnalysisPredicateScript script; + private final AnalysisPredicateScript.Token token; + + ScriptedConditionTokenFilter(TokenStream input, Function inputFactory, + AnalysisPredicateScript script) { + super(input, inputFactory); + this.script = script; + this.token = new AnalysisPredicateScript.Token(this); + } + + @Override + protected boolean shouldFilter() throws IOException { + token.updatePosition(); + return script.execute(token); + } } @Override diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PredicateTokenScriptFilterTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PredicateTokenScriptFilterTests.java new file mode 100644 index 0000000000000..18afbdcecb3e6 --- /dev/null +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PredicateTokenScriptFilterTests.java @@ -0,0 +1,89 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.analysis.common; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.IndexAnalyzers; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.indices.analysis.AnalysisModule; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.test.ESTokenStreamTestCase; +import org.elasticsearch.test.IndexSettingsModule; + +import java.io.IOException; +import java.util.Collections; + +public class PredicateTokenScriptFilterTests extends ESTokenStreamTestCase { + + public void testSimpleFilter() throws IOException { + Settings settings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); + Settings indexSettings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put("index.analysis.filter.f.type", "predicate_token_filter") + .put("index.analysis.filter.f.script.source", "token.getTerm().length() > 5") + .put("index.analysis.analyzer.myAnalyzer.type", "custom") + .put("index.analysis.analyzer.myAnalyzer.tokenizer", "standard") + .putList("index.analysis.analyzer.myAnalyzer.filter", "f") + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); + + AnalysisPredicateScript.Factory factory = () -> new AnalysisPredicateScript() { + @Override + public boolean execute(Token token) { + return token.getTerm().length() > 5; + } + }; + + @SuppressWarnings("unchecked") + ScriptService scriptService = new ScriptService(indexSettings, Collections.emptyMap(), Collections.emptyMap()){ + @Override + public FactoryType compile(Script script, ScriptContext context) { + assertEquals(context, AnalysisPredicateScript.CONTEXT); + assertEquals(new Script("token.getTerm().length() > 5"), script); + return (FactoryType) factory; + } + }; + + CommonAnalysisPlugin plugin = new CommonAnalysisPlugin(); + plugin.createComponents(null, null, null, null, scriptService, null, null, null, null); + AnalysisModule module + = new AnalysisModule(TestEnvironment.newEnvironment(settings), Collections.singletonList(plugin)); + + IndexAnalyzers analyzers = module.getAnalysisRegistry().build(idxSettings); + + try (NamedAnalyzer analyzer = analyzers.get("myAnalyzer")) { + assertNotNull(analyzer); + assertAnalyzesTo(analyzer, "Vorsprung Durch Technik", new String[]{ + "Vorsprung", "Technik" + }); + } + + } + +} diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/60_analysis_scripting.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/60_analysis_scripting.yml index 4305e5db0af37..2015fe31fccb5 100644 --- a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/60_analysis_scripting.yml +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/60_analysis_scripting.yml @@ -28,9 +28,44 @@ - type: condition filter: [ "lowercase" ] script: - source: "token.position > 1 && token.positionIncrement > 0 && token.startOffset > 0 && token.endOffset > 0 && (token.positionLength == 1 || token.type == \"a\" || token.keyword)" + source: "token.position >= 1 && token.positionIncrement > 0 && token.startOffset > 0 && token.endOffset > 0 && (token.positionLength == 1 || token.type == \"a\" || token.keyword)" - length: { tokens: 3 } - match: { tokens.0.token: "Vorsprung" } - match: { tokens.1.token: "durch" } - match: { tokens.2.token: "technik" } + +--- +"script_filter": + - do: + indices.analyze: + body: + text: "Vorsprung Durch Technik" + tokenizer: "whitespace" + filter: + - type: predicate_token_filter + script: + source: "token.term.length() > 5" + + - length: { tokens: 2 } + - match: { tokens.0.token: "Vorsprung" } + - match: { tokens.1.token: "Technik" } + +--- +"script_filter_position": + - do: + indices.analyze: + body: + text: "a b c d e f g h" + tokenizer: "whitespace" + filter: + - type: predicate_token_filter + script: + source: "token.position >= 4" + + - length: { tokens: 4 } + - match: { tokens.0.token: "e" } + - match: { tokens.1.token: "f" } + - match: { tokens.2.token: "g" } + - match: { tokens.3.token: "h" } + From a3e1f1e46f444e656f1eed62736c85e0c2d903e6 Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Tue, 11 Sep 2018 14:35:34 +0300 Subject: [PATCH 86/91] SQL: Adds MONTHNAME, DAYNAME and QUARTER functions (#33411) * Added monthname, dayname and quarter functions * Updated docs tests with the new functions --- .../expression/function/FunctionRegistry.java | 18 ++-- .../function/scalar/Processors.java | 6 +- .../scalar/datetime/BaseDateTimeFunction.java | 70 +++++++++++++ .../datetime/BaseDateTimeProcessor.java | 59 +++++++++++ .../scalar/datetime/DateTimeFunction.java | 67 ++----------- .../scalar/datetime/DateTimeProcessor.java | 38 ++----- .../function/scalar/datetime/DayName.java | 49 ++++++++++ .../function/scalar/datetime/DayOfMonth.java | 2 +- .../function/scalar/datetime/DayOfWeek.java | 2 +- .../function/scalar/datetime/DayOfYear.java | 2 +- .../function/scalar/datetime/HourOfDay.java | 2 +- .../function/scalar/datetime/MinuteOfDay.java | 2 +- .../scalar/datetime/MinuteOfHour.java | 2 +- .../function/scalar/datetime/MonthName.java | 50 ++++++++++ .../function/scalar/datetime/MonthOfYear.java | 2 +- .../datetime/NamedDateTimeFunction.java | 94 ++++++++++++++++++ .../datetime/NamedDateTimeProcessor.java | 98 +++++++++++++++++++ .../function/scalar/datetime/Quarter.java | 94 ++++++++++++++++++ .../scalar/datetime/QuarterProcessor.java | 60 ++++++++++++ .../scalar/datetime/SecondOfMinute.java | 2 +- .../function/scalar/datetime/WeekOfYear.java | 2 +- .../function/scalar/datetime/Year.java | 2 +- .../whitelist/InternalSqlScriptUtils.java | 14 +++ .../xpack/sql/plugin/sql_whitelist.txt | 3 + .../datetime/NamedDateTimeProcessorTests.java | 89 +++++++++++++++++ .../datetime/QuarterProcessorTests.java | 46 +++++++++ .../xpack/qa/sql/cli/ShowTestCase.java | 2 + .../sql/src/main/resources/command.csv-spec | 7 ++ .../sql/src/main/resources/datetime.sql-spec | 55 ++++++++++- .../qa/sql/src/main/resources/docs.csv-spec | 9 +- 30 files changed, 838 insertions(+), 110 deletions(-) create mode 100644 x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeFunction.java create mode 100644 x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeProcessor.java create mode 100644 x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayName.java create mode 100644 x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthName.java create mode 100644 x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeFunction.java create mode 100644 x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessor.java create mode 100644 x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Quarter.java create mode 100644 x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/QuarterProcessor.java create mode 100644 x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessorTests.java create mode 100644 x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/QuarterProcessorTests.java diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java index c9d652861f800..820aafb011628 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java @@ -21,13 +21,16 @@ import org.elasticsearch.xpack.sql.expression.function.aggregate.SumOfSquares; import org.elasticsearch.xpack.sql.expression.function.aggregate.VarPop; import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Mod; +import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayName; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayOfMonth; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayOfWeek; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DayOfYear; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.HourOfDay; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.MinuteOfDay; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.MinuteOfHour; +import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.MonthName; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.MonthOfYear; +import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.Quarter; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.SecondOfMinute; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.WeekOfYear; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.Year; @@ -62,21 +65,21 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.string.BitLength; import org.elasticsearch.xpack.sql.expression.function.scalar.string.Char; import org.elasticsearch.xpack.sql.expression.function.scalar.string.CharLength; -import org.elasticsearch.xpack.sql.expression.function.scalar.string.LCase; -import org.elasticsearch.xpack.sql.expression.function.scalar.string.LTrim; -import org.elasticsearch.xpack.sql.expression.function.scalar.string.Length; -import org.elasticsearch.xpack.sql.expression.function.scalar.string.RTrim; -import org.elasticsearch.xpack.sql.expression.function.scalar.string.Space; -import org.elasticsearch.xpack.sql.expression.function.scalar.string.UCase; import org.elasticsearch.xpack.sql.expression.function.scalar.string.Concat; import org.elasticsearch.xpack.sql.expression.function.scalar.string.Insert; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.LCase; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.LTrim; import org.elasticsearch.xpack.sql.expression.function.scalar.string.Left; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.Length; import org.elasticsearch.xpack.sql.expression.function.scalar.string.Locate; import org.elasticsearch.xpack.sql.expression.function.scalar.string.Position; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.RTrim; import org.elasticsearch.xpack.sql.expression.function.scalar.string.Repeat; import org.elasticsearch.xpack.sql.expression.function.scalar.string.Replace; import org.elasticsearch.xpack.sql.expression.function.scalar.string.Right; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.Space; import org.elasticsearch.xpack.sql.expression.function.scalar.string.Substring; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.UCase; import org.elasticsearch.xpack.sql.parser.ParsingException; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.util.StringUtils; @@ -123,6 +126,9 @@ public class FunctionRegistry { def(MonthOfYear.class, MonthOfYear::new, "MONTH"), def(Year.class, Year::new), def(WeekOfYear.class, WeekOfYear::new, "WEEK"), + def(DayName.class, DayName::new, "DAYNAME"), + def(MonthName.class, MonthName::new, "MONTHNAME"), + def(Quarter.class, Quarter::new), // Math def(Abs.class, Abs::new), def(ACos.class, ACos::new), diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java index 0f36654fa4aff..a62aadab46705 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java @@ -10,6 +10,8 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.BinaryArithmeticProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.UnaryArithmeticProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor; +import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDateTimeProcessor; +import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.QuarterProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.math.BinaryMathProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.math.MathProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.BucketExtractorProcessor; @@ -17,13 +19,13 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.ConstantProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.HitExtractorProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; -import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.string.BinaryStringNumericProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.string.BinaryStringStringProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.string.ConcatFunctionProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.string.InsertFunctionProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.string.LocateFunctionProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.string.ReplaceFunctionProcessor; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.string.SubstringFunctionProcessor; import java.util.ArrayList; @@ -52,6 +54,8 @@ public static List getNamedWriteables() { entries.add(new Entry(Processor.class, BinaryMathProcessor.NAME, BinaryMathProcessor::new)); // datetime entries.add(new Entry(Processor.class, DateTimeProcessor.NAME, DateTimeProcessor::new)); + entries.add(new Entry(Processor.class, NamedDateTimeProcessor.NAME, NamedDateTimeProcessor::new)); + entries.add(new Entry(Processor.class, QuarterProcessor.NAME, QuarterProcessor::new)); // math entries.add(new Entry(Processor.class, MathProcessor.NAME, MathProcessor::new)); // string diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeFunction.java new file mode 100644 index 0000000000000..2213fad8c8d9f --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeFunction.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Expressions; +import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute; +import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction; +import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.type.DataType; + +import java.util.TimeZone; + +abstract class BaseDateTimeFunction extends UnaryScalarFunction { + + private final TimeZone timeZone; + private final String name; + + BaseDateTimeFunction(Location location, Expression field, TimeZone timeZone) { + super(location, field); + this.timeZone = timeZone; + + StringBuilder sb = new StringBuilder(super.name()); + // add timezone as last argument + sb.insert(sb.length() - 1, " [" + timeZone.getID() + "]"); + + this.name = sb.toString(); + } + + @Override + protected final NodeInfo info() { + return NodeInfo.create(this, ctorForInfo(), field(), timeZone()); + } + + protected abstract NodeInfo.NodeCtor2 ctorForInfo(); + + @Override + protected TypeResolution resolveType() { + if (field().dataType() == DataType.DATE) { + return TypeResolution.TYPE_RESOLVED; + } + return new TypeResolution("Function [" + functionName() + "] cannot be applied on a non-date expression ([" + + Expressions.name(field()) + "] of type [" + field().dataType().esType + "])"); + } + + public TimeZone timeZone() { + return timeZone; + } + + @Override + public String name() { + return name; + } + + @Override + public boolean foldable() { + return field().foldable(); + } + + @Override + protected ScriptTemplate asScriptFrom(AggregateFunctionAttribute aggregate) { + throw new UnsupportedOperationException(); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeProcessor.java new file mode 100644 index 0000000000000..95547ded22274 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/BaseDateTimeProcessor.java @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; +import org.joda.time.ReadableInstant; + +import java.io.IOException; +import java.util.TimeZone; + +public abstract class BaseDateTimeProcessor implements Processor { + + private final TimeZone timeZone; + + BaseDateTimeProcessor(TimeZone timeZone) { + this.timeZone = timeZone; + } + + BaseDateTimeProcessor(StreamInput in) throws IOException { + timeZone = TimeZone.getTimeZone(in.readString()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(timeZone.getID()); + } + + TimeZone timeZone() { + return timeZone; + } + + @Override + public Object process(Object l) { + if (l == null) { + return null; + } + long millis; + if (l instanceof String) { + // 6.4+ + millis = Long.parseLong(l.toString()); + } else if (l instanceof ReadableInstant) { + // 6.3- + millis = ((ReadableInstant) l).getMillis(); + } else { + throw new SqlIllegalArgumentException("A string or a date is required; received {}", l); + } + + return doProcess(millis); + } + + abstract Object doProcess(long millis); +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFunction.java index 606728222787b..d87e15084a422 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFunction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeFunction.java @@ -6,10 +6,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; import org.elasticsearch.xpack.sql.expression.Expression; -import org.elasticsearch.xpack.sql.expression.Expressions; import org.elasticsearch.xpack.sql.expression.FieldAttribute; -import org.elasticsearch.xpack.sql.expression.function.aggregate.AggregateFunctionAttribute; -import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeProcessor.DateTimeExtractor; import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions; @@ -17,7 +14,6 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder; import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; import org.elasticsearch.xpack.sql.tree.Location; -import org.elasticsearch.xpack.sql.tree.NodeInfo; import org.elasticsearch.xpack.sql.type.DataType; import org.joda.time.DateTime; @@ -31,45 +27,10 @@ import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder; import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate; -public abstract class DateTimeFunction extends UnaryScalarFunction { - - private final TimeZone timeZone; - private final String name; +public abstract class DateTimeFunction extends BaseDateTimeFunction { DateTimeFunction(Location location, Expression field, TimeZone timeZone) { - super(location, field); - this.timeZone = timeZone; - - StringBuilder sb = new StringBuilder(super.name()); - // add timezone as last argument - sb.insert(sb.length() - 1, " [" + timeZone.getID() + "]"); - - this.name = sb.toString(); - } - - @Override - protected final NodeInfo info() { - return NodeInfo.create(this, ctorForInfo(), field(), timeZone()); - } - - protected abstract NodeInfo.NodeCtor2 ctorForInfo(); - - @Override - protected TypeResolution resolveType() { - if (field().dataType() == DataType.DATE) { - return TypeResolution.TYPE_RESOLVED; - } - return new TypeResolution("Function [" + functionName() + "] cannot be applied on a non-date expression ([" - + Expressions.name(field()) + "] of type [" + field().dataType().esType + "])"); - } - - public TimeZone timeZone() { - return timeZone; - } - - @Override - public boolean foldable() { - return field().foldable(); + super(location, field, timeZone); } @Override @@ -79,7 +40,7 @@ public Object fold() { return null; } - return dateTimeChrono(folded.getMillis(), timeZone.getID(), chronoField().name()); + return dateTimeChrono(folded.getMillis(), timeZone().getID(), chronoField().name()); } public static Integer dateTimeChrono(long millis, String tzId, String chronoName) { @@ -94,27 +55,21 @@ protected ScriptTemplate asScriptFrom(FieldAttribute field) { String template = null; template = formatTemplate("{sql}.dateTimeChrono(doc[{}].value.millis, {}, {})"); params.variable(field.name()) - .variable(timeZone.getID()) + .variable(timeZone().getID()) .variable(chronoField().name()); return new ScriptTemplate(template, params.build(), dataType()); } - - @Override - protected ScriptTemplate asScriptFrom(AggregateFunctionAttribute aggregate) { - throw new UnsupportedOperationException(); - } - /** * Used for generating the painless script version of this function when the time zone is not UTC */ protected abstract ChronoField chronoField(); @Override - protected final ProcessorDefinition makeProcessorDefinition() { + protected ProcessorDefinition makeProcessorDefinition() { return new UnaryProcessorDefinition(location(), this, ProcessorDefinitions.toProcessorDefinition(field()), - new DateTimeProcessor(extractor(), timeZone)); + new DateTimeProcessor(extractor(), timeZone())); } protected abstract DateTimeExtractor extractor(); @@ -127,12 +82,6 @@ public DataType dataType() { // used for applying ranges public abstract String dateTimeFormat(); - // add tz along the rest of the params - @Override - public String name() { - return name; - } - @Override public boolean equals(Object obj) { if (obj == null || obj.getClass() != getClass()) { @@ -140,11 +89,11 @@ public boolean equals(Object obj) { } DateTimeFunction other = (DateTimeFunction) obj; return Objects.equals(other.field(), field()) - && Objects.equals(other.timeZone, timeZone); + && Objects.equals(other.timeZone(), timeZone()); } @Override public int hashCode() { - return Objects.hash(field(), timeZone); + return Objects.hash(field(), timeZone()); } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeProcessor.java index d135b8a086566..d34b1c1e39053 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeProcessor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DateTimeProcessor.java @@ -7,19 +7,16 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; -import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; import org.joda.time.DateTime; import org.joda.time.DateTimeFieldType; import org.joda.time.DateTimeZone; import org.joda.time.ReadableDateTime; -import org.joda.time.ReadableInstant; import java.io.IOException; import java.util.Objects; import java.util.TimeZone; -public class DateTimeProcessor implements Processor { +public class DateTimeProcessor extends BaseDateTimeProcessor { public enum DateTimeExtractor { DAY_OF_MONTH(DateTimeFieldType.dayOfMonth()), @@ -45,24 +42,22 @@ public int extract(ReadableDateTime dt) { } public static final String NAME = "dt"; - private final DateTimeExtractor extractor; - private final TimeZone timeZone; public DateTimeProcessor(DateTimeExtractor extractor, TimeZone timeZone) { + super(timeZone); this.extractor = extractor; - this.timeZone = timeZone; } public DateTimeProcessor(StreamInput in) throws IOException { + super(in); extractor = in.readEnum(DateTimeExtractor.class); - timeZone = TimeZone.getTimeZone(in.readString()); } @Override public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); out.writeEnum(extractor); - out.writeString(timeZone.getID()); } @Override @@ -75,32 +70,15 @@ DateTimeExtractor extractor() { } @Override - public Object process(Object l) { - if (l == null) { - return null; - } - - ReadableDateTime dt; - if (l instanceof String) { - // 6.4+ - final long millis = Long.parseLong(l.toString()); - dt = new DateTime(millis, DateTimeZone.forTimeZone(timeZone)); - } else if (l instanceof ReadableInstant) { - // 6.3- - dt = (ReadableDateTime) l; - if (!TimeZone.getTimeZone("UTC").equals(timeZone)) { - dt = dt.toDateTime().withZone(DateTimeZone.forTimeZone(timeZone)); - } - } else { - throw new SqlIllegalArgumentException("A string or a date is required; received {}", l); - } + public Object doProcess(long millis) { + ReadableDateTime dt = new DateTime(millis, DateTimeZone.forTimeZone(timeZone())); return extractor.extract(dt); } @Override public int hashCode() { - return Objects.hash(extractor, timeZone); + return Objects.hash(extractor, timeZone()); } @Override @@ -110,7 +88,7 @@ public boolean equals(Object obj) { } DateTimeProcessor other = (DateTimeProcessor) obj; return Objects.equals(extractor, other.extractor) - && Objects.equals(timeZone, other.timeZone); + && Objects.equals(timeZone(), other.timeZone()); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayName.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayName.java new file mode 100644 index 0000000000000..2f5ba7eeaca9f --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayName.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDateTimeProcessor.NameExtractor; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; + +import java.util.TimeZone; + +/** + * Extract the day of the week from a datetime in text format (Monday, Tuesday etc.) + */ +public class DayName extends NamedDateTimeFunction { + protected static final String DAY_NAME_FORMAT = "EEEE"; + + public DayName(Location location, Expression field, TimeZone timeZone) { + super(location, field, timeZone); + } + + @Override + protected NodeCtor2 ctorForInfo() { + return DayName::new; + } + + @Override + protected DayName replaceChild(Expression newChild) { + return new DayName(location(), newChild, timeZone()); + } + + @Override + protected String dateTimeFormat() { + return DAY_NAME_FORMAT; + } + + @Override + protected NameExtractor nameExtractor() { + return NameExtractor.DAY_NAME; + } + + @Override + public String extractName(long millis, String tzId) { + return nameExtractor().extract(millis, tzId); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfMonth.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfMonth.java index 1ac3771d49db1..ebb576b4648e1 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfMonth.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfMonth.java @@ -22,7 +22,7 @@ public DayOfMonth(Location location, Expression field, TimeZone timeZone) { } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return DayOfMonth::new; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfWeek.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfWeek.java index 7582ece6250bd..d840d4d71df0a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfWeek.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfWeek.java @@ -22,7 +22,7 @@ public DayOfWeek(Location location, Expression field, TimeZone timeZone) { } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return DayOfWeek::new; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYear.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYear.java index 8f5e06188327d..1fa248d9c2063 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYear.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/DayOfYear.java @@ -23,7 +23,7 @@ public DayOfYear(Location location, Expression field, TimeZone timeZone) { } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return DayOfYear::new; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/HourOfDay.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/HourOfDay.java index 5a2bc681ab882..4df28bddad088 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/HourOfDay.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/HourOfDay.java @@ -22,7 +22,7 @@ public HourOfDay(Location location, Expression field, TimeZone timeZone) { } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return HourOfDay::new; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfDay.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfDay.java index 2840fa0c21b85..ef0fb0bce18aa 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfDay.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfDay.java @@ -23,7 +23,7 @@ public MinuteOfDay(Location location, Expression field, TimeZone timeZone) { } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return MinuteOfDay::new; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfHour.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfHour.java index d577bb916966a..f5ab095ef2455 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfHour.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MinuteOfHour.java @@ -22,7 +22,7 @@ public MinuteOfHour(Location location, Expression field, TimeZone timeZone) { } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return MinuteOfHour::new; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthName.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthName.java new file mode 100644 index 0000000000000..170c80c10f91a --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthName.java @@ -0,0 +1,50 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDateTimeProcessor.NameExtractor; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; + +import java.util.TimeZone; + +/** + * Extract the month from a datetime in text format (January, February etc.) + */ +public class MonthName extends NamedDateTimeFunction { + protected static final String MONTH_NAME_FORMAT = "MMMM"; + + public MonthName(Location location, Expression field, TimeZone timeZone) { + super(location, field, timeZone); + } + + @Override + protected NodeCtor2 ctorForInfo() { + return MonthName::new; + } + + @Override + protected MonthName replaceChild(Expression newChild) { + return new MonthName(location(), newChild, timeZone()); + } + + @Override + protected String dateTimeFormat() { + return MONTH_NAME_FORMAT; + } + + @Override + public String extractName(long millis, String tzId) { + return nameExtractor().extract(millis, tzId); + } + + @Override + protected NameExtractor nameExtractor() { + return NameExtractor.MONTH_NAME; + } + +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthOfYear.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthOfYear.java index 3a2d51bee78ad..503a771611e7d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthOfYear.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/MonthOfYear.java @@ -22,7 +22,7 @@ public MonthOfYear(Location location, Expression field, TimeZone timeZone) { } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return MonthOfYear::new; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeFunction.java new file mode 100644 index 0000000000000..c3e10981ce1fe --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeFunction.java @@ -0,0 +1,94 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.FieldAttribute; +import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDateTimeProcessor.NameExtractor; +import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; +import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions; +import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.UnaryProcessorDefinition; +import org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder; +import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.type.DataType; +import org.elasticsearch.xpack.sql.util.StringUtils; +import org.joda.time.DateTime; + +import java.util.Objects; +import java.util.TimeZone; + +import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder; +import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate; + +/* + * Base class for "naming" date/time functions like month_name and day_name + */ +abstract class NamedDateTimeFunction extends BaseDateTimeFunction { + + NamedDateTimeFunction(Location location, Expression field, TimeZone timeZone) { + super(location, field, timeZone); + } + + @Override + public Object fold() { + DateTime folded = (DateTime) field().fold(); + if (folded == null) { + return null; + } + + return extractName(folded.getMillis(), timeZone().getID()); + } + + public abstract String extractName(long millis, String tzId); + + @Override + protected ScriptTemplate asScriptFrom(FieldAttribute field) { + ParamsBuilder params = paramsBuilder(); + + String template = null; + template = formatTemplate(formatMethodName("{sql}.{method_name}(doc[{}].value.millis, {})")); + params.variable(field.name()) + .variable(timeZone().getID()); + + return new ScriptTemplate(template, params.build(), dataType()); + } + + private String formatMethodName(String template) { + // the Painless method name will be the enum's lower camelcase name + return template.replace("{method_name}", StringUtils.underscoreToLowerCamelCase(nameExtractor().toString())); + } + + @Override + protected final ProcessorDefinition makeProcessorDefinition() { + return new UnaryProcessorDefinition(location(), this, ProcessorDefinitions.toProcessorDefinition(field()), + new NamedDateTimeProcessor(nameExtractor(), timeZone())); + } + + protected abstract NameExtractor nameExtractor(); + + protected abstract String dateTimeFormat(); + + @Override + public DataType dataType() { + return DataType.KEYWORD; + } + + @Override + public boolean equals(Object obj) { + if (obj == null || obj.getClass() != getClass()) { + return false; + } + NamedDateTimeFunction other = (NamedDateTimeFunction) obj; + return Objects.equals(other.field(), field()) + && Objects.equals(other.timeZone(), timeZone()); + } + + @Override + public int hashCode() { + return Objects.hash(field(), timeZone()); + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessor.java new file mode 100644 index 0000000000000..478ad8ee09f04 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessor.java @@ -0,0 +1,98 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.time.Instant; +import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; +import java.util.Locale; +import java.util.Objects; +import java.util.TimeZone; +import java.util.function.BiFunction; + +public class NamedDateTimeProcessor extends BaseDateTimeProcessor { + + public enum NameExtractor { + // for the moment we'll use no specific Locale, but we might consider introducing a Locale parameter, just like the timeZone one + DAY_NAME((Long millis, String tzId) -> { + ZonedDateTime time = ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneId.of(tzId)); + return time.format(DateTimeFormatter.ofPattern(DayName.DAY_NAME_FORMAT, Locale.ROOT)); + }), + MONTH_NAME((Long millis, String tzId) -> { + ZonedDateTime time = ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneId.of(tzId)); + return time.format(DateTimeFormatter.ofPattern(MonthName.MONTH_NAME_FORMAT, Locale.ROOT)); + }); + + private final BiFunction apply; + + NameExtractor(BiFunction apply) { + this.apply = apply; + } + + public final String extract(Long millis, String tzId) { + return apply.apply(millis, tzId); + } + } + + public static final String NAME = "ndt"; + + private final NameExtractor extractor; + + public NamedDateTimeProcessor(NameExtractor extractor, TimeZone timeZone) { + super(timeZone); + this.extractor = extractor; + } + + public NamedDateTimeProcessor(StreamInput in) throws IOException { + super(in); + extractor = in.readEnum(NameExtractor.class); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeEnum(extractor); + } + + @Override + public String getWriteableName() { + return NAME; + } + + NameExtractor extractor() { + return extractor; + } + + @Override + public Object doProcess(long millis) { + return extractor.extract(millis, timeZone().getID()); + } + + @Override + public int hashCode() { + return Objects.hash(extractor, timeZone()); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || obj.getClass() != getClass()) { + return false; + } + NamedDateTimeProcessor other = (NamedDateTimeProcessor) obj; + return Objects.equals(extractor, other.extractor) + && Objects.equals(timeZone(), other.timeZone()); + } + + @Override + public String toString() { + return extractor.toString(); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Quarter.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Quarter.java new file mode 100644 index 0000000000000..22e368b0ec6ba --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Quarter.java @@ -0,0 +1,94 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.FieldAttribute; +import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; +import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions; +import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.UnaryProcessorDefinition; +import org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder; +import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.NodeInfo.NodeCtor2; +import org.elasticsearch.xpack.sql.type.DataType; +import org.joda.time.DateTime; + +import java.util.Objects; +import java.util.TimeZone; + +import static org.elasticsearch.xpack.sql.expression.function.scalar.datetime.QuarterProcessor.quarter; +import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder; +import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate.formatTemplate; + +public class Quarter extends BaseDateTimeFunction { + + protected static final String QUARTER_FORMAT = "q"; + + public Quarter(Location location, Expression field, TimeZone timeZone) { + super(location, field, timeZone); + } + + @Override + public Object fold() { + DateTime folded = (DateTime) field().fold(); + if (folded == null) { + return null; + } + + return quarter(folded.getMillis(), timeZone().getID()); + } + + @Override + protected ScriptTemplate asScriptFrom(FieldAttribute field) { + ParamsBuilder params = paramsBuilder(); + + String template = null; + template = formatTemplate("{sql}.quarter(doc[{}].value.millis, {})"); + params.variable(field.name()) + .variable(timeZone().getID()); + + return new ScriptTemplate(template, params.build(), dataType()); + } + + @Override + protected NodeCtor2 ctorForInfo() { + return Quarter::new; + } + + @Override + protected Quarter replaceChild(Expression newChild) { + return new Quarter(location(), newChild, timeZone()); + } + + @Override + protected ProcessorDefinition makeProcessorDefinition() { + return new UnaryProcessorDefinition(location(), this, ProcessorDefinitions.toProcessorDefinition(field()), + new QuarterProcessor(timeZone())); + } + + @Override + public DataType dataType() { + return DataType.INTEGER; + } + + @Override + public boolean equals(Object obj) { + if (obj == null || obj.getClass() != getClass()) { + return false; + } + BaseDateTimeFunction other = (BaseDateTimeFunction) obj; + return Objects.equals(other.field(), field()) + && Objects.equals(other.timeZone(), timeZone()); + } + + @Override + public int hashCode() { + return Objects.hash(field(), timeZone()); + } + +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/QuarterProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/QuarterProcessor.java new file mode 100644 index 0000000000000..c6904216d0fec --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/QuarterProcessor.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; + +import org.elasticsearch.common.io.stream.StreamInput; + +import java.io.IOException; +import java.time.Instant; +import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; +import java.util.Locale; +import java.util.Objects; +import java.util.TimeZone; + +public class QuarterProcessor extends BaseDateTimeProcessor { + + public QuarterProcessor(TimeZone timeZone) { + super(timeZone); + } + + public QuarterProcessor(StreamInput in) throws IOException { + super(in); + } + + public static final String NAME = "q"; + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public Object doProcess(long millis) { + return quarter(millis, timeZone().getID()); + } + + public static Integer quarter(long millis, String tzId) { + ZonedDateTime time = ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneId.of(tzId)); + return Integer.valueOf(time.format(DateTimeFormatter.ofPattern(Quarter.QUARTER_FORMAT, Locale.ROOT))); + } + + @Override + public int hashCode() { + return Objects.hash(timeZone()); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || obj.getClass() != getClass()) { + return false; + } + DateTimeProcessor other = (DateTimeProcessor) obj; + return Objects.equals(timeZone(), other.timeZone()); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/SecondOfMinute.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/SecondOfMinute.java index 883502c017da5..3522eb10ffe80 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/SecondOfMinute.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/SecondOfMinute.java @@ -22,7 +22,7 @@ public SecondOfMinute(Location location, Expression field, TimeZone timeZone) { } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return SecondOfMinute::new; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/WeekOfYear.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/WeekOfYear.java index eef2c48ad0f72..59948165f71cb 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/WeekOfYear.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/WeekOfYear.java @@ -22,7 +22,7 @@ public WeekOfYear(Location location, Expression field, TimeZone timeZone) { } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return WeekOfYear::new; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Year.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Year.java index 28d475e4c7085..2b065329be305 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Year.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/Year.java @@ -22,7 +22,7 @@ public Year(Location location, Expression field, TimeZone timeZone) { } @Override - protected NodeCtor2 ctorForInfo() { + protected NodeCtor2 ctorForInfo() { return Year::new; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java index 12faeb78b662d..f0a79f15e36dd 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java @@ -6,6 +6,8 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.whitelist; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeFunction; +import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDateTimeProcessor.NameExtractor; +import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.QuarterProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.string.BinaryStringNumericProcessor.BinaryStringNumericOperation; import org.elasticsearch.xpack.sql.expression.function.scalar.string.BinaryStringStringProcessor.BinaryStringStringOperation; import org.elasticsearch.xpack.sql.expression.function.scalar.string.ConcatFunctionProcessor; @@ -28,6 +30,18 @@ public static Integer dateTimeChrono(long millis, String tzId, String chronoName return DateTimeFunction.dateTimeChrono(millis, tzId, chronoName); } + public static String dayName(long millis, String tzId) { + return NameExtractor.DAY_NAME.extract(millis, tzId); + } + + public static String monthName(long millis, String tzId) { + return NameExtractor.MONTH_NAME.extract(millis, tzId); + } + + public static Integer quarter(long millis, String tzId) { + return QuarterProcessor.quarter(millis, tzId); + } + public static Integer ascii(String s) { return (Integer) StringOperation.ASCII.apply(s); } diff --git a/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt b/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt index 8f86685889c55..0f12d32d44e8b 100644 --- a/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt +++ b/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt @@ -9,6 +9,9 @@ class org.elasticsearch.xpack.sql.expression.function.scalar.whitelist.InternalSqlScriptUtils { Integer dateTimeChrono(long, String, String) + String dayName(long, String) + String monthName(long, String) + Integer quarter(long, String) Integer ascii(String) Integer bitLength(String) String character(Number) diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessorTests.java new file mode 100644 index 0000000000000..3d57675e20919 --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/NamedDateTimeProcessorTests.java @@ -0,0 +1,89 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.NamedDateTimeProcessor.NameExtractor; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; + +import java.io.IOException; +import java.util.TimeZone; + +public class NamedDateTimeProcessorTests extends AbstractWireSerializingTestCase { + private static final TimeZone UTC = TimeZone.getTimeZone("UTC"); + + public static NamedDateTimeProcessor randomNamedDateTimeProcessor() { + return new NamedDateTimeProcessor(randomFrom(NameExtractor.values()), UTC); + } + + @Override + protected NamedDateTimeProcessor createTestInstance() { + return randomNamedDateTimeProcessor(); + } + + @Override + protected Reader instanceReader() { + return NamedDateTimeProcessor::new; + } + + @Override + protected NamedDateTimeProcessor mutateInstance(NamedDateTimeProcessor instance) throws IOException { + NameExtractor replaced = randomValueOtherThan(instance.extractor(), () -> randomFrom(NameExtractor.values())); + return new NamedDateTimeProcessor(replaced, UTC); + } + + public void testValidDayNamesInUTC() { + NamedDateTimeProcessor proc = new NamedDateTimeProcessor(NameExtractor.DAY_NAME, UTC); + assertEquals("Thursday", proc.process("0")); + assertEquals("Saturday", proc.process("-64164233612338")); + assertEquals("Monday", proc.process("64164233612338")); + + assertEquals("Thursday", proc.process(new DateTime(0L, DateTimeZone.UTC))); + assertEquals("Thursday", proc.process(new DateTime(-5400, 12, 25, 2, 0, DateTimeZone.UTC))); + assertEquals("Friday", proc.process(new DateTime(30, 2, 1, 12, 13, DateTimeZone.UTC))); + assertEquals("Tuesday", proc.process(new DateTime(10902, 8, 22, 11, 11, DateTimeZone.UTC))); + } + + public void testValidDayNamesWithNonUTCTimeZone() { + NamedDateTimeProcessor proc = new NamedDateTimeProcessor(NameExtractor.DAY_NAME, TimeZone.getTimeZone("GMT-10:00")); + assertEquals("Wednesday", proc.process("0")); + assertEquals("Friday", proc.process("-64164233612338")); + assertEquals("Monday", proc.process("64164233612338")); + + assertEquals("Wednesday", proc.process(new DateTime(0L, DateTimeZone.UTC))); + assertEquals("Wednesday", proc.process(new DateTime(-5400, 12, 25, 2, 0, DateTimeZone.UTC))); + assertEquals("Friday", proc.process(new DateTime(30, 2, 1, 12, 13, DateTimeZone.UTC))); + assertEquals("Tuesday", proc.process(new DateTime(10902, 8, 22, 11, 11, DateTimeZone.UTC))); + assertEquals("Monday", proc.process(new DateTime(10902, 8, 22, 9, 59, DateTimeZone.UTC))); + } + + public void testValidMonthNamesInUTC() { + NamedDateTimeProcessor proc = new NamedDateTimeProcessor(NameExtractor.MONTH_NAME, UTC); + assertEquals("January", proc.process("0")); + assertEquals("September", proc.process("-64164233612338")); + assertEquals("April", proc.process("64164233612338")); + + assertEquals("January", proc.process(new DateTime(0L, DateTimeZone.UTC))); + assertEquals("December", proc.process(new DateTime(-5400, 12, 25, 10, 10, DateTimeZone.UTC))); + assertEquals("February", proc.process(new DateTime(30, 2, 1, 12, 13, DateTimeZone.UTC))); + assertEquals("August", proc.process(new DateTime(10902, 8, 22, 11, 11, DateTimeZone.UTC))); + } + + public void testValidMonthNamesWithNonUTCTimeZone() { + NamedDateTimeProcessor proc = new NamedDateTimeProcessor(NameExtractor.MONTH_NAME, TimeZone.getTimeZone("GMT-3:00")); + assertEquals("December", proc.process("0")); + assertEquals("August", proc.process("-64165813612338")); // GMT: Tuesday, September 1, -0064 2:53:07.662 AM + assertEquals("April", proc.process("64164233612338")); // GMT: Monday, April 14, 4003 2:13:32.338 PM + + assertEquals("December", proc.process(new DateTime(0L, DateTimeZone.UTC))); + assertEquals("November", proc.process(new DateTime(-5400, 12, 1, 1, 1, DateTimeZone.UTC))); + assertEquals("February", proc.process(new DateTime(30, 2, 1, 12, 13, DateTimeZone.UTC))); + assertEquals("July", proc.process(new DateTime(10902, 8, 1, 2, 59, DateTimeZone.UTC))); + assertEquals("August", proc.process(new DateTime(10902, 8, 1, 3, 00, DateTimeZone.UTC))); + } +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/QuarterProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/QuarterProcessorTests.java new file mode 100644 index 0000000000000..7747bb8cae4ed --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/datetime/QuarterProcessorTests.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.sql.expression.function.scalar.datetime; + +import org.elasticsearch.test.ESTestCase; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; + +import java.util.TimeZone; + +public class QuarterProcessorTests extends ESTestCase { + + private static final TimeZone UTC = TimeZone.getTimeZone("UTC"); + + public void testQuarterWithUTCTimezone() { + QuarterProcessor proc = new QuarterProcessor(UTC); + + assertEquals(1, proc.process(new DateTime(0L, DateTimeZone.UTC))); + assertEquals(4, proc.process(new DateTime(-5400, 12, 25, 10, 10, DateTimeZone.UTC))); + assertEquals(1, proc.process(new DateTime(30, 2, 1, 12, 13, DateTimeZone.UTC))); + assertEquals(3, proc.process(new DateTime(10902, 8, 22, 11, 11, DateTimeZone.UTC))); + + assertEquals(1, proc.process("0")); + assertEquals(3, proc.process("-64164233612338")); + assertEquals(2, proc.process("64164233612338")); + } + + public void testValidDayNamesWithNonUTCTimeZone() { + QuarterProcessor proc = new QuarterProcessor(TimeZone.getTimeZone("GMT-10:00")); + assertEquals(4, proc.process(new DateTime(0L, DateTimeZone.UTC))); + assertEquals(4, proc.process(new DateTime(-5400, 1, 1, 5, 0, DateTimeZone.UTC))); + assertEquals(1, proc.process(new DateTime(30, 4, 1, 9, 59, DateTimeZone.UTC))); + + proc = new QuarterProcessor(TimeZone.getTimeZone("GMT+10:00")); + assertEquals(4, proc.process(new DateTime(10902, 9, 30, 14, 1, DateTimeZone.UTC))); + assertEquals(3, proc.process(new DateTime(10902, 9, 30, 13, 59, DateTimeZone.UTC))); + + assertEquals(1, proc.process("0")); + assertEquals(3, proc.process("-64164233612338")); + assertEquals(2, proc.process("64164233612338")); + } +} diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/ShowTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/ShowTestCase.java index f5b9381c54b31..601dca8abd417 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/ShowTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/ShowTestCase.java @@ -65,6 +65,8 @@ public void testShowFunctionsLikeInfix() throws IOException { assertThat(readLine(), RegexMatcher.matches("\\s*DAY_OF_YEAR\\s*\\|\\s*SCALAR\\s*")); assertThat(readLine(), RegexMatcher.matches("\\s*HOUR_OF_DAY\\s*\\|\\s*SCALAR\\s*")); assertThat(readLine(), RegexMatcher.matches("\\s*MINUTE_OF_DAY\\s*\\|\\s*SCALAR\\s*")); + assertThat(readLine(), RegexMatcher.matches("\\s*DAY_NAME\\s*\\|\\s*SCALAR\\s*")); + assertThat(readLine(), RegexMatcher.matches("\\s*DAYNAME\\s*\\|\\s*SCALAR\\s*")); assertEquals("", readLine()); } } diff --git a/x-pack/qa/sql/src/main/resources/command.csv-spec b/x-pack/qa/sql/src/main/resources/command.csv-spec index 77d397fa2b5be..28aadeded2cc1 100644 --- a/x-pack/qa/sql/src/main/resources/command.csv-spec +++ b/x-pack/qa/sql/src/main/resources/command.csv-spec @@ -38,6 +38,11 @@ MONTH |SCALAR YEAR |SCALAR WEEK_OF_YEAR |SCALAR WEEK |SCALAR +DAY_NAME |SCALAR +DAYNAME |SCALAR +MONTH_NAME |SCALAR +MONTHNAME |SCALAR +QUARTER |SCALAR ABS |SCALAR ACOS |SCALAR ASIN |SCALAR @@ -130,6 +135,8 @@ DAY_OF_WEEK |SCALAR DAY_OF_YEAR |SCALAR HOUR_OF_DAY |SCALAR MINUTE_OF_DAY |SCALAR +DAY_NAME |SCALAR +DAYNAME |SCALAR ; showTables diff --git a/x-pack/qa/sql/src/main/resources/datetime.sql-spec b/x-pack/qa/sql/src/main/resources/datetime.sql-spec index 20ea8329c8f4d..81012b7bebf92 100644 --- a/x-pack/qa/sql/src/main/resources/datetime.sql-spec +++ b/x-pack/qa/sql/src/main/resources/datetime.sql-spec @@ -12,34 +12,83 @@ dateTimeDay SELECT DAY(birth_date) d, last_name l FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; + dateTimeDayOfMonth SELECT DAY_OF_MONTH(birth_date) d, last_name l FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; + dateTimeMonth SELECT MONTH(birth_date) d, last_name l FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; + dateTimeYear SELECT YEAR(birth_date) d, last_name l FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; +monthNameFromStringDate +SELECT MONTHNAME(CAST('2018-09-03' AS TIMESTAMP)) month FROM "test_emp" limit 1; + +dayNameFromStringDate +SELECT DAYNAME(CAST('2018-09-03' AS TIMESTAMP)) day FROM "test_emp" limit 1; + +quarterSelect +SELECT QUARTER(hire_date) q, hire_date FROM test_emp ORDER BY hire_date LIMIT 15; + // // Filter // + dateTimeFilterDayOfMonth SELECT DAY_OF_MONTH(birth_date) AS d, last_name l FROM "test_emp" WHERE DAY_OF_MONTH(birth_date) <= 10 ORDER BY emp_no LIMIT 5; + dateTimeFilterMonth SELECT MONTH(birth_date) AS d, last_name l FROM "test_emp" WHERE MONTH(birth_date) <= 5 ORDER BY emp_no LIMIT 5; + dateTimeFilterYear SELECT YEAR(birth_date) AS d, last_name l FROM "test_emp" WHERE YEAR(birth_date) <= 1960 ORDER BY emp_no LIMIT 5; +monthNameFilterWithFirstLetter +SELECT MONTHNAME(hire_date) AS m, hire_date FROM "test_emp" WHERE LEFT(MONTHNAME(hire_date), 1) = 'J' ORDER BY hire_date LIMIT 10; + +monthNameFilterWithFullName +SELECT MONTHNAME(hire_date) AS m, hire_date FROM "test_emp" WHERE MONTHNAME(hire_date) = 'August' ORDER BY hire_date LIMIT 10; + +dayNameFilterWithFullName +SELECT DAYNAME(hire_date) AS d, hire_date FROM "test_emp" WHERE DAYNAME(hire_date) = 'Sunday' ORDER BY hire_date LIMIT 10; + +dayNameAndMonthNameAsFilter +SELECT first_name, last_name FROM "test_emp" WHERE DAYNAME(hire_date) = 'Sunday' AND MONTHNAME(hire_date) = 'January' ORDER BY hire_date LIMIT 10; + +quarterWithFilter +SELECT QUARTER(hire_date) quarter, hire_date FROM test_emp WHERE QUARTER(hire_date) > 2 ORDER BY hire_date LIMIT 15; // // Aggregate // - dateTimeAggByYear SELECT YEAR(birth_date) AS d, CAST(SUM(emp_no) AS INT) s FROM "test_emp" GROUP BY YEAR(birth_date) ORDER BY YEAR(birth_date) LIMIT 13; -dateTimeAggByMonth +dateTimeAggByMonthWithOrderBy SELECT MONTH(birth_date) AS d, COUNT(*) AS c, CAST(SUM(emp_no) AS INT) s FROM "test_emp" GROUP BY MONTH(birth_date) ORDER BY MONTH(birth_date) DESC; -dateTimeAggByDayOfMonth +dateTimeAggByDayOfMonthWithOrderBy SELECT DAY_OF_MONTH(birth_date) AS d, COUNT(*) AS c, CAST(SUM(emp_no) AS INT) s FROM "test_emp" GROUP BY DAY_OF_MONTH(birth_date) ORDER BY DAY_OF_MONTH(birth_date) DESC; + +monthNameWithGroupBy +SELECT MONTHNAME("hire_date") AS month, COUNT(*) AS count FROM "test_emp" GROUP BY MONTHNAME("hire_date"), MONTH("hire_date") ORDER BY MONTH("hire_date"); + +monthNameWithDoubleGroupByAndOrderBy +SELECT MONTHNAME("hire_date") AS month, COUNT(*) AS count FROM "test_emp" GROUP BY MONTHNAME("hire_date"), MONTH("hire_date") ORDER BY MONTHNAME("hire_date") DESC; + +// AwaitsFix https://github.com/elastic/elasticsearch/issues/33519 +// monthNameWithGroupByOrderByAndHaving +// SELECT CAST(MAX("salary") AS DOUBLE) max_salary, MONTHNAME("hire_date") month_name FROM "test_emp" GROUP BY MONTHNAME("hire_date") HAVING MAX("salary") > 50000 ORDER BY MONTHNAME(hire_date); +// dayNameWithHaving +// SELECT DAYNAME("hire_date") FROM "test_emp" GROUP BY DAYNAME("hire_date") HAVING MAX("emp_no") > ASCII(DAYNAME("hire_date")); + +dayNameWithDoubleGroupByAndOrderBy +SELECT COUNT(*) c, DAYNAME(hire_date) day_name, DAY(hire_date) day FROM test_emp WHERE MONTHNAME(hire_date) = 'August' GROUP BY DAYNAME(hire_date), DAY(hire_date) ORDER BY DAYNAME(hire_date), DAY(hire_date); + +dayNameWithGroupByOrderByAndHaving +SELECT CAST(MAX(salary) AS DOUBLE) max_salary, DAYNAME(hire_date) day_name FROM test_emp GROUP BY DAYNAME(hire_date) HAVING MAX(salary) > 50000 ORDER BY DAYNAME("hire_date"); + +quarterWithGroupByAndOrderBy +SELECT QUARTER(hire_date) quarter, COUNT(*) hires FROM test_emp GROUP BY QUARTER(hire_date) ORDER BY QUARTER(hire_date); \ No newline at end of file diff --git a/x-pack/qa/sql/src/main/resources/docs.csv-spec b/x-pack/qa/sql/src/main/resources/docs.csv-spec index 2a4f29fcf5d9a..52356bdfd52eb 100644 --- a/x-pack/qa/sql/src/main/resources/docs.csv-spec +++ b/x-pack/qa/sql/src/main/resources/docs.csv-spec @@ -214,6 +214,11 @@ MONTH |SCALAR YEAR |SCALAR WEEK_OF_YEAR |SCALAR WEEK |SCALAR +DAY_NAME |SCALAR +DAYNAME |SCALAR +MONTH_NAME |SCALAR +MONTHNAME |SCALAR +QUARTER |SCALAR ABS |SCALAR ACOS |SCALAR ASIN |SCALAR @@ -318,7 +323,9 @@ DAY |SCALAR DAY_OF_WEEK |SCALAR DAY_OF_YEAR |SCALAR HOUR_OF_DAY |SCALAR -MINUTE_OF_DAY |SCALAR +MINUTE_OF_DAY |SCALAR +DAY_NAME |SCALAR +DAYNAME |SCALAR // end::showFunctionsWithPattern ; From 2f3b542d57a6a1e50ba67e4c69b9dece0cac180b Mon Sep 17 00:00:00 2001 From: Ed Savage <32410745+edsavage@users.noreply.github.com> Date: Tue, 11 Sep 2018 12:48:14 +0100 Subject: [PATCH 87/91] HLRC: Add ML get categories API (#33465) HLRC: Adding the ML 'get categories' API --- .../client/MLRequestConverters.java | 15 ++ .../client/MachineLearningClient.java | 41 +++++ .../client/ml/GetCategoriesRequest.java | 128 ++++++++++++++++ .../client/ml/GetCategoriesResponse.java | 79 ++++++++++ .../client/MLRequestConvertersTests.java | 16 ++ .../client/MachineLearningGetResultsIT.java | 141 ++++++++++++++++++ .../MlClientDocumentationIT.java | 75 +++++++++- .../client/ml/GetCategoriesRequestTests.java | 51 +++++++ .../client/ml/GetCategoriesResponseTests.java | 53 +++++++ .../job/results/CategoryDefinitionTests.java | 2 +- .../high-level/ml/get-categories.asciidoc | 83 +++++++++++ .../high-level/supported-apis.asciidoc | 2 + 12 files changed, 684 insertions(+), 2 deletions(-) create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesRequest.java create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesResponse.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetCategoriesRequestTests.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetCategoriesResponseTests.java create mode 100644 docs/java-rest/high-level/ml/get-categories.asciidoc diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java index ecbe7f2d3a5d3..d158c1a06a2f3 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java @@ -32,6 +32,7 @@ import org.elasticsearch.client.ml.FlushJobRequest; import org.elasticsearch.client.ml.ForecastJobRequest; import org.elasticsearch.client.ml.GetBucketsRequest; +import org.elasticsearch.client.ml.GetCategoriesRequest; import org.elasticsearch.client.ml.GetInfluencersRequest; import org.elasticsearch.client.ml.GetJobRequest; import org.elasticsearch.client.ml.GetJobStatsRequest; @@ -194,6 +195,20 @@ static Request getBuckets(GetBucketsRequest getBucketsRequest) throws IOExceptio return request; } + static Request getCategories(GetCategoriesRequest getCategoriesRequest) throws IOException { + String endpoint = new EndpointBuilder() + .addPathPartAsIs("_xpack") + .addPathPartAsIs("ml") + .addPathPartAsIs("anomaly_detectors") + .addPathPart(getCategoriesRequest.getJobId()) + .addPathPartAsIs("results") + .addPathPartAsIs("categories") + .build(); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + request.setEntity(createEntity(getCategoriesRequest, REQUEST_BODY_CONTENT_TYPE)); + return request; + } + static Request getOverallBuckets(GetOverallBucketsRequest getOverallBucketsRequest) throws IOException { String endpoint = new EndpointBuilder() .addPathPartAsIs("_xpack") diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java index 85c5771f3450b..b5f7550b913d4 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java @@ -32,6 +32,8 @@ import org.elasticsearch.client.ml.FlushJobResponse; import org.elasticsearch.client.ml.GetBucketsRequest; import org.elasticsearch.client.ml.GetBucketsResponse; +import org.elasticsearch.client.ml.GetCategoriesRequest; +import org.elasticsearch.client.ml.GetCategoriesResponse; import org.elasticsearch.client.ml.GetInfluencersRequest; import org.elasticsearch.client.ml.GetInfluencersResponse; import org.elasticsearch.client.ml.GetJobRequest; @@ -474,6 +476,45 @@ public void getBucketsAsync(GetBucketsRequest request, RequestOptions options, A Collections.emptySet()); } + /** + * Gets the categories for a Machine Learning Job. + *

+ * For additional info + * see + * ML GET categories documentation + * + * @param request The request + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @throws IOException when there is a serialization issue sending the request or receiving the response + */ + public GetCategoriesResponse getCategories(GetCategoriesRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, + MLRequestConverters::getCategories, + options, + GetCategoriesResponse::fromXContent, + Collections.emptySet()); + } + + /** + * Gets the categories for a Machine Learning Job, notifies listener once the requested buckets are retrieved. + *

+ * For additional info + * see + * ML GET categories documentation + * + * @param request The request + * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener Listener to be notified upon request completion + */ + public void getCategoriesAsync(GetCategoriesRequest request, RequestOptions options, ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, + MLRequestConverters::getCategories, + options, + GetCategoriesResponse::fromXContent, + listener, + Collections.emptySet()); + } + /** * Gets overall buckets for a set of Machine Learning Jobs. *

diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesRequest.java new file mode 100644 index 0000000000000..4fc68793f0060 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesRequest.java @@ -0,0 +1,128 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.ml.job.config.Job; +import org.elasticsearch.client.ml.job.util.PageParams; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +/** + * A request to retrieve categories of a given job + */ +public class GetCategoriesRequest extends ActionRequest implements ToXContentObject { + + + public static final ParseField CATEGORY_ID = new ParseField("category_id"); + + public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "get_categories_request", a -> new GetCategoriesRequest((String) a[0])); + + + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); + PARSER.declareLong(GetCategoriesRequest::setCategoryId, CATEGORY_ID); + PARSER.declareObject(GetCategoriesRequest::setPageParams, PageParams.PARSER, PageParams.PAGE); + } + + private final String jobId; + private Long categoryId; + private PageParams pageParams; + + /** + * Constructs a request to retrieve category information from a given job + * @param jobId id of the job from which to retrieve results + */ + public GetCategoriesRequest(String jobId) { + this.jobId = Objects.requireNonNull(jobId); + } + + public String getJobId() { + return jobId; + } + + public PageParams getPageParams() { + return pageParams; + } + + public Long getCategoryId() { + return categoryId; + } + + /** + * Sets the category id + * @param categoryId the category id + */ + public void setCategoryId(Long categoryId) { + this.categoryId = categoryId; + } + + /** + * Sets the paging parameters + * @param pageParams the paging parameters + */ + public void setPageParams(PageParams pageParams) { + this.pageParams = pageParams; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(Job.ID.getPreferredName(), jobId); + if (categoryId != null) { + builder.field(CATEGORY_ID.getPreferredName(), categoryId); + } + if (pageParams != null) { + builder.field(PageParams.PAGE.getPreferredName(), pageParams); + } + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + GetCategoriesRequest request = (GetCategoriesRequest) obj; + return Objects.equals(jobId, request.jobId) + && Objects.equals(categoryId, request.categoryId) + && Objects.equals(pageParams, request.pageParams); + } + + @Override + public int hashCode() { + return Objects.hash(jobId, categoryId, pageParams); + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesResponse.java new file mode 100644 index 0000000000000..3d3abe00bfb62 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesResponse.java @@ -0,0 +1,79 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.client.ml.job.results.CategoryDefinition; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +/** + * A response containing the requested categories + */ +public class GetCategoriesResponse extends AbstractResultResponse { + + public static final ParseField CATEGORIES = new ParseField("categories"); + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("get_categories_response", true, + a -> new GetCategoriesResponse((List) a[0], (long) a[1])); + + static { + PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), CategoryDefinition.PARSER, CATEGORIES); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), COUNT); + } + + public static GetCategoriesResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + GetCategoriesResponse(List categories, long count) { + super(CATEGORIES, categories, count); + } + + /** + * The retrieved categories + * @return the retrieved categories + */ + public List categories() { + return results; + } + + @Override + public int hashCode() { + return Objects.hash(count, results); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + GetCategoriesResponse other = (GetCategoriesResponse) obj; + return count == other.count && Objects.equals(results, other.results); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java index 26e6251af48d0..7cc5f119c399c 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MLRequestConvertersTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.client.ml.FlushJobRequest; import org.elasticsearch.client.ml.ForecastJobRequest; import org.elasticsearch.client.ml.GetBucketsRequest; +import org.elasticsearch.client.ml.GetCategoriesRequest; import org.elasticsearch.client.ml.GetInfluencersRequest; import org.elasticsearch.client.ml.GetJobRequest; import org.elasticsearch.client.ml.GetJobStatsRequest; @@ -220,6 +221,21 @@ public void testGetBuckets() throws IOException { } } + public void testGetCategories() throws IOException { + String jobId = randomAlphaOfLength(10); + GetCategoriesRequest getCategoriesRequest = new GetCategoriesRequest(jobId); + getCategoriesRequest.setPageParams(new PageParams(100, 300)); + + + Request request = MLRequestConverters.getCategories(getCategoriesRequest); + assertEquals(HttpGet.METHOD_NAME, request.getMethod()); + assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/results/categories", request.getEndpoint()); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) { + GetCategoriesRequest parsedRequest = GetCategoriesRequest.PARSER.apply(parser, null); + assertThat(parsedRequest, equalTo(getCategoriesRequest)); + } + } + public void testGetOverallBuckets() throws IOException { String jobId = randomAlphaOfLength(10); GetOverallBucketsRequest getOverallBucketsRequest = new GetOverallBucketsRequest(jobId); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java index 40d8596d1ba86..ddaec64157381 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningGetResultsIT.java @@ -23,6 +23,8 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.ml.GetBucketsRequest; import org.elasticsearch.client.ml.GetBucketsResponse; +import org.elasticsearch.client.ml.GetCategoriesRequest; +import org.elasticsearch.client.ml.GetCategoriesResponse; import org.elasticsearch.client.ml.GetInfluencersRequest; import org.elasticsearch.client.ml.GetInfluencersResponse; import org.elasticsearch.client.ml.GetOverallBucketsRequest; @@ -126,11 +128,150 @@ private void addRecordIndexRequest(long timestamp, boolean isInterim, BulkReques bulkRequest.add(indexRequest); } + private void addCategoryIndexRequest(long categoryId, String categoryName, BulkRequest bulkRequest) { + IndexRequest indexRequest = new IndexRequest(RESULTS_INDEX, DOC); + indexRequest.source("{\"job_id\":\"" + JOB_ID + "\", \"category_id\": " + categoryId + ", \"terms\": \"" + + categoryName + "\", \"regex\": \".*?" + categoryName + ".*\", \"max_matching_length\": 3, \"examples\": [\"" + + categoryName + "\"]}", XContentType.JSON); + bulkRequest.add(indexRequest); + } + + private void addCategoriesIndexRequests(BulkRequest bulkRequest) { + + List categories = Arrays.asList("AAL", "JZA", "JBU"); + + for (int i = 0; i < categories.size(); i++) { + addCategoryIndexRequest(i+1, categories.get(i), bulkRequest); + } + } + @After public void deleteJob() throws IOException { new MlRestTestStateCleaner(logger, client()).clearMlMetadata(); } + public void testGetCategories() throws IOException { + + // index some category results + BulkRequest bulkRequest = new BulkRequest(); + bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + addCategoriesIndexRequests(bulkRequest); + + highLevelClient().bulk(bulkRequest, RequestOptions.DEFAULT); + + MachineLearningClient machineLearningClient = highLevelClient().machineLearning(); + + { + GetCategoriesRequest request = new GetCategoriesRequest(JOB_ID); + request.setPageParams(new PageParams(0, 10000)); + + GetCategoriesResponse response = execute(request, machineLearningClient::getCategories, + machineLearningClient::getCategoriesAsync); + + assertThat(response.count(), equalTo(3L)); + assertThat(response.categories().size(), equalTo(3)); + assertThat(response.categories().get(0).getCategoryId(), equalTo(1L)); + assertThat(response.categories().get(0).getGrokPattern(), equalTo(".*?AAL.*")); + assertThat(response.categories().get(0).getRegex(), equalTo(".*?AAL.*")); + assertThat(response.categories().get(0).getTerms(), equalTo("AAL")); + + assertThat(response.categories().get(1).getCategoryId(), equalTo(2L)); + assertThat(response.categories().get(1).getGrokPattern(), equalTo(".*?JZA.*")); + assertThat(response.categories().get(1).getRegex(), equalTo(".*?JZA.*")); + assertThat(response.categories().get(1).getTerms(), equalTo("JZA")); + + assertThat(response.categories().get(2).getCategoryId(), equalTo(3L)); + assertThat(response.categories().get(2).getGrokPattern(), equalTo(".*?JBU.*")); + assertThat(response.categories().get(2).getRegex(), equalTo(".*?JBU.*")); + assertThat(response.categories().get(2).getTerms(), equalTo("JBU")); + } + { + GetCategoriesRequest request = new GetCategoriesRequest(JOB_ID); + request.setPageParams(new PageParams(0, 1)); + + GetCategoriesResponse response = execute(request, machineLearningClient::getCategories, + machineLearningClient::getCategoriesAsync); + + assertThat(response.count(), equalTo(3L)); + assertThat(response.categories().size(), equalTo(1)); + assertThat(response.categories().get(0).getCategoryId(), equalTo(1L)); + assertThat(response.categories().get(0).getGrokPattern(), equalTo(".*?AAL.*")); + assertThat(response.categories().get(0).getRegex(), equalTo(".*?AAL.*")); + assertThat(response.categories().get(0).getTerms(), equalTo("AAL")); + } + { + GetCategoriesRequest request = new GetCategoriesRequest(JOB_ID); + request.setPageParams(new PageParams(1, 2)); + + GetCategoriesResponse response = execute(request, machineLearningClient::getCategories, + machineLearningClient::getCategoriesAsync); + + assertThat(response.count(), equalTo(3L)); + assertThat(response.categories().size(), equalTo(2)); + assertThat(response.categories().get(0).getCategoryId(), equalTo(2L)); + assertThat(response.categories().get(0).getGrokPattern(), equalTo(".*?JZA.*")); + assertThat(response.categories().get(0).getRegex(), equalTo(".*?JZA.*")); + assertThat(response.categories().get(0).getTerms(), equalTo("JZA")); + + assertThat(response.categories().get(1).getCategoryId(), equalTo(3L)); + assertThat(response.categories().get(1).getGrokPattern(), equalTo(".*?JBU.*")); + assertThat(response.categories().get(1).getRegex(), equalTo(".*?JBU.*")); + assertThat(response.categories().get(1).getTerms(), equalTo("JBU")); + } + { + GetCategoriesRequest request = new GetCategoriesRequest(JOB_ID); + request.setCategoryId(0L); // request a non-existent category + + GetCategoriesResponse response = execute(request, machineLearningClient::getCategories, + machineLearningClient::getCategoriesAsync); + + assertThat(response.count(), equalTo(0L)); + assertThat(response.categories().size(), equalTo(0)); + } + { + GetCategoriesRequest request = new GetCategoriesRequest(JOB_ID); + request.setCategoryId(1L); + + GetCategoriesResponse response = execute(request, machineLearningClient::getCategories, + machineLearningClient::getCategoriesAsync); + + assertThat(response.count(), equalTo(1L)); + assertThat(response.categories().size(), equalTo(1)); + assertThat(response.categories().get(0).getCategoryId(), equalTo(1L)); + assertThat(response.categories().get(0).getGrokPattern(), equalTo(".*?AAL.*")); + assertThat(response.categories().get(0).getRegex(), equalTo(".*?AAL.*")); + assertThat(response.categories().get(0).getTerms(), equalTo("AAL")); + } + { + GetCategoriesRequest request = new GetCategoriesRequest(JOB_ID); + request.setCategoryId(2L); + + GetCategoriesResponse response = execute(request, machineLearningClient::getCategories, + machineLearningClient::getCategoriesAsync); + + assertThat(response.count(), equalTo(1L)); + assertThat(response.categories().get(0).getCategoryId(), equalTo(2L)); + assertThat(response.categories().get(0).getGrokPattern(), equalTo(".*?JZA.*")); + assertThat(response.categories().get(0).getRegex(), equalTo(".*?JZA.*")); + assertThat(response.categories().get(0).getTerms(), equalTo("JZA")); + + } + { + GetCategoriesRequest request = new GetCategoriesRequest(JOB_ID); + request.setCategoryId(3L); + + GetCategoriesResponse response = execute(request, machineLearningClient::getCategories, + machineLearningClient::getCategoriesAsync); + + assertThat(response.count(), equalTo(1L)); + assertThat(response.categories().get(0).getCategoryId(), equalTo(3L)); + assertThat(response.categories().get(0).getGrokPattern(), equalTo(".*?JBU.*")); + assertThat(response.categories().get(0).getRegex(), equalTo(".*?JBU.*")); + assertThat(response.categories().get(0).getTerms(), equalTo("JBU")); + } + } + public void testGetBuckets() throws IOException { MachineLearningClient machineLearningClient = highLevelClient().machineLearning(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java index 9abef54d0d24f..845729eccbde8 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java @@ -39,6 +39,8 @@ import org.elasticsearch.client.ml.ForecastJobResponse; import org.elasticsearch.client.ml.GetBucketsRequest; import org.elasticsearch.client.ml.GetBucketsResponse; +import org.elasticsearch.client.ml.GetCategoriesRequest; +import org.elasticsearch.client.ml.GetCategoriesResponse; import org.elasticsearch.client.ml.GetInfluencersRequest; import org.elasticsearch.client.ml.GetInfluencersResponse; import org.elasticsearch.client.ml.GetJobRequest; @@ -69,6 +71,7 @@ import org.elasticsearch.client.ml.job.config.RuleCondition; import org.elasticsearch.client.ml.job.results.AnomalyRecord; import org.elasticsearch.client.ml.job.results.Bucket; +import org.elasticsearch.client.ml.job.results.CategoryDefinition; import org.elasticsearch.client.ml.job.results.Influencer; import org.elasticsearch.client.ml.job.results.OverallBucket; import org.elasticsearch.client.ml.job.stats.JobStats; @@ -473,7 +476,7 @@ public void onFailure(Exception e) { assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } - + public void testGetBuckets() throws IOException, InterruptedException { RestHighLevelClient client = highLevelClient(); @@ -1111,4 +1114,74 @@ public void onFailure(Exception e) { assertTrue(latch.await(30L, TimeUnit.SECONDS)); } } + + public void testGetCategories() throws IOException, InterruptedException { + RestHighLevelClient client = highLevelClient(); + + String jobId = "test-get-categories"; + Job job = MachineLearningIT.buildJob(jobId); + client.machineLearning().putJob(new PutJobRequest(job), RequestOptions.DEFAULT); + + // Let us index a category + IndexRequest indexRequest = new IndexRequest(".ml-anomalies-shared", "doc"); + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + indexRequest.source("{\"job_id\": \"test-get-categories\", \"category_id\": 1, \"terms\": \"AAL\"," + + " \"regex\": \".*?AAL.*\", \"max_matching_length\": 3, \"examples\": [\"AAL\"]}", XContentType.JSON); + client.index(indexRequest, RequestOptions.DEFAULT); + + { + // tag::x-pack-ml-get-categories-request + GetCategoriesRequest request = new GetCategoriesRequest(jobId); // <1> + // end::x-pack-ml-get-categories-request + + // tag::x-pack-ml-get-categories-category-id + request.setCategoryId(1L); // <1> + // end::x-pack-ml-get-categories-category-id + + // tag::x-pack-ml-get-categories-page + request.setPageParams(new PageParams(100, 200)); // <1> + // end::x-pack-ml-get-categories-page + + // Set page params back to null so the response contains the category we indexed + request.setPageParams(null); + + // tag::x-pack-ml-get-categories-execute + GetCategoriesResponse response = client.machineLearning().getCategories(request, RequestOptions.DEFAULT); + // end::x-pack-ml-get-categories-execute + + // tag::x-pack-ml-get-categories-response + long count = response.count(); // <1> + List categories = response.categories(); // <2> + // end::x-pack-ml-get-categories-response + assertEquals(1, categories.size()); + } + { + GetCategoriesRequest request = new GetCategoriesRequest(jobId); + + // tag::x-pack-ml-get-categories-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(GetCategoriesResponse getcategoriesResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::x-pack-ml-get-categories-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::x-pack-ml-get-categories-execute-async + client.machineLearning().getCategoriesAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::x-pack-ml-get-categories-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetCategoriesRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetCategoriesRequestTests.java new file mode 100644 index 0000000000000..7d9fe2b238f75 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetCategoriesRequestTests.java @@ -0,0 +1,51 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.client.ml.job.util.PageParams; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; + +public class GetCategoriesRequestTests extends AbstractXContentTestCase { + + @Override + protected GetCategoriesRequest createTestInstance() { + GetCategoriesRequest request = new GetCategoriesRequest(randomAlphaOfLengthBetween(1, 20)); + if (randomBoolean()) { + request.setCategoryId(randomNonNegativeLong()); + } else { + int from = randomInt(10000); + int size = randomInt(10000); + request.setPageParams(new PageParams(from, size)); + } + return request; + } + + @Override + protected GetCategoriesRequest doParseInstance(XContentParser parser) throws IOException { + return GetCategoriesRequest.PARSER.apply(parser, null); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetCategoriesResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetCategoriesResponseTests.java new file mode 100644 index 0000000000000..e8718ba20e9ce --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/GetCategoriesResponseTests.java @@ -0,0 +1,53 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.client.ml; + +import org.elasticsearch.client.ml.job.results.CategoryDefinition; +import org.elasticsearch.client.ml.job.results.CategoryDefinitionTests; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +public class GetCategoriesResponseTests extends AbstractXContentTestCase { + + @Override + protected GetCategoriesResponse createTestInstance() { + String jobId = randomAlphaOfLength(20); + int listSize = randomInt(10); + List categories = new ArrayList<>(listSize); + for (int j = 0; j < listSize; j++) { + CategoryDefinition category = CategoryDefinitionTests.createTestInstance(jobId); + categories.add(category); + } + return new GetCategoriesResponse(categories, listSize); + } + + @Override + protected GetCategoriesResponse doParseInstance(XContentParser parser) throws IOException { + return GetCategoriesResponse.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/results/CategoryDefinitionTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/results/CategoryDefinitionTests.java index 27e15a1600d38..63f261583869f 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/results/CategoryDefinitionTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/job/results/CategoryDefinitionTests.java @@ -25,7 +25,7 @@ public class CategoryDefinitionTests extends AbstractXContentTestCase { - public CategoryDefinition createTestInstance(String jobId) { + public static CategoryDefinition createTestInstance(String jobId) { CategoryDefinition categoryDefinition = new CategoryDefinition(jobId); categoryDefinition.setCategoryId(randomLong()); categoryDefinition.setTerms(randomAlphaOfLength(10)); diff --git a/docs/java-rest/high-level/ml/get-categories.asciidoc b/docs/java-rest/high-level/ml/get-categories.asciidoc new file mode 100644 index 0000000000000..0e86a2b7f33a6 --- /dev/null +++ b/docs/java-rest/high-level/ml/get-categories.asciidoc @@ -0,0 +1,83 @@ +[[java-rest-high-x-pack-ml-get-categories]] +=== Get Categories API + +The Get Categories API retrieves one or more category results. +It accepts a `GetCategoriesRequest` object and responds +with a `GetCategoriesResponse` object. + +[[java-rest-high-x-pack-ml-get-categories-request]] +==== Get Categories Request + +A `GetCategoriesRequest` object gets created with an existing non-null `jobId`. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-categories-request] +-------------------------------------------------- +<1> Constructing a new request referencing an existing `jobId` + +==== Optional Arguments +The following arguments are optional: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-categories-category-id] +-------------------------------------------------- +<1> The id of the category to get. Otherwise it will return all categories. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-categories-page] +-------------------------------------------------- +<1> The page parameters `from` and `size`. `from` specifies the number of categories to skip. +`size` specifies the maximum number of categories to get. Defaults to `0` and `100` respectively. + +[[java-rest-high-x-pack-ml-get-categories-execution]] +==== Execution + +The request can be executed through the `MachineLearningClient` contained +in the `RestHighLevelClient` object, accessed via the `machineLearningClient()` method. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-categories-execute] +-------------------------------------------------- + + +[[java-rest-high-x-pack-ml-get-categories-execution-async]] +==== Asynchronous Execution + +The request can also be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-categories-execute-async] +-------------------------------------------------- +<1> The `GetCategoriesRequest` to execute and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back with the `onResponse` method +if the execution is successful or the `onFailure` method if the execution +failed. + +A typical listener for `GetCategoriesResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-categories-listener] +-------------------------------------------------- +<1> `onResponse` is called back when the action is completed successfully +<2> `onFailure` is called back when some unexpected error occurs + +[[java-rest-high-snapshot-ml-get-categories-response]] +==== Get Categories Response + +The returned `GetCategoriesResponse` contains the requested categories: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MlClientDocumentationIT.java[x-pack-ml-get-categories-response] +-------------------------------------------------- +<1> The count of categories that were matched +<2> The categories retrieved \ No newline at end of file diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 8d92653ce5702..87639a2ea3fa9 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -226,6 +226,7 @@ The Java High Level REST Client supports the following Machine Learning APIs: * <> * <> * <> +* <> include::ml/put-job.asciidoc[] include::ml/get-job.asciidoc[] @@ -241,6 +242,7 @@ include::ml/get-overall-buckets.asciidoc[] include::ml/get-records.asciidoc[] include::ml/post-data.asciidoc[] include::ml/get-influencers.asciidoc[] +include::ml/get-categories.asciidoc[] == Migration APIs From 517cfc3cc0c2278a1287cf961c7db513e81dcbb6 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 11 Sep 2018 14:05:14 +0200 Subject: [PATCH 88/91] Add read-only Engine (#33563) This change adds an engine implementation that opens a reader on an existing index but doesn't permit any refreshes or modifications to the index. Relates to #32867 Relates to #32844 --- .../elasticsearch/index/engine/Engine.java | 6 +- .../index/engine/InternalEngine.java | 5 - .../index/engine/ReadOnlyEngine.java | 372 ++++++++++++++++++ .../index/engine/InternalEngineTests.java | 2 +- .../index/engine/ReadOnlyEngineTests.java | 156 ++++++++ 5 files changed, 533 insertions(+), 8 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java create mode 100644 server/src/test/java/org/elasticsearch/index/engine/ReadOnlyEngineTests.java diff --git a/server/src/main/java/org/elasticsearch/index/engine/Engine.java b/server/src/main/java/org/elasticsearch/index/engine/Engine.java index fe27aea805eef..ea8161c158980 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -661,7 +661,7 @@ protected final void ensureOpen() { } /** get commits stats for the last commit */ - public CommitStats commitStats() { + public final CommitStats commitStats() { return new CommitStats(getLastCommittedSegmentInfos()); } @@ -951,7 +951,9 @@ public final boolean refreshNeeded() { * * @return the commit Id for the resulting commit */ - public abstract CommitId flush() throws EngineException; + public final CommitId flush() throws EngineException { + return flush(false, false); + } /** diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index d9b03777f1b1b..b2ab0d71c32ac 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -1576,11 +1576,6 @@ public boolean shouldPeriodicallyFlush() { || localCheckpointTracker.getCheckpoint() == localCheckpointTracker.getMaxSeqNo(); } - @Override - public CommitId flush() throws EngineException { - return flush(false, false); - } - @Override public CommitId flush(boolean force, boolean waitIfOngoing) throws EngineException { ensureOpen(); diff --git a/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java b/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java new file mode 100644 index 0000000000000..a55987d0a0082 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java @@ -0,0 +1,372 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.engine; + +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexCommit; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.SegmentInfos; +import org.apache.lucene.index.SoftDeletesDirectoryReaderWrapper; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.ReferenceManager; +import org.apache.lucene.search.SearcherManager; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.Lock; +import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; +import org.elasticsearch.core.internal.io.IOUtils; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.seqno.SeqNoStats; +import org.elasticsearch.index.seqno.SequenceNumbers; +import org.elasticsearch.index.store.Store; +import org.elasticsearch.index.translog.Translog; +import org.elasticsearch.index.translog.TranslogStats; + +import java.io.Closeable; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.function.BiFunction; +import java.util.function.Function; +import java.util.stream.Stream; + +/** + * A basic read-only engine that allows switching a shard to be true read-only temporarily or permanently. + * Note: this engine can be opened side-by-side with a read-write engine but will not reflect any changes made to the read-write + * engine. + * + * @see #ReadOnlyEngine(EngineConfig, SeqNoStats, TranslogStats, boolean, Function) + */ +public final class ReadOnlyEngine extends Engine { + + private final SegmentInfos lastCommittedSegmentInfos; + private final SeqNoStats seqNoStats; + private final TranslogStats translogStats; + private final SearcherManager searcherManager; + private final IndexCommit indexCommit; + private final Lock indexWriterLock; + + /** + * Creates a new ReadOnlyEngine. This ctor can also be used to open a read-only engine on top of an already opened + * read-write engine. It allows to optionally obtain the writer locks for the shard which would time-out if another + * engine is still open. + * + * @param config the engine configuration + * @param seqNoStats sequence number statistics for this engine or null if not provided + * @param translogStats translog stats for this engine or null if not provided + * @param obtainLock if true this engine will try to obtain the {@link IndexWriter#WRITE_LOCK_NAME} lock. Otherwise + * the lock won't be obtained + * @param readerWrapperFunction allows to wrap the index-reader for this engine. + */ + public ReadOnlyEngine(EngineConfig config, SeqNoStats seqNoStats, TranslogStats translogStats, boolean obtainLock, + Function readerWrapperFunction) { + super(config); + try { + Store store = config.getStore(); + store.incRef(); + DirectoryReader reader = null; + Directory directory = store.directory(); + Lock indexWriterLock = null; + boolean success = false; + try { + // we obtain the IW lock even though we never modify the index. + // yet this makes sure nobody else does. including some testing tools that try to be messy + indexWriterLock = obtainLock ? directory.obtainLock(IndexWriter.WRITE_LOCK_NAME) : null; + this.lastCommittedSegmentInfos = Lucene.readSegmentInfos(directory); + this.translogStats = translogStats == null ? new TranslogStats(0, 0, 0, 0, 0) : translogStats; + this.seqNoStats = seqNoStats == null ? buildSeqNoStats(lastCommittedSegmentInfos) : seqNoStats; + reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(directory), config.getShardId()); + if (config.getIndexSettings().isSoftDeleteEnabled()) { + reader = new SoftDeletesDirectoryReaderWrapper(reader, Lucene.SOFT_DELETES_FIELD); + } + reader = readerWrapperFunction.apply(reader); + this.indexCommit = reader.getIndexCommit(); + this.searcherManager = new SearcherManager(reader, + new RamAccountingSearcherFactory(engineConfig.getCircuitBreakerService())); + this.indexWriterLock = indexWriterLock; + success = true; + } finally { + if (success == false) { + IOUtils.close(reader, indexWriterLock, store::decRef); + } + } + } catch (IOException e) { + throw new UncheckedIOException(e); // this is stupid + } + } + + @Override + protected void closeNoLock(String reason, CountDownLatch closedLatch) { + if (isClosed.compareAndSet(false, true)) { + try { + IOUtils.close(searcherManager, indexWriterLock, store::decRef); + } catch (Exception ex) { + logger.warn("failed to close searcher", ex); + } finally { + closedLatch.countDown(); + } + } + } + + public static SeqNoStats buildSeqNoStats(SegmentInfos infos) { + final SequenceNumbers.CommitInfo seqNoStats = + SequenceNumbers.loadSeqNoInfoFromLuceneCommit(infos.userData.entrySet()); + long maxSeqNo = seqNoStats.maxSeqNo; + long localCheckpoint = seqNoStats.localCheckpoint; + return new SeqNoStats(maxSeqNo, localCheckpoint, localCheckpoint); + } + + @Override + public GetResult get(Get get, BiFunction searcherFactory) throws EngineException { + return getFromSearcher(get, searcherFactory, SearcherScope.EXTERNAL); + } + + @Override + protected ReferenceManager getReferenceManager(SearcherScope scope) { + return searcherManager; + } + + @Override + protected SegmentInfos getLastCommittedSegmentInfos() { + return lastCommittedSegmentInfos; + } + + @Override + public String getHistoryUUID() { + return lastCommittedSegmentInfos.userData.get(Engine.HISTORY_UUID_KEY); + } + + @Override + public long getWritingBytes() { + return 0; + } + + @Override + public long getIndexThrottleTimeInMillis() { + return 0; + } + + @Override + public boolean isThrottled() { + return false; + } + + @Override + public IndexResult index(Index index) { + assert false : "this should not be called"; + throw new UnsupportedOperationException("indexing is not supported on a read-only engine"); + } + + @Override + public DeleteResult delete(Delete delete) { + assert false : "this should not be called"; + throw new UnsupportedOperationException("deletes are not supported on a read-only engine"); + } + + @Override + public NoOpResult noOp(NoOp noOp) { + assert false : "this should not be called"; + throw new UnsupportedOperationException("no-ops are not supported on a read-only engine"); + } + + @Override + public boolean isTranslogSyncNeeded() { + return false; + } + + @Override + public boolean ensureTranslogSynced(Stream locations) { + return false; + } + + @Override + public void syncTranslog() { + } + + @Override + public Closeable acquireRetentionLockForPeerRecovery() { + return () -> {}; + } + + @Override + public Translog.Snapshot newChangesSnapshot(String source, MapperService mapperService, long fromSeqNo, long toSeqNo, + boolean requiredFullRange) throws IOException { + return readHistoryOperations(source, mapperService, fromSeqNo); + } + + @Override + public Translog.Snapshot readHistoryOperations(String source, MapperService mapperService, long startingSeqNo) throws IOException { + return new Translog.Snapshot() { + @Override + public void close() { } + @Override + public int totalOperations() { + return 0; + } + @Override + public Translog.Operation next() { + return null; + } + }; + } + + @Override + public int estimateNumberOfHistoryOperations(String source, MapperService mapperService, long startingSeqNo) throws IOException { + return 0; + } + + @Override + public boolean hasCompleteOperationHistory(String source, MapperService mapperService, long startingSeqNo) throws IOException { + return false; + } + + @Override + public TranslogStats getTranslogStats() { + return translogStats; + } + + @Override + public Translog.Location getTranslogLastWriteLocation() { + return new Translog.Location(0,0,0); + } + + @Override + public long getLocalCheckpoint() { + return seqNoStats.getLocalCheckpoint(); + } + + @Override + public void waitForOpsToComplete(long seqNo) { + } + + @Override + public void resetLocalCheckpoint(long newCheckpoint) { + } + + @Override + public SeqNoStats getSeqNoStats(long globalCheckpoint) { + return new SeqNoStats(seqNoStats.getMaxSeqNo(), seqNoStats.getLocalCheckpoint(), globalCheckpoint); + } + + @Override + public long getLastSyncedGlobalCheckpoint() { + return seqNoStats.getGlobalCheckpoint(); + } + + @Override + public long getIndexBufferRAMBytesUsed() { + return 0; + } + + @Override + public List segments(boolean verbose) { + return Arrays.asList(getSegmentInfo(lastCommittedSegmentInfos, verbose)); + } + + @Override + public void refresh(String source) { + // we could allow refreshes if we want down the road the searcher manager will then reflect changes to a rw-engine + // opened side-by-side + } + + @Override + public void writeIndexingBuffer() throws EngineException { + } + + @Override + public boolean shouldPeriodicallyFlush() { + return false; + } + + @Override + public SyncedFlushResult syncFlush(String syncId, CommitId expectedCommitId) { + // we can't do synced flushes this would require an indexWriter which we don't have + throw new UnsupportedOperationException("syncedFlush is not supported on a read-only engine"); + } + + @Override + public CommitId flush(boolean force, boolean waitIfOngoing) throws EngineException { + return new CommitId(lastCommittedSegmentInfos.getId()); + } + + @Override + public void forceMerge(boolean flush, int maxNumSegments, boolean onlyExpungeDeletes, + boolean upgrade, boolean upgradeOnlyAncientSegments) { + } + + @Override + public IndexCommitRef acquireLastIndexCommit(boolean flushFirst) { + store.incRef(); + return new IndexCommitRef(indexCommit, store::decRef); + } + + @Override + public IndexCommitRef acquireSafeIndexCommit() { + return acquireLastIndexCommit(false); + } + + @Override + public void activateThrottling() { + } + + @Override + public void deactivateThrottling() { + } + + @Override + public void trimUnreferencedTranslogFiles() { + } + + @Override + public boolean shouldRollTranslogGeneration() { + return false; + } + + @Override + public void rollTranslogGeneration() { + } + + @Override + public void restoreLocalCheckpointFromTranslog() { + } + + @Override + public int fillSeqNoGaps(long primaryTerm) { + return 0; + } + + @Override + public Engine recoverFromTranslog(TranslogRecoveryRunner translogRecoveryRunner, long recoverUpToSeqNo) { + return this; + } + + @Override + public void skipTranslogRecovery() { + } + + @Override + public void trimOperationsFromTranslog(long belowTerm, long aboveSeqNo) { + } + + @Override + public void maybePruneDeletes() { + } +} diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index a26fd72468b48..39132d805b268 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -5033,7 +5033,7 @@ public void testAcquireSearcherOnClosingEngine() throws Exception { expectThrows(AlreadyClosedException.class, () -> engine.acquireSearcher("test")); } - private static void trimUnsafeCommits(EngineConfig config) throws IOException { + static void trimUnsafeCommits(EngineConfig config) throws IOException { final Store store = config.getStore(); final TranslogConfig translogConfig = config.getTranslogConfig(); final String translogUUID = store.readLastCommittedSegmentsInfo().getUserData().get(Translog.TRANSLOG_UUID_KEY); diff --git a/server/src/test/java/org/elasticsearch/index/engine/ReadOnlyEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/ReadOnlyEngineTests.java new file mode 100644 index 0000000000000..4a5b89351bd16 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/engine/ReadOnlyEngineTests.java @@ -0,0 +1,156 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.engine; + +import org.apache.lucene.util.LuceneTestCase; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.core.internal.io.IOUtils; +import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.seqno.SeqNoStats; +import org.elasticsearch.index.seqno.SequenceNumbers; +import org.elasticsearch.index.store.Store; + +import java.io.IOException; +import java.util.Set; +import java.util.concurrent.atomic.AtomicLong; +import java.util.function.Function; + +import static org.hamcrest.Matchers.equalTo; + +public class ReadOnlyEngineTests extends EngineTestCase { + + public void testReadOnlyEngine() throws Exception { + IOUtils.close(engine, store); + Engine readOnlyEngine = null; + final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); + try (Store store = createStore()) { + EngineConfig config = config(defaultSettings, store, createTempDir(), newMergePolicy(), null, null, globalCheckpoint::get); + int numDocs = scaledRandomIntBetween(10, 1000); + final SeqNoStats lastSeqNoStats; + final Set lastDocIds; + try (InternalEngine engine = createEngine(config)) { + Engine.Get get = null; + for (int i = 0; i < numDocs; i++) { + if (rarely()) { + continue; // gap in sequence number + } + ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), new BytesArray("{}"), null); + engine.index(new Engine.Index(newUid(doc), doc, i, primaryTerm.get(), 1, null, Engine.Operation.Origin.REPLICA, + System.nanoTime(), -1, false)); + if (get == null || rarely()) { + get = newGet(randomBoolean(), doc); + } + if (rarely()) { + engine.flush(); + } + globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), engine.getLocalCheckpoint())); + } + engine.syncTranslog(); + engine.flush(); + readOnlyEngine = new ReadOnlyEngine(engine.engineConfig, engine.getSeqNoStats(globalCheckpoint.get()), + engine.getTranslogStats(), false, Function.identity()); + lastSeqNoStats = engine.getSeqNoStats(globalCheckpoint.get()); + lastDocIds = getDocIds(engine, true); + assertThat(readOnlyEngine.getLocalCheckpoint(), equalTo(lastSeqNoStats.getLocalCheckpoint())); + assertThat(readOnlyEngine.getSeqNoStats(globalCheckpoint.get()).getMaxSeqNo(), equalTo(lastSeqNoStats.getMaxSeqNo())); + assertThat(getDocIds(readOnlyEngine, false), equalTo(lastDocIds)); + for (int i = 0; i < numDocs; i++) { + if (randomBoolean()) { + String delId = Integer.toString(i); + engine.delete(new Engine.Delete("test", delId, newUid(delId), primaryTerm.get())); + } + if (rarely()) { + engine.flush(); + } + } + Engine.Searcher external = readOnlyEngine.acquireSearcher("test", Engine.SearcherScope.EXTERNAL); + Engine.Searcher internal = readOnlyEngine.acquireSearcher("test", Engine.SearcherScope.INTERNAL); + assertSame(external.reader(), internal.reader()); + IOUtils.close(external, internal); + // the locked down engine should still point to the previous commit + assertThat(readOnlyEngine.getLocalCheckpoint(), equalTo(lastSeqNoStats.getLocalCheckpoint())); + assertThat(readOnlyEngine.getSeqNoStats(globalCheckpoint.get()).getMaxSeqNo(), equalTo(lastSeqNoStats.getMaxSeqNo())); + assertThat(getDocIds(readOnlyEngine, false), equalTo(lastDocIds)); + try (Engine.GetResult getResult = readOnlyEngine.get(get, readOnlyEngine::acquireSearcher)) { + assertTrue(getResult.exists()); + } + + } + // Close and reopen the main engine + InternalEngineTests.trimUnsafeCommits(config); + try (InternalEngine recoveringEngine = new InternalEngine(config)) { + recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); + // the locked down engine should still point to the previous commit + assertThat(readOnlyEngine.getLocalCheckpoint(), equalTo(lastSeqNoStats.getLocalCheckpoint())); + assertThat(readOnlyEngine.getSeqNoStats(globalCheckpoint.get()).getMaxSeqNo(), equalTo(lastSeqNoStats.getMaxSeqNo())); + assertThat(getDocIds(readOnlyEngine, false), equalTo(lastDocIds)); + } + } finally { + IOUtils.close(readOnlyEngine); + } + } + + public void testFlushes() throws IOException { + IOUtils.close(engine, store); + Engine readOnlyEngine = null; + final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); + try (Store store = createStore()) { + EngineConfig config = config(defaultSettings, store, createTempDir(), newMergePolicy(), null, null, globalCheckpoint::get); + int numDocs = scaledRandomIntBetween(10, 1000); + try (InternalEngine engine = createEngine(config)) { + for (int i = 0; i < numDocs; i++) { + if (rarely()) { + continue; // gap in sequence number + } + ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), new BytesArray("{}"), null); + engine.index(new Engine.Index(newUid(doc), doc, i, primaryTerm.get(), 1, null, Engine.Operation.Origin.REPLICA, + System.nanoTime(), -1, false)); + if (rarely()) { + engine.flush(); + } + globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), engine.getLocalCheckpoint())); + } + engine.syncTranslog(); + engine.flushAndClose(); + readOnlyEngine = new ReadOnlyEngine(engine.engineConfig, null , null, true, Function.identity()); + Engine.CommitId flush = readOnlyEngine.flush(randomBoolean(), randomBoolean()); + assertEquals(flush, readOnlyEngine.flush(randomBoolean(), randomBoolean())); + } finally { + IOUtils.close(readOnlyEngine); + } + } + } + + public void testReadOnly() throws IOException { + IOUtils.close(engine, store); + final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED); + try (Store store = createStore()) { + EngineConfig config = config(defaultSettings, store, createTempDir(), newMergePolicy(), null, null, globalCheckpoint::get); + store.createEmpty(); + try (ReadOnlyEngine readOnlyEngine = new ReadOnlyEngine(config, null , null, true, Function.identity())) { + Class expectedException = LuceneTestCase.TEST_ASSERTS_ENABLED ? AssertionError.class : + UnsupportedOperationException.class; + expectThrows(expectedException, () -> readOnlyEngine.index(null)); + expectThrows(expectedException, () -> readOnlyEngine.delete(null)); + expectThrows(expectedException, () -> readOnlyEngine.noOp(null)); + expectThrows(UnsupportedOperationException.class, () -> readOnlyEngine.syncFlush(null, null)); + } + } + } +} From ad4b5e427004351ae7e88dadfe4210db437ab764 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 11 Sep 2018 08:35:42 -0400 Subject: [PATCH 89/91] Fix upgrading of list settings (#33589) Upgrading list settings is broken because of the conversion that we do to strings, and then when we try to put back the upgraded value we do not know that it is a representation of a list. This commit addresses this by adding special handling for list settings. --- .../settings/AbstractScopedSettings.java | 26 ++++++----- .../common/settings/SettingUpgrader.java | 6 +++ .../common/settings/ScopedSettingsTests.java | 44 +++++++++++++++++++ 3 files changed, 65 insertions(+), 11 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index e25d954aa4f1c..b010d7982fd2d 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.regex.Regex; -import java.util.AbstractMap; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -54,7 +53,7 @@ public abstract class AbstractScopedSettings extends AbstractComponent { private final List> settingUpdaters = new CopyOnWriteArrayList<>(); private final Map> complexMatchers; private final Map> keySettings; - private final Map, Function, Map.Entry>> settingUpgraders; + private final Map, SettingUpgrader> settingUpgraders; private final Setting.Property scope; private static final Pattern KEY_PATTERN = Pattern.compile("^(?:[-\\w]+[.])*[-\\w]+$"); private static final Pattern GROUP_KEY_PATTERN = Pattern.compile("^(?:[-\\w]+[.])+$"); @@ -70,12 +69,8 @@ protected AbstractScopedSettings( this.settingUpgraders = Collections.unmodifiableMap( - settingUpgraders - .stream() - .collect( - Collectors.toMap( - SettingUpgrader::getSetting, - u -> e -> new AbstractMap.SimpleEntry<>(u.getKey(e.getKey()), u.getValue(e.getValue()))))); + settingUpgraders.stream().collect(Collectors.toMap(SettingUpgrader::getSetting, Function.identity()))); + this.scope = scope; Map> complexMatchers = new HashMap<>(); @@ -786,15 +781,24 @@ public Settings upgradeSettings(final Settings settings) { boolean changed = false; // track if any settings were upgraded for (final String key : settings.keySet()) { final Setting setting = getRaw(key); - final Function, Map.Entry> upgrader = settingUpgraders.get(setting); + final SettingUpgrader upgrader = settingUpgraders.get(setting); if (upgrader == null) { // the setting does not have an upgrader, copy the setting builder.copy(key, settings); } else { // the setting has an upgrader, so mark that we have changed a setting and apply the upgrade logic changed = true; - final Map.Entry upgrade = upgrader.apply(new Entry(key, settings)); - builder.put(upgrade.getKey(), upgrade.getValue()); + if (setting.isListSetting()) { + final List value = settings.getAsList(key); + final String upgradedKey = upgrader.getKey(key); + final List upgradedValue = upgrader.getListValue(value); + builder.putList(upgradedKey, upgradedValue); + } else { + final String value = settings.get(key); + final String upgradedKey = upgrader.getKey(key); + final String upgradedValue = upgrader.getValue(value); + builder.put(upgradedKey, upgradedValue); + } } } // we only return a new instance if there was an upgrade diff --git a/server/src/main/java/org/elasticsearch/common/settings/SettingUpgrader.java b/server/src/main/java/org/elasticsearch/common/settings/SettingUpgrader.java index 91f2bead300d3..bc41b55490574 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/SettingUpgrader.java +++ b/server/src/main/java/org/elasticsearch/common/settings/SettingUpgrader.java @@ -19,6 +19,8 @@ package org.elasticsearch.common.settings; +import java.util.List; + /** * Represents the logic to upgrade a setting. * @@ -51,4 +53,8 @@ default String getValue(final String value) { return value; } + default List getListValue(final List value) { + return value; + } + } diff --git a/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index 0ee1d2e9c4a80..6766316fafd46 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -47,6 +47,7 @@ import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Function; +import java.util.stream.Collectors; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; @@ -1171,4 +1172,47 @@ public String getValue(final String value) { } } + public void testUpgradeListSetting() { + final Setting> oldSetting = + Setting.listSetting("foo.old", Collections.emptyList(), Function.identity(), Property.NodeScope); + final Setting> newSetting = + Setting.listSetting("foo.new", Collections.emptyList(), Function.identity(), Property.NodeScope); + + final AbstractScopedSettings service = + new ClusterSettings( + Settings.EMPTY, + new HashSet<>(Arrays.asList(oldSetting, newSetting)), + Collections.singleton(new SettingUpgrader>() { + + @Override + public Setting> getSetting() { + return oldSetting; + } + + @Override + public String getKey(final String key) { + return "foo.new"; + } + + @Override + public List getListValue(final List value) { + return value.stream().map(s -> "new." + s).collect(Collectors.toList()); + } + })); + + final int length = randomIntBetween(0, 16); + final List values = length == 0 ? Collections.emptyList() : new ArrayList<>(length); + for (int i = 0; i < length; i++) { + values.add(randomAlphaOfLength(8)); + } + + final Settings settings = Settings.builder().putList("foo.old", values).build(); + final Settings upgradedSettings = service.upgradeSettings(settings); + assertFalse(oldSetting.exists(upgradedSettings)); + assertTrue(newSetting.exists(upgradedSettings)); + assertThat( + newSetting.get(upgradedSettings), + equalTo(oldSetting.get(settings).stream().map(s -> "new." + s).collect(Collectors.toList()))); + } + } From 36bdad4895395e6854ffe2b783aec47ec93b4b1f Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Tue, 11 Sep 2018 13:38:44 +0100 Subject: [PATCH 90/91] Use IndexWriter.getFlushingBytes() rather than tracking it ourselves (#33582) Currently we keep track of how many bytes are currently being written to disk in an AtomicLong within InternalEngine, updating it on refresh. The IndexWriter has its own accounting for this, and exposes it via a getFlushingBytes method in the latest lucene 8 snapshot. This commit removes the InternalEngine tracking in favour of just using the IndexWriter method. --- .../elasticsearch/index/engine/InternalEngine.java | 13 +------------ .../elasticsearch/index/engine/LiveVersionMap.java | 8 ++++++++ .../index/engine/LiveVersionMapTests.java | 14 ++++++++++++++ 3 files changed, 23 insertions(+), 12 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index b2ab0d71c32ac..e8f5e41590892 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -152,12 +152,6 @@ public class InternalEngine extends Engine { private final SoftDeletesPolicy softDeletesPolicy; private final LastRefreshedCheckpointListener lastRefreshedCheckpointListener; - /** - * How many bytes we are currently moving to disk, via either IndexWriter.flush or refresh. IndexingMemoryController polls this - * across all shards to decide if throttling is necessary because moving bytes to disk is falling behind vs incoming documents - * being indexed/deleted. - */ - private final AtomicLong writingBytes = new AtomicLong(); private final AtomicBoolean trackTranslogLocation = new AtomicBoolean(false); @Nullable @@ -530,7 +524,7 @@ public String getHistoryUUID() { /** Returns how many bytes we are currently moving from indexing buffer to segments on disk */ @Override public long getWritingBytes() { - return writingBytes.get(); + return indexWriter.getFlushingBytes() + versionMap.getRefreshingBytes(); } /** @@ -1437,9 +1431,6 @@ final void refresh(String source, SearcherScope scope) throws EngineException { // pass the new reader reference to the external reader manager. final long localCheckpointBeforeRefresh = getLocalCheckpoint(); - // this will also cause version map ram to be freed hence we always account for it. - final long bytes = indexWriter.ramBytesUsed() + versionMap.ramBytesUsedForRefresh(); - writingBytes.addAndGet(bytes); try (ReleasableLock lock = readLock.acquire()) { ensureOpen(); if (store.tryIncRef()) { @@ -1465,8 +1456,6 @@ final void refresh(String source, SearcherScope scope) throws EngineException { e.addSuppressed(inner); } throw new RefreshFailedEngineException(shardId, e); - } finally { - writingBytes.addAndGet(-bytes); } assert lastRefreshedCheckpoint() >= localCheckpointBeforeRefresh : "refresh checkpoint was not advanced; " + "local_checkpoint=" + localCheckpointBeforeRefresh + " refresh_checkpoint=" + lastRefreshedCheckpoint(); diff --git a/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java b/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java index 18d3cedb37e60..d0dd9466b6075 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java +++ b/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java @@ -434,6 +434,14 @@ long ramBytesUsedForRefresh() { return maps.current.ramBytesUsed.get(); } + /** + * Returns how much RAM is current being freed up by refreshing. This is {@link #ramBytesUsed()} + * except does not include tombstones because they don't clear on refresh. + */ + long getRefreshingBytes() { + return maps.old.ramBytesUsed.get(); + } + @Override public Collection getChildResources() { // TODO: useful to break down RAM usage here? diff --git a/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java b/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java index 286e85cef3fc6..115785b2e7b96 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java @@ -41,6 +41,7 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.nullValue; public class LiveVersionMapTests extends ESTestCase { @@ -91,6 +92,19 @@ public void testRamBytesUsed() throws Exception { assertEquals(actualRamBytesUsed, estimatedRamBytesUsed, tolerance); } + public void testRefreshingBytes() throws IOException { + LiveVersionMap map = new LiveVersionMap(); + BytesRefBuilder uid = new BytesRefBuilder(); + uid.copyChars(TestUtil.randomSimpleString(random(), 10, 20)); + try (Releasable r = map.acquireLock(uid.toBytesRef())) { + map.putIndexUnderLock(uid.toBytesRef(), randomIndexVersionValue()); + } + map.beforeRefresh(); + assertThat(map.getRefreshingBytes(), greaterThan(0L)); + map.afterRefresh(true); + assertThat(map.getRefreshingBytes(), equalTo(0L)); + } + private BytesRef uid(String string) { BytesRefBuilder builder = new BytesRefBuilder(); builder.copyChars(string); From 73c75bef216ee2d0658c7029c7c97e587983caad Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 11 Sep 2018 08:40:22 -0400 Subject: [PATCH 91/91] Preserve cluster settings on full restart tests (#33590) Today the full cluster restart tests do not preserve cluster settings on restart. This is a mistake because it is not an accurate reflection of reality, we do not expect users to clear cluster settings when they perform a full cluster restart. This commit makes it so that all full cluster restart tests preserve settings on upgrade. --- .../upgrades/AbstractFullClusterRestartTestCase.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/test/framework/src/main/java/org/elasticsearch/upgrades/AbstractFullClusterRestartTestCase.java b/test/framework/src/main/java/org/elasticsearch/upgrades/AbstractFullClusterRestartTestCase.java index 62c8e2f00ffe5..7e73e795b8a05 100644 --- a/test/framework/src/main/java/org/elasticsearch/upgrades/AbstractFullClusterRestartTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/upgrades/AbstractFullClusterRestartTestCase.java @@ -57,4 +57,9 @@ protected boolean preserveTemplatesUponCompletion() { return true; } + @Override + protected boolean preserveClusterSettings() { + return true; + } + }