Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Backport 2.17] Add support to upload snapshot shard blobs with hashed prefix #15649

Merged
merged 5 commits into from
Sep 4, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- [Workload Management] Add query group level failure tracking ([#15227](https://github.com/opensearch-project/OpenSearch/pull/15527))
- [Reader Writer Separation] Add searchOnly replica routing configuration ([#15410](https://github.com/opensearch-project/OpenSearch/pull/15410))
- Add index creation using the context field ([#15290](https://github.com/opensearch-project/OpenSearch/pull/15290))
- [Remote Publication] Add remote download stats ([#15291](https://github.com/opensearch-project/OpenSearch/pull/15291)))
- [Remote Publication] Add remote download stats ([#15291](https://github.com/opensearch-project/OpenSearch/pull/15291))
- Add support to upload snapshot shard blobs with hashed prefix ([#15426](https://github.com/opensearch-project/OpenSearch/pull/15426))
- Add canRemain method to TargetPoolAllocationDecider to move shards from local to remote pool for hot to warm tiering ([#15010](https://github.com/opensearch-project/OpenSearch/pull/15010))
- Add support for pluggable deciders for concurrent search ([#15363](https://github.com/opensearch-project/OpenSearch/pull/15363))

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,19 +67,11 @@ public void testUrlRepository() throws Exception {

logger.info("--> creating repository");
Path repositoryLocation = randomRepoPath();
assertAcked(
client.admin()
.cluster()
.preparePutRepository("test-repo")
.setType(FsRepository.TYPE)
.setSettings(
Settings.builder()
.put(FsRepository.LOCATION_SETTING.getKey(), repositoryLocation)
.put(FsRepository.COMPRESS_SETTING.getKey(), randomBoolean())
.put(FsRepository.CHUNK_SIZE_SETTING.getKey(), randomIntBetween(100, 1000), ByteSizeUnit.BYTES)
)
);

Settings.Builder settings = Settings.builder()
.put(FsRepository.LOCATION_SETTING.getKey(), repositoryLocation)
.put(FsRepository.COMPRESS_SETTING.getKey(), randomBoolean())
.put(FsRepository.CHUNK_SIZE_SETTING.getKey(), randomIntBetween(100, 1000), ByteSizeUnit.BYTES);
createRepository("test-repo", FsRepository.TYPE, settings);
createIndex("test-idx");
ensureGreen();

Expand Down Expand Up @@ -115,17 +107,10 @@ public void testUrlRepository() throws Exception {
cluster().wipeIndices("test-idx");

logger.info("--> create read-only URL repository");
assertAcked(
client.admin()
.cluster()
.preparePutRepository("url-repo")
.setType(URLRepository.TYPE)
.setSettings(
Settings.builder()
.put(URLRepository.URL_SETTING.getKey(), repositoryLocation.toUri().toURL().toString())
.put("list_directories", randomBoolean())
)
);
Settings.Builder settingsBuilder = Settings.builder()
.put(URLRepository.URL_SETTING.getKey(), repositoryLocation.toUri().toURL().toString())
.put("list_directories", randomBoolean());
createRepository("url-repo", URLRepository.TYPE, settingsBuilder);
logger.info("--> restore index after deletion");
RestoreSnapshotResponse restoreSnapshotResponse = client.admin()
.cluster()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@
import com.azure.storage.blob.models.BlobStorageException;
import org.opensearch.action.ActionRunnable;
import org.opensearch.action.support.PlainActionFuture;
import org.opensearch.action.support.master.AcknowledgedResponse;
import org.opensearch.common.collect.Tuple;
import org.opensearch.common.settings.MockSecureSettings;
import org.opensearch.common.settings.SecureSettings;
Expand All @@ -47,6 +46,7 @@
import org.opensearch.plugins.Plugin;
import org.opensearch.repositories.AbstractThirdPartyRepositoryTestCase;
import org.opensearch.repositories.blobstore.BlobStoreRepository;
import org.opensearch.test.OpenSearchIntegTestCase;
import org.junit.AfterClass;

import java.net.HttpURLConnection;
Expand All @@ -56,7 +56,6 @@
import reactor.core.scheduler.Schedulers;

import static org.hamcrest.Matchers.blankOrNullString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not;

public class AzureStorageCleanupThirdPartyTests extends AbstractThirdPartyRepositoryTestCase {
Expand Down Expand Up @@ -103,17 +102,11 @@ protected SecureSettings credentials() {

@Override
protected void createRepository(String repoName) {
AcknowledgedResponse putRepositoryResponse = client().admin()
.cluster()
.preparePutRepository(repoName)
.setType("azure")
.setSettings(
Settings.builder()
.put("container", System.getProperty("test.azure.container"))
.put("base_path", System.getProperty("test.azure.base"))
)
.get();
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
Settings.Builder settings = Settings.builder()
.put("container", System.getProperty("test.azure.container"))
.put("base_path", System.getProperty("test.azure.base"));

OpenSearchIntegTestCase.putRepository(client().admin().cluster(), repoName, "azure", settings);
if (Strings.hasText(System.getProperty("test.azure.sas_token"))) {
ensureSasTokenPermissions();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,19 +32,18 @@

package org.opensearch.repositories.gcs;

import org.opensearch.action.support.master.AcknowledgedResponse;
import org.opensearch.common.settings.MockSecureSettings;
import org.opensearch.common.settings.SecureSettings;
import org.opensearch.common.settings.Settings;
import org.opensearch.core.common.Strings;
import org.opensearch.plugins.Plugin;
import org.opensearch.repositories.AbstractThirdPartyRepositoryTestCase;
import org.opensearch.test.OpenSearchIntegTestCase;

import java.util.Base64;
import java.util.Collection;

import static org.hamcrest.Matchers.blankOrNullString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not;

public class GoogleCloudStorageThirdPartyTests extends AbstractThirdPartyRepositoryTestCase {
Expand Down Expand Up @@ -84,16 +83,9 @@ protected SecureSettings credentials() {

@Override
protected void createRepository(final String repoName) {
AcknowledgedResponse putRepositoryResponse = client().admin()
.cluster()
.preparePutRepository("test-repo")
.setType("gcs")
.setSettings(
Settings.builder()
.put("bucket", System.getProperty("test.google.bucket"))
.put("base_path", System.getProperty("test.google.base", "/"))
)
.get();
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
Settings.Builder settings = Settings.builder()
.put("bucket", System.getProperty("test.google.bucket"))
.put("base_path", System.getProperty("test.google.base", "/"));
OpenSearchIntegTestCase.putRepository(client().admin().cluster(), "test-repo", "gcs", settings);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -34,12 +34,12 @@
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;

import org.opensearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryResponse;
import org.opensearch.action.support.master.AcknowledgedResponse;
import org.opensearch.common.settings.MockSecureSettings;
import org.opensearch.common.settings.SecureSettings;
import org.opensearch.common.settings.Settings;
import org.opensearch.plugins.Plugin;
import org.opensearch.repositories.AbstractThirdPartyRepositoryTestCase;
import org.opensearch.test.OpenSearchIntegTestCase;

import java.util.Collection;

Expand All @@ -61,20 +61,13 @@ protected SecureSettings credentials() {

@Override
protected void createRepository(String repoName) {
AcknowledgedResponse putRepositoryResponse = client().admin()
.cluster()
.preparePutRepository(repoName)
.setType("hdfs")
.setSettings(
Settings.builder()
.put("uri", "hdfs:///")
.put("conf.fs.AbstractFileSystem.hdfs.impl", TestingFs.class.getName())
.put("path", "foo")
.put("chunk_size", randomIntBetween(100, 1000) + "k")
.put("compress", randomBoolean())
)
.get();
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
Settings.Builder settings = Settings.builder()
.put("uri", "hdfs:///")
.put("conf.fs.AbstractFileSystem.hdfs.impl", TestingFs.class.getName())
.put("path", "foo")
.put("chunk_size", randomIntBetween(100, 1000) + "k")
.put("compress", randomBoolean());
OpenSearchIntegTestCase.putRepository(client().admin().cluster(), repoName, "hdfs", settings);
}

// HDFS repository doesn't have precise cleanup stats so we only check whether or not any blobs were removed
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@

import org.opensearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
import org.opensearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse;
import org.opensearch.action.support.master.AcknowledgedResponse;
import org.opensearch.client.Client;
import org.opensearch.cluster.ClusterState;
import org.opensearch.common.settings.Settings;
Expand All @@ -45,6 +44,7 @@
import org.opensearch.repositories.blobstore.BlobStoreRepository;
import org.opensearch.repositories.blobstore.BlobStoreTestUtil;
import org.opensearch.snapshots.SnapshotState;
import org.opensearch.test.OpenSearchIntegTestCase;
import org.opensearch.test.OpenSearchSingleNodeTestCase;
import org.opensearch.threadpool.ThreadPool;

Expand All @@ -63,21 +63,13 @@ protected Collection<Class<? extends Plugin>> getPlugins() {

public void testSimpleWorkflow() {
Client client = client();

AcknowledgedResponse putRepositoryResponse = client.admin()
.cluster()
.preparePutRepository("test-repo")
.setType("hdfs")
.setSettings(
Settings.builder()
.put("uri", "hdfs:///")
.put("conf.fs.AbstractFileSystem.hdfs.impl", TestingFs.class.getName())
.put("path", "foo")
.put("chunk_size", randomIntBetween(100, 1000) + "k")
.put("compress", randomBoolean())
)
.get();
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
Settings.Builder settings = Settings.builder()
.put("uri", "hdfs:///")
.put("conf.fs.AbstractFileSystem.hdfs.impl", TestingFs.class.getName())
.put("path", "foo")
.put("chunk_size", randomIntBetween(100, 1000) + "k")
.put("compress", randomBoolean());
OpenSearchIntegTestCase.putRepository(client.admin().cluster(), "test-repo", "hdfs", settings);

createIndex("test-idx-1");
createIndex("test-idx-2");
Expand Down Expand Up @@ -168,7 +160,7 @@ public void testSimpleWorkflow() {

public void testMissingUri() {
try {
client().admin().cluster().preparePutRepository("test-repo").setType("hdfs").setSettings(Settings.EMPTY).get();
OpenSearchIntegTestCase.putRepository(client().admin().cluster(), "test-repo", "hdfs", Settings.builder());
fail();
} catch (RepositoryException e) {
assertTrue(e.getCause() instanceof IllegalArgumentException);
Expand All @@ -178,12 +170,8 @@ public void testMissingUri() {

public void testEmptyUri() {
try {
client().admin()
.cluster()
.preparePutRepository("test-repo")
.setType("hdfs")
.setSettings(Settings.builder().put("uri", "/path").build())
.get();
Settings.Builder settings = Settings.builder().put("uri", "/path");
OpenSearchIntegTestCase.putRepository(client().admin().cluster(), "test-repo", "hdfs", settings);
fail();
} catch (RepositoryException e) {
assertTrue(e.getCause() instanceof IllegalArgumentException);
Expand All @@ -193,12 +181,8 @@ public void testEmptyUri() {

public void testNonHdfsUri() {
try {
client().admin()
.cluster()
.preparePutRepository("test-repo")
.setType("hdfs")
.setSettings(Settings.builder().put("uri", "file:///").build())
.get();
Settings.Builder settings = Settings.builder().put("uri", "file:///");
OpenSearchIntegTestCase.putRepository(client().admin().cluster(), "test-repo", "hdfs", settings);
fail();
} catch (RepositoryException e) {
assertTrue(e.getCause() instanceof IllegalArgumentException);
Expand All @@ -208,12 +192,8 @@ public void testNonHdfsUri() {

public void testPathSpecifiedInHdfs() {
try {
client().admin()
.cluster()
.preparePutRepository("test-repo")
.setType("hdfs")
.setSettings(Settings.builder().put("uri", "hdfs:///some/path").build())
.get();
Settings.Builder settings = Settings.builder().put("uri", "hdfs:///some/path");
OpenSearchIntegTestCase.putRepository(client().admin().cluster(), "test-repo", "hdfs", settings);
fail();
} catch (RepositoryException e) {
assertTrue(e.getCause() instanceof IllegalArgumentException);
Expand All @@ -223,12 +203,8 @@ public void testPathSpecifiedInHdfs() {

public void testMissingPath() {
try {
client().admin()
.cluster()
.preparePutRepository("test-repo")
.setType("hdfs")
.setSettings(Settings.builder().put("uri", "hdfs:///").build())
.get();
Settings.Builder settings = Settings.builder().put("uri", "hdfs:///");
OpenSearchIntegTestCase.putRepository(client().admin().cluster(), "test-repo", "hdfs", settings);
fail();
} catch (RepositoryException e) {
assertTrue(e.getCause() instanceof IllegalArgumentException);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@

import software.amazon.awssdk.services.s3.model.StorageClass;

import org.opensearch.action.support.master.AcknowledgedResponse;
import org.opensearch.common.SuppressForbidden;
import org.opensearch.common.blobstore.BlobMetadata;
import org.opensearch.common.blobstore.BlobPath;
Expand All @@ -43,6 +42,7 @@
import org.opensearch.plugins.Plugin;
import org.opensearch.repositories.AbstractThirdPartyRepositoryTestCase;
import org.opensearch.repositories.blobstore.BlobStoreRepository;
import org.opensearch.test.OpenSearchIntegTestCase;
import org.junit.Before;

import java.util.Collection;
Expand All @@ -51,7 +51,6 @@
import java.util.concurrent.TimeUnit;

import static org.hamcrest.Matchers.blankOrNullString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not;

public class S3RepositoryThirdPartyTests extends AbstractThirdPartyRepositoryTestCase {
Expand Down Expand Up @@ -111,13 +110,7 @@ protected void createRepository(String repoName) {
settings.put("storage_class", storageClass);
}
}
AcknowledgedResponse putRepositoryResponse = client().admin()
.cluster()
.preparePutRepository("test-repo")
.setType("s3")
.setSettings(settings)
.get();
assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
OpenSearchIntegTestCase.putRepository(client().admin().cluster(), "test-repo", "s3", settings);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.RestResponse;
import org.opensearch.rest.action.admin.cluster.RestGetRepositoriesAction;
import org.opensearch.test.OpenSearchIntegTestCase;
import org.opensearch.test.OpenSearchSingleNodeTestCase;
import org.opensearch.test.rest.FakeRestRequest;

Expand All @@ -68,7 +69,6 @@

import static org.opensearch.repositories.s3.S3ClientSettings.ACCESS_KEY_SETTING;
import static org.opensearch.repositories.s3.S3ClientSettings.SECRET_KEY_SETTING;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
Expand Down Expand Up @@ -277,14 +277,8 @@ public void sendResponse(RestResponse response) {
}

private void createRepository(final String name, final Settings repositorySettings) {
assertAcked(
client().admin()
.cluster()
.preparePutRepository(name)
.setType(S3Repository.TYPE)
.setVerify(false)
.setSettings(repositorySettings)
);
Settings.Builder settings = Settings.builder().put(repositorySettings);
OpenSearchIntegTestCase.putRepository(client().admin().cluster(), name, S3Repository.TYPE, false, settings);
}

/**
Expand Down
Loading
Loading