Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[HUDI-7406] Rename classes to be readable in storage abstraction #10672

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,6 @@

package org.apache.hudi.cli.commands;

import org.apache.avro.generic.GenericRecord;
import org.apache.avro.generic.IndexedRecord;
import org.apache.avro.specific.SpecificData;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hudi.avro.HoodieAvroUtils;
import org.apache.hudi.avro.model.HoodieArchivedMetaEntry;
import org.apache.hudi.avro.model.HoodieCleanMetadata;
Expand All @@ -44,8 +38,14 @@
import org.apache.hudi.common.util.collection.ClosableIterator;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.hadoop.fs.HadoopFSUtils;
import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.storage.StoragePath;

import org.apache.avro.generic.GenericRecord;
import org.apache.avro.generic.IndexedRecord;
import org.apache.avro.specific.SpecificData;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.shell.standard.ShellComponent;
Expand Down Expand Up @@ -169,7 +169,7 @@ private int copyArchivedInstants(List<FileStatus> statuses, Set<String> actionSe
LOG.error("Could not load metadata for action " + action + " at instant time " + instantTime);
continue;
}
final String outPath = localFolder + HoodieLocation.SEPARATOR + instantTime + "." + action;
final String outPath = localFolder + StoragePath.SEPARATOR + instantTime + "." + action;
writeToFile(outPath, HoodieAvroUtils.avroToJson(metadata, true));
}
}
Expand All @@ -191,7 +191,7 @@ private int copyNonArchivedInstants(List<HoodieInstant> instants, int limit, Str
final HoodieTableMetaClient metaClient = HoodieCLI.getTableMetaClient();
final HoodieActiveTimeline timeline = metaClient.getActiveTimeline();
for (HoodieInstant instant : instants) {
String localPath = localFolder + HoodieLocation.SEPARATOR + instant.getFileName();
String localPath = localFolder + StoragePath.SEPARATOR + instant.getFileName();

byte[] data = null;
switch (instant.getAction()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
import org.apache.hudi.common.util.FileIOUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.hadoop.fs.HadoopFSUtils;
import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.storage.StoragePath;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
Expand Down Expand Up @@ -202,7 +202,7 @@ public void testShowLogFileRecordsWithMerge() throws IOException, InterruptedExc

// write to path '2015/03/16'.
Schema schema = HoodieAvroUtils.addMetadataFields(getSimpleSchema());
partitionPath = tablePath + HoodieLocation.SEPARATOR + HoodieTestCommitMetadataGenerator.DEFAULT_SECOND_PARTITION_PATH;
partitionPath = tablePath + StoragePath.SEPARATOR + HoodieTestCommitMetadataGenerator.DEFAULT_SECOND_PARTITION_PATH;
Files.createDirectories(Paths.get(partitionPath));

HoodieLogFormat.Writer writer = null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion;
import org.apache.hudi.common.testutils.HoodieTestDataGenerator;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.storage.StoragePath;

import org.apache.avro.Schema;
import org.apache.hadoop.fs.FileSystem;
Expand Down Expand Up @@ -147,7 +147,7 @@ public void testCreateWithSpecifiedValues() {
assertTrue(ShellEvaluationResultUtil.isSuccess(result));
assertEquals("Metadata for table " + tableName + " loaded", result.toString());
HoodieTableMetaClient client = HoodieCLI.getTableMetaClient();
assertEquals(metaPath + HoodieLocation.SEPARATOR + "archive", client.getArchivePath());
assertEquals(metaPath + StoragePath.SEPARATOR + "archive", client.getArchivePath());
assertEquals(tablePath, client.getBasePath());
assertEquals(metaPath, client.getMetaPath());
assertEquals(HoodieTableType.MERGE_ON_READ, client.getTableType());
Expand Down Expand Up @@ -186,7 +186,7 @@ public void testRefresh() throws IOException {
private void testRefreshCommand(String command) throws IOException {
// clean table matedata
FileSystem fs = FileSystem.get(hadoopConf());
fs.delete(new Path(tablePath + HoodieLocation.SEPARATOR + HoodieTableMetaClient.METAFOLDER_NAME), true);
fs.delete(new Path(tablePath + StoragePath.SEPARATOR + HoodieTableMetaClient.METAFOLDER_NAME), true);

// Create table
assertTrue(prepareTable());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion;
import org.apache.hudi.functional.TestBootstrap;
import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.storage.StoragePath;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
Expand Down Expand Up @@ -65,16 +65,16 @@ public class ITTestBootstrapCommand extends HoodieCLIIntegrationTestBase {
public void init() {
String srcName = "source";
tableName = "test-table";
sourcePath = basePath + HoodieLocation.SEPARATOR + srcName;
tablePath = basePath + HoodieLocation.SEPARATOR + tableName;
sourcePath = basePath + StoragePath.SEPARATOR + srcName;
tablePath = basePath + StoragePath.SEPARATOR + tableName;

// generate test data
partitions = Arrays.asList("2018", "2019", "2020");
long timestamp = Instant.now().toEpochMilli();
for (int i = 0; i < partitions.size(); i++) {
Dataset<Row> df = TestBootstrap.generateTestRawTripDataset(timestamp,
i * NUM_OF_RECORDS, i * NUM_OF_RECORDS + NUM_OF_RECORDS, null, jsc, sqlContext);
df.write().parquet(sourcePath + HoodieLocation.SEPARATOR + PARTITION_FIELD + "=" + partitions.get(i));
df.write().parquet(sourcePath + StoragePath.SEPARATOR + PARTITION_FIELD + "=" + partitions.get(i));
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion;
import org.apache.hudi.common.testutils.HoodieTestDataGenerator;
import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.storage.StoragePath;
import org.apache.hudi.testutils.HoodieClientTestUtils;
import org.apache.hudi.utilities.HDFSParquetImporter;
import org.apache.hudi.utilities.functional.TestHDFSParquetImporter;
Expand Down Expand Up @@ -77,7 +77,7 @@ public class ITTestHDFSParquetImportCommand extends HoodieCLIIntegrationTestBase
@BeforeEach
public void init() throws IOException, ParseException {
tableName = "test_table";
tablePath = basePath + HoodieLocation.SEPARATOR + tableName;
tablePath = basePath + StoragePath.SEPARATOR + tableName;
sourcePath = new Path(basePath, "source");
targetPath = new Path(tablePath);
schemaFile = new Path(basePath, "file.schema").toString();
Expand Down Expand Up @@ -109,7 +109,7 @@ public void testConvertWithInsert() throws IOException {
() -> assertEquals("Table imported to hoodie format", result.toString()));

// Check hudi table exist
String metaPath = targetPath + HoodieLocation.SEPARATOR + HoodieTableMetaClient.METAFOLDER_NAME;
String metaPath = targetPath + StoragePath.SEPARATOR + HoodieTableMetaClient.METAFOLDER_NAME;
assertTrue(Files.exists(Paths.get(metaPath)), "Hoodie table not exist.");

// Load meta data
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
import org.apache.hudi.common.model.IOType;
import org.apache.hudi.common.table.timeline.versioning.TimelineLayoutVersion;
import org.apache.hudi.common.testutils.FileCreateUtils;
import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.storage.StoragePath;

import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
Expand Down Expand Up @@ -54,7 +54,7 @@ public class ITTestMarkersCommand extends HoodieCLIIntegrationTestBase {
@BeforeEach
public void init() throws IOException {
String tableName = "test_table";
tablePath = basePath + HoodieLocation.SEPARATOR + tableName;
tablePath = basePath + StoragePath.SEPARATOR + tableName;

// Create table and connect
new TableCommand().createTable(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.metadata.HoodieTableMetadata;
import org.apache.hudi.metadata.SparkHoodieBackedTableMetadataWriter;
import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.storage.StoragePath;

import org.apache.hadoop.fs.Path;
import org.junit.jupiter.api.BeforeEach;
Expand Down Expand Up @@ -66,7 +66,7 @@ public class ITTestSavepointsCommand extends HoodieCLIIntegrationTestBase {
@BeforeEach
public void init() throws IOException {
String tableName = "test_table";
tablePath = basePath + HoodieLocation.SEPARATOR + tableName;
tablePath = basePath + StoragePath.SEPARATOR + tableName;

// Create table and connect
new TableCommand().createTable(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.index.HoodieIndex;
import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.storage.StoragePath;

import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
Expand Down Expand Up @@ -72,7 +72,7 @@ public class ITTestTableCommand extends HoodieCLIIntegrationTestBase {

@Test
public void testChangeTableCOW2MOR() throws IOException {
tablePath = basePath + HoodieLocation.SEPARATOR + tableName + "_cow2mor";
tablePath = basePath + StoragePath.SEPARATOR + tableName + "_cow2mor";
// Create table and connect
new TableCommand().createTable(
tablePath, "test_table", HoodieTableType.COPY_ON_WRITE.name(),
Expand All @@ -89,7 +89,7 @@ public void testChangeTableCOW2MOR() throws IOException {

@Test
public void testChangeTableMOR2COW() throws IOException {
tablePath = basePath + HoodieLocation.SEPARATOR + tableName + "_mor2cow";
tablePath = basePath + StoragePath.SEPARATOR + tableName + "_mor2cow";
// Create table and connect
new TableCommand().createTable(
tablePath, "test_table", HoodieTableType.MERGE_ON_READ.name(),
Expand All @@ -104,7 +104,7 @@ public void testChangeTableMOR2COW() throws IOException {

@Test
public void testChangeTableMOR2COW_withPendingCompactions() throws Exception {
tablePath = basePath + HoodieLocation.SEPARATOR + tableName + "_cow2mor";
tablePath = basePath + StoragePath.SEPARATOR + tableName + "_cow2mor";
// Create table and connect
new TableCommand().createTable(
tablePath, "test_table", HoodieTableType.MERGE_ON_READ.name(),
Expand Down Expand Up @@ -136,7 +136,7 @@ public void testChangeTableMOR2COW_withPendingCompactions() throws Exception {

@Test
public void testChangeTableMOR2COW_withFullCompaction() throws Exception {
tablePath = basePath + HoodieLocation.SEPARATOR + tableName + "_cow2mor";
tablePath = basePath + StoragePath.SEPARATOR + tableName + "_cow2mor";
// Create table and connect
new TableCommand().createTable(
tablePath, "test_table", HoodieTableType.MERGE_ON_READ.name(),
Expand All @@ -161,7 +161,7 @@ public void testChangeTableMOR2COW_withFullCompaction() throws Exception {

@Test
public void testChangeTableMOR2COW_withoutCompaction() throws Exception {
tablePath = basePath + HoodieLocation.SEPARATOR + tableName + "_cow2mor";
tablePath = basePath + StoragePath.SEPARATOR + tableName + "_cow2mor";
// Create table and connect
new TableCommand().createTable(
tablePath, "test_table", HoodieTableType.MERGE_ON_READ.name(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
import org.apache.hudi.common.util.ValidationUtils;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.storage.StoragePath;
import org.apache.hudi.table.HoodieTable;

import org.apache.hadoop.fs.FileSystem;
Expand Down Expand Up @@ -52,7 +52,7 @@ public static boolean deleteHeartbeatFile(FileSystem fs, String basePath, String
boolean deleted = false;
try {
String heartbeatFolderPath = HoodieTableMetaClient.getHeartbeatFolderPath(basePath);
deleted = fs.delete(new Path(heartbeatFolderPath + HoodieLocation.SEPARATOR + instantTime), false);
deleted = fs.delete(new Path(heartbeatFolderPath + StoragePath.SEPARATOR + instantTime), false);
if (!deleted) {
LOG.error("Failed to delete heartbeat for instant " + instantTime);
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
import org.apache.hudi.common.util.ValidationUtils;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.exception.HoodieHeartbeatException;
import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.storage.StoragePath;

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
Expand Down Expand Up @@ -228,7 +228,7 @@ private void stopHeartbeatTimer(Heartbeat heartbeat) {

public static Boolean heartbeatExists(FileSystem fs, String basePath, String instantTime) throws IOException {
Path heartbeatFilePath = new Path(HoodieTableMetaClient.getHeartbeatFolderPath(basePath)
+ HoodieLocation.SEPARATOR + instantTime);
+ StoragePath.SEPARATOR + instantTime);
return fs.exists(heartbeatFilePath);
}

Expand All @@ -255,7 +255,7 @@ private void updateHeartbeat(String instantTime) throws HoodieHeartbeatException
try {
Long newHeartbeatTime = System.currentTimeMillis();
OutputStream outputStream =
this.fs.create(new Path(heartbeatFolderPath + HoodieLocation.SEPARATOR + instantTime), true);
this.fs.create(new Path(heartbeatFolderPath + StoragePath.SEPARATOR + instantTime), true);
outputStream.close();
Heartbeat heartbeat = instantToHeartbeatMap.get(instantTime);
if (heartbeat.getLastHeartbeatTime() != null && isHeartbeatExpired(instantTime)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
import org.apache.hudi.exception.HoodieIOException;
import org.apache.hudi.exception.HoodieLockException;
import org.apache.hudi.hadoop.fs.HadoopFSUtils;
import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.storage.StoragePath;
import org.apache.hudi.storage.StorageSchemes;

import org.apache.hadoop.conf.Configuration;
Expand Down Expand Up @@ -78,10 +78,10 @@ public FileSystemBasedLockProvider(final LockConfiguration lockConfiguration, fi
String lockDirectory = lockConfiguration.getConfig().getString(FILESYSTEM_LOCK_PATH_PROP_KEY, null);
if (StringUtils.isNullOrEmpty(lockDirectory)) {
lockDirectory = lockConfiguration.getConfig().getString(HoodieWriteConfig.BASE_PATH.key())
+ HoodieLocation.SEPARATOR + HoodieTableMetaClient.METAFOLDER_NAME;
+ StoragePath.SEPARATOR + HoodieTableMetaClient.METAFOLDER_NAME;
}
this.lockTimeoutMinutes = lockConfiguration.getConfig().getInteger(FILESYSTEM_LOCK_EXPIRE_PROP_KEY);
this.lockFile = new Path(lockDirectory + HoodieLocation.SEPARATOR + LOCK_FILE_NAME);
this.lockFile = new Path(lockDirectory + StoragePath.SEPARATOR + LOCK_FILE_NAME);
this.lockInfo = new LockInfo();
this.sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
this.fs = HadoopFSUtils.getFs(this.lockFile.toString(), configuration);
Expand Down Expand Up @@ -221,6 +221,6 @@ public static TypedProperties getLockConfig(String tablePath) {
* <p>IMPORTANT: this path should be shared especially when there is engine cooperation.
*/
private static String defaultLockPath(String tablePath) {
return tablePath + HoodieLocation.SEPARATOR + AUXILIARYFOLDER_NAME;
return tablePath + StoragePath.SEPARATOR + AUXILIARYFOLDER_NAME;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@

import org.apache.hudi.common.table.HoodieTableConfig;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.storage.StoragePath;

import org.apache.hadoop.fs.Path;
import org.slf4j.Logger;
Expand All @@ -47,8 +47,8 @@ public void register(HoodieTableMetaClient metaClient, String indexName, String
LOG.info("Registering index {} of using {}", indexName, indexType);
String indexMetaPath = metaClient.getTableConfig().getIndexDefinitionPath()
.orElseGet(() -> metaClient.getMetaPath()
+ HoodieLocation.SEPARATOR + HoodieTableMetaClient.INDEX_DEFINITION_FOLDER_NAME
+ HoodieLocation.SEPARATOR + HoodieTableMetaClient.INDEX_DEFINITION_FILE_NAME);
+ StoragePath.SEPARATOR + HoodieTableMetaClient.INDEX_DEFINITION_FOLDER_NAME
+ StoragePath.SEPARATOR + HoodieTableMetaClient.INDEX_DEFINITION_FILE_NAME);
// build HoodieFunctionalIndexMetadata and then add to index definition file
metaClient.buildFunctionalIndexDefinition(indexMetaPath, indexName, indexType, columns, options);
// update table config if necessary
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
import org.apache.hudi.config.HoodieIndexConfig;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.index.HoodieIndex;
import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.storage.StoragePath;
import org.apache.hudi.table.HoodieTable;

import org.apache.hadoop.conf.Configuration;
Expand Down Expand Up @@ -66,7 +66,7 @@ public class TestCompletionTimeQueryView {
@Test
void testReadCompletionTime() throws Exception {
String tableName = "testTable";
String tablePath = tempFile.getAbsolutePath() + HoodieLocation.SEPARATOR + tableName;
String tablePath = tempFile.getAbsolutePath() + StoragePath.SEPARATOR + tableName;
HoodieTableMetaClient metaClient = HoodieTestUtils.init(new Configuration(), tablePath, HoodieTableType.COPY_ON_WRITE, tableName);
prepareTimeline(tablePath, metaClient);
try (CompletionTimeQueryView view = new CompletionTimeQueryView(metaClient, String.format("%08d", 3))) {
Expand Down Expand Up @@ -95,7 +95,7 @@ void testReadCompletionTime() throws Exception {
@Test
void testReadStartTime() throws Exception {
String tableName = "testTable";
String tablePath = tempFile.getAbsolutePath() + HoodieLocation.SEPARATOR + tableName;
String tablePath = tempFile.getAbsolutePath() + StoragePath.SEPARATOR + tableName;
HoodieTableMetaClient metaClient = HoodieTestUtils.init(new Configuration(), tablePath, HoodieTableType.COPY_ON_WRITE, tableName);
prepareTimeline(tablePath, metaClient);
try (CompletionTimeQueryView view = new CompletionTimeQueryView(metaClient, String.format("%08d", 3))) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
import org.apache.hudi.common.util.collection.ClosableIterator;
import org.apache.hudi.exception.HoodieCommitException;
import org.apache.hudi.exception.HoodieException;
import org.apache.hudi.storage.HoodieLocation;
import org.apache.hudi.storage.StoragePath;

import org.apache.avro.Schema;
import org.apache.avro.generic.IndexedRecord;
Expand Down Expand Up @@ -74,7 +74,7 @@ public class TestLegacyArchivedMetaEntryReader {
@Test
void testReadLegacyArchivedTimeline() throws Exception {
String tableName = "testTable";
String tablePath = tempFile.getAbsolutePath() + HoodieLocation.SEPARATOR + tableName;
String tablePath = tempFile.getAbsolutePath() + StoragePath.SEPARATOR + tableName;
HoodieTableMetaClient metaClient = HoodieTestUtils.init(new Configuration(), tablePath, HoodieTableType.COPY_ON_WRITE, tableName);
prepareLegacyArchivedTimeline(metaClient);
LegacyArchivedMetaEntryReader reader = new LegacyArchivedMetaEntryReader(metaClient);
Expand Down
Loading
Loading