Skip to content

Commit

Permalink
Merge branch 'main' into 2024_03_14_decimal_avg
Browse files Browse the repository at this point in the history
  • Loading branch information
liujiayi771 committed Mar 16, 2024
2 parents 5a31e26 + da137d6 commit f2a213f
Show file tree
Hide file tree
Showing 35 changed files with 976 additions and 166 deletions.
22 changes: 19 additions & 3 deletions .github/workflows/velox_be.yml
Original file line number Diff line number Diff line change
Expand Up @@ -450,11 +450,27 @@ jobs:
--local --preset=velox --benchmark-type=h --error-on-memleak --off-heap-size=10g -s=1.0 --threads=16 --iterations=1 \
&& GLUTEN_IT_JVM_ARGS=-Xmx20G sbin/gluten-it.sh queries-compare \
--local --preset=velox --benchmark-type=ds --error-on-memleak --off-heap-size=40g -s=10.0 --threads=32 --iterations=1'
- name: TPC-H SF1.0 && TPC-DS SF10.0 Parquet local spark3.2 with Celeborn
- name: TPC-H SF1.0 && TPC-DS SF10.0 Parquet local spark3.2 with Celeborn 0.4.0
run: |
$PATH_TO_GLUTEN_TE/$OS_IMAGE_NAME/gha/gha-checkout/exec.sh \
'wget https://archive.apache.org/dist/incubator/celeborn/celeborn-0.3.0-incubating/apache-celeborn-0.3.0-incubating-bin.tgz && \
tar xzf apache-celeborn-0.3.0-incubating-bin.tgz -C /opt/ && mv /opt/apache-celeborn-0.3.0-incubating-bin /opt/celeborn && cd /opt/celeborn && \
'wget https://archive.apache.org/dist/incubator/celeborn/celeborn-0.4.0-incubating/apache-celeborn-0.4.0-incubating-bin.tgz && \
tar xzf apache-celeborn-0.4.0-incubating-bin.tgz -C /opt/ && mv /opt/apache-celeborn-0.4.0-incubating-bin /opt/celeborn && cd /opt/celeborn && \
mv ./conf/celeborn-env.sh.template ./conf/celeborn-env.sh && \
echo -e "CELEBORN_MASTER_MEMORY=4g\nCELEBORN_WORKER_MEMORY=4g\nCELEBORN_WORKER_OFFHEAP_MEMORY=8g" > ./conf/celeborn-env.sh && \
echo -e "celeborn.worker.commitFiles.threads 128\nceleborn.worker.sortPartition.threads 64" > ./conf/celeborn-defaults.conf \
&& bash ./sbin/start-master.sh && bash ./sbin/start-worker.sh && \
cd /opt/gluten/tools/gluten-it && mvn clean install -Pspark-3.2,rss,celeborn-0.4 \
&& GLUTEN_IT_JVM_ARGS=-Xmx5G sbin/gluten-it.sh queries-compare \
--local --preset=velox-with-celeborn --benchmark-type=h --error-on-memleak --off-heap-size=10g -s=1.0 --threads=16 --iterations=1 \
&& GLUTEN_IT_JVM_ARGS=-Xmx5G sbin/gluten-it.sh queries-compare \
--local --preset=velox-with-celeborn --benchmark-type=ds --error-on-memleak --off-heap-size=10g -s=1.0 --threads=16 --iterations=1 && \
bash /opt/celeborn/sbin/stop-worker.sh \
&& bash /opt/celeborn/sbin/stop-master.sh && rm -rf /opt/celeborn'
- name: TPC-H SF1.0 && TPC-DS SF10.0 Parquet local spark3.2 with Celeborn 0.3.2
run: |
$PATH_TO_GLUTEN_TE/$OS_IMAGE_NAME/gha/gha-checkout/exec.sh \
'wget https://archive.apache.org/dist/incubator/celeborn/celeborn-0.3.2-incubating/apache-celeborn-0.3.2-incubating-bin.tgz && \
tar xzf apache-celeborn-0.3.2-incubating-bin.tgz -C /opt/ && mv /opt/apache-celeborn-0.3.2-incubating-bin /opt/celeborn && cd /opt/celeborn && \
mv ./conf/celeborn-env.sh.template ./conf/celeborn-env.sh && \
echo -e "CELEBORN_MASTER_MEMORY=4g\nCELEBORN_WORKER_MEMORY=4g\nCELEBORN_WORKER_OFFHEAP_MEMORY=8g" > ./conf/celeborn-env.sh && \
echo -e "celeborn.worker.commitFiles.threads 128\nceleborn.worker.sortPartition.threads 64" > ./conf/celeborn-defaults.conf \
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -607,7 +607,7 @@ class GlutenFunctionValidateSuite extends GlutenClickHouseWholeStageTransformerS
runQueryAndCompare(
"select id, if(id % 2 = 0, sum(id), max(id)) as s1, " +
"if(id %2 = 0, sum(id+1), sum(id+2)) as s2 from range(10) group by id") {
df => checkOperatorCount[ProjectExecTransformer](1)(df)
df => checkOperatorCount[ProjectExecTransformer](2)(df)
}

// CSE in sort
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,13 @@ class SharedLibraryLoaderCentos7 extends SharedLibraryLoader {
override def loadLib(loader: JniLibLoader): Unit = {
loader
.newTransaction()
.loadAndCreateLink("libboost_thread.so.1.72.0", "libboost_thread.so", false)
.loadAndCreateLink("libboost_system.so.1.72.0", "libboost_system.so", false)
.loadAndCreateLink("libboost_regex.so.1.72.0", "libboost_regex.so", false)
.loadAndCreateLink("libboost_program_options.so.1.72.0", "libboost_program_options.so", false)
.loadAndCreateLink("libboost_filesystem.so.1.72.0", "libboost_filesystem.so", false)
.loadAndCreateLink("libboost_context.so.1.72.0", "libboost_context.so", false)
.loadAndCreateLink("libboost_atomic.so.1.84.0", "libboost_atomic.so", false)
.loadAndCreateLink("libboost_thread.so.1.84.0", "libboost_thread.so", false)
.loadAndCreateLink("libboost_system.so.1.84.0", "libboost_system.so", false)
.loadAndCreateLink("libboost_regex.so.1.84.0", "libboost_regex.so", false)
.loadAndCreateLink("libboost_program_options.so.1.84.0", "libboost_program_options.so", false)
.loadAndCreateLink("libboost_filesystem.so.1.84.0", "libboost_filesystem.so", false)
.loadAndCreateLink("libboost_context.so.1.84.0", "libboost_context.so", false)
.loadAndCreateLink("libdouble-conversion.so.1", "libdouble-conversion.so", false)
.loadAndCreateLink("libevent-2.0.so.5", "libevent-2.0.so", false)
.loadAndCreateLink("libgflags.so.2.2", "libgflags.so", false)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,15 +22,16 @@ class SharedLibraryLoaderCentos8 extends SharedLibraryLoader {
override def loadLib(loader: JniLibLoader): Unit = {
loader
.newTransaction()
.loadAndCreateLink("libboost_thread.so.1.72.0", "libboost_thread.so", false)
.loadAndCreateLink("libboost_system.so.1.72.0", "libboost_system.so", false)
.loadAndCreateLink("libboost_atomic.so.1.84.0", "libboost_atomic.so", false)
.loadAndCreateLink("libboost_thread.so.1.84.0", "libboost_thread.so", false)
.loadAndCreateLink("libboost_system.so.1.84.0", "libboost_system.so", false)
.loadAndCreateLink("libicudata.so.60", "libicudata.so", false)
.loadAndCreateLink("libicuuc.so.60", "libicuuc.so", false)
.loadAndCreateLink("libicui18n.so.60", "libicui18n.so", false)
.loadAndCreateLink("libboost_regex.so.1.72.0", "libboost_regex.so", false)
.loadAndCreateLink("libboost_program_options.so.1.72.0", "libboost_program_options.so", false)
.loadAndCreateLink("libboost_filesystem.so.1.72.0", "libboost_filesystem.so", false)
.loadAndCreateLink("libboost_context.so.1.72.0", "libboost_context.so", false)
.loadAndCreateLink("libboost_regex.so.1.84.0", "libboost_regex.so", false)
.loadAndCreateLink("libboost_program_options.so.1.84.0", "libboost_program_options.so", false)
.loadAndCreateLink("libboost_filesystem.so.1.84.0", "libboost_filesystem.so", false)
.loadAndCreateLink("libboost_context.so.1.84.0", "libboost_context.so", false)
.loadAndCreateLink("libdouble-conversion.so.3", "libdouble-conversion.so", false)
.loadAndCreateLink("libevent-2.1.so.6", "libevent-2.1.so", false)
.loadAndCreateLink("libgflags.so.2.2", "libgflags.so", false)
Expand Down
4 changes: 2 additions & 2 deletions cpp-ch/clickhouse.version
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
CH_ORG=Kyligence
CH_BRANCH=rebase_ch/20240314
CH_COMMIT=3ddade3ed79
CH_BRANCH=rebase_ch/20240315
CH_COMMIT=1240fda3f62
1 change: 1 addition & 0 deletions cpp/velox/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -296,6 +296,7 @@ set(VELOX_SRCS
compute/VeloxRuntime.cc
compute/WholeStageResultIterator.cc
compute/VeloxPlanConverter.cc
compute/iceberg/IcebergPlanConverter.cc
jni/VeloxJniWrapper.cc
jni/JniFileSystem.cc
jni/JniUdf.cc
Expand Down
4 changes: 4 additions & 0 deletions cpp/velox/compute/VeloxPlanConverter.cc
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

#include "compute/ResultIterator.h"
#include "config/GlutenConfig.h"
#include "iceberg/IcebergPlanConverter.h"
#include "operators/plannodes/RowVectorStream.h"
#include "velox/common/file/FileSystems.h"

Expand Down Expand Up @@ -93,6 +94,9 @@ std::shared_ptr<SplitInfo> parseScanSplitInfo(
case SubstraitFileFormatCase::kText:
splitInfo->format = dwio::common::FileFormat::TEXT;
break;
case SubstraitFileFormatCase::kIceberg:
splitInfo = IcebergPlanConverter::parseIcebergSplitInfo(file, std::move(splitInfo));
break;
default:
splitInfo->format = dwio::common::FileFormat::UNKNOWN;
break;
Expand Down
44 changes: 31 additions & 13 deletions cpp/velox/compute/WholeStageResultIterator.cc
Original file line number Diff line number Diff line change
Expand Up @@ -154,19 +154,37 @@ WholeStageResultIterator::WholeStageResultIterator(
auto metadataColumn = metadataColumns[idx];
std::unordered_map<std::string, std::optional<std::string>> partitionKeys;
constructPartitionColumns(partitionKeys, partitionColumn);
auto split = std::make_shared<velox::connector::hive::HiveConnectorSplit>(
kHiveConnectorId,
paths[idx],
format,
starts[idx],
lengths[idx],
partitionKeys,
std::nullopt,
std::unordered_map<std::string, std::string>(),
nullptr,
std::unordered_map<std::string, std::string>(),
0,
metadataColumn);
std::shared_ptr<velox::connector::ConnectorSplit> split;
if (auto icebergSplitInfo = std::dynamic_pointer_cast<IcebergSplitInfo>(scanInfo)) {
// Set Iceberg split.
std::unordered_map<std::string, std::string> customSplitInfo{{"table_format", "hive-iceberg"}};
auto deleteFiles = icebergSplitInfo->deleteFilesVec[idx];
split = std::make_shared<velox::connector::hive::iceberg::HiveIcebergSplit>(
kHiveConnectorId,
paths[idx],
format,
starts[idx],
lengths[idx],
partitionKeys,
std::nullopt,
customSplitInfo,
nullptr,
deleteFiles);
} else {
split = std::make_shared<velox::connector::hive::HiveConnectorSplit>(
kHiveConnectorId,
paths[idx],
format,
starts[idx],
lengths[idx],
partitionKeys,
std::nullopt,
std::unordered_map<std::string, std::string>(),
nullptr,
std::unordered_map<std::string, std::string>(),
0,
metadataColumn);
}
connectorSplits.emplace_back(split);
}

Expand Down
2 changes: 2 additions & 0 deletions cpp/velox/compute/WholeStageResultIterator.h
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,13 @@
#pragma once

#include "compute/Runtime.h"
#include "iceberg/IcebergPlanConverter.h"
#include "memory/ColumnarBatchIterator.h"
#include "memory/VeloxColumnarBatch.h"
#include "substrait/SubstraitToVeloxPlan.h"
#include "substrait/plan.pb.h"
#include "utils/metrics.h"
#include "velox/connectors/hive/iceberg/IcebergSplit.h"
#include "velox/core/Config.h"
#include "velox/core/PlanNode.h"
#include "velox/exec/Task.h"
Expand Down
84 changes: 84 additions & 0 deletions cpp/velox/compute/iceberg/IcebergPlanConverter.cc
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

#include "IcebergPlanConverter.h"

namespace gluten {

std::shared_ptr<IcebergSplitInfo> IcebergPlanConverter::parseIcebergSplitInfo(
substrait::ReadRel_LocalFiles_FileOrFiles file,
std::shared_ptr<SplitInfo> splitInfo) {
using SubstraitFileFormatCase = ::substrait::ReadRel_LocalFiles_FileOrFiles::IcebergReadOptions::FileFormatCase;
using SubstraitDeleteFileFormatCase =
::substrait::ReadRel_LocalFiles_FileOrFiles::IcebergReadOptions::DeleteFile::FileFormatCase;
auto icebergSplitInfo = std::dynamic_pointer_cast<IcebergSplitInfo>(splitInfo)
? std::dynamic_pointer_cast<IcebergSplitInfo>(splitInfo)
: std::make_shared<IcebergSplitInfo>(*splitInfo);
auto icebergReadOption = file.iceberg();
switch (icebergReadOption.file_format_case()) {
case SubstraitFileFormatCase::kParquet:
icebergSplitInfo->format = dwio::common::FileFormat::PARQUET;
break;
case SubstraitFileFormatCase::kOrc:
icebergSplitInfo->format = dwio::common::FileFormat::ORC;
break;
default:
icebergSplitInfo->format = dwio::common::FileFormat::UNKNOWN;
break;
}
if (icebergReadOption.delete_files_size() > 0) {
auto deleteFiles = icebergReadOption.delete_files();
std::vector<IcebergDeleteFile> deletes;
deletes.reserve(icebergReadOption.delete_files_size());
for (auto i = 0; i < icebergReadOption.delete_files_size(); i++) {
auto deleteFile = icebergReadOption.delete_files().Get(i);
dwio::common::FileFormat format;
FileContent fileContent;
switch (deleteFile.file_format_case()) {
case SubstraitDeleteFileFormatCase::kParquet:
format = dwio::common::FileFormat::PARQUET;
break;
case SubstraitDeleteFileFormatCase::kOrc:
format = dwio::common::FileFormat::ORC;
break;
default:
format = dwio::common::FileFormat::UNKNOWN;
}
switch (deleteFile.filecontent()) {
case ::substrait::ReadRel_LocalFiles_FileOrFiles_IcebergReadOptions_FileContent_POSITION_DELETES:
fileContent = FileContent::kPositionalDeletes;
break;
case ::substrait::ReadRel_LocalFiles_FileOrFiles_IcebergReadOptions_FileContent_EQUALITY_DELETES:
fileContent = FileContent::kEqualityDeletes;
break;
default:
fileContent = FileContent::kData;
break;
}
deletes.emplace_back(IcebergDeleteFile(
fileContent, deleteFile.filepath(), format, deleteFile.recordcount(), deleteFile.filesize()));
}
icebergSplitInfo->deleteFilesVec.emplace_back(deletes);
} else {
// Add an empty delete files vector to indicate that this data file has no delete file.
icebergSplitInfo->deleteFilesVec.emplace_back(std::vector<IcebergDeleteFile>{});
}

return icebergSplitInfo;
}

} // namespace gluten
42 changes: 42 additions & 0 deletions cpp/velox/compute/iceberg/IcebergPlanConverter.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

#pragma once

#include "substrait/SubstraitToVeloxPlan.h"
#include "velox/connectors/hive/iceberg/IcebergDeleteFile.h"

using namespace facebook::velox::connector::hive::iceberg;

namespace gluten {
struct IcebergSplitInfo : SplitInfo {
std::vector<std::vector<IcebergDeleteFile>> deleteFilesVec;

IcebergSplitInfo(const SplitInfo& splitInfo) : SplitInfo(splitInfo) {
// Reserve the actual size of the deleteFilesVec.
deleteFilesVec.reserve(splitInfo.paths.capacity());
}
};

class IcebergPlanConverter {
public:
static std::shared_ptr<IcebergSplitInfo> parseIcebergSplitInfo(
substrait::ReadRel_LocalFiles_FileOrFiles file,
std::shared_ptr<SplitInfo> splitInfo);
};

} // namespace gluten
3 changes: 3 additions & 0 deletions cpp/velox/substrait/SubstraitToVeloxPlan.h
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,9 @@ struct SplitInfo {

/// The file format of the files to be scanned.
dwio::common::FileFormat format;

/// Make SplitInfo polymorphic
virtual ~SplitInfo() = default;
};

/// This class is used to convert the Substrait plan into Velox plan.
Expand Down
4 changes: 2 additions & 2 deletions dev/package.sh
Original file line number Diff line number Diff line change
Expand Up @@ -29,13 +29,13 @@ function process_setup_ubuntu_2204 {

function process_setup_centos_8 {
cp /usr/lib64/{libre2.so.0,libdouble-conversion.so.3,libgflags.so.2.2,libglog.so.0,libevent-2.1.so.6,libdwarf.so.1,libgsasl.so.7,libicudata.so.60,libicui18n.so.60,libicuuc.so.60,libidn.so.11,libntlm.so.0,libsodium.so} $THIRDPARTY_LIB/
cp /usr/local/lib/{libhdfs3.so.1,libboost_context.so.1.72.0,libboost_filesystem.so.1.72.0,libboost_program_options.so.1.72.0,libboost_regex.so.1.72.0,libboost_system.so.1.72.0,libboost_thread.so.1.72.0,libprotobuf.so.32} $THIRDPARTY_LIB/
cp /usr/local/lib/{libhdfs3.so.1,libboost_context.so.1.84.0,libboost_filesystem.so.1.84.0,libboost_program_options.so.1.84.0,libboost_regex.so.1.84.0,libboost_system.so.1.84.0,libboost_thread.so.1.84.0,libboost_atomic.so.1.84.0,libprotobuf.so.32} $THIRDPARTY_LIB/
}

function process_setup_centos_7 {
cp /usr/local/lib64/{libgflags.so.2.2,libglog.so.0} $THIRDPARTY_LIB/
cp /usr/lib64/{libdouble-conversion.so.1,libevent-2.0.so.5,libzstd.so.1,libntlm.so.0,libgsasl.so.7,liblz4.so.1} $THIRDPARTY_LIB/
cp /usr/local/lib/{libre2.so.10,libhdfs3.so.1,libboost_context.so.1.72.0,libboost_filesystem.so.1.72.0,libboost_program_options.so.1.72.0,libboost_system.so.1.72.0,libboost_thread.so.1.72.0,libboost_regex.so.1.72.0,libprotobuf.so.32} $THIRDPARTY_LIB/
cp /usr/local/lib/{libre2.so.10,libhdfs3.so.1,libboost_context.so.1.84.0,libboost_filesystem.so.1.84.0,libboost_program_options.so.1.84.0,libboost_system.so.1.84.0,libboost_thread.so.1.84.0,libboost_regex.so.1.84.0,libboost_atomic.so.1.84.0,libprotobuf.so.32} $THIRDPARTY_LIB/
}

function process_setup_debian_11 {
Expand Down
1 change: 1 addition & 0 deletions docs/get-started/ClickHouse.md
Original file line number Diff line number Diff line change
Expand Up @@ -671,6 +671,7 @@ spark.dynamicAllocation.enabled false
```

#### Celeborn Columnar Shuffle Support
Currently, the supported Celeborn versions are `0.3.x` and `0.4.0`.
The native Celeborn support can be enabled by the following configuration
```
spark.shuffle.manager=org.apache.spark.shuffle.gluten.celeborn.CelebornShuffleManager
Expand Down
4 changes: 3 additions & 1 deletion docs/get-started/Velox.md
Original file line number Diff line number Diff line change
Expand Up @@ -207,7 +207,9 @@ Currently there are several ways to asscess S3 in Spark. Please refer [Velox S3]

## Celeborn support

Gluten with velox backend supports [Celeborn](https://github.com/apache/incubator-celeborn) as remote shuffle service. Below introduction is used to enable this feature
Gluten with velox backend supports [Celeborn](https://github.com/apache/incubator-celeborn) as remote shuffle service. Currently, the supported Celeborn versions are `0.3.x` and `0.4.0`.

Below introduction is used to enable this feature

First refer to this URL(https://github.com/apache/incubator-celeborn) to setup a celeborn cluster.

Expand Down
1 change: 0 additions & 1 deletion ep/build-velox/src/get_velox.sh
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,6 @@ function process_setup_centos7 {
# install gtest
sed -i '/^ run_and_time install_folly/a \ \ run_and_time install_gtest' scripts/setup-centos7.sh

sed -i "s/boostorg.jfrog.io\/artifactory\/main\/release\/1.72.0\/source\/boost_1_72_0.tar.gz/src.fedoraproject.org\/repo\/pkgs\/boost\/boost_1_72_0-snapshot.tar.gz\/sha512\/b91d96e0fd76cdfb2accadedde85a7d005d7f8ccdfde50c7f195bd5ea1f0c203520d5dac1fca33f38f20ff484f8400303226d6febb31f644ebb4d9809f91088a\/boost_1_72_0-snapshot.tar.gz/" scripts/setup-centos7.sh
if [ $ENABLE_HDFS = "ON" ]; then
sed -i '/^function install_protobuf.*/i function install_libhdfs3 {\n cd "\${DEPENDENCY_DIR}"\n github_checkout oap-project/libhdfs3 master \n cmake_install\n}\n' scripts/setup-centos7.sh
sed -i '/^ run_and_time install_folly/a \ \ run_and_time install_libhdfs3' scripts/setup-centos7.sh
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,12 +37,14 @@ import java.util
import java.util.Locale

class CHCelebornHashBasedColumnarShuffleWriter[K, V](
shuffleId: Int,
handle: CelebornShuffleHandle[K, V, V],
context: TaskContext,
celebornConf: CelebornConf,
client: ShuffleClient,
writeMetrics: ShuffleWriteMetricsReporter)
extends CelebornHashBasedColumnarShuffleWriter[K, V](
shuffleId: Int,
handle,
context,
celebornConf,
Expand Down Expand Up @@ -90,7 +92,7 @@ class CHCelebornHashBasedColumnarShuffleWriter[K, V](
"allocations from make() to split()")
}
logInfo(s"Gluten shuffle writer: Trying to push $size bytes of data")
val spilled = jniWrapper.evict(nativeShuffleWriter);
val spilled = jniWrapper.evict(nativeShuffleWriter)
logInfo(s"Gluten shuffle writer: Spilled $spilled / $size bytes of data")
spilled
}
Expand Down
Loading

0 comments on commit f2a213f

Please sign in to comment.