From 2aa2e5f5f38f1118452783f190bf1ee5148eb5f5 Mon Sep 17 00:00:00 2001 From: Yue Deng Date: Wed, 29 May 2024 14:20:02 +0800 Subject: [PATCH] fix: compatibility with datafusion --- .../writer/base_writer/equality_delete_writer.rs | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/crates/iceberg/src/writer/base_writer/equality_delete_writer.rs b/crates/iceberg/src/writer/base_writer/equality_delete_writer.rs index 6a7936e06..ca1238544 100644 --- a/crates/iceberg/src/writer/base_writer/equality_delete_writer.rs +++ b/crates/iceberg/src/writer/base_writer/equality_delete_writer.rs @@ -232,7 +232,6 @@ impl FieldProjector { mod test { use arrow_select::concat::concat_batches; use bytes::Bytes; - use futures::AsyncReadExt; use itertools::Itertools; use std::{collections::HashMap, sync::Arc}; @@ -266,15 +265,11 @@ mod test { assert_eq!(data_file.file_format, DataFileFormat::Parquet); // read the written file - let mut input_file = file_io - .new_input(data_file.file_path.clone()) - .unwrap() - .reader() - .await - .unwrap(); - let mut res = vec![]; - let file_size = input_file.read_to_end(&mut res).await.unwrap(); - let reader_builder = ParquetRecordBatchReaderBuilder::try_new(Bytes::from(res)).unwrap(); + let input_file = file_io.new_input(data_file.file_path.clone()).unwrap(); + // read the written file + let input_content = input_file.read().await.unwrap(); + let reader_builder = + ParquetRecordBatchReaderBuilder::try_new(input_content.clone()).unwrap(); let metadata = reader_builder.metadata().clone(); // check data