forked from delta-io/delta
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[UniForm] Cast Iceberg TIME to Spark Long (delta-io#4093)
<!-- Thanks for sending a pull request! Here are some tips for you: 1. If this is your first time, please read our contributor guidelines: https://github.com/delta-io/delta/blob/master/CONTRIBUTING.md 2. If the PR is unfinished, add '[WIP]' in your PR title, e.g., '[WIP] Your PR title ...'. 3. Be sure to keep the PR description updated to reflect all changes. 4. Please write your PR title to summarize what this PR proposes. 5. If possible, provide a concise example to reproduce the issue for a faster review. 6. If applicable, include the corresponding issue number in the PR title and link it in the body. --> #### Which Delta project/connector is this regarding? <!-- Please add the component selected below to the beginning of the pull request title For example: [Spark] Title of my pull request --> - [x] Spark - [ ] Standalone - [ ] Flink - [ ] Kernel - [ ] Other (fill in here) ## Description This PR introduces the feature that allows UniForm to cast Iceberg TIME type to Spark Long data type. The result will be represented as the microseconds since midnight. <!-- - Describe what this PR changes. - Describe why we need the change. If this PR resolves an issue be sure to include "Resolves #XXX" to correctly link and close the issue upon merge. --> ## How was this patch tested? UT <!-- If tests were added, say they were added here. Please make sure to test the changes thoroughly including negative and positive cases if possible. If the changes were tested in any way other than unit tests, please clarify how you tested step by step (ideally copy and paste-able, so that other reviewers can test and check, and descendants can verify in the future). If the changes were not tested, please explain why. --> ## Does this PR introduce _any_ user-facing changes? No <!-- If yes, please clarify the previous behavior and the change this PR proposes - provide the console output, description and/or an example to show the behavior difference if possible. If possible, please also clarify if this is a user-facing change compared to the released Delta Lake versions or within the unreleased branches such as master. If no, write 'No'. -->
- Loading branch information
1 parent
0b7eed9
commit 2ffdbbf
Showing
10 changed files
with
330 additions
and
23 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
127 changes: 127 additions & 0 deletions
127
iceberg/src/main/scala/org/apache/spark/sql/delta/TypeToSparkTypeWithCustomCast.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,127 @@ | ||
/* | ||
* Copyright (2021) The Delta Lake Project Authors. | ||
* | ||
* Licensed under the Apache License, Version 2.0 (the "License"); | ||
* you may not use this file except in compliance with the License. | ||
* You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, | ||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the License for the specific language governing permissions and | ||
* limitations under the License. | ||
*/ | ||
|
||
package org.apache.spark.sql.delta.commands.convert | ||
|
||
import java.util | ||
|
||
import scala.collection.JavaConverters._ | ||
|
||
import org.apache.iceberg.MetadataColumns | ||
import org.apache.iceberg.Schema | ||
import org.apache.iceberg.relocated.com.google.common.collect.Lists | ||
import org.apache.iceberg.types.Type | ||
import org.apache.iceberg.types.Type.TypeID._ | ||
import org.apache.iceberg.types.Types | ||
import org.apache.iceberg.types.TypeUtil | ||
|
||
import org.apache.spark.sql.types.ArrayType | ||
import org.apache.spark.sql.types.BinaryType | ||
import org.apache.spark.sql.types.BooleanType | ||
import org.apache.spark.sql.types.DataType | ||
import org.apache.spark.sql.types.DateType | ||
import org.apache.spark.sql.types.DecimalType | ||
import org.apache.spark.sql.types.DoubleType | ||
import org.apache.spark.sql.types.FloatType | ||
import org.apache.spark.sql.types.IntegerType | ||
import org.apache.spark.sql.types.LongType | ||
import org.apache.spark.sql.types.MapType | ||
import org.apache.spark.sql.types.Metadata | ||
import org.apache.spark.sql.types.MetadataBuilder | ||
import org.apache.spark.sql.types.StringType | ||
import org.apache.spark.sql.types.StructField | ||
import org.apache.spark.sql.types.StructType | ||
import org.apache.spark.sql.types.TimestampNTZType | ||
import org.apache.spark.sql.types.TimestampType | ||
|
||
/** | ||
* This class is copied from [[org.apache.iceberg.spark.TypeToSparkType]] to | ||
* add custom type casting. Currently, it supports the following casting | ||
* * Iceberg TIME -> Spark Long | ||
* | ||
*/ | ||
class TypeToSparkTypeWithCustomCast extends TypeUtil.SchemaVisitor[DataType] { | ||
|
||
val METADATA_COL_ATTR_KEY = "__metadata_col"; | ||
|
||
override def schema(schema: Schema, structType: DataType): DataType = structType | ||
|
||
override def struct(struct: Types.StructType, fieldResults: util.List[DataType]): DataType = { | ||
val fields = struct.fields(); | ||
val sparkFields: util.List[StructField] = | ||
Lists.newArrayListWithExpectedSize(fieldResults.size()) | ||
for (i <- 0 until fields.size()) { | ||
val field = fields.get(i) | ||
val `type` = fieldResults.get(i) | ||
val metadata = fieldMetadata(field.fieldId()) | ||
var sparkField = StructField.apply(field.name(), `type`, field.isOptional(), metadata) | ||
if (field.doc() != null) { | ||
sparkField = sparkField.withComment(field.doc()) | ||
} | ||
sparkFields.add(sparkField) | ||
} | ||
|
||
StructType.apply(sparkFields) | ||
} | ||
|
||
override def field(field: Types.NestedField, fieldResult: DataType): DataType = fieldResult | ||
|
||
override def list(list: Types.ListType, elementResult: DataType): DataType = | ||
ArrayType.apply(elementResult, list.isElementOptional()) | ||
|
||
override def map(map: Types.MapType, keyResult: DataType, valueResult: DataType): DataType = | ||
MapType.apply(keyResult, valueResult, map.isValueOptional()) | ||
|
||
override def primitive(primitive: Type.PrimitiveType): DataType = { | ||
primitive.typeId() match { | ||
case BOOLEAN => BooleanType | ||
case INTEGER => IntegerType | ||
case LONG => LongType | ||
case FLOAT => FloatType | ||
case DOUBLE => DoubleType | ||
case DATE => DateType | ||
// This line is changed to allow casting TIME to Spark Long. | ||
// The result is microseconds since midnight. | ||
case TIME => LongType | ||
case TIMESTAMP => | ||
val ts = primitive.asInstanceOf[Types.TimestampType] | ||
if (ts.shouldAdjustToUTC()) { | ||
TimestampType | ||
} else { | ||
TimestampNTZType | ||
} | ||
case STRING => StringType | ||
case UUID => // use String | ||
StringType | ||
case FIXED => BinaryType | ||
case BINARY => BinaryType | ||
case DECIMAL => | ||
val decimal = primitive.asInstanceOf[Types.DecimalType] | ||
DecimalType.apply(decimal.precision(), decimal.scale()); | ||
case _ => | ||
throw new UnsupportedOperationException( | ||
"Cannot convert unknown type to Spark: " + primitive); | ||
} | ||
} | ||
|
||
private def fieldMetadata(fieldId: Int): Metadata = { | ||
if (MetadataColumns.metadataFieldIds().contains(fieldId)) { | ||
return new MetadataBuilder().putBoolean(METADATA_COL_ATTR_KEY, value = true).build() | ||
} | ||
|
||
Metadata.empty | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.