forked from apache/spark
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Start prototyping Java Row -> UnsafeRow converters
- Loading branch information
Showing
4 changed files
with
255 additions
and
4 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
168 changes: 168 additions & 0 deletions
168
...atalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/UnsafeRowConverter.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,168 @@ | ||
/* | ||
* Licensed to the Apache Software Foundation (ASF) under one or more | ||
* contributor license agreements. See the NOTICE file distributed with | ||
* this work for additional information regarding copyright ownership. | ||
* The ASF licenses this file to You under the Apache License, Version 2.0 | ||
* (the "License"); you may not use this file except in compliance with | ||
* the License. You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, | ||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the License for the specific language governing permissions and | ||
* limitations under the License. | ||
*/ | ||
|
||
package org.apache.spark.sql.catalyst.expressions | ||
|
||
import org.apache.spark.sql.types._ | ||
import org.apache.spark.unsafe.PlatformDependent | ||
import org.apache.spark.unsafe.array.ByteArrayMethods | ||
|
||
/** Write a column into an UnsafeRow */ | ||
private abstract class UnsafeColumnWriter[T] { | ||
/** | ||
* Write a value into an UnsafeRow. | ||
* | ||
* @param value the value to write | ||
* @param columnNumber what column to write it to | ||
* @param row a pointer to the unsafe row | ||
* @param baseObject | ||
* @param baseOffset | ||
* @param appendCursor the offset from the start of the unsafe row to the end of the row; | ||
* used for calculating where variable-length data should be written | ||
* @return the number of variable-length bytes written | ||
*/ | ||
def write( | ||
value: T, | ||
columnNumber: Int, | ||
row: UnsafeRow, | ||
baseObject: Object, | ||
baseOffset: Long, | ||
appendCursor: Int): Int | ||
|
||
/** | ||
* Return the number of bytes that are needed to write this variable-length value. | ||
*/ | ||
def getSize(value: T): Int | ||
} | ||
|
||
private object UnsafeColumnWriter { | ||
def forType(dataType: DataType): UnsafeColumnWriter[_] = { | ||
dataType match { | ||
case IntegerType => IntUnsafeColumnWriter | ||
case LongType => LongUnsafeColumnWriter | ||
case StringType => StringUnsafeColumnWriter | ||
case _ => throw new UnsupportedOperationException() | ||
} | ||
} | ||
} | ||
|
||
private class StringUnsafeColumnWriter private() extends UnsafeColumnWriter[UTF8String] { | ||
def getSize(value: UTF8String): Int = { | ||
// round to nearest word | ||
val numBytes = value.getBytes.length | ||
8 + ByteArrayMethods.roundNumberOfBytesToNearestWord(numBytes) | ||
} | ||
|
||
override def write( | ||
value: UTF8String, | ||
columnNumber: Int, | ||
row: UnsafeRow, | ||
baseObject: Object, | ||
baseOffset: Long, | ||
appendCursor: Int): Int = { | ||
val numBytes = value.getBytes.length | ||
PlatformDependent.UNSAFE.putLong(baseObject, baseOffset + appendCursor, numBytes) | ||
PlatformDependent.copyMemory( | ||
value.getBytes, | ||
PlatformDependent.BYTE_ARRAY_OFFSET, | ||
baseObject, | ||
baseOffset + appendCursor + 8, | ||
numBytes | ||
) | ||
row.setLong(columnNumber, appendCursor) | ||
8 + ((numBytes / 8) + (if (numBytes % 8 == 0) 0 else 1)) * 8 | ||
} | ||
} | ||
private object StringUnsafeColumnWriter extends StringUnsafeColumnWriter | ||
|
||
private abstract class PrimitiveUnsafeColumnWriter[T] extends UnsafeColumnWriter[T] { | ||
def getSize(value: T): Int = 0 | ||
} | ||
|
||
private class IntUnsafeColumnWriter private() extends PrimitiveUnsafeColumnWriter[Int] { | ||
override def write( | ||
value: Int, | ||
columnNumber: Int, | ||
row: UnsafeRow, | ||
baseObject: Object, | ||
baseOffset: Long, | ||
appendCursor: Int): Int = { | ||
row.setInt(columnNumber, value) | ||
0 | ||
} | ||
} | ||
private object IntUnsafeColumnWriter extends IntUnsafeColumnWriter | ||
|
||
private class LongUnsafeColumnWriter private() extends PrimitiveUnsafeColumnWriter[Long] { | ||
override def write( | ||
value: Long, | ||
columnNumber: Int, | ||
row: UnsafeRow, | ||
baseObject: Object, | ||
baseOffset: Long, | ||
appendCursor: Int): Int = { | ||
row.setLong(columnNumber, value) | ||
0 | ||
} | ||
} | ||
private case object LongUnsafeColumnWriter extends LongUnsafeColumnWriter | ||
|
||
|
||
class UnsafeRowConverter(fieldTypes: Array[DataType]) { | ||
|
||
private[this] val writers: Array[UnsafeColumnWriter[Any]] = { | ||
fieldTypes.map(t => UnsafeColumnWriter.forType(t).asInstanceOf[UnsafeColumnWriter[Any]]) | ||
} | ||
|
||
def getSizeRequirement(row: Row): Int = { | ||
var fieldNumber = 0 | ||
var variableLengthFieldSize: Int = 0 | ||
while (fieldNumber < writers.length) { | ||
if (!row.isNullAt(fieldNumber)) { | ||
variableLengthFieldSize += writers(fieldNumber).getSize(row.get(fieldNumber)) | ||
|
||
} | ||
fieldNumber += 1 | ||
} | ||
(8 * fieldTypes.length) + UnsafeRow.calculateBitSetWidthInBytes(fieldTypes.length) + variableLengthFieldSize | ||
} | ||
|
||
def writeRow(row: Row, baseObject: Object, baseOffset: Long): Long = { | ||
val unsafeRow = new UnsafeRow() | ||
unsafeRow.set(baseObject, baseOffset, writers.length, null) // TODO: schema? | ||
var fieldNumber = 0 | ||
var appendCursor: Int = | ||
(8 * fieldTypes.length) + UnsafeRow.calculateBitSetWidthInBytes(fieldTypes.length) | ||
while (fieldNumber < writers.length) { | ||
if (row.isNullAt(fieldNumber)) { | ||
unsafeRow.setNullAt(fieldNumber) | ||
// TODO: type-specific null value writing? | ||
} else { | ||
appendCursor += writers(fieldNumber).write( | ||
row.get(fieldNumber), | ||
fieldNumber, | ||
unsafeRow, | ||
baseObject, | ||
baseOffset, | ||
appendCursor) | ||
} | ||
fieldNumber += 1 | ||
} | ||
appendCursor | ||
} | ||
|
||
} |
67 changes: 67 additions & 0 deletions
67
...st/src/test/scala/org/apache/spark/sql/catalyst/expressions/UnsafeRowConverterSuite.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,67 @@ | ||
/* | ||
* Licensed to the Apache Software Foundation (ASF) under one or more | ||
* contributor license agreements. See the NOTICE file distributed with | ||
* this work for additional information regarding copyright ownership. | ||
* The ASF licenses this file to You under the Apache License, Version 2.0 | ||
* (the "License"); you may not use this file except in compliance with | ||
* the License. You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, | ||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the License for the specific language governing permissions and | ||
* limitations under the License. | ||
*/ | ||
|
||
package org.apache.spark.sql.catalyst.expressions | ||
|
||
import org.apache.spark.sql.types.{StringType, DataType, LongType, IntegerType} | ||
import org.apache.spark.unsafe.PlatformDependent | ||
import org.apache.spark.unsafe.array.ByteArrayMethods | ||
import org.scalatest.{FunSuite, Matchers} | ||
|
||
|
||
class UnsafeRowConverterSuite extends FunSuite with Matchers { | ||
|
||
test("basic conversion with only primitive types") { | ||
val fieldTypes: Array[DataType] = Array(LongType, LongType, IntegerType) | ||
val row = new SpecificMutableRow(fieldTypes) | ||
row.setLong(0, 0) | ||
row.setLong(1, 1) | ||
row.setInt(2, 2) | ||
val converter = new UnsafeRowConverter(fieldTypes) | ||
val sizeRequired: Int = converter.getSizeRequirement(row) | ||
sizeRequired should be (8 + (3 * 8)) | ||
val buffer: Array[Long] = new Array[Long](sizeRequired / 8) | ||
val numBytesWritten = converter.writeRow(row, buffer, PlatformDependent.LONG_ARRAY_OFFSET) | ||
numBytesWritten should be (sizeRequired) | ||
val unsafeRow = new UnsafeRow() | ||
unsafeRow.set(buffer, PlatformDependent.LONG_ARRAY_OFFSET, fieldTypes.length, null) | ||
unsafeRow.getLong(0) should be (0) | ||
unsafeRow.getLong(1) should be (1) | ||
unsafeRow.getInt(2) should be (2) | ||
} | ||
|
||
test("basic conversion with primitive and string types") { | ||
val fieldTypes: Array[DataType] = Array(LongType, StringType, StringType) | ||
val row = new SpecificMutableRow(fieldTypes) | ||
row.setLong(0, 0) | ||
row.setString(1, "Hello") | ||
row.setString(2, "World") | ||
val converter = new UnsafeRowConverter(fieldTypes) | ||
val sizeRequired: Int = converter.getSizeRequirement(row) | ||
sizeRequired should be (8 + (8 * 3) + | ||
ByteArrayMethods.roundNumberOfBytesToNearestWord("Hello".getBytes.length + 8) + | ||
ByteArrayMethods.roundNumberOfBytesToNearestWord("World".getBytes.length + 8)) | ||
val buffer: Array[Long] = new Array[Long](sizeRequired / 8) | ||
val numBytesWritten = converter.writeRow(row, buffer, PlatformDependent.LONG_ARRAY_OFFSET) | ||
numBytesWritten should be (sizeRequired) | ||
val unsafeRow = new UnsafeRow() | ||
unsafeRow.set(buffer, PlatformDependent.LONG_ARRAY_OFFSET, fieldTypes.length, null) | ||
unsafeRow.getLong(0) should be (0) | ||
unsafeRow.getString(1) should be ("Hello") | ||
unsafeRow.getString(2) should be ("World") | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters