Skip to content

Commit

Permalink
The change for map support.
Browse files Browse the repository at this point in the history
  • Loading branch information
viirya committed Feb 1, 2018
1 parent 0cd76f4 commit 35548e6
Show file tree
Hide file tree
Showing 4 changed files with 27 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -624,6 +624,7 @@ public final ColumnarArray getArray(int rowId) {
// second child column vector, and puts the offsets and lengths in the current column vector.
@Override
public final ColumnarMap getMap(int rowId) {
if (isNullAt(rowId)) return null;
return new ColumnarMap(getChild(0), getChild(1), getArrayOffset(rowId), getArrayLength(rowId));
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -233,13 +233,13 @@ public final ColumnarRow getStruct(int rowId) {
public abstract ColumnarArray getArray(int rowId);

/**
* Returns the map type value for rowId.
* Returns the map type value for rowId. If the slot for rowId is null, it should return null.
*
* In Spark, map type value is basically a key data array and a value data array. A key from the
* key array with a index and a value from the value array with the same index contribute to
* an entry of this map type value.
*
* To support map type, implementations must construct an {@link ColumnarMap} and return it in
* To support map type, implementations must construct a {@link ColumnarMap} and return it in
* this method. {@link ColumnarMap} requires a {@link ColumnVector} that stores the data of all
* the keys of all the maps in this vector, and another {@link ColumnVector} that stores the data
* of all the values of all the maps in this vector, and a pair of offset and length which
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,6 @@ public ColumnarArray getArray(int ordinal) {

@Override
public ColumnarMap getMap(int ordinal) {
if (data.getChild(ordinal).isNullAt(rowId)) return null;
return data.getChild(ordinal).getMap(rowId);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1373,4 +1373,28 @@ class ColumnarBatchSuite extends SparkFunSuite {
column.putByteArray(idx, "Hello".getBytes(StandardCharsets.UTF_8))
assert(column.getBinary(idx) != null)
}

testVector("getMap should return null for null slot", 4,
new MapType(IntegerType, IntegerType, false)) { column =>
assert(column.numNulls() == 0)

var idx = 0
column.putNull(idx)
assert(column.getBinary(idx) == null)
idx += 1
column.putNull(idx)
assert(column.getBinary(idx) == null)
assert(column.numNulls() == 2)

idx += 1
val keyCol = column.getChild(0)
keyCol.putInt(0, 0)
keyCol.putInt(1, 1)
val valueCol = column.getChild(1)
valueCol.putInt(0, 0)
valueCol.putInt(1, 2)

column.putArray(idx, 0, 2)
assert(column.getMap(idx) != null)
}
}

0 comments on commit 35548e6

Please sign in to comment.