From a3d80564ad4d09c3c2f95896ca2765904b1abe8f Mon Sep 17 00:00:00 2001 From: Michael Munday Date: Tue, 28 Jul 2020 10:36:20 -0700 Subject: [PATCH] [SPARK-32458][SQL][TESTS] Fix incorrectly sized row value reads ### What changes were proposed in this pull request? Updates to tests to use correctly sized `getInt` or `getLong` calls. ### Why are the changes needed? The reads were incorrectly sized (i.e. `putLong` paired with `getInt` and `putInt` paired with `getLong`). This causes test failures on big-endian systems. ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? Tests were run on a big-endian system (s390x). This change is unlikely to have any practical effect on little-endian systems. Closes #29258 from mundaym/fix-row. Authored-by: Michael Munday Signed-off-by: Dongjoon Hyun --- .../spark/sql/catalyst/encoders/RowEncoderSuite.scala | 2 +- .../apache/spark/sql/catalyst/util/UnsafeMapSuite.scala | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/RowEncoderSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/RowEncoderSuite.scala index fd24f058f357c..d20a9ba3f0f68 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/RowEncoderSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/RowEncoderSuite.scala @@ -336,7 +336,7 @@ class RowEncoderSuite extends CodegenInterpretedPlanTest { val encoder = RowEncoder(schema).resolveAndBind() val localDate = java.time.LocalDate.parse("2019-02-27") val row = toRow(encoder, Row(localDate)) - assert(row.getLong(0) === DateTimeUtils.localDateToDays(localDate)) + assert(row.getInt(0) === DateTimeUtils.localDateToDays(localDate)) val readback = fromRow(encoder, row) assert(readback.get(0).equals(localDate)) } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/UnsafeMapSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/UnsafeMapSuite.scala index ebc88612be22a..443534fd0a06a 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/UnsafeMapSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/UnsafeMapSuite.scala @@ -48,8 +48,8 @@ class UnsafeMapSuite extends SparkFunSuite { val ser = new JavaSerializer(new SparkConf).newInstance() val mapDataSer = ser.deserialize[UnsafeMapData](ser.serialize(unsafeMapData)) assert(mapDataSer.numElements() == 1) - assert(mapDataSer.keyArray().getInt(0) == 19285) - assert(mapDataSer.valueArray().getInt(0) == 19286) + assert(mapDataSer.keyArray().getLong(0) == 19285) + assert(mapDataSer.valueArray().getLong(0) == 19286) assert(mapDataSer.getBaseObject.asInstanceOf[Array[Byte]].length == 1024) } @@ -57,8 +57,8 @@ class UnsafeMapSuite extends SparkFunSuite { val ser = new KryoSerializer(new SparkConf).newInstance() val mapDataSer = ser.deserialize[UnsafeMapData](ser.serialize(unsafeMapData)) assert(mapDataSer.numElements() == 1) - assert(mapDataSer.keyArray().getInt(0) == 19285) - assert(mapDataSer.valueArray().getInt(0) == 19286) + assert(mapDataSer.keyArray().getLong(0) == 19285) + assert(mapDataSer.valueArray().getLong(0) == 19286) assert(mapDataSer.getBaseObject.asInstanceOf[Array[Byte]].length == 1024) } }