From 07f4c824f48f6eeb03b2b44749a0ac0be1d35a6f Mon Sep 17 00:00:00 2001 From: Takeshi Yamamuro Date: Wed, 11 Apr 2018 23:56:05 +0900 Subject: [PATCH] Fix --- .../expressions/objects/objects.scala | 29 +++++++++++++++---- .../expressions/ObjectExpressionsSuite.scala | 1 - 2 files changed, 24 insertions(+), 6 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala index 6eea60bbda415..56e6f4b1908e3 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala @@ -1009,16 +1009,35 @@ case class CatalystToExternalMap private( override def children: Seq[Expression] = keyLambdaFunction :: valueLambdaFunction :: inputData :: Nil - private lazy val toScalaValue: Any => Any = { - assert(inputData.dataType.isInstanceOf[MapType]) - val mapType = inputData.dataType.asInstanceOf[MapType] - CatalystTypeConverters.createToScalaConverter(mapType) + private lazy val inputMapType = inputData.dataType.asInstanceOf[MapType] + + private lazy val keyConverter = + CatalystTypeConverters.createToScalaConverter(inputMapType.keyType) + private lazy val valueConverter = + CatalystTypeConverters.createToScalaConverter(inputMapType.valueType) + + private def newMapBuilder(): Builder[AnyRef, AnyRef] = { + val clazz = Utils.classForName(collClass.getCanonicalName + "$") + val module = clazz.getField("MODULE$").get(null) + val method = clazz.getMethod("newBuilder") + method.invoke(module).asInstanceOf[Builder[AnyRef, AnyRef]] } override def eval(input: InternalRow): Any = { val result = inputData.eval(input).asInstanceOf[MapData] if (result != null) { - toScalaValue(result) + val builder = newMapBuilder() + builder.sizeHint(result.numElements()) + val keyArray = result.keyArray() + val valueArray = result.valueArray() + var i = 0 + while (i < result.numElements()) { + val key = keyConverter(keyArray.get(i, inputMapType.keyType)) + val value = valueConverter(valueArray.get(i, inputMapType.valueType)) + builder += Tuple2(key, value) + i += 1 + } + builder.result() } else { null } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ObjectExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ObjectExpressionsSuite.scala index 78b0dac1d1cd6..fd96b1aa14816 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ObjectExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ObjectExpressionsSuite.scala @@ -386,7 +386,6 @@ class ObjectExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { } } -<<<<<<< 0c94e48bc50717e1627c0d2acd5382d9adc73c97 test("LambdaVariable should support interpreted execution") { def genSchema(dt: DataType): Seq[StructType] = { Seq(StructType(StructField("col_1", dt, nullable = false) :: Nil),