From 00f7ea3c45ceda4d6a2634fd16ee7d24424c4dc6 Mon Sep 17 00:00:00 2001 From: Hanumath Rao Maduri Date: Tue, 3 Oct 2017 10:58:04 +0300 Subject: [PATCH] Branch spark 69 (#170) * Fixing the wrong type casting of TimeStamp to OTimeStamp when read from spark dataFrame. * SPARK-69: Problem with license when we try to read from json and write to maprdb --- .../main/scala/com/mapr/db/spark/utils/MapRDBUtils.scala | 1 + .../test/scala/com/mapr/db/testCases/PredicateTests.scala | 8 +++++++- .../scala/com/mapr/db/testCases/SparkSqlAcessTests.scala | 8 +++++++- 3 files changed, 15 insertions(+), 2 deletions(-) diff --git a/external/maprdb/src/main/scala/com/mapr/db/spark/utils/MapRDBUtils.scala b/external/maprdb/src/main/scala/com/mapr/db/spark/utils/MapRDBUtils.scala index c6a903a1e13c3..a2d02db4fad8b 100644 --- a/external/maprdb/src/main/scala/com/mapr/db/spark/utils/MapRDBUtils.scala +++ b/external/maprdb/src/main/scala/com/mapr/db/spark/utils/MapRDBUtils.scala @@ -27,6 +27,7 @@ private[spark] object MapRDBUtils { tabDesc.setAutoSplit(true) tabDesc.setPath(tableName) tabDesc.setBulkLoad(bulkMode) + tabDesc.setInsertionOrder(false) if (keys.isEmpty) DBClient().createTable(tabDesc) else diff --git a/external/maprdb/src/test/scala/com/mapr/db/testCases/PredicateTests.scala b/external/maprdb/src/test/scala/com/mapr/db/testCases/PredicateTests.scala index ebfa8391f37ff..c5935353608ab 100644 --- a/external/maprdb/src/test/scala/com/mapr/db/testCases/PredicateTests.scala +++ b/external/maprdb/src/test/scala/com/mapr/db/testCases/PredicateTests.scala @@ -10,6 +10,8 @@ import org.apache.spark.rdd.RDD import org.ojai.exceptions.TypeException import org.ojai.store.QueryCondition import com.mapr.db.MapRDB +import com.mapr.db.spark.dbclient.DBClient + object PredicateTests { val tableName = "/tmp/user_profiles_predicates" @@ -29,7 +31,11 @@ object PredicateTests { if (MapRDB.tableExists(tableName)) MapRDB.deleteTable(tableName) println("table successfully create :" + tableName) - MapRDB.createTable(tableName) + val tabDesc = DBClient().newTableDescriptor() + tabDesc.setAutoSplit(true) + tabDesc.setPath(tableName) + tabDesc.setInsertionOrder(false) + DBClient().createTable(tabDesc) } def runTests(sparkSession: SparkContext): Unit = { diff --git a/external/maprdb/src/test/scala/com/mapr/db/testCases/SparkSqlAcessTests.scala b/external/maprdb/src/test/scala/com/mapr/db/testCases/SparkSqlAcessTests.scala index 190ddf71e5d9b..0c78cb84ada25 100644 --- a/external/maprdb/src/test/scala/com/mapr/db/testCases/SparkSqlAcessTests.scala +++ b/external/maprdb/src/test/scala/com/mapr/db/testCases/SparkSqlAcessTests.scala @@ -12,6 +12,7 @@ import org.apache.spark.sql.SparkSession import org.ojai.types.{ODate, OTime, OTimestamp} import com.mapr.org.apache.hadoop.hbase.util.Bytes.ByteArrayComparator import com.mapr.db.MapRDB +import com.mapr.db.spark.dbclient.DBClient object SparkSqlAccessTests { lazy val conf = new SparkConf() @@ -31,7 +32,12 @@ object SparkSqlAccessTests { def tableInitialization(tableName: String): Unit = { if (MapRDB.tableExists(tableName)) MapRDB.deleteTable(tableName) - val table = MapRDB.createTable(tableName) + val tabDesc = DBClient().newTableDescriptor() + tabDesc.setAutoSplit(true) + tabDesc.setPath(tableName) + tabDesc.setInsertionOrder(false) + DBClient().createTable(tabDesc) + val table = DBClient().getTable(tableName) table.insertOrReplace(getNullRecord()) table.insertOrReplace(getBooleanRecord()) table.insertOrReplace(getStringRecord())