Skip to content

Commit

Permalink
[SPARK-1570] Fix classloading in JavaSQLContext.applySchema
Browse files Browse the repository at this point in the history
I think I hit a class loading issue when running JavaSparkSQL example using spark-submit in local mode.

Author: Kan Zhang <[email protected]>

Closes apache#484 from kanzhang/SPARK-1570 and squashes the following commits:

feaaeba [Kan Zhang] [SPARK-1570] Fix classloading in JavaSQLContext.applySchema
  • Loading branch information
kanzhang authored and pwendell committed Apr 22, 2014
1 parent 0ea0b1a commit ea8cea8
Showing 1 changed file with 4 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ import org.apache.spark.sql.catalyst.expressions.{AttributeReference, GenericRow
import org.apache.spark.sql.catalyst.types._
import org.apache.spark.sql.parquet.ParquetRelation
import org.apache.spark.sql.execution.{ExistingRdd, SparkLogicalPlan}
import org.apache.spark.util.Utils

/**
* The entry point for executing Spark SQL queries from a Java program.
Expand Down Expand Up @@ -84,10 +85,11 @@ class JavaSQLContext(sparkContext: JavaSparkContext) {
*/
def applySchema(rdd: JavaRDD[_], beanClass: Class[_]): JavaSchemaRDD = {
val schema = getSchema(beanClass)
val className = beanClass.getCanonicalName
val className = beanClass.getName
val rowRdd = rdd.rdd.mapPartitions { iter =>
// BeanInfo is not serializable so we must rediscover it remotely for each partition.
val localBeanInfo = Introspector.getBeanInfo(Class.forName(className))
val localBeanInfo = Introspector.getBeanInfo(
Class.forName(className, true, Utils.getContextOrSparkClassLoader))
val extractors =
localBeanInfo.getPropertyDescriptors.filterNot(_.getName == "class").map(_.getReadMethod)

Expand Down

0 comments on commit ea8cea8

Please sign in to comment.