diff --git a/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala b/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala index 447deafff53cd..75fc02acd1bce 100644 --- a/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala +++ b/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala @@ -41,6 +41,7 @@ import org.apache.hadoop.mapreduce.lib.output.{FileOutputFormat => NewFileOutput import org.apache.hadoop.mapred.SparkHadoopWriter import org.apache.spark._ +import org.apache.spark.deploy.SparkHadoopUtil import org.apache.spark.Partitioner.defaultPartitioner import org.apache.spark.SparkContext._ import org.apache.spark.partial.{BoundedDouble, PartialResult} @@ -723,6 +724,7 @@ class PairRDDFunctions[K: ClassTag, V: ClassTag](self: RDD[(K, V)]) if (valueClass == null) { throw new SparkException("Output value class not set") } + SparkHadoopUtil.get.addCredentials(conf) logDebug("Saving as hadoop file of type (" + keyClass.getSimpleName + ", " + valueClass.getSimpleName + ")")