Skip to content

Commit

Permalink
remove foundry auth hack
Browse files Browse the repository at this point in the history
  • Loading branch information
jdcasale committed Jan 8, 2021
1 parent 527b597 commit ce51a2f
Showing 1 changed file with 0 additions and 19 deletions.
19 changes: 0 additions & 19 deletions core/src/main/scala/org/apache/spark/scheduler/DAGScheduler.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1003,8 +1003,6 @@ private[spark] class DAGScheduler(
listener: JobListener,
properties: Properties): Unit = {
var finalStage: ResultStage = null
// TODO(rshkv): Re-enable this
// forceFoundryAuthIfEnabled(properties)
try {
// New stage creation may throw an exception if, for example, jobs are run on a
// HadoopRDD whose underlying HDFS files have been deleted.
Expand Down Expand Up @@ -1063,23 +1061,6 @@ private[spark] class DAGScheduler(
submitStage(finalStage)
}

// TODO(rshkv): Think about this
private def forceFoundryAuthIfEnabled(properties: Properties): Unit = {
import scala.collection.JavaConverters.asScalaSetConverter

val foundrySparkSessionPrefix = "foundry.spark.session"
val shouldCopyAuthTokensKey = foundrySparkSessionPrefix + ".shouldForceAuthorize"
if (properties != null
&& properties.containsKey(shouldCopyAuthTokensKey)
&& properties.getProperty(shouldCopyAuthTokensKey).equals("true")) {
val tokenKeys = properties.keySet.asScala.map((key: Any) => key.asInstanceOf[String])
.filter((key: String) => {
key.startsWith(foundrySparkSessionPrefix)
})
tokenKeys.foreach(key => sc.setLocalProperty(key, properties.getProperty(key)))
}
}

private[scheduler] def handleMapStageSubmitted(jobId: Int,
dependency: ShuffleDependency[_, _, _],
callSite: CallSite,
Expand Down

0 comments on commit ce51a2f

Please sign in to comment.