diff --git a/pom.xml b/pom.xml
index c85c5feeaf383..eaed47a27aef4 100644
--- a/pom.xml
+++ b/pom.xml
@@ -192,7 +192,7 @@
central
Maven Repository
- https://repo1.maven.org/maven2
+ http://repo1.maven.org/maven2
true
@@ -203,7 +203,7 @@
apache-repo
Apache Repository
- https://repository.apache.org/content/repositories/releases
+ http://repository.apache.org/content/repositories/releases
true
@@ -214,7 +214,7 @@
jboss-repo
JBoss Repository
- https://repository.jboss.org/nexus/content/repositories/releases
+ http://repository.jboss.org/nexus/content/repositories/releases
true
@@ -225,7 +225,7 @@
mqtt-repo
MQTT Repository
- https://repo.eclipse.org/content/repositories/paho-releases
+ http://repo.eclipse.org/content/repositories/paho-releases
true
@@ -236,7 +236,7 @@
cloudera-repo
Cloudera Repository
- https://repository.cloudera.com/artifactory/cloudera-repos
+ http://repository.cloudera.com/artifactory/cloudera-repos
true
@@ -258,7 +258,7 @@
spring-releases
Spring Release Repository
- https://repo.spring.io/libs-release
+ http://repo.spring.io/libs-release
true
@@ -270,7 +270,7 @@
central
- https://repo1.maven.org/maven2
+ http://repo1.maven.org/maven2
true
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystConf.scala
index bd47eee4fa094..19fd6942f3ff2 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystConf.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/CatalystConf.scala
@@ -36,19 +36,21 @@ private[spark] trait CatalystConf {
* relations are already filled in and the analyser needs only to resolve attribute references.
*/
object EmptyConf extends CatalystConf {
- def setConf(key: String, value: String) : Unit = {
+ override def caseSensitiveAnalysis: Boolean = true
+
+ override def setConf(key: String, value: String) : Unit = {
throw new UnsupportedOperationException
}
- def getConf(key: String) : String = {
+ override def getConf(key: String) : String = {
throw new UnsupportedOperationException
}
- def getConf(key: String, defaultValue: String) : String = {
+ override def getConf(key: String, defaultValue: String) : String = {
throw new UnsupportedOperationException
}
- def getAllConfs: immutable.Map[String, String] = {
+ override def getAllConfs: immutable.Map[String, String] = {
throw new UnsupportedOperationException
}
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index e5b8fb0ab5454..b5e50a109fbd8 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -31,7 +31,7 @@ import org.apache.spark.sql.types._
* when all relations are already filled in and the analyzer needs only to resolve attribute
* references.
*/
-object SimpleAnalyzer extends Analyzer(EmptyCatalog, EmptyFunctionRegistry, new SimpleConf)
+object SimpleAnalyzer extends Analyzer(EmptyCatalog, EmptyFunctionRegistry, new SimpleConf(true))
/**
* Provides a logical query plan analyzer, which translates [[UnresolvedAttribute]]s and
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/test/SimpleConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/test/SimpleConf.scala
index 6c10b46726dbf..b9d17d99343d4 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/test/SimpleConf.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/test/SimpleConf.scala
@@ -23,9 +23,11 @@ import scala.collection.immutable
import scala.collection.mutable
/** A CatalystConf that can be used for local testing. */
-class SimpleConf(caseSensitiveAnalysis: Boolean) extends CatalystConf {
+class SimpleConf(caseSensitive: Boolean) extends CatalystConf {
val map = mutable.Map[String, String]()
+ def caseSensitiveAnalysis: Boolean = caseSensitive
+
def setConf(key: String, value: String) : Unit = {
map.put(key, value)
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecisionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecisionSuite.scala
index 390ab861f2af3..82ed3640b48bd 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecisionSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DecimalPrecisionSuite.scala
@@ -24,7 +24,7 @@ import org.apache.spark.sql.catalyst.test.SimpleConf
import org.scalatest.{BeforeAndAfter, FunSuite}
class DecimalPrecisionSuite extends FunSuite with BeforeAndAfter {
- val conf = new SimpleConf
+ val conf = new SimpleConf(true)
val catalog = new SimpleCatalog(conf)
val analyzer = new Analyzer(catalog, EmptyFunctionRegistry, conf)
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index 50615e2bc61f4..46eacf7e8a792 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -21,13 +21,7 @@ import org.scalatest.BeforeAndAfterAll
import org.apache.spark.sql.execution.GeneratedAggregate
import org.apache.spark.sql.functions._
-<<<<<<< HEAD
import org.apache.spark.sql.catalyst.CatalystConf
-import org.apache.spark.sql.catalyst.errors.TreeNodeException
-import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
-import org.apache.spark.sql.types._
-=======
->>>>>>> 254e0509762937acc9c72b432d5d953bf72c3c52
import org.apache.spark.sql.TestData._
import org.apache.spark.sql.test.TestSQLContext
import org.apache.spark.sql.test.TestSQLContext.{udf => _, _}