Skip to content

Commit

Permalink
Update Apache Spark to 2.3.0; resolves #218 (#219)
Browse files Browse the repository at this point in the history
- Update tests to use workaround for SPARK-2243
- Comment out ExtractGraph test as per https://github.com/archivesunleashed/aut/pull/204/files#diff-4541b9834513985c360b64093fd45073
- Align Hadoop version with Apache Spark pom.xml https://github.com/apache/spark/blob/branch-2.3/pom.xml#L120
  • Loading branch information
ruebot authored and ianmilligan1 committed May 14, 2018
1 parent b8a8a97 commit fc8f4bf
Show file tree
Hide file tree
Showing 11 changed files with 27 additions and 15 deletions.
28 changes: 14 additions & 14 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@
<project_name>${project.artifactId}</project_name>
<project_organization>The Archives Unleashed Project</project_organization>
<scala.version>2.11.8</scala.version>
<hadoop.version>2.7.3</hadoop.version>
<spark.version>2.1.1</spark.version>
<hadoop.version>2.6.5</hadoop.version>
<spark.version>2.3.0</spark.version>
<github.global.server>github</github.global.server>
<checkstyle.plugin.version>2.17</checkstyle.plugin.version>
<license.plugin.version>3.0</license.plugin.version>
Expand Down Expand Up @@ -508,6 +508,16 @@
<artifactId>jackson-module-scala_2.11</artifactId>
<version>2.8.8</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>${hadoop.version}</version>
</dependency>
<dependency>
<groupId>org.scalanlp</groupId>
<artifactId>breeze_2.11</artifactId>
Expand All @@ -534,11 +544,6 @@
<artifactId>spark-graphx_2.11</artifactId>
<version>${spark.version}</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>23.0</version>
</dependency>
<dependency>
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>
Expand Down Expand Up @@ -598,8 +603,8 @@
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-math3</artifactId>
<version>3.1.1</version>
<artifactId>commons-compress</artifactId>
<version>1.16</version>
</dependency>
<dependency>
<groupId>net.java.dev.jets3t</groupId>
Expand All @@ -616,11 +621,6 @@
<artifactId>commons-net</artifactId>
<version>1.4.1</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.3.1</version>
</dependency>
<dependency>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
Expand Down
1 change: 1 addition & 0 deletions src/test/scala/io/archivesunleashed/ArcTest.scala
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ class ArcTest extends FunSuite with BeforeAndAfter {
val conf = new SparkConf()
.setMaster(master)
.setAppName(appName)
conf.set("spark.driver.allowMultipleContexts", "true");
sc = new SparkContext(conf)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ class ArchiveRecordTest extends FunSuite with BeforeAndAfter {
val conf = new SparkConf()
.setMaster(master)
.setAppName(appName)
conf.set("spark.driver.allowMultipleContexts", "true");
sc = new SparkContext(conf)
}

Expand Down
1 change: 1 addition & 0 deletions src/test/scala/io/archivesunleashed/CountableRDDTest.scala
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ class CountableRDDTest extends FunSuite with BeforeAndAfter {
val conf = new SparkConf()
.setMaster(master)
.setAppName(appName)
conf.set("spark.driver.allowMultipleContexts", "true");
sc = new SparkContext(conf)
}

Expand Down
1 change: 1 addition & 0 deletions src/test/scala/io/archivesunleashed/RecordLoaderTest.scala
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ class RecordLoaderTest extends FunSuite with BeforeAndAfter {
val conf = new SparkConf()
.setMaster(master)
.setAppName(appName)
conf.set("spark.driver.allowMultipleContexts", "true");
sc = new SparkContext(conf)
}

Expand Down
1 change: 1 addition & 0 deletions src/test/scala/io/archivesunleashed/RecordRDDTest.scala
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ class RecordRDDTest extends FunSuite with BeforeAndAfter {
val conf = new SparkConf()
.setMaster(master)
.setAppName(appName)
conf.set("spark.driver.allowMultipleContexts", "true");
sc = new SparkContext(conf)
}

Expand Down
1 change: 1 addition & 0 deletions src/test/scala/io/archivesunleashed/WarcTest.scala
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ class WarcTest extends FunSuite with BeforeAndAfter {
val conf = new SparkConf()
.setMaster(master)
.setAppName(appName)
conf.set("spark.driver.allowMultipleContexts", "true");
sc = new SparkContext(conf)
records = RecordLoader.loadArchives(warcPath, sc)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,9 @@ import org.scalatest.{BeforeAndAfter, FunSuite}

import scala.util.Try

@RunWith(classOf[JUnitRunner])
// TODO:
// See: https://github.com/archivesunleashed/aut/pull/204/files#diff-4541b9834513985c360b64093fd45073
//@RunWith(classOf[JUnitRunner])
class ExtractGraphTest extends FunSuite with BeforeAndAfter {
private val arcPath = Resources.getResource("arc/example.arc.gz").getPath
private var sc: SparkContext = _
Expand All @@ -43,6 +45,7 @@ import scala.util.Try
val conf = new SparkConf()
.setMaster(master)
.setAppName(appName)
conf.set("spark.driver.allowMultipleContexts", "true");
sc = new SparkContext(conf)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ class ExtractPopularImagesTest extends FunSuite with BeforeAndAfter {
val conf = new SparkConf()
.setMaster(master)
.setAppName(appName)
conf.set("spark.driver.allowMultipleContexts", "true");
sc = new SparkContext(conf)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ class WriteGEXFTest extends FunSuite with BeforeAndAfter{
val conf = new SparkConf()
.setMaster(master)
.setAppName(appName)
conf.set("spark.driver.allowMultipleContexts", "true");
sc = new SparkContext(conf)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ class WriteGraphMLTest extends FunSuite with BeforeAndAfter{
val conf = new SparkConf()
.setMaster(master)
.setAppName(appName)
conf.set("spark.driver.allowMultipleContexts", "true");
sc = new SparkContext(conf)
}

Expand Down

0 comments on commit fc8f4bf

Please sign in to comment.