Skip to content

Commit

Permalink
Merge pull request #2017 from pomadchin/fix/spark-persistent
Browse files Browse the repository at this point in the history
Loggers serialization safety
  • Loading branch information
lossyrob authored Feb 21, 2017
2 parents 17b6647 + 5cb43cb commit 5513313
Show file tree
Hide file tree
Showing 42 changed files with 50 additions and 61 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ import geotrellis.spark.io.avro.codecs._
import geotrellis.spark.merge._
import geotrellis.util._

import com.typesafe.scalalogging.LazyLogging
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import spray.json._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ import geotrellis.spark.io.avro.codecs._
import geotrellis.spark.merge._
import geotrellis.util._

import com.typesafe.scalalogging.LazyLogging
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import spray.json._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ import geotrellis.geotools._
import geotrellis.spark._
import geotrellis.util.annotations.experimental
import geotrellis.vector._
import geotrellis.util._

import com.typesafe.scalalogging.LazyLogging
import org.apache.accumulo.core.client.mapreduce.InputFormatBase
import org.apache.hadoop.io.Text
import org.apache.hadoop.mapreduce.Job
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ import geotrellis.geomesa.geotools._
import geotrellis.spark._
import geotrellis.util.annotations.experimental
import geotrellis.vector._
import geotrellis.util._

import com.typesafe.scalalogging.LazyLogging
import org.apache.spark.rdd.RDD
import org.apache.spark.SparkContext
import org.geotools.data.Transaction
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@
package geotrellis.spark.io.geomesa

import geotrellis.spark.LayerId
import geotrellis.util.LazyLogging
import geotrellis.util.annotations.experimental

import com.typesafe.scalalogging.LazyLogging
import org.geotools.data.DataStoreFinder
import org.locationtech.geomesa.accumulo.data.AccumuloDataStore

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,10 @@ import geotrellis.raster._
import geotrellis.spark._
import geotrellis.spark.io._
import geotrellis.spark.io.accumulo.AccumuloAttributeStore
import geotrellis.util._
import geotrellis.util.annotations.experimental
import geotrellis.vector.Extent

import com.typesafe.scalalogging.LazyLogging
import com.vividsolutions.jts.geom._
import mil.nga.giat.geowave.adapter.raster.adapter.RasterDataAdapter
import mil.nga.giat.geowave.core.geotime.ingest._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ import geotrellis.util._
import geotrellis.util.annotations.experimental
import geotrellis.vector.Extent

import com.typesafe.scalalogging.LazyLogging
import com.vividsolutions.jts.geom._
import mil.nga.giat.geowave.adapter.raster.adapter.RasterDataAdapter
import mil.nga.giat.geowave.core.geotime.ingest._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ import geotrellis.util._
import geotrellis.util.annotations.experimental
import geotrellis.vector.Extent

import com.typesafe.scalalogging.LazyLogging
import mil.nga.giat.geowave.adapter.raster.adapter.merge.RasterTileRowTransform
import mil.nga.giat.geowave.adapter.raster.adapter.RasterDataAdapter
import mil.nga.giat.geowave.core.geotime.index.dimension._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ import geotrellis.spark.merge._
import geotrellis.util._

import org.apache.spark.SparkContext
import com.typesafe.scalalogging.LazyLogging
import org.apache.spark.rdd.RDD
import spray.json._

Expand Down
1 change: 0 additions & 1 deletion raster/build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ libraryDependencies ++= Seq(
typesafeConfig,
jts,
spire,
logging,
monocleCore,
monocleMacro,
openCSV)
Expand Down
3 changes: 1 addition & 2 deletions raster/src/main/scala/geotrellis/raster/Tile.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,12 @@
package geotrellis.raster

import spire.syntax.cfor._
import com.typesafe.scalalogging._
import geotrellis.util._

import java.util.Locale
import scala.collection.mutable.ArrayBuffer
import scala.math.BigDecimal


/**
* Base trait for a Tile.
*/
Expand Down
1 change: 0 additions & 1 deletion s3-testkit/build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,5 @@ libraryDependencies ++= Seq(
sparkCore % "provided",
awsSdkS3,
spire,
logging,
scalatest
)
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,16 @@
package geotrellis.spark.io.s3.testkit

import geotrellis.spark.io.s3._
import java.io.ByteArrayInputStream
import geotrellis.util.LazyLogging

import com.amazonaws.services.s3.model._
import java.util.concurrent.ConcurrentHashMap
import com.amazonaws.services.s3.internal.AmazonS3ExceptionBuilder
import scala.collection.immutable.TreeMap
import com.typesafe.scalalogging.LazyLogging
import org.apache.commons.io.IOUtils

import java.io.ByteArrayInputStream
import java.util.concurrent.ConcurrentHashMap

import scala.collection.immutable.TreeMap
import scala.collection.JavaConverters._

class MockS3Client() extends S3Client with LazyLogging {
Expand Down
5 changes: 0 additions & 5 deletions s3/src/main/scala/geotrellis/spark/io/s3/AmazonS3Client.scala
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,10 @@ package geotrellis.spark.io.s3

import com.amazonaws.ClientConfiguration
import com.amazonaws.auth._
import com.amazonaws.services.s3.model.DeleteObjectsRequest.KeyVersion
import com.amazonaws.services.s3.{AmazonS3Client => AWSAmazonS3Client}
import com.amazonaws.retry.PredefinedRetryPolicies
import com.amazonaws.services.s3.model._
import org.apache.commons.io.IOUtils
import com.typesafe.scalalogging.LazyLogging

import java.io.{InputStream, ByteArrayInputStream}
import scala.annotation.tailrec
import scala.collection.JavaConverters._
import scala.collection.mutable

Expand Down
7 changes: 2 additions & 5 deletions s3/src/main/scala/geotrellis/spark/io/s3/S3Client.scala
Original file line number Diff line number Diff line change
Expand Up @@ -16,19 +16,16 @@

package geotrellis.spark.io.s3

import com.amazonaws.ClientConfiguration
import geotrellis.util.LazyLogging

import com.amazonaws.auth._
import com.amazonaws.services.s3.model.DeleteObjectsRequest.KeyVersion
import com.amazonaws.services.s3.{AmazonS3Client => AWSAmazonS3Client}
import com.amazonaws.retry.PredefinedRetryPolicies
import com.amazonaws.services.s3.model._
import org.apache.commons.io.IOUtils
import com.typesafe.scalalogging.LazyLogging

import java.io.{InputStream, ByteArrayInputStream}
import scala.annotation.tailrec
import scala.collection.JavaConverters._
import scala.collection.mutable

trait S3Client extends LazyLogging {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,13 @@ package geotrellis.spark.io.s3

import geotrellis.proj4.CRS
import geotrellis.spark.io.hadoop._
import geotrellis.util.LazyLogging

import com.amazonaws.services.s3.model.{ListObjectsRequest, ObjectListing}
import com.amazonaws.auth._
import com.amazonaws.regions._
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.mapreduce.{InputFormat, Job, JobContext}
import com.typesafe.scalalogging.LazyLogging

import scala.util.matching.Regex

Expand Down
6 changes: 4 additions & 2 deletions s3/src/main/scala/geotrellis/spark/io/s3/S3InputSplit.scala
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,14 @@

package geotrellis.spark.io.s3

import java.io.{DataOutput, DataInput}
import geotrellis.util.LazyLogging

import com.amazonaws.services.s3.model.{S3ObjectSummary, ObjectListing}
import org.apache.hadoop.io.Writable
import org.apache.hadoop.mapreduce.InputSplit
import com.amazonaws.auth.{AWSCredentials, BasicAWSCredentials, AnonymousAWSCredentials, BasicSessionCredentials}
import com.typesafe.scalalogging.LazyLogging

import java.io.{DataOutput, DataInput}

/**
* Represents are batch of keys to be read from an S3 bucket.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ import geotrellis.util._

import org.apache.spark.SparkContext
import spray.json.JsonFormat
import com.typesafe.scalalogging.LazyLogging

import scala.reflect.ClassTag

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ import geotrellis.spark.io.index._
import geotrellis.spark.merge._
import geotrellis.util._

import com.typesafe.scalalogging.LazyLogging
import org.apache.avro.Schema
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ import geotrellis.spark.io.index._
import geotrellis.util._

import com.amazonaws.services.s3.model.PutObjectRequest
import com.typesafe.scalalogging.LazyLogging
import org.apache.spark.rdd.RDD
import spray.json._

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ import geotrellis.util._

import com.amazonaws.auth.AWSCredentials
import com.amazonaws.services.s3.model.GetObjectRequest
import com.typesafe.scalalogging.LazyLogging
import org.apache.hadoop.mapreduce.{InputSplit, TaskAttemptContext, RecordReader}
import org.apache.commons.io.IOUtils

Expand Down
1 change: 0 additions & 1 deletion spark-etl/build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ name := "geotrellis-spark-etl"
libraryDependencies ++= Seq(
"com.github.fge" % "json-schema-validator" % "2.2.6",
sparkCore % "provided",
logging,
scalatest % "test")

test in assembly := {}
Expand Down
1 change: 0 additions & 1 deletion spark-etl/src/main/scala/geotrellis/spark/etl/Etl.scala
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ import geotrellis.spark.etl.config._

import org.apache.spark._
import org.apache.spark.rdd.RDD
import com.typesafe.scalalogging.LazyLogging

import scala.reflect._
import scala.reflect.runtime.universe._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ package geotrellis.spark.etl.accumulo
import geotrellis.spark.etl.OutputPlugin
import geotrellis.spark.etl.config.{AccumuloProfile, BackendProfile, EtlConf}
import geotrellis.spark.io.accumulo.{AccumuloAttributeStore, AccumuloWriteStrategy, HdfsWriteStrategy, SocketWriteStrategy}
import com.typesafe.scalalogging.LazyLogging
import geotrellis.util.LazyLogging

trait AccumuloOutput[K, V, M] extends OutputPlugin[K, V, M] with LazyLogging {
val name = "accumulo"
Expand Down
1 change: 0 additions & 1 deletion spark/build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ libraryDependencies ++= Seq(
sparkCore % "provided",
hadoopClient % "provided",
"com.google.uzaygezen" % "uzaygezen-core" % "0.2",
logging,
avro,
spire,
monocleCore, monocleMacro,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ import geotrellis.spark.io.index._
import geotrellis.util._

import spray.json.JsonFormat
import com.typesafe.scalalogging.LazyLogging

import scala.reflect.ClassTag

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ import geotrellis.util._

import org.apache.spark.SparkContext
import spray.json.JsonFormat
import com.typesafe.scalalogging.LazyLogging

import scala.reflect.ClassTag

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ import geotrellis.spark.io.json._
import geotrellis.spark.merge._
import geotrellis.util._

import com.typesafe.scalalogging.LazyLogging
import org.apache.avro.Schema
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ import geotrellis.raster._
import geotrellis.util._

import org.apache.spark.rdd.RDD
import com.typesafe.scalalogging.LazyLogging
import spray.json._

import scala.reflect._
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ import geotrellis.spark.io._
import geotrellis.spark.io.avro._
import geotrellis.util._

import com.typesafe.scalalogging.LazyLogging
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.spark.SparkContext
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ import geotrellis.spark.io.index.KeyIndex
import geotrellis.spark.io.json._
import geotrellis.util._

import com.typesafe.scalalogging.LazyLogging
import org.apache.avro.Schema
import org.apache.hadoop.fs.Path
import org.apache.spark.SparkContext
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ import geotrellis.spark.merge._
import geotrellis.spark.util._
import geotrellis.util._

import com.typesafe.scalalogging.LazyLogging
import org.apache.hadoop.fs.Path
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ import geotrellis.spark.io.avro._
import geotrellis.spark.io.avro.codecs._
import geotrellis.spark.io.hadoop.formats._
import geotrellis.spark.util.KryoWrapper
import geotrellis.util.LazyLogging

import com.typesafe.scalalogging.LazyLogging
import org.apache.avro.Schema
import org.apache.hadoop.io._
import org.apache.hadoop.fs.Path
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,13 @@ package geotrellis.spark.io.hadoop
import geotrellis.spark._
import geotrellis.spark.util._
import geotrellis.spark.partition._
import geotrellis.spark.io.hadoop.formats._
import geotrellis.spark.io.index._
import geotrellis.spark.io.avro._
import geotrellis.spark.io.avro.codecs._
import geotrellis.util.LazyLogging

import com.typesafe.scalalogging.LazyLogging
import org.apache.hadoop.fs.Path
import org.apache.hadoop.io._
import org.apache.hadoop.mapreduce.lib.output._
import org.apache.hadoop.conf.Configuration
import org.apache.spark.rdd._

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,20 +16,19 @@

package geotrellis.spark.io.hadoop

import geotrellis.spark.io.hadoop.formats._
import geotrellis.util.LazyLogging

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs._
import org.apache.hadoop.mapreduce.Job
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat
import org.apache.hadoop.io._

import java.io._
import java.util.Scanner
import com.typesafe.scalalogging.LazyLogging

import scala.collection.mutable.ListBuffer
import scala.util.Random
import scala.reflect._
import java.io._

abstract class LineScanner extends Iterator[String] with java.io.Closeable

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@ import geotrellis.util._

import org.apache.spark.Partitioner
import org.apache.spark.rdd._
import com.typesafe.scalalogging.LazyLogging

import scala.reflect.ClassTag

Expand Down
4 changes: 3 additions & 1 deletion spark/src/main/scala/geotrellis/spark/util/SparkUtils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,12 @@

package geotrellis.spark.util

import com.typesafe.scalalogging.LazyLogging
import geotrellis.util.LazyLogging

import org.apache.hadoop.conf.Configuration
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext

import java.io.File

object SparkUtils extends LazyLogging {
Expand Down
Loading

0 comments on commit 5513313

Please sign in to comment.