Skip to content

Commit

Permalink
implemented @squito suggestion for SparkEnv
Browse files Browse the repository at this point in the history
  • Loading branch information
Joseph Batchik committed Jul 16, 2015
1 parent dd71efe commit c0cf329
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -20,20 +20,17 @@ package org.apache.spark.serializer
import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
import java.nio.ByteBuffer

import org.apache.spark.SparkConf
import org.apache.spark.io.CompressionCodec

import scala.collection.mutable

import org.apache.commons.io.IOUtils
import com.esotericsoftware.kryo.{Kryo, Serializer => KSerializer}
import com.esotericsoftware.kryo.io.{Input => KryoInput, Output => KryoOutput}
import org.xerial.snappy.{SnappyInputStream, SnappyOutputStream}

import org.apache.avro.{Schema, SchemaNormalization}
import org.apache.avro.generic.{GenericData, GenericRecord}
import org.apache.avro.io._
import org.apache.commons.io.IOUtils

import org.apache.spark.SparkEnv
import org.apache.spark.io.CompressionCodec

/**
* Custom serializer used for generic Avro records. If the user registers the schemas
Expand All @@ -57,7 +54,8 @@ private[serializer] class GenericAvroSerializer(schemas: Map[Long, String])
private val fingerprintCache = new mutable.HashMap[Schema, Long]()
private val schemaCache = new mutable.HashMap[Long, Schema]()

private val codec = CompressionCodec.createCodec(new SparkConf())
/** This needs to be lazy since SparkEnv is not initialized yet sometimes when this is called */
private lazy val codec = CompressionCodec.createCodec(SparkEnv.get.conf)

/**
* Used to compress Schemas when they are being sent over the wire.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ import java.nio.ByteBuffer
import com.esotericsoftware.kryo.io.{Output, Input}
import org.apache.avro.{SchemaBuilder, Schema}
import org.apache.avro.generic.GenericData.Record
import org.apache.spark.io.CompressionCodec

import org.apache.spark.{SparkFunSuite, SharedSparkContext}

Expand Down

0 comments on commit c0cf329

Please sign in to comment.