Skip to content

Commit

Permalink
Privatize methods
Browse files Browse the repository at this point in the history
  • Loading branch information
andrewor14 committed Feb 5, 2014
1 parent 13920c9 commit 090544a
Showing 1 changed file with 6 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,7 @@ private[spark] class ExternalAppendOnlyMap[K, V, C](
* In the event of key hash collisions, this ensures no pairs are hidden from being merged.
* Assume the given iterator is in sorted order.
*/
def getMorePairs(it: Iterator[(K, C)]): ArrayBuffer[(K, C)] = {
private def getMorePairs(it: Iterator[(K, C)]): ArrayBuffer[(K, C)] = {
val kcPairs = new ArrayBuffer[(K, C)]
if (it.hasNext) {
var kc = it.next()
Expand All @@ -260,7 +260,7 @@ private[spark] class ExternalAppendOnlyMap[K, V, C](
* If the given buffer contains a value for the given key, merge that value into
* baseCombiner and remove the corresponding (K, C) pair from the buffer
*/
def mergeIfKeyExists(key: K, baseCombiner: C, buffer: StreamBuffer): C = {
private def mergeIfKeyExists(key: K, baseCombiner: C, buffer: StreamBuffer): C = {
var i = 0
while (i < buffer.pairs.size) {
val (k, c) = buffer.pairs(i)
Expand Down Expand Up @@ -320,7 +320,7 @@ private[spark] class ExternalAppendOnlyMap[K, V, C](
*
* StreamBuffers are ordered by the minimum key hash found across all of their own pairs.
*/
case class StreamBuffer(iterator: Iterator[(K, C)], pairs: ArrayBuffer[(K, C)])
private case class StreamBuffer(iterator: Iterator[(K, C)], pairs: ArrayBuffer[(K, C)])
extends Comparable[StreamBuffer] {

def minKeyHash: Int = {
Expand Down Expand Up @@ -358,7 +358,7 @@ private[spark] class ExternalAppendOnlyMap[K, V, C](
/**
* Construct a stream that reads only from the next batch
*/
def nextBatchStream(): InputStream = {
private def nextBatchStream(): InputStream = {
if (batchSizes.length > 0) {
ByteStreams.limit(bufferedStream, batchSizes.remove(0))
} else {
Expand All @@ -373,7 +373,7 @@ private[spark] class ExternalAppendOnlyMap[K, V, C](
* If the current batch is drained, construct a stream for the next batch and read from it.
* If no more pairs are left, return null.
*/
def readNextItem(): (K, C) = {
private def readNextItem(): (K, C) = {
try {
val item = deserializeStream.readObject().asInstanceOf[(K, C)]
objectsRead += 1
Expand Down Expand Up @@ -408,7 +408,7 @@ private[spark] class ExternalAppendOnlyMap[K, V, C](
}

// TODO: Ensure this gets called even if the iterator isn't drained.
def cleanup() {
private def cleanup() {
deserializeStream.close()
file.delete()
}
Expand Down

0 comments on commit 090544a

Please sign in to comment.