Skip to content

Commit

Permalink
Add Hive and small follow-up
Browse files Browse the repository at this point in the history
  • Loading branch information
hvanhovell committed Aug 15, 2016
1 parent 3b638d4 commit 7ba8abb
Show file tree
Hide file tree
Showing 4 changed files with 3 additions and 7 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ case class RowDataSourceScanExec(
override val metastoreTableIdentifier: Option[TableIdentifier])
extends DataSourceScanExec with CodegenSupport {

private[sql] override lazy val metrics =
override lazy val metrics =
Map("numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"))

val outputUnsafeRows = relation match {
Expand Down Expand Up @@ -231,7 +231,7 @@ case class BatchedDataSourceScanExec(
override val metastoreTableIdentifier: Option[TableIdentifier])
extends DataSourceScanExec with CodegenSupport {

private[sql] override lazy val metrics =
override lazy val metrics =
Map("numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"),
"scanTime" -> SQLMetrics.createTimingMetric(sparkContext, "scan time"))

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ import org.apache.spark.sql.hive.MetastoreRelation
* @param ignoreIfExists allow continue working if it's already exists, otherwise
* raise exception
*/
private[hive]
case class CreateHiveTableAsSelectCommand(
tableDesc: CatalogTable,
query: LogicalPlan,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,6 @@ import org.apache.spark.util.{CircularBuffer, RedirectThread, SerializableConfig
* @param script the command that should be executed.
* @param output the attributes that are produced by the script.
*/
private[hive]
case class ScriptTransformation(
input: Seq[Expression],
script: String,
Expand Down Expand Up @@ -336,7 +335,6 @@ private class ScriptTransformationWriterThread(
}
}

private[hive]
object HiveScriptIOSchema {
def apply(input: ScriptInputOutputSchema): HiveScriptIOSchema = {
HiveScriptIOSchema(
Expand All @@ -355,7 +353,6 @@ object HiveScriptIOSchema {
/**
* The wrapper class of Hive input and output schema properties
*/
private[hive]
case class HiveScriptIOSchema (
inputRowFormat: Seq[(String, String)],
outputRowFormat: Seq[(String, String)],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ import org.apache.spark.util.SerializableConfiguration
* [[FileFormat]] for reading ORC files. If this is moved or renamed, please update
* [[DataSource]]'s backwardCompatibilityMap.
*/
private[sql] class OrcFileFormat
class OrcFileFormat
extends FileFormat with DataSourceRegister with Serializable {

override def shortName(): String = "orc"
Expand Down

0 comments on commit 7ba8abb

Please sign in to comment.