Skip to content

Commit

Permalink
address comments
Browse files Browse the repository at this point in the history
  • Loading branch information
scwf committed Dec 30, 2014
1 parent 02a662c commit 445b57b
Show file tree
Hide file tree
Showing 4 changed files with 13 additions and 13 deletions.
11 changes: 7 additions & 4 deletions sql/core/src/main/scala/org/apache/spark/sql/sources/ddl.scala
Original file line number Diff line number Diff line change
Expand Up @@ -209,12 +209,15 @@ private[sql] case class CreateTableUsing(
sys.error(s"Failed to load class for data source: $provider")
}
}
val dataSource =
clazz.newInstance().asInstanceOf[org.apache.spark.sql.sources.SchemaRelationProvider]
val relation = if(tableCols.isEmpty) {
dataSource.createRelation(
sqlContext, new CaseInsensitiveMap(options))
val dataSource =
clazz.newInstance().asInstanceOf[org.apache.spark.sql.sources.RelationProvider]

dataSource.createRelation(sqlContext, new CaseInsensitiveMap(options))
} else {
val dataSource =
clazz.newInstance().asInstanceOf[org.apache.spark.sql.sources.SchemaRelationProvider]

dataSource.createRelation(
sqlContext, new CaseInsensitiveMap(options), Some(StructType(tableCols)))
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,10 @@ import scala.language.existentials

import org.apache.spark.sql._

class FilteredScanSource extends SchemaRelationProvider {
class FilteredScanSource extends RelationProvider {
override def createRelation(
sqlContext: SQLContext,
parameters: Map[String, String],
schema: Option[StructType] = None): BaseRelation = {
parameters: Map[String, String]): BaseRelation = {
SimpleFilteredScan(parameters("from").toInt, parameters("to").toInt)(sqlContext)
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,10 @@ package org.apache.spark.sql.sources

import org.apache.spark.sql._

class PrunedScanSource extends SchemaRelationProvider {
class PrunedScanSource extends RelationProvider {
override def createRelation(
sqlContext: SQLContext,
parameters: Map[String, String],
schema: Option[StructType] = None): BaseRelation = {
parameters: Map[String, String]): BaseRelation = {
SimplePrunedScan(parameters("from").toInt, parameters("to").toInt)(sqlContext)
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,10 @@ import org.apache.spark.sql._

class DefaultSource extends SimpleScanSource

class SimpleScanSource extends SchemaRelationProvider {
class SimpleScanSource extends RelationProvider {
override def createRelation(
sqlContext: SQLContext,
parameters: Map[String, String],
schema: Option[StructType] = None): BaseRelation = {
parameters: Map[String, String]): BaseRelation = {
SimpleScan(parameters("from").toInt, parameters("TO").toInt)(sqlContext)
}
}
Expand Down

0 comments on commit 445b57b

Please sign in to comment.