Skip to content

Commit

Permalink
[SQL] Make dataframe more tolerant of being serialized
Browse files Browse the repository at this point in the history
Eases use in the spark-shell.

Author: Michael Armbrust <michael@databricks.com>

Closes apache#4545 from marmbrus/serialization and squashes the following commits:

04748e6 [Michael Armbrust] @scala.annotation.varargs
b36e219 [Michael Armbrust] moreFixes
  • Loading branch information
marmbrus committed Feb 12, 2015
1 parent d931b01 commit a38e23c
Show file tree
Hide file tree
Showing 4 changed files with 15 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ private[sql] object DataFrame {
*/
// TODO: Improve documentation.
@Experimental
trait DataFrame extends RDDApi[Row] {
trait DataFrame extends RDDApi[Row] with Serializable {

val sqlContext: SQLContext

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,8 @@ import org.apache.spark.sql.types.{NumericType, StructType}
* Internal implementation of [[DataFrame]]. Users of the API should use [[DataFrame]] directly.
*/
private[sql] class DataFrameImpl protected[sql](
override val sqlContext: SQLContext,
val queryExecution: SQLContext#QueryExecution)
@transient override val sqlContext: SQLContext,
@transient val queryExecution: SQLContext#QueryExecution)
extends DataFrame {

/**
Expand Down
9 changes: 9 additions & 0 deletions sql/core/src/main/scala/org/apache/spark/sql/Dsl.scala
Original file line number Diff line number Diff line change
Expand Up @@ -166,6 +166,15 @@ object Dsl {
//////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////

/**
* Returns the first column that is not null.
* {{{
* df.select(coalesce(df("a"), df("b")))
* }}}
*/
@scala.annotation.varargs
def coalesce(e: Column*): Column = Coalesce(e.map(_.expr))

/**
* Unary minus, i.e. negate the expression.
* {{{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -129,9 +129,11 @@ class SQLContext(@transient val sparkContext: SparkContext)
* A collection of methods that are considered experimental, but can be used to hook into
* the query planner for advanced functionalities.
*/
@transient
val experimental: ExperimentalMethods = new ExperimentalMethods(this)

/** Returns a [[DataFrame]] with no rows or columns. */
@transient
lazy val emptyDataFrame = DataFrame(this, NoRelation)

/**
Expand Down Expand Up @@ -178,7 +180,7 @@ class SQLContext(@transient val sparkContext: SparkContext)
* (Scala-specific)
* Implicit methods available in Scala for converting common Scala objects into [[DataFrame]]s.
*/
object implicits {
object implicits extends Serializable {
// scalastyle:on

/** Creates a DataFrame from an RDD of case classes or tuples. */
Expand Down

0 comments on commit a38e23c

Please sign in to comment.