Skip to content

Commit

Permalink
Move Row extractor to catalyst.
Browse files Browse the repository at this point in the history
  • Loading branch information
marmbrus committed Apr 4, 2014
1 parent 208bf5e commit 3997dc9
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 14 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,21 @@ package org.apache.spark.sql.catalyst.expressions

import org.apache.spark.sql.catalyst.types.NativeType

object Row {
/**
* This method can be used to extract fields from a [[Row]] object in a pattern match. Example:
* {{{
* import org.apache.spark.sql._
*
* val pairs = sql("SELECT key, value FROM src").rdd.map {
* case Row(key: Int, value: String) =>
* key -> value
* }
* }}}
*/
def unapplySeq(row: Row): Some[Seq[Any]] = Some(row)
}

/**
* Represents one row of output from a relational operator. Allows both generic access by ordinal,
* which will incur boxing overhead for primitives, as well as native primitive access.
Expand Down
15 changes: 1 addition & 14 deletions sql/catalyst/src/main/scala/org/apache/spark/sql/package.scala
Original file line number Diff line number Diff line change
Expand Up @@ -32,18 +32,5 @@ package object sql {

type Row = catalyst.expressions.Row

object Row {
/**
* This method can be used to extract fields from a [[Row]] object in a pattern match. Example:
* {{{
* import org.apache.spark.sql._
*
* val pairs = sql("SELECT key, value FROM src").rdd.map {
* case Row(key: Int, value: String) =>
* key -> value
* }
* }}}
*/
def unapplySeq(row: Row): Some[Seq[Any]] = Some(row)
}
val Row = catalyst.expressions.Row
}

0 comments on commit 3997dc9

Please sign in to comment.