diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala index 5eb52d5350f55..2b8fbdcde9d37 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala @@ -64,7 +64,8 @@ abstract class LogicalPlan extends QueryPlan[LogicalPlan] { // struct fields. val options = children.flatMap(_.output).flatMap { option => // If the first part of the desired name matches a qualifier for this possible match, drop it. - val remainingParts = if (option.qualifiers contains parts.head) parts.drop(1) else parts + val remainingParts = + if (option.qualifiers.contains(parts.head) && parts.size > 1) parts.drop(1) else parts if (option.name == remainingParts.head) (option, remainingParts.tail.toList) :: Nil else Nil } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala index e2ad3915d3134..aa0c426f6fcb3 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala @@ -28,6 +28,12 @@ class SQLQuerySuite extends QueryTest { // Make sure the tables are loaded. TestData + test("SPARK-2041 column name equals tablename") { + checkAnswer( + sql("SELECT tableName FROM tableName"), + "test") + } + test("index into array") { checkAnswer( sql("SELECT data, data[0], data[0] + data[1], data[0 + 1] FROM arrayData"), diff --git a/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala b/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala index 876bd1636aab3..05de736bbce1b 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/TestData.scala @@ -114,4 +114,7 @@ object TestData { NullStrings(2, "ABC") :: NullStrings(3, null) :: Nil) nullStrings.registerAsTable("nullStrings") + + case class TableName(tableName: String) + TestSQLContext.sparkContext.parallelize(TableName("test") :: Nil).registerAsTable("tableName") }