From 6d530a919c2f61e69d970625f77b99df5c93b019 Mon Sep 17 00:00:00 2001 From: Andrew Or Date: Tue, 15 Mar 2016 14:38:50 -0700 Subject: [PATCH] Fix tests --- .../spark/sql/catalyst/catalog/CatalogTestCases.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/CatalogTestCases.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/CatalogTestCases.scala index 59d531a810886..d7df7fb32e0bc 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/CatalogTestCases.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/CatalogTestCases.scala @@ -78,7 +78,7 @@ abstract class CatalogTestCases extends SparkFunSuite with BeforeAndAfterEach { catalog.createTable("db2", newTable("tbl1", "db2"), ignoreIfExists = false) catalog.createTable("db2", newTable("tbl2", "db2"), ignoreIfExists = false) catalog.createPartitions("db2", "tbl2", Seq(part1, part2), ignoreIfExists = false) - catalog.createFunction("db2", newFunc("func1")) + catalog.createFunction("db2", newFunc("func1", Some("db2"))) catalog } @@ -97,8 +97,8 @@ abstract class CatalogTestCases extends SparkFunSuite with BeforeAndAfterEach { partitionColumns = Seq(CatalogColumn("a", "int"), CatalogColumn("b", "string"))) } - private def newFunc(name: String): CatalogFunction = { - CatalogFunction(FunctionIdentifier(name, database = None), funcClass) + private def newFunc(name: String, database: Option[String] = None): CatalogFunction = { + CatalogFunction(FunctionIdentifier(name, database), funcClass) } /**