Skip to content

Commit

Permalink
[HUDI-4103] TestCreateTable failed CTAS when indicating hoodie.databa…
Browse files Browse the repository at this point in the history
…se.name in table properties
  • Loading branch information
dongkelun committed May 16, 2022
1 parent 61030d8 commit 9c37bde
Showing 1 changed file with 72 additions and 66 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -383,80 +383,86 @@ class TestCreateTable extends HoodieSparkSqlTestBase {
}

test("Test Create Table As Select With Tblproperties For Filter Props") {
Seq("cow", "mor").foreach { tableType =>
val tableName = generateTableName
spark.sql(
s"""
| create table $tableName using hudi
| partitioned by (dt)
| tblproperties(
| hoodie.database.name = "databaseName",
| hoodie.table.name = "tableName",
| primaryKey = 'id',
| preCombineField = 'ts',
| hoodie.datasource.write.operation = 'upsert',
| type = '$tableType'
| )
| AS
| select 1 as id, 'a1' as name, 10 as price, '2021-04-01' as dt, 1000 as ts
withTempDir { tmp =>
Seq("cow", "mor").foreach { tableType =>
val tableName = generateTableName
spark.sql(
s"""
| create table $tableName using hudi
| partitioned by (dt)
| tblproperties(
| hoodie.database.name = "databaseName",
| hoodie.table.name = "tableName",
| primaryKey = 'id',
| preCombineField = 'ts',
| hoodie.datasource.write.operation = 'upsert',
| type = '$tableType'
| )
| location '${tmp.getCanonicalPath}/$tableName'
| AS
| select 1 as id, 'a1' as name, 10 as price, '2021-04-01' as dt, 1000 as ts
""".stripMargin
)
checkAnswer(s"select id, name, price, dt from $tableName")(
Seq(1, "a1", 10, "2021-04-01")
)
val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier(tableName))
assertFalse(table.properties.contains(HoodieTableConfig.DATABASE_NAME.key()))
assertFalse(table.properties.contains(HoodieTableConfig.NAME.key()))
assertFalse(table.properties.contains(OPERATION.key()))
)
checkAnswer(s"select id, name, price, dt from $tableName")(
Seq(1, "a1", 10, "2021-04-01")
)
val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier(tableName))
assertFalse(table.properties.contains(HoodieTableConfig.DATABASE_NAME.key()))
assertFalse(table.properties.contains(HoodieTableConfig.NAME.key()))
assertFalse(table.properties.contains(OPERATION.key()))

val tablePath = table.storage.properties("path")
val metaClient = HoodieTableMetaClient.builder()
.setBasePath(tablePath)
.setConf(spark.sessionState.newHadoopConf())
.build()
val tableConfig = metaClient.getTableConfig.getProps.asScala.toMap
assertResult("default")(tableConfig(HoodieTableConfig.DATABASE_NAME.key()))
assertResult(tableName)(tableConfig(HoodieTableConfig.NAME.key()))
assertFalse(tableConfig.contains(OPERATION.key()))
val tablePath = table.storage.properties("path")
val metaClient = HoodieTableMetaClient.builder()
.setBasePath(tablePath)
.setConf(spark.sessionState.newHadoopConf())
.build()
val tableConfig = metaClient.getTableConfig.getProps.asScala.toMap
assertResult("default")(tableConfig(HoodieTableConfig.DATABASE_NAME.key()))
assertResult(tableName)(tableConfig(HoodieTableConfig.NAME.key()))
assertFalse(tableConfig.contains(OPERATION.key()))
}
}
}

test("Test Create Table As Select With Options For Filter Props") {
Seq("cow", "mor").foreach { tableType =>
val tableName = generateTableName
spark.sql(
s"""
| create table $tableName using hudi
| partitioned by (dt)
| options(
| hoodie.database.name = "databaseName",
| hoodie.table.name = "tableName",
| primaryKey = 'id',
| preCombineField = 'ts',
| hoodie.datasource.write.operation = 'upsert',
| type = '$tableType'
| )
| AS
| select 1 as id, 'a1' as name, 10 as price, '2021-04-01' as dt, 1000 as ts
withTempDir { tmp =>
Seq("cow", "mor").foreach { tableType =>
val tableName = generateTableName
spark.sql(
s"""
| create table $tableName using hudi
| partitioned by (dt)
| options(
| hoodie.database.name = "databaseName",
| hoodie.table.name = "tableName",
| primaryKey = 'id',
| preCombineField = 'ts',
| hoodie.datasource.write.operation = 'upsert',
| type = '$tableType'
| )
| location '${tmp.getCanonicalPath}/$tableName'
| AS
| select 1 as id, 'a1' as name, 10 as price, '2021-04-01' as dt, 1000 as ts
""".stripMargin
)
checkAnswer(s"select id, name, price, dt from $tableName")(
Seq(1, "a1", 10, "2021-04-01")
)
val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier(tableName))
assertFalse(table.properties.contains(HoodieTableConfig.DATABASE_NAME.key()))
assertFalse(table.properties.contains(HoodieTableConfig.NAME.key()))
assertFalse(table.properties.contains(OPERATION.key()))
)
checkAnswer(s"select id, name, price, dt from $tableName")(
Seq(1, "a1", 10, "2021-04-01")
)
val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier(tableName))
assertFalse(table.properties.contains(HoodieTableConfig.DATABASE_NAME.key()))
assertFalse(table.properties.contains(HoodieTableConfig.NAME.key()))
assertFalse(table.properties.contains(OPERATION.key()))

val tablePath = table.storage.properties("path")
val metaClient = HoodieTableMetaClient.builder()
.setBasePath(tablePath)
.setConf(spark.sessionState.newHadoopConf())
.build()
val tableConfig = metaClient.getTableConfig.getProps.asScala.toMap
assertResult("default")(tableConfig(HoodieTableConfig.DATABASE_NAME.key()))
assertResult(tableName)(tableConfig(HoodieTableConfig.NAME.key()))
assertFalse(tableConfig.contains(OPERATION.key()))
val tablePath = table.storage.properties("path")
val metaClient = HoodieTableMetaClient.builder()
.setBasePath(tablePath)
.setConf(spark.sessionState.newHadoopConf())
.build()
val tableConfig = metaClient.getTableConfig.getProps.asScala.toMap
assertResult("default")(tableConfig(HoodieTableConfig.DATABASE_NAME.key()))
assertResult(tableName)(tableConfig(HoodieTableConfig.NAME.key()))
assertFalse(tableConfig.contains(OPERATION.key()))
}
}
}

Expand Down

0 comments on commit 9c37bde

Please sign in to comment.