Skip to content

Commit

Permalink
Fix 'schemas' table not being properly created when Delta Lake is use…
Browse files Browse the repository at this point in the history
…d for bookkeeping.
  • Loading branch information
yruslan committed Aug 29, 2024
1 parent 065f214 commit 63c330a
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -100,8 +100,8 @@ class BookkeeperDeltaTable(database: Option[String],
}

private def initSchemasDirectory(): Unit = {
if (!spark.catalog.tableExists(schemasTable)) {
writeEmptyDataset[TableSchemaJson](schemasTable)
if (!spark.catalog.tableExists(schemasFullTableName)) {
writeEmptyDataset[TableSchemaJson](schemasFullTableName)
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,13 @@ class BookkeeperDeltaTableLongSuite extends BookkeeperCommonSuite with SparkTest
val rndInt = Math.abs(Random.nextInt())
getBookkeeper(s"tbl${rndInt}_")
}

"test tables are created properly" in {
getBookkeeper(s"tbl0000_")

assert(spark.catalog.tableExists("tbl0000_bookkeeping"))
assert(spark.catalog.tableExists("tbl0000_schemas"))
}
}

private def cleanUpWarehouse(): Unit = {
Expand Down

0 comments on commit 63c330a

Please sign in to comment.