Skip to content
This repository has been archived by the owner on Aug 25, 2023. It is now read-only.

Commit

Permalink
fix flaky test
Browse files Browse the repository at this point in the history
  • Loading branch information
Simon Heisterkamp committed Nov 10, 2022
1 parent 855cc45 commit 8da4438
Show file tree
Hide file tree
Showing 3 changed files with 20 additions and 11 deletions.
3 changes: 2 additions & 1 deletion src/atc/delta/db_handle.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,5 +50,6 @@ def drop_cascade(self) -> None:
def create(self) -> None:
sql = f"CREATE DATABASE IF NOT EXISTS {self._name} "
if self._location:
sql += f" LOCATION '{self._location}'"
sql += f' LOCATION "{self._location}"'
print("execute sql:", sql)
Spark.get().sql(sql)
11 changes: 9 additions & 2 deletions src/atc/delta/delta_handle.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,13 @@ def append(self, df: DataFrame, mergeSchema: bool = None) -> None:
return self.write_or_append(df, "append", mergeSchema=mergeSchema)

def truncate(self) -> None:
Spark.get().sql(f"TRUNCATE TABLE {self._name};")
if self._path:
Spark.get().sql(f"TRUNCATE TABLE delta.`{self._path}`;")
else:
Spark.get().sql(f"TRUNCATE TABLE {self._name};")
# if the name also does not exit, this will give a useful error like
# pyspark.sql.utils.AnalysisException:
# Table not found for 'TRUNCATE TABLE': TestDb.TestTbl;

def drop(self) -> None:
Spark.get().sql(f"DROP TABLE IF EXISTS {self._name};")
Expand All @@ -105,7 +111,8 @@ def drop_and_delete(self) -> None:
def create_hive_table(self) -> None:
sql = f"CREATE TABLE IF NOT EXISTS {self._name} "
if self._location:
sql += f" USING DELTA LOCATION '{self._location}'"
sql += f' USING DELTA LOCATION "{self._location}"'
print("execute sql:", sql)
Spark.get().sql(sql)

def recreate_hive_table(self):
Expand Down
17 changes: 9 additions & 8 deletions tests/cluster/delta/test_delta_class.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,15 +69,16 @@ def test_02_write(self):

dh.append(df, mergeSchema=True)

# @unittest.skip("Flaky test")
def test_03_create(self):
print(Configurator().get_all_details())
print(
{
k: v[:-15] + v[-12:]
for k, v in Spark.get().sparkContext.getConf().getAll()
if k.startswith("fs.azure.account")
}
)
# print(Configurator().get_all_details())
# print(
# {
# k: v[:-15] + v[-12:]
# for k, v in Spark.get().sparkContext.getConf().getAll()
# if k.startswith("fs.azure.account")
# }
# )

db = DbHandle.from_tc("MyDb")
db.create()
Expand Down

0 comments on commit 8da4438

Please sign in to comment.