diff --git a/src/atc/delta/delta_handle.py b/src/atc/delta/delta_handle.py index 2aa8f8bb..79db959a 100644 --- a/src/atc/delta/delta_handle.py +++ b/src/atc/delta/delta_handle.py @@ -92,11 +92,11 @@ def append(self, df: DataFrame, mergeSchema: bool = None) -> None: return self.write_or_append(df, "append", mergeSchema=mergeSchema) def truncate(self) -> None: - if self._path: - Spark.get().sql(f"TRUNCATE TABLE delta.`{self._path}`;") + if self._location: + Spark.get().sql(f"TRUNCATE TABLE delta.`{self._location}`;") else: Spark.get().sql(f"TRUNCATE TABLE {self._name};") - # if the name also does not exit, this will give a useful error like + # if the hive table does not exit, this will give a useful error like # pyspark.sql.utils.AnalysisException: # Table not found for 'TRUNCATE TABLE': TestDb.TestTbl; diff --git a/tests/cluster/delta/test_delta_class.py b/tests/cluster/delta/test_delta_class.py index 89b2e5aa..c9efb5d4 100644 --- a/tests/cluster/delta/test_delta_class.py +++ b/tests/cluster/delta/test_delta_class.py @@ -1,5 +1,7 @@ +import time import unittest +from py4j.protocol import Py4JJavaError from pyspark.sql.utils import AnalysisException from atc import Configurator @@ -84,7 +86,17 @@ def test_03_create(self): db.create() dh = DeltaHandle.from_tc("MyTbl") - dh.create_hive_table() + for i in range(10, 0, -1): + try: + dh.create_hive_table() + break + except (AnalysisException, Py4JJavaError) as e: + if i > 0: + print(e) + print("trying again in 10 seconds") + time.sleep(10) + else: + raise e # test hive access: df = Spark.get().table("TestDb.TestTbl")