diff --git a/src/databricks/labs/lsql/backends.py b/src/databricks/labs/lsql/backends.py index cad73f1d..25e28fcc 100644 --- a/src/databricks/labs/lsql/backends.py +++ b/src/databricks/labs/lsql/backends.py @@ -207,6 +207,7 @@ def fetch(self, sql: str, *, catalog: str | None = None, schema: str | None = No self._spark.sql(f"USE CATALOG {catalog}") if schema: self._spark.sql(f"USE SCHEMA {schema}") + # TODO: pyspark.sql.Row is being returned instead of databricks.labs.lsql.core.Row return iter(self._spark.sql(sql).collect()) except Exception as e: error_message = str(e) diff --git a/src/databricks/labs/lsql/core.py b/src/databricks/labs/lsql/core.py index 541c411d..38e27dd7 100644 --- a/src/databricks/labs/lsql/core.py +++ b/src/databricks/labs/lsql/core.py @@ -66,6 +66,10 @@ def as_dict(self) -> dict[str, Any]: """Convert the row to a dictionary with the same conventions as Databricks SDK.""" return dict(zip(self.__columns__, self, strict=True)) + # PySpark's compatibility + def asDict(self, recursive: bool = False) -> dict[str, Any]: + return self.as_dict() + def __eq__(self, other): """Check if the rows are equal.""" if not isinstance(other, Row):