Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Changes to work with Databricks SDK v0.38.0 #350

Merged
merged 7 commits into from
Nov 19, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ classifiers = [
]
dependencies = [
"databricks-labs-blueprint[yaml]>=0.4.2",
"databricks-sdk~=0.37",
"databricks-sdk~=0.38",
JCZuurmond marked this conversation as resolved.
Show resolved Hide resolved
"sqlglot>=22.3.1"
]

Expand Down
4 changes: 2 additions & 2 deletions src/databricks/labs/lsql/dashboards.py
Original file line number Diff line number Diff line change
Expand Up @@ -1133,9 +1133,9 @@ def create_dashboard(
warehouse_id=warehouse_id,
)
if dashboard_id is not None:
sdk_dashboard = self._ws.lakeview.update(dashboard_id, dashboard=dashboard_to_create.as_dict()) # type: ignore
sdk_dashboard = self._ws.lakeview.update(dashboard_id, dashboard=dashboard_to_create)
else:
sdk_dashboard = self._ws.lakeview.create(dashboard=dashboard_to_create.as_dict()) # type: ignore
sdk_dashboard = self._ws.lakeview.create(dashboard=dashboard_to_create)
if publish:
assert sdk_dashboard.dashboard_id is not None
self._ws.lakeview.publish(sdk_dashboard.dashboard_id, warehouse_id=warehouse_id)
Expand Down
78 changes: 37 additions & 41 deletions tests/integration/test_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,49 +17,44 @@ def test_sql_execution_chunked(ws, disposition):
assert total == 1999999000000


def test_sql_execution(ws, env_or_skip):
results = []
NYC_TAXI_TRIPS_LIMITED = """
WITH zipcodes AS (
SELECT DISTINCT pickup_zip, dropoff_zip
FROM samples.nyctaxi.trips
WHERE pickup_zip = 10282 AND dropoff_zip <= 10005
)

SELECT
trips.pickup_zip,
trips.dropoff_zip,
trips.tpep_pickup_datetime,
trips.tpep_dropoff_datetime
FROM
zipcodes
JOIN
samples.nyctaxi.trips AS trips
ON zipcodes.pickup_zip = trips.pickup_zip AND zipcodes.dropoff_zip = trips.dropoff_zip
ORDER BY trips.dropoff_zip, trips.tpep_pickup_datetime, trips.tpep_dropoff_datetime
"""


def test_sql_execution(ws, env_or_skip) -> None:
see = StatementExecutionExt(ws, warehouse_id=env_or_skip("TEST_DEFAULT_WAREHOUSE_ID"))
for pickup_zip, dropoff_zip in see.fetch_all(
"SELECT pickup_zip, dropoff_zip FROM nyctaxi.trips LIMIT 10", catalog="samples"
):
results.append((pickup_zip, dropoff_zip))
assert results == [
(10282, 10171),
(10110, 10110),
(10103, 10023),
(10022, 10017),
(10110, 10282),
(10009, 10065),
(10153, 10199),
(10112, 10069),
(10023, 10153),
(10012, 10003),
]


def test_sql_execution_partial(ws, env_or_skip):
results = []

records = see.fetch_all(NYC_TAXI_TRIPS_LIMITED, catalog="samples")

assert len([True for _ in records]) > 1


def test_sql_execution_as_iterator(ws, env_or_skip) -> None:
number_of_records = 0
see = StatementExecutionExt(ws, warehouse_id=env_or_skip("TEST_DEFAULT_WAREHOUSE_ID"), catalog="samples")
for row in see("SELECT * FROM nyctaxi.trips LIMIT 10"):
pickup_time, dropoff_time = row[0], row[1]
pickup_zip = row.pickup_zip
dropoff_zip = row["dropoff_zip"]
for row in see(NYC_TAXI_TRIPS_LIMITED):
pickup_zip, dropoff_zip, pickup_time, dropoff_time = row[0], row[1], row[2], row[3]
all_fields = row.asDict()
logger.info(f"{pickup_zip}@{pickup_time} -> {dropoff_zip}@{dropoff_time}: {all_fields}")
results.append((pickup_zip, dropoff_zip))
assert results == [
(10282, 10171),
(10110, 10110),
(10103, 10023),
(10022, 10017),
(10110, 10282),
(10009, 10065),
(10153, 10199),
(10112, 10069),
(10023, 10153),
(10012, 10003),
]
number_of_records += 1
assert number_of_records > 1


def test_fetch_one(ws):
Expand All @@ -73,9 +68,10 @@ def test_fetch_one_fails_if_limit_is_bigger(ws):
see.fetch_one("SELECT * FROM samples.nyctaxi.trips LIMIT 100")


def test_fetch_one_works(ws):
def test_fetch_one_works(ws) -> None:
see = StatementExecutionExt(ws)
row = see.fetch_one("SELECT * FROM samples.nyctaxi.trips LIMIT 1")
row = see.fetch_one("SELECT pickup_zip FROM samples.nyctaxi.trips WHERE pickup_zip == 10282 LIMIT 1")
assert row is not None
assert row.pickup_zip == 10282


Expand Down
4 changes: 2 additions & 2 deletions tests/integration/test_dashboards.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def create(*, display_name: str = "") -> SDKDashboard:
display_name = f"created_by_lsql_{make_random()}"
else:
display_name = f"{display_name} ({make_random()})"
dashboard = ws.lakeview.create(dashboard=SDKDashboard(display_name=display_name).as_dict())
dashboard = ws.lakeview.create(dashboard=SDKDashboard(display_name=display_name))
if is_in_debug():
dashboard_url = f"{ws.config.host}/sql/dashboardsv3/{dashboard.dashboard_id}"
webbrowser.open(dashboard_url)
Expand Down Expand Up @@ -117,7 +117,7 @@ def test_dashboards_creates_exported_dashboard_definition(ws, make_dashboard) ->
dashboard_content = (Path(__file__).parent / "dashboards" / "dashboard.lvdash.json").read_text()

dashboard_to_create = dataclasses.replace(sdk_dashboard, serialized_dashboard=dashboard_content)
ws.lakeview.update(sdk_dashboard.dashboard_id, dashboard=dashboard_to_create.as_dict())
ws.lakeview.update(sdk_dashboard.dashboard_id, dashboard=dashboard_to_create)
lakeview_dashboard = Dashboard.from_dict(json.loads(dashboard_content))
new_dashboard = dashboards.get_dashboard(sdk_dashboard.path)

Expand Down
4 changes: 2 additions & 2 deletions tests/unit/test_dashboards.py
Original file line number Diff line number Diff line change
Expand Up @@ -1478,7 +1478,7 @@ def test_dashboards_calls_create_without_dashboard_id() -> None:

dashboards.create_dashboard(dashboard_metadata, parent_path="/non/existing/path", warehouse_id="warehouse")

ws.lakeview.create.assert_called_with(dashboard=sdk_dashboard.as_dict())
ws.lakeview.create.assert_called_with(dashboard=sdk_dashboard)
ws.lakeview.update.assert_not_called()
ws.lakeview.publish.assert_not_called()

Expand All @@ -1498,7 +1498,7 @@ def test_dashboards_calls_update_with_dashboard_id() -> None:
dashboards.create_dashboard(dashboard_metadata, dashboard_id="id", warehouse_id="warehouse")

ws.lakeview.create.assert_not_called()
ws.lakeview.update.assert_called_with("id", dashboard=sdk_dashboard.as_dict())
ws.lakeview.update.assert_called_with("id", dashboard=sdk_dashboard)
ws.lakeview.publish.assert_not_called()


Expand Down