Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

draft: check storage with lxc script + wait_for_idle() #625

Closed
wants to merge 5 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .github/workflows/integrate.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,10 @@ jobs:
juju add-model kubeflow
sg snap_microk8s -c "tox -e bundle-integration-${{ matrix.sdk }} -- --model kubeflow --bundle=./tests/integration/bundles/kfp_latest_edge.yaml.j2" --charmcraft-clean

- name: Display df
run: df -h
if: always()

- name: Get all
run: kubectl get all -A
if: failure()
Expand Down
1 change: 1 addition & 0 deletions requirements-integration-v1.in
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,4 @@ pytest-operator
pyyaml
tenacity
sh
jq
4 changes: 2 additions & 2 deletions requirements-integration-v1.txt
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@ aiosignal==1.3.1
# via aiohttp
anyio==4.4.0
# via httpx
appnope==0.1.4
# via ipython
asttokens==2.4.1
# via stack-data
async-timeout==4.0.3
Expand Down Expand Up @@ -130,6 +128,8 @@ jedi==0.19.1
# via ipython
jinja2==3.1.4
# via pytest-operator
jq==1.8.0
# via -r requirements-integration-v1.in
jsonschema==4.17.3
# via
# -r requirements-integration-v1.in
Expand Down
1 change: 1 addition & 0 deletions requirements-integration-v2.in
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,4 @@ pytest-operator
pyyaml
sh
tenacity
jq
2 changes: 2 additions & 0 deletions requirements-integration-v2.txt
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,8 @@ jedi==0.19.1
# via ipython
jinja2==3.1.4
# via pytest-operator
jq==1.8.0
# via -r requirements-integration-v2.in
jsonschema==4.17.3
# via -r requirements-integration-v2.in
juju==3.5.2.0
Expand Down
1 change: 1 addition & 0 deletions tests/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ This directory has the following structure:
│   ├── charmcraft.py
│   ├── k8s_resources.py
│   └── localize_bundle.py
│   └── lxc.py
├── kfp_globals.py
├── pipelines/
│   └── ... # Sample pipelines
Expand Down
8 changes: 7 additions & 1 deletion tests/integration/bundles/kfp_1.8_stable_install.yaml.j2
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ applications:
argo-controller: { charm: ch:argo-controller, channel: 3.3.10/stable, scale: 1, trust: true }
metacontroller-operator: { charm: ch:metacontroller-operator, channel: 3.0/stable, scale: 1, trust: true }
minio: { charm: ch:minio, channel: ckf-1.8/stable, scale: 1 }
kfp-db: { charm: ch:mysql-k8s, channel: 8.0/stable, scale: 1, constraints: mem=2G, trust: true }
mlmd: { charm: ch:mlmd, channel: 1.14/stable, scale: 1 }
envoy: { charm: ch:envoy, channel: 2.0/stable, scale: 1 }
kubeflow-profiles: { charm: ch:kubeflow-profiles, channel: 1.8/stable, scale: 1, trust: true }
Expand All @@ -22,6 +21,13 @@ applications:
options:
default-gateway: kubeflow-gateway
trust: true
kfp-db:
charm: mysql-k8s
channel: 8.0/stable
scale: 1
options:
profile: testing
trust: true
kubeflow-roles:
charm: kubeflow-roles
channel: 1.8/stable
Expand Down
8 changes: 7 additions & 1 deletion tests/integration/bundles/kfp_latest_edge.yaml.j2
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ applications:
argo-controller: { charm: ch:argo-controller, channel: latest/edge, scale: 1, trust: true }
metacontroller-operator: { charm: ch:metacontroller-operator, channel: latest/edge, scale: 1, trust: true }
minio: { charm: ch:minio, channel: latest/edge, scale: 1 }
kfp-db: { charm: ch:mysql-k8s, channel: 8.0/stable, scale: 1, constraints: mem=2G, trust: true }
mlmd: { charm: ch:mlmd, channel: latest/edge, scale: 1, trust: true}
envoy: { charm: ch:envoy, channel: latest/edge, scale: 1 }
kubeflow-profiles: { charm: ch:kubeflow-profiles, channel: latest/edge, scale: 1, trust: true }
Expand All @@ -22,6 +21,13 @@ applications:
options:
default-gateway: kubeflow-gateway
trust: true
kfp-db:
charm: mysql-k8s
channel: 8.0/stable
scale: 1
options:
profile: testing
trust: true
kubeflow-roles:
charm: kubeflow-roles
channel: latest/edge
Expand Down
14 changes: 14 additions & 0 deletions tests/integration/helpers/lxc.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
import sh
import jq

def clean_charmcraft_lxc_instances() -> None:
"""
Delete lxc instances in project "charmcraft" that are prefixed with "charmcraft-".

Based on https://discourse.charmhub.io/t/how-to-quickly-clean-unused-lxd-instances-from-charmcraft-pack/15975
"""
lxc_instances = sh.lxc.list(project="charmcraft", format="json")
lxc_instances_charmcraft = jq.compile('.[] | select(.name | startswith("charmcraft-")) | .name').input_text(lxc_instances).all()
for instance in lxc_instances_charmcraft:
print(f"Deleting lxc instance '{instance}'")
sh.lxc.delete(instance, project="charmcraft")
20 changes: 13 additions & 7 deletions tests/integration/test_kfp_functional_v1.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from helpers.k8s_resources import apply_manifests, fetch_response
from helpers.localize_bundle import get_resources_from_charm_file
from helpers.charmcraft import charmcraft_clean
from helpers.lxc import clean_charmcraft_lxc_instances
from kfp_globals import (
CHARM_PATH_TEMPLATE,
KFP_CHARMS,
Expand All @@ -19,7 +20,6 @@
SAMPLE_VIEWER,
)

import sh
import kfp
import lightkube
import pytest
Expand Down Expand Up @@ -97,7 +97,8 @@ async def test_build_and_deploy(ops_test: OpsTest, request, lightkube_client):
context.update([(f"{charm.replace('-', '_')}", charm_file)])

if charmcraft_clean_flag == True:
charmcraft_clean(charms_to_build)
# charmcraft_clean(charms_to_build)
clean_charmcraft_lxc_instances()

# Render kfp-operators bundle file with locally built charms and their resources
rendered_bundle = render_bundle(
Expand All @@ -107,11 +108,16 @@ async def test_build_and_deploy(ops_test: OpsTest, request, lightkube_client):
# Deploy the kfp-operators bundle from the rendered bundle file
await deploy_bundle(ops_test, bundle_path=rendered_bundle, trust=True)

# Use `juju wait-for` instead of `wait_for_idle()`
# due to https://github.com/canonical/kfp-operators/issues/601
# and https://github.com/juju/python-libjuju/issues/1204
log.info("Waiting on model applications to be active")
sh.juju("wait-for","model","kubeflow", query="forEach(applications, app => app.status == 'active')", timeout="30m")
# Wait for everything to be up. Note, at time of writing these charms would naturally go
# into blocked during deploy while waiting for each other to satisfy relations, so we don't
# raise_on_blocked.
await ops_test.model.wait_for_idle(
status="active",
raise_on_blocked=False, # These apps block while waiting for each other to deploy/relate
raise_on_error=True,
timeout=3600,
idle_period=30,
)


# ---- KFP API Server focused test cases
Expand Down
20 changes: 14 additions & 6 deletions tests/integration/test_kfp_functional_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from helpers.k8s_resources import apply_manifests, fetch_response
from helpers.localize_bundle import get_resources_from_charm_file
from helpers.charmcraft import charmcraft_clean
from helpers.lxc import clean_charmcraft_lxc_instances
from kfp_globals import (
CHARM_PATH_TEMPLATE,
KFP_CHARMS,
Expand Down Expand Up @@ -99,7 +100,8 @@ async def test_build_and_deploy(ops_test: OpsTest, request, lightkube_client):
context.update([(f"{charm.replace('-', '_')}", charm_file)])

if charmcraft_clean_flag == True:
charmcraft_clean(charms_to_build)
# charmcraft_clean(charms_to_build)
clean_charmcraft_lxc_instances()

# Render kfp-operators bundle file with locally built charms and their resources
rendered_bundle = render_bundle(
Expand All @@ -109,11 +111,17 @@ async def test_build_and_deploy(ops_test: OpsTest, request, lightkube_client):
# Deploy the kfp-operators bundle from the rendered bundle file
await deploy_bundle(ops_test, bundle_path=rendered_bundle, trust=True)

# Use `juju wait-for` instead of `wait_for_idle()`
# due to https://github.com/canonical/kfp-operators/issues/601
# and https://github.com/juju/python-libjuju/issues/1204
log.info("Waiting on model applications to be active")
sh.juju("wait-for","model","kubeflow", query="forEach(applications, app => app.status == 'active')", timeout="30m")
# Wait for everything to be up. Note, at time of writing these charms would naturally go
# into blocked during deploy while waiting for each other to satisfy relations, so we don't
# raise_on_blocked.
await ops_test.model.wait_for_idle(
status="active",
raise_on_blocked=False, # These apps block while waiting for each other to deploy/relate
raise_on_error=True,
timeout=3600,
idle_period=30,
)


# ---- KFP API Server focused test cases
async def test_upload_pipeline(kfp_client):
Expand Down
Loading