Skip to content

Commit

Permalink
Merge branch 'master' into feature/ODS-2600_Validate_DSC_creates_all_…
Browse files Browse the repository at this point in the history
…Serverless_CRs
  • Loading branch information
asanzgom authored Apr 1, 2024
2 parents 9c34b0d + bc04b46 commit b46accf
Show file tree
Hide file tree
Showing 122 changed files with 5,210 additions and 3,031 deletions.
58 changes: 10 additions & 48 deletions .github/workflows/code_quality.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,34 +2,11 @@ name: Lint
on: [push, pull_request]

jobs:
run_lint:
name: black formatter
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0

- name: Get changed files
id: changed-files
uses: tj-actions/[email protected]

- name: Install black formater
run: pip install black

- name: List all changed files
run: |
for file in ${{ steps.changed-files.outputs.all_changed_files }}; do
echo "$file was changed"
if [[ $file == *.py ]]; then
black $file --check --diff
fi
done
run_shellcheck:
name: shellcheck linter
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: shellcheck_
uses: reviewdog/action-shellcheck@v1
with:
Expand All @@ -46,7 +23,7 @@ jobs:
continue-on-error: true
steps:
- name: Checkout repository
uses: actions/checkout@v3
uses: actions/checkout@v4
- name: Install dependencies
run: |
python -m pip install --upgrade pip
Expand All @@ -58,27 +35,6 @@ jobs:
with:
sarif_file: .sarif.json
category: robocop
isort_linter:
name: isort
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3

- name: Get changed files
id: changed-files
uses: tj-actions/[email protected]

- name: Install isort linter
run: pip install isort

- name: List all changed files
run: |
for file in ${{ steps.changed-files.outputs.all_changed_files }}; do
echo "$file was changed"
if [[ $file == *.py ]]; then
isort $file --check --diff
fi
done
ruff:
name: ruff
runs-on: ubuntu-latest
Expand All @@ -94,7 +50,7 @@ jobs:
id: cached-home-local
with:
path: ~/.local
key: ${{ runner.os }}-local-${{ env.poetry_version }}
key: "${{ runner.os }}-local-${{ env.poetry_version }}"

- name: Install poetry
if: steps.cached-home-local.outputs.cache-hit != 'true'
Expand All @@ -116,7 +72,13 @@ jobs:
- name: Install ruff
run: poetry install --sync

# run all liners in turn and don't fail fast

- run: poetry run ruff check ods_ci/
if: "!cancelled()"

- run: poetry run ruff format ods_ci/ --check --diff
if: "!cancelled()"

selftests:
name: selftests
Expand All @@ -133,7 +95,7 @@ jobs:
id: cached-home-local
with:
path: ~/.local
key: ${{ runner.os }}-local-${{ env.poetry_version }}
key: "${{ runner.os }}-local-${{ env.poetry_version }}"

- name: Install poetry
if: steps.cached-home-local.outputs.cache-hit != 'true'
Expand Down
187 changes: 30 additions & 157 deletions ods_ci/libs/DataSciencePipelinesAPI.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,53 +14,15 @@ class DataSciencePipelinesAPI:
def __init__(self):
self.route = ""
self.sa_token = None

@keyword
def wait_until_openshift_pipelines_operator_is_deployed(self):
"""
when creating at the first time, it can take like 1 minute to have the pods ready
"""
deployment_count = 0
count = 0
while deployment_count != 1 and count < 30:
deployments = []
response, _ = self.run_oc("oc get deployment -n openshift-operators openshift-pipelines-operator -o json")
try:
response = json.loads(response)
if (
response["metadata"]["name"] == "openshift-pipelines-operator"
and "readyReplicas" in response["status"]
and response["status"]["readyReplicas"] == 1
):
deployments.append(response)
except JSONDecodeError:
pass
deployment_count = len(deployments)
time.sleep(1)
count += 1
pipeline_run_crd_count = 0
count = 0
while pipeline_run_crd_count < 1 and count < 60:
# https://github.com/opendatahub-io/odh-dashboard/issues/1673
# It is possible to start the Pipeline Server without pipelineruns.tekton.dev CRD
pipeline_run_crd_count = self.count_pods("oc get crd pipelineruns.tekton.dev", 1)
time.sleep(1)
count += 1
assert pipeline_run_crd_count == 1
return self.count_running_pods(
"oc get pods -n openshift-operators -l name=openshift-pipelines-operator -o json",
"openshift-pipelines-operator",
"Running",
1,
)
self.sleep_time = 45

@keyword
def login_and_wait_dsp_route(
self,
user,
pwd,
project,
route_name="ds-pipeline-pipelines-definition",
route_name="ds-pipeline-dspa",
timeout=120,
):
print("Fetch token")
Expand Down Expand Up @@ -89,7 +51,7 @@ def login_and_wait_dsp_route(

assert self.route != "", "Route must not be empty"
print(f"Waiting for Data Science Pipeline route to be ready to avoid firing false alerts: {self.route}")
time.sleep(45)
time.sleep(self.sleep_time)
status = -1
count = 0
while status != 200 and count < timeout:
Expand All @@ -102,8 +64,8 @@ def login_and_wait_dsp_route(
# if you need to debug, try to print also the response
print(f"({count}): Data Science Pipeline HTTP Status: {status}")
if status != 200:
time.sleep(30)
count += 30
time.sleep(self.sleep_time)
count += self.sleep_time
return status

@keyword
Expand All @@ -121,112 +83,6 @@ def remove_pipeline_project(self, project):
time.sleep(1)
count += 1

@keyword
def create_pipeline(self, url_test_pipeline_run_yaml):
print("Creating a pipeline from data science pipelines stack")
test_pipeline_run_yaml, _ = self.do_get(url_test_pipeline_run_yaml)
filename = "test_pipeline_run_yaml.yaml"
with open(filename, "w", encoding="utf-8") as f:
f.write(test_pipeline_run_yaml)
with open(filename, "rb") as f:
response, _ = self.do_upload(
f"https://{self.route}/apis/v1beta1/pipelines/upload",
files={"uploadfile": f},
headers={"Authorization": f"Bearer {self.sa_token}"},
)
os.remove(filename)
pipeline_json = json.loads(response)
pipeline_id = pipeline_json["id"]
response, status = self.do_get(
f"https://{self.route}/apis/v1beta1/pipelines/{pipeline_id}",
headers={"Authorization": f"Bearer {self.sa_token}"},
)
assert status == 200
assert json.loads(response)["name"] == filename
return pipeline_id

@keyword
def create_run(self, pipeline_id):
print("Creating the run from uploaded pipeline")
response, status = self.do_post(
f"https://{self.route}/apis/v1beta1/runs",
headers={
"Authorization": f"Bearer {self.sa_token}",
"Content-Type": "application/json",
},
json={
"name": "test-pipeline-run",
"pipeline_spec": {"pipeline_id": f"{pipeline_id}"},
},
)
assert status == 200
run_json = json.loads(response)
run_id = run_json["run"]["id"]

response, status = self.do_get(
f"https://{self.route}/apis/v1beta1/runs/{run_id}",
headers={"Authorization": f"Bearer {self.sa_token}"},
)
assert status == 200

return run_id

@keyword
def check_run_status(self, run_id, timeout=160):
run_status = None
count = 0
run_finished_ok = False
while not run_finished_ok and count < timeout:
response, status = self.do_get(
f"https://{self.route}/apis/v1beta1/runs/{run_id}",
headers={"Authorization": f"Bearer {self.sa_token}"},
)
try:
run_json = json.loads(response)
if "run" in run_json and "status" in run_json["run"]:
run_status = run_json["run"]["status"]
except JSONDecodeError:
print(response, status)
print(f"Checking run status: {run_status}")
if run_status == "Failed":
break
# https://github.com/tektoncd/pipeline/blob/main/docs/pipelineruns.md#monitoring-execution-status
if run_status in ("Completed", "Succeeded"):
run_finished_ok = True
break
time.sleep(1)
count += 1
return run_finished_ok

@keyword
def delete_runs(self, run_id):
print("Deleting the runs")

response, status = self.do_delete(
f"https://{self.route}/apis/v1beta1/runs/{run_id}",
headers={"Authorization": f"Bearer {self.sa_token}"},
)
assert status == 200
response, status = self.do_get(
f"https://{self.route}/apis/v1beta1/runs/{run_id}",
headers={"Authorization": f"Bearer {self.sa_token}"},
)
assert status == 404

@keyword
def delete_pipeline(self, pipeline_id):
print("Deleting the pipeline")
response, status = self.do_delete(
f"https://{self.route}/apis/v1beta1/pipelines/{pipeline_id}",
headers={"Authorization": f"Bearer {self.sa_token}"},
)
assert status == 200
response, status = self.do_get(
f"https://{self.route}/apis/v1beta1/pipelines/{pipeline_id}",
headers={"Authorization": f"Bearer {self.sa_token}"},
)
assert status == 404

@keyword
def add_role_to_user(self, name, user, project):
output, error = self.run_oc(f"oc policy add-role-to-user {name} {user} -n {project} --role-namespace={project}")
Expand All @@ -236,9 +92,7 @@ def add_role_to_user(self, name, user, project):
def do_http_request(self, url):
assert self.route != "", "Login First"
response = requests.get(
f"http://{self.route}/{url}",
headers={"Authorization": f"Bearer {self.sa_token}"},
verify=False,
f"http://{self.route}/{url}", headers={"Authorization": f"Bearer {self.sa_token}"}, verify=self.get_cert()
)
assert response.status_code == 200
return response.url
Expand Down Expand Up @@ -309,21 +163,40 @@ def run_oc(self, command):
output, error = process.communicate()
return self.byte_to_str(output), error

def do_get(self, url, headers=None):
response = requests.get(url, headers=headers, verify=False)
def do_get(self, url, headers=None, skip_ssl=False):
if skip_ssl:
response = requests.get(url, headers=headers, verify=False)
else:
response = requests.get(url, headers=headers, verify=self.get_cert())
return self.byte_to_str(response.content), response.status_code

def do_post(self, url, headers, json):
response = requests.post(url, headers=headers, json=json, verify=False)
response = requests.post(url, headers=headers, json=json, verify=self.get_cert())
return self.byte_to_str(response.content), response.status_code

def do_upload(self, url, files, headers=None):
response = requests.post(url, headers=headers, files=files, verify=False)
response = requests.post(url, headers=headers, files=files, verify=self.get_cert())
return self.byte_to_str(response.content), response.status_code

def do_delete(self, url, headers):
response = requests.delete(url, headers=headers, verify=False)
response = requests.delete(url, headers=headers, verify=self.get_cert())
return self.byte_to_str(response.content), response.status_code

def byte_to_str(self, content):
return content.decode("utf-8", "ignore")

def get_secret(self, project, name):
secret_json, _ = self.run_oc(f"oc get secret -n {project} {name} -o json")
assert len(secret_json) > 0
return json.loads(secret_json)

def get_cert(self):
cert_json = self.get_secret("openshift-ingress-operator", "router-ca")
cert = cert_json["data"]["tls.crt"]
decoded_cert = base64.b64decode(cert).decode("utf-8")

file_name = "/tmp/kfp-cert"
cert_file = open(file_name, "w")
cert_file.write(decoded_cert)
cert_file.close()
return file_name
Loading

0 comments on commit b46accf

Please sign in to comment.