diff --git a/ods_ci/tests/Resources/CLI/DataSciencePipelines/DataSciencePipelinesBackend.resource b/ods_ci/tests/Resources/CLI/DataSciencePipelines/DataSciencePipelinesBackend.resource new file mode 100644 index 000000000..fe1b63b88 --- /dev/null +++ b/ods_ci/tests/Resources/CLI/DataSciencePipelines/DataSciencePipelinesBackend.resource @@ -0,0 +1,115 @@ +*** Settings *** +Documentation Collection of keywords to interact with Data Science Pipelines via CLI +Library OperatingSystem +Resource ../../../Resources/OCP.resource + + +*** Variables *** +${DSPA_PATH}= tests/Resources/Files/pipeline-samples/v2/dspa + + +*** Keywords *** +Create Pipeline Server + [Documentation] Creates a pipeline server providing object storage and database information + ... Note: currently, only some of the parameters are used. In the future this keyword will be + ... enhanced to use them all + [Arguments] ${namespace} + ... ${object_storage_access_key} ${object_storage_secret_key} +# ... ${object_storage_endpoint} ${object_storage_region} +# ... ${object_storage_bucket_name} +# ... ${database_host}=${EMPTY} ${database_port}=3306 +# ... ${database_username}=${EMPTY} ${database_password}=${EMPTY} +# ... ${database_db_name}=${EMPTY} + ... ${dsp_version}=v2 + + Create Secret With Pipelines Object Storage Information namespace=${namespace} + ... object_storage_access_key=${object_storage_access_key} + ... object_storage_secret_key=${object_storage_secret_key} + + # Process DSPA Template to create pipeline server + ${TEMPLATE_PARAMETERS}= Set Variable -p DSP_VERSION=${dsp_version} + Run oc process -f ${DSPA_PATH}/dspa-template.yaml ${TEMPLATE_PARAMETERS} | oc apply -n ${namespace} -f - + +# robocop: disable:line-too-long +Create PipelineServer Using Custom DSPA + [Documentation] Install and verifies that DataSciencePipelinesApplication CRD is installed and working + [Arguments] ${namespace} ${dspa_file}=data-science-pipelines-sample.yaml ${assert_install}=True + + Run oc apply -f "${DSPA_PATH}/${dspa_file}" -n ${namespace} + IF ${assert_install}==True + ${generation_value} Run oc get datasciencepipelinesapplications -n ${namespace} -o json | jq '.items[0].metadata.generation' + Should Be True ${generation_value} == 2 DataSciencePipelinesApplication created + END + +Verify Pipeline Server Deployments # robocop: disable + [Documentation] Verifies the correct deployment of DS Pipelines in the rhods namespace + [Arguments] ${namespace} + + @{all_pods}= Oc Get kind=Pod namespace=${namespace} + ... label_selector=component=data-science-pipelines + Run Keyword And Continue On Failure Length Should Be ${all_pods} 7 + + @{pipeline_api_server}= Oc Get kind=Pod namespace=${namespace} + ... label_selector=app=ds-pipeline-dspa + ${containerNames}= Create List oauth-proxy ds-pipeline-api-server + Verify Deployment ${pipeline_api_server} 1 2 ${containerNames} + + @{pipeline_metadata_envoy}= Oc Get kind=Pod namespace=${namespace} + ... label_selector=app=ds-pipeline-metadata-envoy-dspa + ${containerNames}= Create List container oauth-proxy + Verify Deployment ${pipeline_metadata_envoy} 1 2 ${containerNames} + + @{pipeline_metadata_grpc}= Oc Get kind=Pod namespace=${namespace} + ... label_selector=app=ds-pipeline-metadata-grpc-dspa + ${containerNames}= Create List container + Verify Deployment ${pipeline_metadata_grpc} 1 1 ${containerNames} + + @{pipeline_persistenceagent}= Oc Get kind=Pod namespace=${namespace} + ... label_selector=app=ds-pipeline-persistenceagent-dspa + ${containerNames}= Create List ds-pipeline-persistenceagent + Verify Deployment ${pipeline_persistenceagent} 1 1 ${containerNames} + + @{pipeline_scheduledworkflow}= Oc Get kind=Pod namespace=${namespace} + ... label_selector=app=ds-pipeline-scheduledworkflow-dspa + ${containerNames}= Create List ds-pipeline-scheduledworkflow + Verify Deployment ${pipeline_scheduledworkflow} 1 1 ${containerNames} + + @{pipeline_workflow_controller}= Oc Get kind=Pod namespace=${namespace} + ... label_selector=app=ds-pipeline-workflow-controller-dspa + ${containerNames}= Create List ds-pipeline-workflow-controller + Verify Deployment ${pipeline_workflow_controller} 1 1 ${containerNames} + + @{mariadb}= Oc Get kind=Pod namespace=${namespace} + ... label_selector=app=mariadb-dspa + ${containerNames}= Create List mariadb + Verify Deployment ${mariadb} 1 1 ${containerNames} + +Wait Until Pipeline Server Is Deployed + [Documentation] Waits until all the expected pods of the pipeline server + ... are running + [Arguments] ${namespace} + Wait Until Keyword Succeeds 10 times 10s + ... Verify Pipeline Server Deployments namespace=${namespace} + +Wait Until Pipeline Server Is Deleted + [Documentation] Waits until all pipeline server pods are deleted + [Arguments] ${namespace} + # robocop: off=expression-can-be-simplified + FOR ${_} IN RANGE 0 30 + ${pod_count}= Run oc get pods -n ${namespace} -l component=data-science-pipelines | wc -l + IF ${pod_count}==0 BREAK + Sleep 1s + END + +# robocop: disable:line-too-long +Create Pipelines ConfigMap With Custom Pip Index Url And Trusted Host + [Documentation] Creates a Configmap (ds-pipeline-custom-env-vars) in the project, + ... storing the values for pip_index_url and pip_trusted_host + [Arguments] ${namespace} + Run oc create configmap ds-pipeline-custom-env-vars -n ${namespace} --from-literal=pip_index_url=${PIP_INDEX_URL} --from-literal=pip_trusted_host=${PIP_TRUSTED_HOST} + +Create Secret With Pipelines Object Storage Information + [Documentation] Creates a secret needed to create a pipeline server containing the object storage credentials + [Arguments] ${namespace} ${object_storage_access_key} ${object_storage_secret_key} + Run oc create secret generic dashboard-dspa-secret -n ${namespace} --from-literal=AWS_ACCESS_KEY_ID=${object_storage_access_key} --from-literal=AWS_SECRET_ACCESS_KEY=${object_storage_secret_key} + Run oc label secret dashboard-dspa-secret -n ${namespace} opendatahub.io/dashboard=true diff --git a/ods_ci/tests/Resources/CLI/ModelServing/llm.resource b/ods_ci/tests/Resources/CLI/ModelServing/llm.resource index 8c77a1e27..ee3d830aa 100644 --- a/ods_ci/tests/Resources/CLI/ModelServing/llm.resource +++ b/ods_ci/tests/Resources/CLI/ModelServing/llm.resource @@ -361,7 +361,11 @@ Query Model Multiple Times ... inference_type=${inference_type} model_name=${model_name} body_params=${body_params} ... query_text=${EXP_RESPONSES}[queries][${query_idx}][query_text] IF "${token}" != "${None}" - Set To Dictionary ${header} Authorization Bearer ${token} + IF "${protocol}" == "grpc" + ${header}= Set Variable "Authorization: Bearer ${token}" -H ${header} + ELSE + Set To Dictionary ${header} Authorization Bearer ${token} + END END ${runtime_details}= Set Variable ${RUNTIME_FORMATS}[${runtime}][endpoints][${inference_type}][${protocol}] ${endpoint}= Set Variable ${runtime_details}[endpoint] diff --git a/ods_ci/tests/Resources/CLI/MustGather/MustGather.resource b/ods_ci/tests/Resources/CLI/MustGather/MustGather.resource index d36c35d17..50f506a6f 100644 --- a/ods_ci/tests/Resources/CLI/MustGather/MustGather.resource +++ b/ods_ci/tests/Resources/CLI/MustGather/MustGather.resource @@ -12,21 +12,21 @@ Get must-gather Logs ${output}= Run process tests/Resources/CLI/MustGather/get-must-gather-logs.sh shell=yes Should Be Equal As Integers ${output.rc} 0 Should Not Contain ${output.stdout} FAIL - ${must-gather-dir}= Run ls -d must-gather.local.* - ${namespaces-log-dir}= Run ls -d ${must-gather-dir}/quay-io-modh-must-gather-sha256-*/namespaces - Set Suite Variable ${must-gather-dir} - Set Suite Variable ${namespaces-log-dir} - Directory Should Exist ${must-gather-dir} - Directory Should Not Be Empty ${must-gather-dir} + ${must_gather_dir}= Run ls -d must-gather.local.* + ${namespaces_log_dir}= Run ls -d ${must_gather_dir}/quay-io-modh-must-gather-sha256-*/namespaces + Set Suite Variable ${must_gather_dir} + Set Suite Variable ${namespaces_log_dir} + Directory Should Exist ${must_gather_dir} + Directory Should Not Be Empty ${must_gather_dir} Verify Logs For ${namespace} [Documentation] Verifies the must-gather logs related to a namespace - Directory Should Exist ${namespaces-log-dir}/${namespace} - Directory Should Not Be Empty ${namespaces-log-dir}/${namespace} - Directory Should Not Be Empty ${namespaces-log-dir}/${namespace}/pods - ${log-files}= Run find ${namespaces-log-dir}/${namespace}/pods -type f -name "*.log" - Should Not Be Equal ${log-files} ${EMPTY} + Directory Should Exist ${namespaces_log_dir}/${namespace} + Directory Should Not Be Empty ${namespaces_log_dir}/${namespace} + Directory Should Not Be Empty ${namespaces_log_dir}/${namespace}/pods + ${log_files}= Run find ${namespaces_log_dir}/${namespace}/pods -type f -name "*.log" + Should Not Be Equal ${log_files} ${EMPTY} Cleanup must-gather Logs [Documentation] Deletes the folder with the must-gather logs - Remove Directory ${must-gather-dir} recursive=True + Run Keyword If "${must_gather_dir}" != "${EMPTY}" Remove Directory ${must_gather_dir} recursive=True diff --git a/ods_ci/tests/Resources/CLI/MustGather/get-must-gather-logs.sh b/ods_ci/tests/Resources/CLI/MustGather/get-must-gather-logs.sh index 3ac31b297..5de212074 100755 --- a/ods_ci/tests/Resources/CLI/MustGather/get-must-gather-logs.sh +++ b/ods_ci/tests/Resources/CLI/MustGather/get-must-gather-logs.sh @@ -1,7 +1,7 @@ #!/bin/sh # Redirecting stdout/stderr of must-gather to a file, as it fills up the # process buffer and prevents the script from running further. -oc adm must-gather --image=quay.io/modh/must-gather@sha256:1bd8735d715b624c1eaf484454b0d6d400a334d8cbba47f99883626f36e96657 &> must-gather-results.txt +oc adm must-gather --image=quay.io/modh/must-gather@sha256:9d5988f45c3b00ec7fbbe7a8a86cc149a2768c9c47e207694fdb6e87ef44adf3 &> must-gather-results.txt if [ $? -eq 0 ] then diff --git a/ods_ci/tests/Resources/Files/data-science-pipelines-reconciliation.yaml b/ods_ci/tests/Resources/Files/pipeline-samples/v2/dspa/data-science-pipelines-reconciliation.yaml similarity index 100% rename from ods_ci/tests/Resources/Files/data-science-pipelines-reconciliation.yaml rename to ods_ci/tests/Resources/Files/pipeline-samples/v2/dspa/data-science-pipelines-reconciliation.yaml diff --git a/ods_ci/tests/Resources/Files/data-science-pipelines-sample.yaml b/ods_ci/tests/Resources/Files/pipeline-samples/v2/dspa/data-science-pipelines-sample.yaml similarity index 100% rename from ods_ci/tests/Resources/Files/data-science-pipelines-sample.yaml rename to ods_ci/tests/Resources/Files/pipeline-samples/v2/dspa/data-science-pipelines-sample.yaml diff --git a/ods_ci/tests/Resources/Files/pipeline-samples/v2/dspa/dspa-template.yaml b/ods_ci/tests/Resources/Files/pipeline-samples/v2/dspa/dspa-template.yaml new file mode 100644 index 000000000..e131b6c5a --- /dev/null +++ b/ods_ci/tests/Resources/Files/pipeline-samples/v2/dspa/dspa-template.yaml @@ -0,0 +1,39 @@ +kind: Template +apiVersion: template.openshift.io/v1 +metadata: + name: dspa-template +objects: + - apiVersion: datasciencepipelinesapplications.opendatahub.io/v1alpha1 + kind: DataSciencePipelinesApplication + metadata: + name: dspa + spec: + dspVersion: ${DSP_VERSION} + objectStorage: + disableHealthCheck: false + enableExternalRoute: false + externalStorage: + basePath: '' + bucket: ${OBJECT_STORAGE_BUCKET} + host: ${OBJECT_STORAGE_HOST} + port: '' + region: ${OBJECT_STORAGE_REGION} + s3CredentialsSecret: + accessKey: AWS_ACCESS_KEY_ID + secretKey: AWS_SECRET_ACCESS_KEY + secretName: dashboard-dspa-secret + scheme: https + podToPodTLS: true +parameters: +- description: Kubeflow Pipelines Version + value: "v2" + name: DSP_VERSION +- description: Object Storage Bucket Name + value: "ods-ci-ds-pipelines" + name: OBJECT_STORAGE_BUCKET +- description: Object Storage Host + value: "s3.amazonaws.com" + name: OBJECT_STORAGE_HOST +- description: Object Storage Region + value: "us-east-1" + name: OBJECT_STORAGE_REGION diff --git a/ods_ci/tests/Resources/Files/dummy-storage-creds.yaml b/ods_ci/tests/Resources/Files/pipeline-samples/v2/dspa/dummy-storage-creds.yaml similarity index 100% rename from ods_ci/tests/Resources/Files/dummy-storage-creds.yaml rename to ods_ci/tests/Resources/Files/pipeline-samples/v2/dspa/dummy-storage-creds.yaml diff --git a/ods_ci/tests/Resources/Page/DistributedWorkloads/DistributedWorkloads.resource b/ods_ci/tests/Resources/Page/DistributedWorkloads/DistributedWorkloads.resource index 4aa7411e6..354824fa2 100644 --- a/ods_ci/tests/Resources/Page/DistributedWorkloads/DistributedWorkloads.resource +++ b/ods_ci/tests/Resources/Page/DistributedWorkloads/DistributedWorkloads.resource @@ -10,7 +10,7 @@ ${CODEFLARE-SDK-API_URL} %{CODEFLARE-SDK-API_URL=https://api.git ${CODEFLARE-SDK_DIR} codeflare-sdk ${CODEFLARE-SDK_REPO_URL} %{CODEFLARE-SDK_REPO_URL=https://github.com/project-codeflare/codeflare-sdk.git} ${DISTRIBUTED_WORKLOADS_RELEASE_ASSETS} https://github.com/opendatahub-io/distributed-workloads/releases/latest/download -${FMS_HF_TUNING_IMAGE} quay.io/modh/fms-hf-tuning@sha256:2985c259c66e227417ed69365bb23ab92ed5022650672771e56070326b21d5f4 +${FMS_HF_TUNING_IMAGE} quay.io/modh/fms-hf-tuning@sha256:8edea6f0f9c4c631cdca1e1c10abf0d4b994738fde78c40d48eda216fdd382f5 ${KFTO_CORE_BINARY_NAME} kfto ${KFTO_UPGRADE_BINARY_NAME} kfto-upgrade diff --git a/ods_ci/tests/Resources/Page/DistributedWorkloads/WorkloadMetricsUI.resource b/ods_ci/tests/Resources/Page/DistributedWorkloads/WorkloadMetricsUI.resource index ac2af4be8..1504fb885 100644 --- a/ods_ci/tests/Resources/Page/DistributedWorkloads/WorkloadMetricsUI.resource +++ b/ods_ci/tests/Resources/Page/DistributedWorkloads/WorkloadMetricsUI.resource @@ -61,7 +61,7 @@ Select Refresh Interval [Arguments] ${refresh_interval} Wait Until Element Is Visible ${REFRESH_INTERVAL_XP} timeout=20 Click Element ${REFRESH_INTERNAL_MENU_XP} - Click Element xpath=//button[text()="${refresh_interval}"] + Click Element xpath=//button[@role="option" and contains(., "${refresh_interval}")] Get Current CPU Usage [Documentation] Returns value of current cpu usage diff --git a/ods_ci/tests/Resources/Page/ODH/ODHDashboard/ODHDataSciencePipelines.resource b/ods_ci/tests/Resources/Page/ODH/ODHDashboard/ODHDataSciencePipelines.resource index 1f1159e35..c88eecd4c 100644 --- a/ods_ci/tests/Resources/Page/ODH/ODHDashboard/ODHDataSciencePipelines.resource +++ b/ods_ci/tests/Resources/Page/ODH/ODHDashboard/ODHDataSciencePipelines.resource @@ -5,12 +5,12 @@ Resource ../../../ODS.robot Resource ../../../Common.robot Resource ../../../Page/ODH/ODHDashboard/ODHDashboard.robot Library DateTime +Resource ../../../CLI/DataSciencePipelines/DataSciencePipelinesBackend.resource Library ../../../../../libs/DataSciencePipelinesAPI.py Resource ODHDataScienceProject/Pipelines.resource *** Variables *** -${DATA_SCIENCE_PIPELINES_APPLICATION_PATH}= tests/Resources/Files ${PIPELINES_IMPORT_BTN_FORM_XP}= xpath://*[@data-testid="import-button"] ${PIPELINE_NAME_INPUT_XP}= xpath://*[@data-testid="pipeline-name"] ${PIPELINE_DESC_INPUT_XP}= xpath://*[@data-testid="pipeline-description"] @@ -23,23 +23,6 @@ ${PIPELINE_EXPERIMENT_TABLE_XP}= xpath://*[@data-testid="experim *** Keywords *** -# robocop: disable:line-too-long -Install DataSciencePipelinesApplication CR - [Documentation] Install and verifies that DataSciencePipelinesApplication CRD is installed and working - [Arguments] ${project} ${dsp_file}=data-science-pipelines-sample.yaml ${assert_install}=True - Log ${project} - Oc Apply kind=DataSciencePipelinesApplication src=${DATA_SCIENCE_PIPELINES_APPLICATION_PATH}/${dsp_file} namespace=${project} - IF ${assert_install}==True - ${generation_value} Run oc get datasciencepipelinesapplications -n ${project} -o json | jq '.items[0].metadata.generation' - Should Be True ${generation_value} == 2 DataSciencePipelinesApplication created - END - -Create Pipelines ConfigMap With Custom Pip Index Url And Trusted Host - [Documentation] Creates a Configmap (ds-pipeline-custom-env-vars) in the project, - ... storing the values for pip_index_url and pip_trusted_host - [Arguments] ${project_title} - Run oc create configmap ds-pipeline-custom-env-vars --from-literal=pip_index_url=${PIP_INDEX_URL} --from-literal=pip_trusted_host=${PIP_TRUSTED_HOST} -n ${project_title} - Fill In Pipeline Import Form [Documentation] Compiles the form to create a pipeline. ... It works when you start server creation from either @@ -151,7 +134,7 @@ Delete Pipeline Server Click Element xpath://button/span/span[text()='Delete pipeline server'] Handle Deletion Confirmation Modal ${data_science_project_name} pipeline server pipeline server Wait Until Page Contains text=Configure pipeline server timeout=120s - Pipelines.Wait Until Pipeline Server Is Deleted ${data_science_project_name} + DataSciencePipelinesBackend.Wait Until Pipeline Server Is Deleted ${data_science_project_name} Verify There Is No "Error Displaying Pipelines" After Creating Pipeline Server [Documentation] Verify me message "Error displaying Pipelines" after creating pipeline server diff --git a/ods_ci/tests/Resources/Page/ODH/ODHDashboard/ODHDataScienceProject/Pipelines.resource b/ods_ci/tests/Resources/Page/ODH/ODHDashboard/ODHDataScienceProject/Pipelines.resource index aecca4e91..aa9488c56 100644 --- a/ods_ci/tests/Resources/Page/ODH/ODHDashboard/ODHDataScienceProject/Pipelines.resource +++ b/ods_ci/tests/Resources/Page/ODH/ODHDashboard/ODHDataScienceProject/Pipelines.resource @@ -71,7 +71,8 @@ Import Pipeline Click Button ${PIPELINES_IMPORT_BTN_FORM_XP} END Wait Until Generic Modal Disappears - Wait Until Project Is Open project_title=${project_title} + Maybe Wait For Dashboard Loading Spinner Page timeout=45s + Wait For Dashboard Page Title ${name} timeout=30s Create Pipeline Run [Documentation] Create a pipeline run from DS Project details page. @@ -277,69 +278,6 @@ Wait Until Page Contains Run Topology Page Run Keyword And Continue On Failure Wait Until Page Contains Element ... ${RUN_TOPOLOGY_XP} -Verify Pipeline Server Deployments # robocop: disable - [Documentation] Verifies the correct deployment of DS Pipelines in the rhods namespace - [Arguments] ${project_title} - - ${namespace}= Get Openshift Namespace From Data Science Project - ... project_title=${project_title} - - @{all_pods}= Oc Get kind=Pod namespace=${namespace} - ... label_selector=component=data-science-pipelines - Run Keyword And Continue On Failure Length Should Be ${all_pods} 7 - - @{pipeline_api_server}= Oc Get kind=Pod namespace=${namespace} - ... label_selector=app=ds-pipeline-dspa - ${containerNames}= Create List oauth-proxy ds-pipeline-api-server - Verify Deployment ${pipeline_api_server} 1 2 ${containerNames} - - @{pipeline_metadata_envoy}= Oc Get kind=Pod namespace=${namespace} - ... label_selector=app=ds-pipeline-metadata-envoy-dspa - ${containerNames}= Create List container oauth-proxy - Verify Deployment ${pipeline_metadata_envoy} 1 2 ${containerNames} - - @{pipeline_metadata_grpc}= Oc Get kind=Pod namespace=${namespace} - ... label_selector=app=ds-pipeline-metadata-grpc-dspa - ${containerNames}= Create List container - Verify Deployment ${pipeline_metadata_grpc} 1 1 ${containerNames} - - @{pipeline_persistenceagent}= Oc Get kind=Pod namespace=${namespace} - ... label_selector=app=ds-pipeline-persistenceagent-dspa - ${containerNames}= Create List ds-pipeline-persistenceagent - Verify Deployment ${pipeline_persistenceagent} 1 1 ${containerNames} - - @{pipeline_scheduledworkflow}= Oc Get kind=Pod namespace=${namespace} - ... label_selector=app=ds-pipeline-scheduledworkflow-dspa - ${containerNames}= Create List ds-pipeline-scheduledworkflow - Verify Deployment ${pipeline_scheduledworkflow} 1 1 ${containerNames} - - @{pipeline_workflow_controller}= Oc Get kind=Pod namespace=${namespace} - ... label_selector=app=ds-pipeline-workflow-controller-dspa - ${containerNames}= Create List ds-pipeline-workflow-controller - Verify Deployment ${pipeline_workflow_controller} 1 1 ${containerNames} - - @{mariadb}= Oc Get kind=Pod namespace=${namespace} - ... label_selector=app=mariadb-dspa - ${containerNames}= Create List mariadb - Verify Deployment ${mariadb} 1 1 ${containerNames} - -Wait Until Pipeline Server Is Deployed - [Documentation] Waits until all the expected pods of the pipeline server - ... are running - [Arguments] ${project_title} - Wait Until Keyword Succeeds 10 times 10s - ... Verify Pipeline Server Deployments project_title=${project_title} - -Wait Until Pipeline Server Is Deleted - [Documentation] Waits until all pipeline server pods are deleted - [Arguments] ${project_title} - # robocop: off=expression-can-be-simplified - FOR ${_} IN RANGE 0 30 - ${pod_count}= Run oc get pods -n ${project_title} -l component=data-science-pipelines | wc -l - IF ${pod_count}==0 BREAK - Sleep 1s - END - # TODO: we need to replace this keyword for a similar one checking in Data Science Pipelines > Runs # Verify Successful Pipeline Run Via Project UI # [Documentation] Validates that a given pipeline run in a given pipeline is in successful end state diff --git a/ods_ci/tests/Resources/Page/ODH/ODHDashboard/ODHModelServing.resource b/ods_ci/tests/Resources/Page/ODH/ODHDashboard/ODHModelServing.resource index 70b09b4da..ff690c649 100644 --- a/ods_ci/tests/Resources/Page/ODH/ODHDashboard/ODHModelServing.resource +++ b/ods_ci/tests/Resources/Page/ODH/ODHDashboard/ODHModelServing.resource @@ -263,12 +263,13 @@ Get Model Inference ... model endpoint. If token authentication is needed for the model, ${token_auth} should be ... set to ${TRUE}. [Arguments] ${model_name} ${inference_input} ${token_auth}=${FALSE} ${project_title}=${NONE} - ... ${kserve_mode}=Serverless ${deployment_mode}='UI' ${service_port}=8888 ${end_point}=${NONE} + ... ${kserve_mode}=Serverless ${deployment_mode}=UI ${service_port}=8888 ${end_point}=${NONE} ${curl_cmd}= Set Variable ${NONE} ${self_managed}= Is RHODS Self-Managed - IF ${deployment_mode} == 'UI' + IF $deployment_mode == 'UI' ${url}= Get Model Route Via UI ${model_name} - ${kserve}= Run Keyword And Return Status SeleniumLibrary.Page Should Contain Single-model serving enabled + ${kserve}= Run Keyword And Return Status SeleniumLibrary.Page Should Contain + ... Single-model serving enabled ${curl_cmd}= Set Variable curl -s ${url} -d ${inference_input} IF ${token_auth} IF "${project_title}" == "${NONE}" @@ -284,23 +285,22 @@ Get Model Inference Fetch Openshift CA Bundle ${curl_cmd}= Catenate ${curl_cmd} --cacert openshift_ca.crt END - END - IF ${deployment_mode} == 'Cli' + ELSE IF $deployment_mode == 'Cli' ${rc} ${cmd_op} Run And Return Rc And Output - ... oc get isvc ${model_name} -o jsonpath='{.metadata.annotations.serving\.kserve\.io/deploymentMode}' -n ${project_title} + ... oc get isvc ${model_name} -o jsonpath='{.metadata.annotations.serving\.kserve\.io/deploymentMode}' -n ${project_title} # robocop: disable Should Be Equal As Integers ${rc} 0 - IF "${cmd_op}" != "ModelMesh" + IF "${cmd_op}" != "ModelMesh" Fetch Knative CA Certificate filename=openshift_ca_istio_knative.crt ${cert}= Set Variable --cacert openshift_ca_istio_knative.crt - ELSE IF ${self_managed} + ELSE IF ${self_managed} Fetch Openshift CA Bundle ${cert}= Set Variable --cacert openshift_ca.crt END IF '${kserve_mode}' == 'Serverless' - ${rc} ${url}= Run And Return Rc And Output oc get ksvc ${model_name}-predictor -n ${project_title} -o jsonpath='{.status.url}' - Should Be Equal As Integers ${rc} 0 - ${curl_cmd}= Set Variable curl -s ${url}${end_point} -d ${inference_input} - + ${rc} ${url}= Run And Return Rc And Output + ... oc get ksvc ${model_name}-predictor -n ${project_title} -o jsonpath='{.status.url}' + Should Be Equal As Integers ${rc} 0 + ${curl_cmd}= Set Variable curl -s ${url}${end_point} -d ${inference_input} ELSE IF '${kserve_mode}' == 'RawDeployment' ${url}= Set Variable http://localhost:${service_port}${end_point} ${curl_cmd}= Set Variable curl -s ${url} -d ${inference_input} --cacert openshift_ca_istio_knative.crt @@ -308,8 +308,8 @@ Get Model Inference Log "Modelmesh CLI mode only" END IF ${token_auth} - ${token}= Create Inference Access Token ${project_title} ${DEFAULT_BUCKET_SA_NAME} - ${curl_cmd}= Catenate ${curl_cmd} -H "Authorization: Bearer ${token}" + ${token}= Create Inference Access Token ${project_title} ${DEFAULT_BUCKET_SA_NAME} + ${curl_cmd}= Catenate ${curl_cmd} -H "Authorization: Bearer ${token}" END END @@ -322,14 +322,15 @@ Get Model Inference Verify Model Inference [Documentation] Verifies that the inference result of a model is equal to an expected output [Arguments] ${model_name} ${inference_input} ${expected_inference_output} ${token_auth}=${FALSE} - ... ${project_title}=${NONE} ${deployment_mode}='UI' ${kserve_mode}=Serverless + ... ${project_title}=${NONE} ${deployment_mode}=UI ${kserve_mode}=Serverless ... ${service_port}=${NONE} ${end_point}=${NONE} - IF ${deployment_mode} == 'UI' + IF $deployment_mode == 'UI' Open Model Serving Home Page Switch Model Serving Project ${project_title} END - ${inference_output}= Get Model Inference model_name=${model_name} inference_input=${inference_input} token_auth=${token_auth} kserve_mode=${kserve_mode} - ... project_title=${project_title} deployment_mode=${deployment_mode} service_port=${service_port} end_point=${end_point} + ${inference_output}= Get Model Inference model_name=${model_name} inference_input=${inference_input} + ... token_auth=${token_auth} kserve_mode=${kserve_mode} project_title=${project_title} + ... deployment_mode=${deployment_mode} service_port=${service_port} end_point=${end_point} # robocop: disable ${result} ${list}= Inference Comparison ${expected_inference_output} ${inference_output} Log ${result} Log ${list} @@ -349,7 +350,7 @@ Verify Model Inference With Retries ... endpoint exposed. ... This is a temporary mitigation meanwhile we find a better way to check the model [Arguments] ${model_name} ${inference_input} ${expected_inference_output} ${token_auth}=${FALSE} - ... ${project_title}=${NONE} ${retries}=${5} ${deployment_mode}='UI' ${kserve_mode}=Serverless + ... ${project_title}=${NONE} ${retries}=${5} ${deployment_mode}=UI ${kserve_mode}=Serverless ... ${service_port}=${NONE} ${end_point}=${NONE} ${status}= Run Keyword And Return Status Verify Model Inference ... ${model_name} ${inference_input} ${expected_inference_output} ${token_auth} ${project_title} diff --git a/ods_ci/tests/Resources/RHOSi.resource b/ods_ci/tests/Resources/RHOSi.resource index 0473ae550..34109880f 100644 --- a/ods_ci/tests/Resources/RHOSi.resource +++ b/ods_ci/tests/Resources/RHOSi.resource @@ -6,6 +6,7 @@ Resource Page/ODH/Monitoring/Monitoring.resource Resource Page/OCPDashboard/InstalledOperators/InstalledOperators.robot Resource Page/OCPLogin/OCPLogin.resource Resource Common.robot +Resource OCP.resource *** Variables *** @@ -137,6 +138,7 @@ Set Expected Value For Release Name ... ODH: Open Data Hub ... RHOAI managed: OpenShift AI Cloud Service ... RHOAI selfmanaged: OpenShift AI Self-Managed + IF "${PRODUCT}" == "RHODS" IF ${IS_SELF_MANAGED} ${expected_release_name}= Set Variable ${RHOAI_SELFMANAGED_RELEASE_NAME} diff --git a/ods_ci/tests/Tests/0100__platform/0101__deploy/0101__installation/0108__operator.robot b/ods_ci/tests/Tests/0100__platform/0101__deploy/0101__installation/0108__operator.robot index 91e4f703b..93e29037d 100644 --- a/ods_ci/tests/Tests/0100__platform/0101__deploy/0101__installation/0108__operator.robot +++ b/ods_ci/tests/Tests/0100__platform/0101__deploy/0101__installation/0108__operator.robot @@ -41,8 +41,7 @@ Verify That DSC And DSCI Release.Version Attribute matches the value in the subs Verify Odh-deployer Checks Cluster Platform Type [Documentation] Verifies if odh-deployer checks the platform type of the cluster before installing - [Tags] Sanity - ... Tier1 + [Tags] Tier1 ... ODS-1316 ... AutomationBug ... Operator @@ -68,8 +67,7 @@ Verify Odh-deployer Checks Cluster Platform Type Verify That The Operator Pod Does Not Get Stuck After Upgrade [Documentation] Verifies that the operator pod doesn't get stuck after an upgrade - [Tags] Sanity - ... Tier1 + [Tags] Tier1 ... ODS-818 ... Operator ${operator_pod_info}= Fetch operator Pod Info @@ -85,7 +83,6 @@ Verify Clean Up ODS Deployer Post-Migration [Documentation] Verifies that resources unused are cleaned up after migration [Tags] Tier1 ... ODS-1767 - ... Sanity ... AutomationBug ... Operator ${version_check} = Is RHODS Version Greater Or Equal Than 1.17.0 @@ -144,6 +141,8 @@ Verify Clean Up ODS Deployer Post-Migration Operator Setup [Documentation] Setup for the Operator tests RHOSi Setup + ${IS_SELF_MANAGED}= Is RHODS Self-Managed + Set Suite Variable ${IS_SELF_MANAGED} Gather Release Attributes From DSC And DSCI Set Expected Value For Release Name @@ -153,7 +152,7 @@ Fetch Odh-deployer Pod Info ... None ... Returns: ... odhdeployer_pod_info(dict): Dictionary containing the information of the odhdeployer pod - @{resources_info_list}= Oc Get kind=Pod api_version=v1 label_selector=name=rhods-operator + @{resources_info_list}= Oc Get kind=Pod api_version=v1 label_selector=${OPERATOR_LABEL_SELECTOR} &{odhdeployer_pod_info}= Set Variable ${resources_info_list}[0] RETURN &{odhdeployer_pod_info} @@ -176,7 +175,7 @@ Fetch Operator Pod Info ... None ... Returns: ... operator_pod_info(dict): Dictionary containing the information of the operator pod - @{operator_pod_info}= Oc Get kind=Pod api_version=v1 label_selector=name=rhods-operator + @{operator_pod_info}= Oc Get kind=Pod api_version=v1 label_selector=${OPERATOR_LABEL_SELECTOR} RETURN @{operator_pod_info} Verify Operator Pods Have CrashLoopBackOff Status After Upgrade diff --git a/ods_ci/tests/Tests/0100__platform/0101__deploy/0104__operators/0104__rhods_operator/0105__rhods_operator.robot b/ods_ci/tests/Tests/0100__platform/0101__deploy/0104__operators/0104__rhods_operator/0105__rhods_operator.robot index ddfd80e45..76bbd1b32 100644 --- a/ods_ci/tests/Tests/0100__platform/0101__deploy/0104__operators/0104__rhods_operator/0105__rhods_operator.robot +++ b/ods_ci/tests/Tests/0100__platform/0101__deploy/0104__operators/0104__rhods_operator/0105__rhods_operator.robot @@ -18,7 +18,7 @@ ${commercial_url} https://www.redhat.com/en/technologies/cloud-comp Verify RHODS operator information [Documentation] This TC verfiy if the text present in ... RHODS opeartor Details section.ProductBug:RHODS-4993 - [Tags] ODS-498 ODS-624 Sanity ProductBug Tier1 + [Tags] ODS-498 ODS-624 Sanity Operator ProductBug Open Installed Operators Page #Select All Projects Wait Until Keyword Succeeds 10 times 5s Click On Searched Operator Red Hat OpenShift AI #robocop: disable diff --git a/ods_ci/tests/Tests/0100__platform/0101__deploy/0104__operators/0104__rhods_operator/0108__rhods_operator_logs_verification.robot b/ods_ci/tests/Tests/0100__platform/0101__deploy/0104__operators/0104__rhods_operator/0108__rhods_operator_logs_verification.robot index 854ee2b70..63a532045 100644 --- a/ods_ci/tests/Tests/0100__platform/0101__deploy/0104__operators/0104__rhods_operator/0108__rhods_operator_logs_verification.robot +++ b/ods_ci/tests/Tests/0100__platform/0101__deploy/0104__operators/0104__rhods_operator/0108__rhods_operator_logs_verification.robot @@ -18,20 +18,20 @@ Suite Teardown RHOSi Teardown *** Variables *** -${namespace} ${OPERATOR_NAMESPACE} ${regex_pattern} level=([Ee]rror).*|([Ff]ailed) to list .* *** Test Cases *** Verify RHODS Operator log - [Tags] Sanity Tier1 + [Tags] Sanity ... ODS-1007 + ... Operator #Get the POD name - ${data} Run keyword Oc Get kind=Pod namespace=${namespace} label_selector=name=rhods-operator + ${data} Run Keyword Oc Get kind=Pod namespace=${OPERATOR_NAMESPACE} label_selector=${OPERATOR_LABEL_SELECTOR} #Capture the logs based on containers - ${val} Run oc logs --tail=1000000 ${data[0]['metadata']['name']} -n ${namespace} -c rhods-operator - #To check if command has been suessfully executed and the logs has been captured - IF len($val)==${0} or "error:" in $val FAIL Either OC command has not been executed sucessfully or Logs is not present + ${val} Run oc logs --tail=1000000 ${data[0]['metadata']['name']} -n ${OPERATOR_NAMESPACE} -c ${OPERATOR_POD_CONTAINER_NAME} + #To check if command has been successfully executed and the logs have been captured + IF len($val)==${0} or "error:" in $val FAIL Either OC command has not been executed successfully or Logs are not present #Filter the error msg from the log captured ${match_list} Get Regexp Matches ${val} ${regex_pattern} #Remove if any duplicate entry are present diff --git a/ods_ci/tests/Tests/0100__platform/0101__deploy/0104__operators/0104__rhods_operator/0109__rhods_operator_oom_kill_verification.robot b/ods_ci/tests/Tests/0100__platform/0101__deploy/0104__operators/0104__rhods_operator/0109__rhods_operator_oom_kill_verification.robot index 565772111..2ae1d5078 100644 --- a/ods_ci/tests/Tests/0100__platform/0101__deploy/0104__operators/0104__rhods_operator/0109__rhods_operator_oom_kill_verification.robot +++ b/ods_ci/tests/Tests/0100__platform/0101__deploy/0104__operators/0104__rhods_operator/0109__rhods_operator_oom_kill_verification.robot @@ -27,7 +27,7 @@ Verify RHODS Operator OOM Kill Behaviour [Documentation] Create multiple namespace and verify ... if the rhods operator pod is running without any ... issue and perfrom some basic validation with RHODS - [Tags] ODS-1091 Tier3 + [Tags] ODS-1091 Tier3 Operator AutomationBug ${dfeault_np_count} Run oc get namespace | wc -l Create Namespace In Openshift Verify Operator Pod Status ${NAMESPACE} name=rhods-operator diff --git a/ods_ci/tests/Tests/0100__platform/0101__deploy/0104__operators/0104__rhods_operator/0110__service_mesh.robot b/ods_ci/tests/Tests/0100__platform/0101__deploy/0104__operators/0104__rhods_operator/0110__service_mesh.robot index ce4c7f941..8e6c50047 100644 --- a/ods_ci/tests/Tests/0100__platform/0101__deploy/0104__operators/0104__rhods_operator/0110__service_mesh.robot +++ b/ods_ci/tests/Tests/0100__platform/0101__deploy/0104__operators/0104__rhods_operator/0110__service_mesh.robot @@ -51,7 +51,7 @@ Validate Service Mesh State Removed [Documentation] The purpose of this Test Case is to validate Service Mesh state 'Removed'. ... The operator will Delete the Service Mesh CR, when state is Removed. ... Test will fail until RHOAIENG-2209 is fixed - [Tags] ServiceMesh-Removed + [Tags] Operator Tier1 ODS-2526 ServiceMesh-Removed ProductBug Set Service Mesh Management State Removed ${OPERATOR_NS} Wait Until Keyword Succeeds 2 min 0 sec diff --git a/ods_ci/tests/Tests/0100__platform/0101__deploy/0104__operators/0104__rhods_operator/0113__dsc_components.robot b/ods_ci/tests/Tests/0100__platform/0101__deploy/0104__operators/0104__rhods_operator/0113__dsc_components.robot index c7c10090d..a73cdb50c 100644 --- a/ods_ci/tests/Tests/0100__platform/0101__deploy/0104__operators/0104__rhods_operator/0113__dsc_components.robot +++ b/ods_ci/tests/Tests/0100__platform/0101__deploy/0104__operators/0104__rhods_operator/0113__dsc_components.robot @@ -139,7 +139,7 @@ Validate Dashboard Removed State Validate Datasciencepipelines Managed State [Documentation] Validate that the DSC Datasciencepipelines component Managed state creates the expected resources, ... check that Datasciencepipelines deployment is created and pod is in Ready state - [Tags] Operator Tier1 RHOAIENG-7298 datasciencepipelines-managed + [Tags] Operator Tier1 RHOAIENG-7298 operator-datasciencepipelines-managed Set DSC Component Managed State And Wait For Completion datasciencepipelines ${DATASCIENCEPIPELINES_DEPLOYMENT_NAME} ${DATASCIENCEPIPELINES_LABEL_SELECTOR} @@ -147,7 +147,7 @@ Validate Datasciencepipelines Managed State Validate Datasciencepipelines Removed State [Documentation] Validate that Datasciencepipelines management state Removed does remove relevant resources. - [Tags] Operator Tier1 RHOAIENG-7298 datasciencepipelines-removed + [Tags] Operator Tier1 RHOAIENG-7298 operator-datasciencepipelines-removed Set DSC Component Removed State And Wait For Completion datasciencepipelines ${DATASCIENCEPIPELINES_DEPLOYMENT_NAME} ${DATASCIENCEPIPELINES_LABEL_SELECTOR} diff --git a/ods_ci/tests/Tests/0100__platform/0101__deploy/0104__operators/0104__rhods_operator/0114__dsc_negative_dependant_operators_not_installed.robot b/ods_ci/tests/Tests/0100__platform/0101__deploy/0104__operators/0104__rhods_operator/0114__dsc_negative_dependant_operators_not_installed.robot index 78ffa8203..c972cc5c9 100644 --- a/ods_ci/tests/Tests/0100__platform/0101__deploy/0104__operators/0104__rhods_operator/0114__dsc_negative_dependant_operators_not_installed.robot +++ b/ods_ci/tests/Tests/0100__platform/0101__deploy/0104__operators/0104__rhods_operator/0114__dsc_negative_dependant_operators_not_installed.robot @@ -143,7 +143,7 @@ Reinstall Service Mesh Operator And Recreate DSC And DSCI Remove DSC And DSCI Resources Install Service Mesh Operator Via Cli Apply DSCInitialization CustomResource dsci_name=${DSCI_NAME} - Wait For DSCInitialization CustomResource To Be Ready timeout=180 + Wait For DSCInitialization CustomResource To Be Ready timeout=600 Apply DataScienceCluster CustomResource dsc_name=${DSC_NAME} Wait For DataScienceCluster CustomResource To Be Ready timeout=600 Set Service Mesh State To Managed And Wait For CR Ready @@ -154,7 +154,7 @@ Reinstall Serverless Operator And Recreate DSC And DSCI Remove DSC And DSCI Resources Install Serverless Operator Via Cli Apply DSCInitialization CustomResource dsci_name=${DSCI_NAME} - Wait For DSCInitialization CustomResource To Be Ready timeout=180 + Wait For DSCInitialization CustomResource To Be Ready timeout=600 Apply DataScienceCluster CustomResource dsc_name=${DSC_NAME} Wait For DataScienceCluster CustomResource To Be Ready timeout=600 Set Service Mesh State To Managed And Wait For CR Ready @@ -166,7 +166,7 @@ Reinstall Service Mesh And Serverless Operators And Recreate DSC And DSCI Install Serverless Operator Via Cli Install Service Mesh Operator Via Cli Apply DSCInitialization CustomResource dsci_name=${DSCI_NAME} - Wait For DSCInitialization CustomResource To Be Ready timeout=180 + Wait For DSCInitialization CustomResource To Be Ready timeout=600 Apply DataScienceCluster CustomResource dsc_name=${DSC_NAME} Wait For DataScienceCluster CustomResource To Be Ready timeout=600 Set Service Mesh State To Managed And Wait For CR Ready diff --git a/ods_ci/tests/Tests/0100__platform/0103__must_gather/test-must-gather-logs.robot b/ods_ci/tests/Tests/0100__platform/0103__must_gather/test-must-gather-logs.robot index 001f0b55a..f906df7dc 100644 --- a/ods_ci/tests/Tests/0100__platform/0103__must_gather/test-must-gather-logs.robot +++ b/ods_ci/tests/Tests/0100__platform/0103__must_gather/test-must-gather-logs.robot @@ -11,7 +11,6 @@ Resource ../../../Resources/CLI/MustGather/MustGather.resource Verify that the must-gather image provides RHODS logs and info [Documentation] Tests the must-gather image for ODH/RHOAI [Tags] Smoke - ... Tier1 ... ODS-505 ... Operator ... MustGather diff --git a/ods_ci/tests/Tests/0500__ide/0501__ide_jupyterhub/special-user-testing.robot b/ods_ci/tests/Tests/0500__ide/0501__ide_jupyterhub/special-user-testing.robot index e473820a0..02de4e1a5 100644 --- a/ods_ci/tests/Tests/0500__ide/0501__ide_jupyterhub/special-user-testing.robot +++ b/ods_ci/tests/Tests/0500__ide/0501__ide_jupyterhub/special-user-testing.robot @@ -1,5 +1,5 @@ *** Settings *** -Test Tags J../upyterHub +Test Tags JupyterHub Resource ../../../Resources/ODS.robot Resource ../../../Resources/Common.robot Resource ../../../Resources/Page/ODH/JupyterHub/JupyterHubSpawner.robot diff --git a/ods_ci/tests/Tests/0500__ide/0502__ide_elyra.robot b/ods_ci/tests/Tests/0500__ide/0502__ide_elyra.robot index 1363bb2a4..f4ae89898 100644 --- a/ods_ci/tests/Tests/0500__ide/0502__ide_elyra.robot +++ b/ods_ci/tests/Tests/0500__ide/0502__ide_elyra.robot @@ -11,12 +11,13 @@ Resource ../../Resources/Page/ODH/ODHDashboard/ODHDataScienceProject/Wor Resource ../../Resources/Page/ODH/ODHDashboard/ODHDataScienceProject/Storages.resource Resource ../../Resources/Page/ODH/ODHDashboard/ODHDataScienceProject/DataConnections.resource Resource ../../Resources/Page/ODH/ODHDashboard/ODHDataScienceProject/Pipelines.resource +Resource ../../Resources/CLI/DataSciencePipelines/DataSciencePipelinesBackend.resource Resource ../../Resources/Page/ODH/ODHDashboard/ODHDataSciencePipelines.resource Library Screenshot Library String Library DebugLibrary Library JupyterLibrary -Test Tags DataSciencePipelines +Test Tags DataSciencePipelines-IDE Suite Setup Elyra Pipelines Suite Setup Suite Teardown Elyra Pipelines Suite Teardown @@ -39,8 +40,7 @@ ${DC_NAME} = elyra-s3 Verify Pipelines Integration With Elyra When Using Standard Data Science Image [Documentation] Verifies that a workbench using the Standard Data Science Image can be used to ... create and run a Data Science Pipeline - [Tags] Sanity Tier1 - ... ODS-2197 + [Tags] Sanity ODS-2197 [Timeout] 10m Verify Pipelines Integration With Elyra Running Hello World Pipeline Test ... img=Standard Data Science @@ -76,9 +76,9 @@ Elyra Pipelines Suite Setup # robocop: off=too-many-calls-in-keyword Create S3 Data Connection project_title=${PRJ_TITLE} dc_name=${DC_NAME} ... aws_access_key=${S3.AWS_ACCESS_KEY_ID} aws_secret_access=${S3.AWS_SECRET_ACCESS_KEY} ... aws_bucket_name=ods-ci-ds-pipelines - Create Pipeline Server dc_name=${DC_NAME} + Pipelines.Create Pipeline Server dc_name=${DC_NAME} ... project_title=${PRJ_TITLE} - Wait Until Pipeline Server Is Deployed project_title=${PRJ_TITLE} + DataSciencePipelinesBackend.Wait Until Pipeline Server Is Deployed namespace=${PRJ_TITLE} Sleep 15s reason=Wait until pipeline server is detected by dashboard Elyra Pipelines Suite Teardown diff --git a/ods_ci/tests/Tests/0600__distributed_workloads/test-distributed-workloads-metrics-ui.robot b/ods_ci/tests/Tests/0600__distributed_workloads/test-distributed-workloads-metrics-ui.robot index d307d7ba2..8d28ac604 100644 --- a/ods_ci/tests/Tests/0600__distributed_workloads/test-distributed-workloads-metrics-ui.robot +++ b/ods_ci/tests/Tests/0600__distributed_workloads/test-distributed-workloads-metrics-ui.robot @@ -39,7 +39,7 @@ Verify Workload Metrics Home page Contents Page Should Contain Element ${PROJECT_METRICS_TAB_XP} Page Should Contain Element ${WORKLOAD_STATUS_TAB_XP} Click Element ${REFRESH_INTERNAL_MENU_XP} - ${get_refresh_interval_list}= Get All Text Under Element xpath=//*[starts-with(@id, "select-option-")] + ${get_refresh_interval_list}= Get All Text Under Element xpath=//button[@role="option"] Lists Should Be Equal ${REFRESH_INTERNAL_LIST} ${get_refresh_interval_list} Verify Project Metrics Default Page contents @@ -114,6 +114,8 @@ Verify The Workload Metrics By Submitting Kueue Batch Workload Wait For Job With Status ${JOB_NAME_QUEUE} Succeeded 180 Select Refresh Interval 15 seconds Page Should Not Contain Element xpath=//*[text()="Running"] + Page Should Contain Element xpath=//*[text()="Succeeded"] + Select Refresh Interval 15 seconds Check Requested Resources ${PRJ_TITLE} ${CPU_SHARED_QUOTA} ${MEMEORY_SHARED_QUOTA} 0 0 Job Check Distributed Workload Resource Metrics Status ${JOB_NAME_QUEUE} Succeeded # Once fixed https://issues.redhat.com/browse/RHOAIENG-9092 update Job success message @@ -185,14 +187,14 @@ Verify Requested resources When Multiple Local Queue Exists ${memory_requested} = Evaluate ${memory_requested_1} + ${memory_requested_2} Check Requested Resources ${PRJ_TITLE} ${CPU_SHARED_QUOTA} ${MEMEORY_SHARED_QUOTA} ${cpu_requested} ${memory_requested} Job - Wait Until Element Is Visible xpath=//*[@id="topResourceConsumingCPU-ChartLabel-title"] timeout=120 - Wait Until Element Is Visible xpath=//*[@id="topResourceConsumingCPU-ChartLegend-ChartLabel-0"] timeout=120 + Wait Until Element Is Visible xpath=//*[@id="topResourceConsumingCPU-ChartLabel-title"] timeout=180 + Wait Until Element Is Visible xpath=//*[@id="topResourceConsumingCPU-ChartLegend-ChartLabel-0"] timeout=180 ${cpu_usage} = Get Current CPU Usage ${PRJ_TITLE} Job ${cpu_consuming} = Get Text xpath:(//*[@style[contains(., 'var(--pf-v5-chart-donut--label--title--Fill')]])[1] Check Resource Consuming Usage ${cpu_usage} ${cpu_consuming} CPU - Wait Until Element Is Visible xpath=//*[@id="topResourceConsumingMemory-ChartLabel-title"] timeout=120 - Wait Until Element Is Visible xpath=//*[@id="topResourceConsumingMemory-ChartLegend-ChartLabel-0"] timeout=120 + Wait Until Element Is Visible xpath=//*[@id="topResourceConsumingMemory-ChartLabel-title"] timeout=180 + Wait Until Element Is Visible xpath=//*[@id="topResourceConsumingMemory-ChartLegend-ChartLabel-0"] timeout=180 ${memory_usage} = Get Current Memory Usage ${PRJ_TITLE} Job ${memory_consuming} = Get Text xpath:(//*[@style[contains(., 'var(--pf-v5-chart-donut--label--title--Fill')]])[2] Check Resource Consuming Usage ${memory_usage} ${memory_consuming} Memory diff --git a/ods_ci/tests/Tests/0600__distributed_workloads/test-smoke.robot b/ods_ci/tests/Tests/0600__distributed_workloads/test-smoke.robot index cc2991ea8..8e45cf7f8 100644 --- a/ods_ci/tests/Tests/0600__distributed_workloads/test-smoke.robot +++ b/ods_ci/tests/Tests/0600__distributed_workloads/test-smoke.robot @@ -27,6 +27,11 @@ Ray smoke test FAIL Can not find kuberay-operator service in ${APPLICATIONS_NAMESPACE} END Log To Console kuberay-operator service exists + Log To Console Verifying kuberay-operator's container image is referred from registry.redhat.io + ${pod} = Find First Pod By Name namespace=${APPLICATIONS_NAMESPACE} pod_regex=kuberay-operator- + Container Image Url Should Contain ${APPLICATIONS_NAMESPACE} ${pod} kuberay-operator + ... registry.redhat.io/rhoai/odh-kuberay-operator-controller + Log To Console kuberay-operator's container image is verified Codeflare smoke test [Documentation] Check that Codeflare deployment and its monitoring service are up and running @@ -47,6 +52,11 @@ Codeflare smoke test FAIL Can not find codeflare-operator-manager-metrics service in ${APPLICATIONS_NAMESPACE} END Log To Console codeflare-operator-manager-metrics service exists + Log To Console Verifying codeflare-operator-manager's container image is referred from registry.redhat.io + ${pod} = Find First Pod By Name namespace=${APPLICATIONS_NAMESPACE} pod_regex=codeflare-operator-manager- + Container Image Url Should Contain ${APPLICATIONS_NAMESPACE} ${pod} manager + ... registry.redhat.io/rhoai/odh-codeflare-operator + Log To Console codeflare-operator-manager's container image is verified Kueue smoke test [Documentation] Check that Kueue deployment and its service are up and running @@ -67,6 +77,11 @@ Kueue smoke test FAIL Can not find kueue-webhook-service service in ${APPLICATIONS_NAMESPACE} END Log To Console kueue-webhook-service service exists + Log To Console Verifying kueue-controller-manager's container image is referred from registry.redhat.io + ${pod} = Find First Pod By Name namespace=${APPLICATIONS_NAMESPACE} pod_regex=kueue-controller-manager- + Container Image Url Should Contain ${APPLICATIONS_NAMESPACE} ${pod} manager + ... registry.redhat.io/rhoai/odh-kueue-controller + Log To Console kueue-controller-manager's container image is verified Training operator smoke test [Documentation] Check that Training operator deployment is up and running @@ -79,6 +94,11 @@ Training operator smoke test IF ${result.rc} != 0 FAIL Timeout waiting for deployment/kubeflow-training-operator to be available in ${APPLICATIONS_NAMESPACE} END + Log To Console Verifying kubeflow-training-operator's container image is referred from registry.redhat.io + ${pod} = Find First Pod By Name namespace=${APPLICATIONS_NAMESPACE} pod_regex=kubeflow-training-operator- + Container Image Url Should Contain ${APPLICATIONS_NAMESPACE} ${pod} training-operator + ... registry.redhat.io/rhoai/odh-training-operator + Log To Console kubeflow-training-operator's container image is verified *** Keywords *** diff --git a/ods_ci/tests/Tests/1000__model_serving/1001__model_serving_modelmesh.robot b/ods_ci/tests/Tests/1000__model_serving/1001__model_serving_modelmesh.robot index d8302f8db..3cb11a6bd 100644 --- a/ods_ci/tests/Tests/1000__model_serving/1001__model_serving_modelmesh.robot +++ b/ods_ci/tests/Tests/1000__model_serving/1001__model_serving_modelmesh.robot @@ -122,7 +122,8 @@ Test Inference With Token Authentication [Tags] Sanity Tier1 ... ODS-1920 Open Data Science Projects Home Page - Create Data Science Project title=${SECOND_PROJECT} description=${PRJ_DESCRIPTION} existing_project=${FALSE} + Create Data Science Project title=${SECOND_PROJECT} description=${PRJ_DESCRIPTION} + ... existing_project=${FALSE} Recreate S3 Data Connection project_title=${SECOND_PROJECT} dc_name=model-serving-connection ... aws_access_key=${S3.AWS_ACCESS_KEY_ID} aws_secret_access=${S3.AWS_SECRET_ACCESS_KEY} ... aws_bucket_name=ods-ci-s3 @@ -130,7 +131,6 @@ Test Inference With Token Authentication Serve Model project_name=${SECOND_PROJECT} model_name=${SECURED_MODEL} model_server=${SECURED_RUNTIME} ... existing_data_connection=${TRUE} data_connection_name=model-serving-connection existing_model=${TRUE} ... framework=onnx model_path=mnist-8.onnx - # Run Keyword And Continue On Failure Verify Model Inference ${SECURED_MODEL} ${INFERENCE_INPUT} ${EXPECTED_INFERENCE_SECURED_OUTPUT} token_auth=${TRUE} # robocop: disable Run Keyword And Continue On Failure Verify Model Inference With Retries ... ${SECURED_MODEL} ${INFERENCE_INPUT} ${EXPECTED_INFERENCE_SECURED_OUTPUT} token_auth=${TRUE} ... project_title=${SECOND_PROJECT} diff --git a/ods_ci/tests/Tests/1000__model_serving/1007__model_serving_llm/1007__model_serving_llm.robot b/ods_ci/tests/Tests/1000__model_serving/1007__model_serving_llm/1007__model_serving_llm.robot index be8675040..71fe75c30 100644 --- a/ods_ci/tests/Tests/1000__model_serving/1007__model_serving_llm/1007__model_serving_llm.robot +++ b/ods_ci/tests/Tests/1000__model_serving/1007__model_serving_llm/1007__model_serving_llm.robot @@ -571,16 +571,14 @@ Verify User Can Serve And Query A Model With Token Wait For Pods To Be Ready label_selector=serving.kserve.io/inferenceservice=${flan_model_name} ... namespace=${test_namespace} Create Role Binding For Authorino name=${DEFAULT_BUCKET_PREFIX} namespace=${test_namespace} - # TODO: The token created from this keyword does not work to query the model, it will result in a 401 Unauthorized - # error being sent back. Investigate and figure out why, fix the logic. ${inf_token}= Create Inference Access Token ${test_namespace} ${DEFAULT_BUCKET_SA_NAME} Sleep 600s Query Model Multiple Times model_name=${flan_model_name} ... inference_type=all-tokens n_times=1 - ... namespace=${test_namespace} token=${inf_token} protocol=http + ... namespace=${test_namespace} token=${inf_token} Query Model Multiple Times model_name=${flan_model_name} ... inference_type=streaming n_times=1 - ... namespace=${test_namespace} token=${inf_token} protocol=http + ... namespace=${test_namespace} token=${inf_token} [Teardown] Clean Up Test Project test_ns=${test_namespace} ... isvc_names=${models_names} wait_prj_deletion=${FALSE} diff --git a/ods_ci/tests/Tests/1000__model_serving/1007__model_serving_llm/1007__model_serving_llm_tgis.robot b/ods_ci/tests/Tests/1000__model_serving/1007__model_serving_llm/1007__model_serving_llm_tgis.robot index 22c172f0b..9250b90df 100644 --- a/ods_ci/tests/Tests/1000__model_serving/1007__model_serving_llm/1007__model_serving_llm_tgis.robot +++ b/ods_ci/tests/Tests/1000__model_serving/1007__model_serving_llm/1007__model_serving_llm_tgis.robot @@ -648,27 +648,25 @@ Verify User Can Serve And Query A Model With Token Wait For Pods To Be Ready label_selector=serving.kserve.io/inferenceservice=${flan_model_name} ... namespace=${test_namespace} Create Role Binding For Authorino name=${DEFAULT_BUCKET_PREFIX} namespace=tgis-standalone-cli - # TODO: The token created from this keyword does not work to query the model, it will result in a 401 Unauthorized - # error being sent back. Investigate and figure out why, fix the logic. ${inf_token}= Create Inference Access Token ${test_namespace} ${DEFAULT_BUCKET_SA_NAME} ${pod_name}= Get Pod Name namespace=${test_namespace} ... label_selector=serving.kserve.io/inferenceservice=${flan_model_name} IF ${IS_KSERVE_RAW} Start Port-forwarding namespace=${test_namespace} pod_name=${pod_name} Query Model Multiple Times model_name=${flan_model_name} runtime=${TGIS_RUNTIME_NAME} - ... inference_type=all-tokens n_times=1 protocol=http + ... inference_type=all-tokens n_times=1 ... namespace=${test_namespace} port_forwarding=${IS_KSERVE_RAW} token=${inf_token} Query Model Multiple Times model_name=${flan_model_name} runtime=${TGIS_RUNTIME_NAME} ... inference_type=tokenize n_times=1 port_forwarding=${IS_KSERVE_RAW} ... namespace=${test_namespace} validate_response=${TRUE} string_check_only=${TRUE} - ... token=${inf_token} protocol=http + ... token=${inf_token} Query Model Multiple Times model_name=${flan_model_name} runtime=${TGIS_RUNTIME_NAME} ... inference_type=model-info n_times=1 port_forwarding=${IS_KSERVE_RAW} ... namespace=${test_namespace} validate_response=${TRUE} string_check_only=${TRUE} - ... token=${inf_token} protocol=http + ... token=${inf_token} Query Model Multiple Times model_name=${flan_model_name} runtime=${TGIS_RUNTIME_NAME} ... inference_type=streaming n_times=1 port_forwarding=${IS_KSERVE_RAW} ... namespace=${test_namespace} validate_response=${FALSE} - ... token=${inf_token} protocol=http + ... token=${inf_token} [Teardown] Run Keywords ... Clean Up Test Project test_ns=${test_namespace} ... isvc_names=${models_names} wait_prj_deletion=${FALSE} @@ -725,4 +723,4 @@ Wait For New Replica Set To Be Ready END Wait Until Keyword Succeeds 5 times 5s ... Wait For Model KServe Deployment To Be Ready label_selector=serving.kserve.io/inferenceservice=${model_name} - ... namespace=${test_namespace} runtime=${TGIS_RUNTIME_NAME} exp_replicas=${new_exp_replicas} + ... namespace=${namespace} runtime=${TGIS_RUNTIME_NAME} exp_replicas=${new_exp_replicas} diff --git a/ods_ci/tests/Tests/1100__data_science_pipelines/1100__data-science-pipelines-api.robot b/ods_ci/tests/Tests/1100__data_science_pipelines/1100__data-science-pipelines-api.robot index 9c07f7461..394ee4972 100644 --- a/ods_ci/tests/Tests/1100__data_science_pipelines/1100__data-science-pipelines-api.robot +++ b/ods_ci/tests/Tests/1100__data_science_pipelines/1100__data-science-pipelines-api.robot @@ -4,11 +4,13 @@ Resource ../../Resources/RHOSi.resource Resource ../../Resources/ODS.robot Resource ../../Resources/Common.robot Resource ../../Resources/Page/ODH/ODHDashboard/ODHDashboard.robot +Resource ../../Resources/Page/ODH/ODHDashboard/ODHDataScienceProject/Projects.resource +Resource ../../Resources/CLI/DataSciencePipelines/DataSciencePipelinesBackend.resource Resource ../../Resources/Page/ODH/ODHDashboard/ODHDataSciencePipelines.resource Library DateTime Library ../../../libs/DataSciencePipelinesAPI.py Library ../../../libs/DataSciencePipelinesKfp.py -Test Tags DataSciencePipelines +Test Tags DataSciencePipelines-Backend Suite Setup Data Science Pipelines Suite Setup Suite Teardown RHOSi Teardown @@ -18,10 +20,21 @@ ${URL_TEST_PIPELINE_RUN_YAML}= https://raw.githubusercontent.com *** Test Cases *** +Verify Pipeline Server Creation With S3 Object Storage + [Documentation] Creates a pipeline server using S3 object storage and verifies that all components are running + [Tags] Smoke + Projects.Create Data Science Project From CLI name=dsp-s3 + DataSciencePipelinesBackend.Create Pipeline Server namespace=dsp-s3 + ... object_storage_access_key=${S3.AWS_ACCESS_KEY_ID} + ... object_storage_secret_key=${S3.AWS_SECRET_ACCESS_KEY} + ... dsp_version=v2 + DataSciencePipelinesBackend.Wait Until Pipeline Server Is Deployed namespace=dsp-s3 + [Teardown] Projects.Delete Project Via CLI By Display Name dsp-s3 + Verify Admin Users Can Create And Run a Data Science Pipeline Using The Api [Documentation] Creates, runs pipelines with admin user. Double check the pipeline result and clean ... the pipeline resources. - [Tags] Sanity Tier1 ODS-2083 + [Tags] Sanity ODS-2083 End To End Pipeline Workflow Via Api ${OCP_ADMIN_USER.USERNAME} ${OCP_ADMIN_USER.PASSWORD} pipelinesapi1 Verify Regular Users Can Create And Run a Data Science Pipeline Using The Api @@ -33,39 +46,49 @@ Verify Regular Users Can Create And Run a Data Science Pipeline Using The Api Verify Ods Users Can Do Http Request That Must Be Redirected to Https [Documentation] Verify Ods Users Can Do Http Request That Must Be Redirected to Https [Tags] Tier1 ODS-2234 - New Project project-redirect-http - Install DataSciencePipelinesApplication CR project-redirect-http + Projects.Create Data Science Project From CLI name=project-redirect-http + DataSciencePipelinesBackend.Create PipelineServer Using Custom DSPA project-redirect-http ${status} Login And Wait Dsp Route ${OCP_ADMIN_USER.USERNAME} ${OCP_ADMIN_USER.PASSWORD} ... project-redirect-http Should Be True ${status} == 200 Could not login to the Data Science Pipelines Rest API OR DSP routing is not working # robocop: disable:line-too-long ${url} Do Http Request apis/v2beta1/runs Should Start With ${url} https - [Teardown] Remove Pipeline Project project-redirect-http + [Teardown] Projects.Delete Project Via CLI By Display Name project-redirect-http Verify DSPO Operator Reconciliation Retry [Documentation] Verify DSPO Operator is able to recover from missing components during the initialization - [Tags] Sanity Tier1 ODS-2477 - ${local_project_name} = Set Variable recon-test - New Project ${local_project_name} - Install DataSciencePipelinesApplication CR ${local_project_name} data-science-pipelines-reconciliation.yaml False + [Tags] Sanity ODS-2477 + + ${local_project_name} = Set Variable dsp-reconciliation-test + Projects.Create Data Science Project From CLI name=${local_project_name} + + # Atempt to create a pipeline server with a custom DSPA. It should fail because there is a missing + # secret with storage credentials (that's why, after, we don't use "Wait Until Pipeline Server Is Deployed" + DataSciencePipelinesBackend.Create PipelineServer Using Custom DSPA + ... ${local_project_name} data-science-pipelines-reconciliation.yaml False Wait Until Keyword Succeeds 15 times 1s ... Double Check If DSPA Was Created ${local_project_name} - DSPA Should Reconcile - ${rc} ${out} = Run And Return Rc And Output oc apply -f tests/Resources/Files/dummy-storage-creds.yaml -n ${local_project_name} + Verify DSPO Logs Show Error Encountered When Parsing DSPA + + # Add the missing secret with storage credentials. The DSPO will reconcile and start the pipeline server pods + # Note: as the credentials are dummy, the DSPA status won't be ready, but it's ok because in this test + # we are just testing the DSPO reconciliation + ${rc} ${out} = Run And Return Rc And Output oc apply -f ${DSPA_PATH}/dummy-storage-creds.yaml -n ${local_project_name} IF ${rc}!=0 Fail - # one pod is good when reconciliation finished + + # After reconciliation, the project should have at least one pod running Wait For Pods Number 1 namespace=${local_project_name} timeout=60 - [Teardown] Remove Pipeline Project ${local_project_name} + [Teardown] Projects.Delete Project Via CLI By Display Name ${local_project_name} *** Keywords *** End To End Pipeline Workflow Via Api [Documentation] Create, run and double check the pipeline result using API. ... In the end, clean the pipeline resources. [Arguments] ${username} ${password} ${project} - Remove Pipeline Project ${project} - New Project ${project} - Install DataSciencePipelinesApplication CR ${project} + Projects.Delete Project Via CLI By Display Name ${project} + Projects.Create Data Science Project From CLI name=${project} + Create PipelineServer Using Custom DSPA ${project} ${status} Login And Wait Dsp Route ${username} ${password} ${project} Should Be True ${status} == 200 Could not login to the Data Science Pipelines Rest API OR DSP routing is not working # robocop: disable:line-too-long Setup Client ${username} ${password} ${project} @@ -74,7 +97,7 @@ End To End Pipeline Workflow Via Api ${run_status} Check Run Status ${run_id} Should Be Equal As Strings ${run_status} SUCCEEDED Pipeline run doesn't have a status that means success. Check the logs DataSciencePipelinesKfp.Delete Run ${run_id} - [Teardown] Remove Pipeline Project ${project} + [Teardown] Projects.Delete Project Via CLI By Display Name ${project} Double Check If DSPA Was Created [Documentation] Double check if DSPA was created @@ -82,7 +105,7 @@ Double Check If DSPA Was Created ${rc} ${out} = Run And Return Rc And Output oc get datasciencepipelinesapplications -n ${local_project_name} IF ${rc}!=0 Fail -DSPA Should Reconcile +Verify DSPO Logs Show Error Encountered When Parsing DSPA [Documentation] DSPA must find an error because not all components were deployed ${stopped} = Set Variable ${False} # limit is 180 because the reconciliation run every 2 minutes @@ -92,9 +115,7 @@ DSPA Should Reconcile TRY WHILE not ${stopped} limit=${timeout} Sleep 1s - ${logs}= Oc Get Pod Logs - ... name=${pod_name} - ... namespace=${APPLICATIONS_NAMESPACE} + ${logs} Run oc logs --tail=1000000 ${pod_name} -n ${APPLICATIONS_NAMESPACE} ${stopped} = Set Variable If "Encountered error when parsing CR" in """${logs}""" True False END EXCEPT WHILE loop was aborted type=start diff --git a/ods_ci/tests/Tests/1100__data_science_pipelines/1100__data-science-pipelines-general.robot b/ods_ci/tests/Tests/1100__data_science_pipelines/1100__data-science-pipelines-general.robot index 43b46b24c..dc9647bb8 100644 --- a/ods_ci/tests/Tests/1100__data_science_pipelines/1100__data-science-pipelines-general.robot +++ b/ods_ci/tests/Tests/1100__data_science_pipelines/1100__data-science-pipelines-general.robot @@ -9,9 +9,10 @@ Resource ../../Resources/Page/ODH/ODHDashboard/ODHDataSciencePipeline Resource ../../Resources/Page/ODH/ODHDashboard/ODHDataScienceProject/DataConnections.resource Resource ../../Resources/Page/ODH/ODHDashboard/ODHDataScienceProject/Projects.resource Resource ../../Resources/Page/ODH/ODHDashboard/ODHDataScienceProject/Pipelines.resource +Resource ../../Resources/CLI/DataSciencePipelines/DataSciencePipelinesBackend.resource Library DateTime Library ../../../libs/DataSciencePipelinesAPI.py -Test Tags DataSciencePipelines +Test Tags DataSciencePipelines-Backend Suite Setup General Suite Setup Suite Teardown General Suite Teardown @@ -48,31 +49,25 @@ Verify Ods User Can Bind The Route Role General Suite Setup [Documentation] Suite setup steps for testing DSG. It creates some test variables ... and runs RHOSi setup - Set Library Search Order SeleniumLibrary RHOSi Setup General Suite Teardown [Documentation] General Suite Teardown - Remove Pipeline Project ${PROJECT_USER3} - Remove Pipeline Project ${PROJECT_USER4} + Projects.Delete Project Via CLI By Display Name ${PROJECT_USER3} + Projects.Delete Project Via CLI By Display Name ${PROJECT_USER4} RHOSi Teardown Create A Pipeline Server And Wait For Dsp Route [Documentation] Create A Pipeline Server And Wait For Dsp Route [Arguments] ${user} ${password} ${auth_type} ${project} - Launch Data Science Project Main Page username=${user} - ... password=${password} - ... ocp_user_auth_type=${auth_type} - ... browser_alias=${user}-session - Remove Pipeline Project ${project} - Create Data Science Project title=${project} description= - Projects.Move To Tab Data connections - Create S3 Data Connection project_title=${project} dc_name=${project}-dc - ... aws_access_key=${S3.AWS_ACCESS_KEY_ID} - ... aws_secret_access=${S3.AWS_SECRET_ACCESS_KEY} - ... aws_s3_endpoint=${S3.AWS_DEFAULT_ENDPOINT} aws_region=${S3.AWS_DEFAULT_REGION} - ... aws_bucket_name=${S3_BUCKET} - Create Pipeline Server dc_name=${project}-dc project_title=${project} - Wait Until Pipeline Server Is Deployed project_title=${project} + + Projects.Create Data Science Project From CLI name=${project} as_user=${user} + + DataSciencePipelinesBackend.Create Pipeline Server namespace=${project} + ... object_storage_access_key=${S3.AWS_ACCESS_KEY_ID} + ... object_storage_secret_key=${S3.AWS_SECRET_ACCESS_KEY} + + DataSciencePipelinesBackend.Wait Until Pipeline Server Is Deployed namespace=${project} + ${status} Login And Wait Dsp Route ${user} ${password} ${project} Should Be True ${status} == 200 Could not login to the Data Science Pipelines Rest API OR DSP routing is not working # robocop: disable:line-too-long diff --git a/ods_ci/tests/Tests/1100__data_science_pipelines/1100__data-science-pipelines-kfp.robot b/ods_ci/tests/Tests/1100__data_science_pipelines/1100__data-science-pipelines-kfp.robot index 434c19ced..9663c05bc 100644 --- a/ods_ci/tests/Tests/1100__data_science_pipelines/1100__data-science-pipelines-kfp.robot +++ b/ods_ci/tests/Tests/1100__data_science_pipelines/1100__data-science-pipelines-kfp.robot @@ -7,10 +7,12 @@ Resource ../../Resources/Common.robot Resource ../../Resources/Page/ODH/ODHDashboard/ODHDashboard.robot Resource ../../Resources/Page/ODH/ODHDashboard/ODHDataSciencePipelines.resource Resource ../../Resources/Page/ODH/ODHDashboard/ODHDataScienceProject/Permissions.resource +Resource ../../Resources/Page/ODH/ODHDashboard/ODHDataScienceProject/Projects.resource +Resource ../../Resources/CLI/DataSciencePipelines/DataSciencePipelinesBackend.resource Library DateTime Library ../../../libs/DataSciencePipelinesAPI.py Library ../../../libs/DataSciencePipelinesKfp.py -Test Tags DataSciencePipelines +Test Tags DataSciencePipelines-Backend Suite Setup Data Science Pipelines Suite Setup Suite Teardown RHOSi Teardown @@ -24,7 +26,7 @@ ${KUEUE_RESOURCES_SETUP_FILEPATH}= tests/Resources/Page/DistributedWorkloads/ Verify Ods Users Can Create And Run A Data Science Pipeline Using The kfp Python Package [Documentation] Creates, runs pipelines with regular user. Double check the pipeline result and clean ... the pipeline resources. - [Tags] Smoke Tier1 ODS-2203 + [Tags] Smoke ODS-2203 ${emtpy_dict}= Create Dictionary End To End Pipeline Workflow Using Kfp ... admin_username=${TEST_USER.USERNAME} @@ -34,7 +36,7 @@ Verify Ods Users Can Create And Run A Data Science Pipeline Using The kfp Python ... project=${PROJECT_NAME} ... python_file=flip_coin.py ... method_name=flipcoin_pipeline - ... status_check_timeout=440 + ... status_check_timeout=180 ... pipeline_params=${emtpy_dict} End To End Pipeline Workflow Using Kfp ... admin_username=${TEST_USER.USERNAME} @@ -44,14 +46,14 @@ Verify Ods Users Can Create And Run A Data Science Pipeline Using The kfp Python ... project=${PROJECT_NAME} ... python_file=iris_pipeline.py ... method_name=my_pipeline - ... status_check_timeout=440 + ... status_check_timeout=180 ... pipeline_params=${emtpy_dict} - [Teardown] Remove Pipeline Project ${PROJECT_NAME} + [Teardown] Projects.Delete Project Via CLI By Display Name ${PROJECT_NAME} Verify Upload Download In Data Science Pipelines Using The kfp Python Package [Documentation] Creates, runs pipelines with regular user. Double check the pipeline result and clean ... the pipeline resources. - [Tags] Sanity Tier1 ODS-2683 + [Tags] Sanity ODS-2683 ${upload_download_dict}= Create Dictionary mlpipeline_minio_artifact_secret=value bucket_name=value End To End Pipeline Workflow Using Kfp ... admin_username=${TEST_USER.USERNAME} @@ -61,10 +63,9 @@ Verify Upload Download In Data Science Pipelines Using The kfp Python Package ... project=${PROJECT_NAME} ... python_file=upload_download.py ... method_name=wire_up_pipeline - ... status_check_timeout=440 + ... status_check_timeout=180 ... pipeline_params=${upload_download_dict} - [Teardown] Remove Pipeline Project ${PROJECT_NAME} - + [Teardown] Projects.Delete Project Via CLI By Display Name ${PROJECT_NAME} Verify Ods Users Can Create And Run A Data Science Pipeline With Ray Using The kfp Python Package @@ -82,10 +83,10 @@ Verify Ods Users Can Create And Run A Data Science Pipeline With Ray Using The k ... project=${PROJECT_NAME} ... python_file=ray_integration.py ... method_name=ray_integration - ... status_check_timeout=440 + ... status_check_timeout=600 ... pipeline_params=${ray_dict} ... ray=${TRUE} - [Teardown] Remove Pipeline Project ${PROJECT_NAME} + [Teardown] Projects.Delete Project Via CLI By Display Name ${PROJECT_NAME} *** Keywords *** @@ -95,9 +96,11 @@ End To End Pipeline Workflow Using Kfp ... clean the pipeline resources. [Arguments] ${username} ${password} ${admin_username} ${admin_password} ${project} ${python_file} ... ${method_name} ${pipeline_params} ${status_check_timeout}=160 ${ray}=${FALSE} - Remove Pipeline Project ${project} - New Project ${project} - Install DataSciencePipelinesApplication CR ${project} + + Projects.Delete Project Via CLI By Display Name ${project} + Projects.Create Data Science Project From CLI name=${project} + + DataSciencePipelinesBackend.Create PipelineServer Using Custom DSPA ${project} ${status} Login And Wait Dsp Route ${admin_username} ${admin_password} ${project} Should Be True ${status} == 200 Could not login to the Data Science Pipelines Rest API OR DSP routing is not working # we remove and add a new project for sanity. LocalQueue is per namespace @@ -114,9 +117,10 @@ End To End Pipeline Workflow Using Kfp ${run_id} Create Run From Pipeline Func ${username} ${password} ${project} ... ${python_file} ${method_name} pipeline_params=${pipeline_params} pip_index_url=${pip_index_url} ... pip_trusted_host=${pip_trusted_host} - ${run_status} Check Run Status ${run_id} timeout=500 + ${run_status} Check Run Status ${run_id} timeout=${status_check_timeout} + Should Be Equal As Strings ${run_status} SUCCEEDED Pipeline run doesn't have a status that means success. Check the logs Should Be Equal As Strings ${run_status} SUCCEEDED Pipeline run doesn't have a status that means success. Check the logs - Remove Pipeline Project ${project} + Projects.Delete Project Via CLI By Display Name ${project} Data Science Pipelines Suite Setup [Documentation] Data Science Pipelines Suite Setup diff --git a/ods_ci/tests/Tests/1100__data_science_pipelines/1100__data-science-pipelines-ui.robot b/ods_ci/tests/Tests/1100__data_science_pipelines/1100__data-science-pipelines-ui.robot index eff02ef33..03f8ef344 100644 --- a/ods_ci/tests/Tests/1100__data_science_pipelines/1100__data-science-pipelines-ui.robot +++ b/ods_ci/tests/Tests/1100__data_science_pipelines/1100__data-science-pipelines-ui.robot @@ -5,7 +5,8 @@ Resource ../../Resources/Page/ODH/ODHDashboard/ODHDataScienceProject/P Resource ../../Resources/Page/ODH/ODHDashboard/ODHDataScienceProject/DataConnections.resource Resource ../../Resources/Page/ODH/ODHDashboard/ODHDataScienceProject/Pipelines.resource Resource ../../Resources/Page/ODH/ODHDashboard/ODHDataSciencePipelines.resource -Test Tags DataSciencePipelines +Resource ../../Resources/CLI/DataSciencePipelines/DataSciencePipelinesBackend.resource +Test Tags DataSciencePipelines-Dashboard Suite Setup Pipelines Suite Setup Suite Teardown Pipelines Suite Teardown @@ -32,14 +33,13 @@ Verify User Can Create, Run and Delete A DS Pipeline From DS Project Details Pag ... AutomationBug: RHOAIENG-10941 [Tags] Smoke ... ODS-2206 ODS-2226 ODS-2633 - ... AutomationBug Open Data Science Project Details Page ${PRJ_TITLE} - Create Pipeline Server dc_name=${DC_NAME} project_title=${PRJ_TITLE} + Pipelines.Create Pipeline Server dc_name=${DC_NAME} project_title=${PRJ_TITLE} Verify There Is No "Error Displaying Pipelines" After Creating Pipeline Server Verify That There Are No Sample Pipelines After Creating Pipeline Server - Wait Until Pipeline Server Is Deployed project_title=${PRJ_TITLE} + DataSciencePipelinesBackend.Wait Until Pipeline Server Is Deployed namespace=${PRJ_TITLE} Import Pipeline name=${PIPELINE_TEST_NAME} ... description=${PIPELINE_TEST_DESC} @@ -51,6 +51,7 @@ Verify User Can Create, Run and Delete A DS Pipeline From DS Project Details Pag # Pipeline Context Menu Should Be Working pipeline_name=${PIPELINE_TEST_NAME} # Pipeline Yaml Should Be Readonly pipeline_name=${PIPELINE_TEST_NAME} + Open Data Science Project Details Page ${PRJ_TITLE} tab_id=pipelines-projects Pipeline Should Be Listed pipeline_name=${PIPELINE_TEST_NAME} ... pipeline_description=${PIPELINE_TEST_DESC} diff --git a/ods_ci/tests/Tests/1100__data_science_pipelines/1100__test-run-data-science-pipelines-operator-e2e-tests.robot b/ods_ci/tests/Tests/1100__data_science_pipelines/1100__test-run-data-science-pipelines-operator-e2e-tests.robot index 9b1ce63ba..73f531233 100644 --- a/ods_ci/tests/Tests/1100__data_science_pipelines/1100__test-run-data-science-pipelines-operator-e2e-tests.robot +++ b/ods_ci/tests/Tests/1100__data_science_pipelines/1100__test-run-data-science-pipelines-operator-e2e-tests.robot @@ -22,9 +22,9 @@ ${KUBECONFIGPATH} %{HOME}/.kube/config Run Data Science Pipelines Operator Integration Tests [Documentation] Run Data Science Pipelines Operator Integration Tests [Tags] - ... DataSciencePipelines + ... DataSciencePipelines-Backend ... Tier1 - ... ODS-2632 + ... ODS-2632 AutomationBug ${openshift_api} Get Openshift Server Log ${openshift_api} ${return_code} ${output} Run And Return Rc And Output cd ${DATA-SCIENCE-PIPELINES-OPERATOR-SDK_DIR} && make integrationtest K8SAPISERVERHOST=${openshift_api} DSPANAMESPACE=${DSPANAMESPACE} KUBECONFIGPATH=${KUBECONFIGPATH} diff --git a/ods_ci/utils/scripts/testconfig/generateTestConfigFile.py b/ods_ci/utils/scripts/testconfig/generateTestConfigFile.py index 780a4842a..6a460377b 100755 --- a/ods_ci/utils/scripts/testconfig/generateTestConfigFile.py +++ b/ods_ci/utils/scripts/testconfig/generateTestConfigFile.py @@ -261,6 +261,7 @@ def generate_test_config_file( data["OPERATOR_NAMESPACE"] = config_data["OPERATOR_NAMESPACE"] data["NOTEBOOKS_NAMESPACE"] = config_data["NOTEBOOKS_NAMESPACE"] data["OPENSHIFT_PIPELINES_CHANNEL"] = config_data["OPENSHIFT_PIPELINES_CHANNEL"] + data["RHODS_OSD_INSTALL_REPO"] = config_data["RHODS_OSD_INSTALL_REPO"] if config_data.get("PIP_INDEX_URL"): data["PIP_INDEX_URL"] = config_data["PIP_INDEX_URL"] if config_data.get("PIP_TRUSTED_HOST"): diff --git a/ods_ci/utils/scripts/testconfig/test-variables.yml b/ods_ci/utils/scripts/testconfig/test-variables.yml index 6ba5dcf44..dfb5ea91d 100644 --- a/ods_ci/utils/scripts/testconfig/test-variables.yml +++ b/ods_ci/utils/scripts/testconfig/test-variables.yml @@ -78,3 +78,4 @@ OPERATOR_NAME: OPERATOR_NAME OPERATOR_NAMESPACE: OPERATOR_NAMESPACE NOTEBOOKS_NAMESPACE: NOTEBOOKS_NAMESPACE OPENSHIFT_PIPELINES_CHANNEL: OPENSHIFT_PIPELINES_CHANNEL +RHODS_OSD_INSTALL_REPO: RHODS_OSD_INSTALL_REPO