From a25aeab5dc75b08be9a43bffd75fdeabafd0eb78 Mon Sep 17 00:00:00 2001 From: rnetser Date: Mon, 16 Dec 2024 18:20:01 +0200 Subject: [PATCH] fix model server upgrade rh-pre-commit.version: 2.3.2 rh-pre-commit.check-secrets: ENABLED --- .../ODH/ODHDashboard/ODHModelServing.resource | 16 ++- .../0201__pre_upgrade.robot | 101 ++++++++++++------ .../0203__post_upgrade.robot | 42 +++++--- 3 files changed, 103 insertions(+), 56 deletions(-) diff --git a/ods_ci/tests/Resources/Page/ODH/ODHDashboard/ODHModelServing.resource b/ods_ci/tests/Resources/Page/ODH/ODHDashboard/ODHModelServing.resource index e2036e09d..a9bd26484 100644 --- a/ods_ci/tests/Resources/Page/ODH/ODHDashboard/ODHModelServing.resource +++ b/ods_ci/tests/Resources/Page/ODH/ODHDashboard/ODHModelServing.resource @@ -264,6 +264,7 @@ Get Model Inference ... set to ${TRUE}. [Arguments] ${model_name} ${inference_input} ${token_auth}=${FALSE} ${project_title}=${NONE} ... ${kserve_mode}=Serverless ${deployment_mode}=UI ${service_port}=8888 ${end_point}=${NONE} + ... ${service_account_name}=default-name ${token}=${NONE} ${set_json_content_type}=${FALSE} ${curl_cmd}= Set Variable ${NONE} ${self_managed}= Is RHODS Self-Managed IF $deployment_mode == 'UI' @@ -275,18 +276,24 @@ Get Model Inference IF "${project_title}" == "${NONE}" ${project_title}= Get Model Project ${model_name} END - ${token}= Get Access Token via UI ${project_title} + IF $token == ${NONE} + ${token}= Get Access Token via UI ${project_title} service_account_name=${service_account_name} + ... single_model=${kserve} model_name=${model_name} + END ${curl_cmd}= Catenate ${curl_cmd} -H "Authorization: Bearer ${token}" END + Log ${curl_cmd} IF ${kserve} Fetch Knative CA Certificate filename=openshift_ca_istio_knative.crt ${curl_cmd}= Catenate ${curl_cmd} --cacert openshift_ca_istio_knative.crt + Log ${curl_cmd} ELSE IF ${self_managed} Fetch Openshift CA Bundle ${curl_cmd}= Catenate ${curl_cmd} --cacert openshift_ca.crt + Log ${curl_cmd} END ELSE IF $deployment_mode == 'Cli' - ${rc} ${cmd_op} Run And Return Rc And Output + ${rc} ${cmd_op}= Run And Return Rc And Output ... oc get isvc ${model_name} -o jsonpath='{.metadata.annotations.serving\.kserve\.io/deploymentMode}' -n ${project_title} # robocop: disable Should Be Equal As Integers ${rc} 0 IF "${cmd_op}" != "ModelMesh" @@ -323,14 +330,15 @@ Verify Model Inference [Documentation] Verifies that the inference result of a model is equal to an expected output [Arguments] ${model_name} ${inference_input} ${expected_inference_output} ${token_auth}=${FALSE} ... ${project_title}=${NONE} ${deployment_mode}=UI ${kserve_mode}=Serverless - ... ${service_port}=${NONE} ${end_point}=${NONE} + ... ${service_port}=${NONE} ${end_point}=${NONE} ${token}=${NONE} IF $deployment_mode == 'UI' Open Model Serving Home Page Switch Model Serving Project ${project_title} END ${inference_output}= Get Model Inference model_name=${model_name} inference_input=${inference_input} ... token_auth=${token_auth} kserve_mode=${kserve_mode} project_title=${project_title} - ... deployment_mode=${deployment_mode} service_port=${service_port} end_point=${end_point} # robocop: disable + ... deployment_mode=${deployment_mode} service_port=${service_port} end_point=${end_point} + ... token=${token} Log ${inference_output} ${result} ${list}= Inference Comparison ${expected_inference_output} ${inference_output} Log ${result} diff --git a/ods_ci/tests/Tests/0200__rhoai_upgrade/0201__pre_upgrade.robot b/ods_ci/tests/Tests/0200__rhoai_upgrade/0201__pre_upgrade.robot index d49e72d67..37c454b72 100644 --- a/ods_ci/tests/Tests/0200__rhoai_upgrade/0201__pre_upgrade.robot +++ b/ods_ci/tests/Tests/0200__rhoai_upgrade/0201__pre_upgrade.robot @@ -26,18 +26,9 @@ Test Tags PreUpgrade *** Variables *** -${CUSTOM_CULLER_TIMEOUT} 60000 -${S_SIZE} 25 -${INFERENCE_INPUT}= @tests/Resources/Files/modelmesh-mnist-input.json -${INFERENCE_INPUT_OPENVINO}= @tests/Resources/Files/openvino-example-input.json -${EXPECTED_INFERENCE_OUTPUT}= {"model_name":"test-model__isvc-83d6fab7bd","model_version":"1","outputs":[{"name":"Plus214_Output_0","datatype":"FP32","shape":[1,10],"data":[-8.233053,-7.7497034,-3.4236815,12.3630295,-12.079103,17.266596,-10.570976,0.7130762,3.321715,1.3621228]}]} -${EXPECTED_INFERENCE_OUTPUT_OPENVINO}= {"model_name":"test-model__isvc-8655dc7979","model_version":"1","outputs":[{"name":"Func/StatefulPartitionedCall/output/_13:0","datatype":"FP32","shape":[1,1],"data":[0.99999994]}]} -${PRJ_TITLE}= model-serving-upgrade -${PRJ_DESCRIPTION}= project used for model serving tests -${MODEL_NAME}= test-model -${MODEL_CREATED}= ${FALSE} -${RUNTIME_NAME}= Model Serving Test -${DW_PROJECT_CREATED}= False +${CUSTOM_CULLER_TIMEOUT} 60000 +${S_SIZE} 25 +${DW_PROJECT_CREATED} False *** Test Cases *** @@ -88,32 +79,72 @@ Verify User Can Disable The Runtime Disable Model Serving Runtime Using CLI namespace=redhat-ods-applications Verify Model Can Be Deployed Via UI For Upgrade - [Tags] Upgrade - [Setup] Begin Web Test - ${runtime_pod_name} = Replace String Using Regexp string=${RUNTIME_NAME} pattern=\\s replace_with=- - ${runtime_pod_name} = Convert To Lower Case ${runtime_pod_name} + # robocop: off=too-long-test-case + # robocop: off=too-many-calls-in-test-case + [Documentation] Verify Model Can Be Deployed Via UI For Upgrade + [Tags] Upgrade + [Setup] Begin Web Test + ${PRJ_TITLE}= Set Variable model-serving-upgrade + ${PRJ_DESCRIPTION}= Set Variable project used for model serving tests + ${MODEL_NAME}= Set Variable test-model + ${MODEL_CREATED}= Set Variable ${FALSE} + ${RUNTIME_NAME}= Set Variable Model Serving Test + ${INFERENCE_INPUT_OPENVINO}= Set Variable + ... @tests/Resources/Files/openvino-example-input.json + ${EXPECTED_INFERENCE_OUTPUT_OPENVINO}= Set Variable + ... {"model_name":"test-model__isvc-8655dc7979","model_version":"1","outputs":[{"name":"Func/StatefulPartitionedCall/output/_13:0","datatype":"FP32","shape":[1,1],"data":[0.99999994]}]} # robocop: disable:line-too-long + ${runtime_pod_name}= Replace String Using Regexp + ... string=${RUNTIME_NAME} + ... pattern=\\s + ... replace_with=- + ${runtime_pod_name}= Convert To Lower Case ${runtime_pod_name} Fetch CA Certificate If RHODS Is Self-Managed Clean All Models Of Current User Open Data Science Projects Home Page - Wait For RHODS Dashboard To Load wait_for_cards=${FALSE} expected_page=Data Science Projects - Create Data Science Project title=${PRJ_TITLE} description=${PRJ_DESCRIPTION} - Create S3 Data Connection project_title=${PRJ_TITLE} dc_name=model-serving-connection - ... aws_access_key=${S3.AWS_ACCESS_KEY_ID} aws_secret_access=${S3.AWS_SECRET_ACCESS_KEY} - ... aws_bucket_name=ods-ci-s3 - Create Model Server token=${FALSE} server_name=${RUNTIME_NAME} - Serve Model project_name=${PRJ_TITLE} model_name=${MODEL_NAME} framework=openvino_ir existing_data_connection=${TRUE} - ... data_connection_name=model-serving-connection model_path=openvino-example-model - Run Keyword And Continue On Failure Wait Until Keyword Succeeds - ... 5 min 10 sec Verify Openvino Deployment runtime_name=${runtime_pod_name} - Run Keyword And Continue On Failure Wait Until Keyword Succeeds 5 min 10 sec Verify Serving Service - Verify Model Status ${MODEL_NAME} success - Set Suite Variable ${MODEL_CREATED} ${TRUE} - Run Keyword And Continue On Failure Verify Model Inference ${MODEL_NAME} ${INFERENCE_INPUT_OPENVINO} ${EXPECTED_INFERENCE_OUTPUT_OPENVINO} token_auth=${FALSE} - Remove File openshift_ca.crt - [Teardown] Run Keywords Dashboard Test Teardown - ... AND - ... Run Keyword If Test Failed Get Events And Pod Logs namespace=${PRJ_TITLE} - ... label_selector=name=modelmesh-serving-${runtime_pod_name} + Wait For RHODS Dashboard To Load + ... wait_for_cards=${FALSE} + ... expected_page=Data Science Projects + Create Data Science Project title=${PRJ_TITLE} description=${PRJ_DESCRIPTION} + Create S3 Data Connection + ... project_title=${PRJ_TITLE} + ... dc_name=model-serving-connection + ... aws_access_key=${S3.AWS_ACCESS_KEY_ID} + ... aws_secret_access=${S3.AWS_SECRET_ACCESS_KEY} + ... aws_bucket_name=ods-ci-s3 + Create Model Server token=${FALSE} server_name=${RUNTIME_NAME} + Serve Model + ... project_name=${PRJ_TITLE} + ... model_name=${MODEL_NAME} + ... framework=openvino_ir + ... existing_data_connection=${TRUE} + ... data_connection_name=model-serving-connection + ... model_path=openvino-example-model + Run Keyword And Continue On Failure + ... Wait Until Keyword Succeeds + ... 5 min + ... 10 sec + ... Verify Openvino Deployment + ... runtime_name=${runtime_pod_name} + ... project_name=${PRJ_TITLE} + Run Keyword And Continue On Failure + ... Wait Until Keyword Succeeds + ... 5 min + ... 10 sec + ... Verify Serving Service + ... project_name=${PRJ_TITLE} + Verify Model Status ${MODEL_NAME} success + Set Suite Variable ${MODEL_CREATED} ${TRUE} # robocop: disable:replace-set-variable-with-var + Run Keyword And Continue On Failure + ... Verify Model Inference + ... ${MODEL_NAME} + ... ${INFERENCE_INPUT_OPENVINO} + ... ${EXPECTED_INFERENCE_OUTPUT_OPENVINO} + ... token_auth=${FALSE} + Remove File openshift_ca.crt + [Teardown] Run Keywords Dashboard Test Teardown + ... AND + ... Run Keyword If Test Failed Get Events And Pod Logs namespace=${PRJ_TITLE} + ... label_selector=name=modelmesh-serving-${runtime_pod_name} Verify User Can Deploy Custom Runtime For Upgrade [Tags] Upgrade diff --git a/ods_ci/tests/Tests/0200__rhoai_upgrade/0203__post_upgrade.robot b/ods_ci/tests/Tests/0200__rhoai_upgrade/0203__post_upgrade.robot index 1ea95e1af..2f3bdd004 100644 --- a/ods_ci/tests/Tests/0200__rhoai_upgrade/0203__post_upgrade.robot +++ b/ods_ci/tests/Tests/0200__rhoai_upgrade/0203__post_upgrade.robot @@ -27,17 +27,8 @@ Test Tags PostUpgrade *** Variables *** -${S_SIZE} 25 -${INFERENCE_INPUT}= @tests/Resources/Files/modelmesh-mnist-input.json -${INFERENCE_INPUT_OPENVINO}= @tests/Resources/Files/openvino-example-input.json -${EXPECTED_INFERENCE_OUTPUT}= {"model_name":"test-model__isvc-83d6fab7bd","model_version":"1","outputs":[{"name":"Plus214_Output_0","datatype":"FP32","shape":[1,10],"data":[-8.233053,-7.7497034,-3.4236815,12.3630295,-12.079103,17.266596,-10.570976,0.7130762,3.321715,1.3621228]}]} -${EXPECTED_INFERENCE_OUTPUT_OPENVINO}= {"model_name":"test-model__isvc-8655dc7979","model_version":"1","outputs":[{"name":"Func/StatefulPartitionedCall/output/_13:0","datatype":"FP32","shape":[1,1],"data":[0.99999994]}]} -${PRJ_TITLE}= model-serving-upgrade -${PRJ_DESCRIPTION}= project used for model serving tests -${MODEL_NAME}= test-model -${MODEL_CREATED}= ${FALSE} -${RUNTIME_NAME}= Model Serving Test -${DW_PROJECT_CREATED}= False +${S_SIZE} 25 +${DW_PROJECT_CREATED} False *** Test Cases *** @@ -133,15 +124,32 @@ Verify POD Status Log "Verified rhods-notebook" Test Inference Post RHODS Upgrade + # robocop: off=too-many-calls-in-test-case + # robocop: off=too-long-test-case [Documentation] Test the inference result after having deployed a model that requires Token Authentication - [Tags] Upgrade - [Setup] Begin Web Test + [Tags] Upgrade + [Setup] Begin Web Test + ${PRJ_TITLE} Set Variable model-serving-upgrade + ${PRJ_DESCRIPTION} Set Variable project used for model serving tests # robocop: off=unused-variable # robocop: disable:line-too-long + ${MODEL_NAME} Set Variable test-model + ${MODEL_CREATED} Set Variable ${FALSE} # robocop: off=unused-variable + ${RUNTIME_NAME} Set Variable Model Serving Test # robocop: off=unused-variable + ${INFERENCE_INPUT} Set Variable @tests/Resources/Files/modelmesh-mnist-input.json # robocop: off=unused-variable # robocop: disable:line-too-long + ${INFERENCE_INPUT_OPENVINO} Set Variable + ... @tests/Resources/Files/openvino-example-input.json + ${EXPECTED_INFERENCE_OUTPUT_OPENVINO} Set Variable + ... {"model_name":"test-model__isvc-8655dc7979","model_version":"1","outputs":[{"name":"Func/StatefulPartitionedCall/output/_13:0","datatype":"FP32","shape":[1,1],"data":[0.99999994]}]} # robocop: disable:line-too-long Fetch CA Certificate If RHODS Is Self-Managed Open Model Serving Home Page - Verify Model Status ${MODEL_NAME} success - Run Keyword And Continue On Failure Verify Model Inference ${MODEL_NAME} ${INFERENCE_INPUT_OPENVINO} ${EXPECTED_INFERENCE_OUTPUT_OPENVINO} token_auth=${FALSE} - Remove File openshift_ca.crt - [Teardown] Run oc delete project ${PRJ_TITLE} + Verify Model Status ${MODEL_NAME} success + Run Keyword And Continue On Failure + ... Verify Model Inference + ... ${MODEL_NAME} + ... ${INFERENCE_INPUT_OPENVINO} + ... ${EXPECTED_INFERENCE_OUTPUT_OPENVINO} + ... token_auth=${FALSE} + Remove File openshift_ca.crt + [Teardown] Run oc delete project ${PRJ_TITLE} Verify Custom Runtime Exists After Upgrade [Documentation] Test the inference result after having deployed a model that requires Token Authentication