From 051d6859f42994a24849095a85bd562aaa4c7485 Mon Sep 17 00:00:00 2001 From: Raghul-M Date: Thu, 26 Sep 2024 20:04:37 +0530 Subject: [PATCH] enabled token --- .../1009__model_serving_triton_on_kserve.robot | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ods_ci/tests/Tests/1000__model_serving/1009__model_serving_triton_on_kserve/1009__model_serving_triton_on_kserve.robot b/ods_ci/tests/Tests/1000__model_serving/1009__model_serving_triton_on_kserve/1009__model_serving_triton_on_kserve.robot index e7f933cc8..89c802ca8 100644 --- a/ods_ci/tests/Tests/1000__model_serving/1009__model_serving_triton_on_kserve/1009__model_serving_triton_on_kserve.robot +++ b/ods_ci/tests/Tests/1000__model_serving/1009__model_serving_triton_on_kserve/1009__model_serving_triton_on_kserve.robot @@ -172,7 +172,8 @@ Test Tensorflow Model Rest Inference Via UI (Triton on Kserve) # robocop: off ... aws_access_key=${S3.AWS_ACCESS_KEY_ID} aws_secret_access=${S3.AWS_SECRET_ACCESS_KEY} ... aws_bucket_name=ods-ci-s3 Deploy Kserve Model Via UI model_name=${TENSORFLOW_MODEL_NAME} serving_runtime=triton-kserve-rest - ... data_connection=model-serving-connection path=triton/model_repository/ model_framework=tensorflow - 2 + ... data_connection=model-serving-connection path=triton/model_repository/ + ... model_framework=tensorflow - 2 ${token}=${TRUE} Wait For Pods To Be Ready label_selector=serving.kserve.io/inferenceservice=${TENSORFLOW_MODEL_LABEL} ... namespace=${PRJ_TITLE} ${EXPECTED_INFERENCE_REST_OUTPUT_TENSORFLOW}= Load Json File file_path=${EXPECTED_INFERENCE_REST_OUTPUT_FILE_TENSORFLOW}