From 37ddc250bb8f61c9236a148616a66f7c19f5866c Mon Sep 17 00:00:00 2001 From: Prabhat Dwivedi Date: Fri, 17 May 2024 11:31:41 +0530 Subject: [PATCH 01/24] CALM-45018: Filter out profiles other than launch profile (#445) Issue Link: https://jira.nutanix.com/browse/CALM-45018 Description: 1. Make a get call to app and fetch it's response. `app_profile_config_reference` in response contains name of profile used for launching application. 2. Use this launch profile name and filter out all other profiles before decompiling. Subsequently filter out dependent packages, substrates, deployments related to removed profiles. 3. Results after decompiling with above approach contains only profile used for launch in decompiled bp. ![image](https://github.com/ideadevice/calm-dsl/assets/123161845/e385ce67-3c95-4f1c-9128-3c18640b894c) Decompiled blueprint: ` """ Generated blueprint DSL (.py) """ import json # no_qa import os # no_qa from calm.dsl.builtins import * # no_qa from calm.dsl.runbooks import CalmEndpoint as Endpoint # Secret Variables BP_CRED_admin_PASSWORD = read_local_file("BP_CRED_admin_PASSWORD") # Credentials BP_CRED_admin = basic_cred( "admin", BP_CRED_admin_PASSWORD, name="admin", type="PASSWORD", default=True, ) class Service1(Service): pass class VM1(Substrate): os_type = "Windows" provider_type = "EXISTING_VM" provider_spec = read_provider_spec(os.path.join("specs", "VM1_provider_spec.yaml")) readiness_probe = readiness_probe( connection_type="POWERSHELL", disabled=True, retries="5", connection_port=5985, address="@@{ip_address}@@", delay_secs="60", ) @action def __pre_create__(): CalmTask.Exec.escript.py2( name="Pre_Create1", filename=os.path.join( "scripts", "Substrate_VM1_Action___pre_create___Task_Pre_Create1.py" ), target=ref(VM1), ) class Default_Package(Package): services = [ref(Service1)] class Default_deployment(Deployment): min_replicas = "1" max_replicas = "3" default_replicas = "1" packages = [ref(Default_Package)] substrate = ref(VM1) class Default(Profile): deployments = [Default_deployment] class clonefortest_dsl_bug2(Blueprint): services = [Service1] packages = [Default_Package] substrates = [VM1] profiles = [Default] credentials = [BP_CRED_admin] class BpMetadata(Metadata): project = Ref.Project("Common") ` (cherry picked from commit 04caa35e046e6da1d19bfba83b181d96d0671ce2) --- calm/dsl/cli/apps.py | 65 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 65 insertions(+) diff --git a/calm/dsl/cli/apps.py b/calm/dsl/cli/apps.py index b673c110..1c1436d5 100644 --- a/calm/dsl/cli/apps.py +++ b/calm/dsl/cli/apps.py @@ -6,6 +6,7 @@ import uuid from json import JSONEncoder from datetime import datetime +from copy import deepcopy import arrow import click @@ -1889,8 +1890,15 @@ def decompile_app_migratable_bp(app_name, bp_dir): sys.exit("[{}] - {}".format(err["code"], err["error"])) res = res.json() + + app_res = _get_app(client, app_name) + launch_profile = app_res["spec"]["resources"]["app_profile_config_reference"].get( + "name", "" + ) + fix_missing_name_in_reference(res["spec"]) remove_non_escript_actions_variables(res["spec"]["resources"]) + filter_launch_profile(launch_profile, res["spec"]["resources"]) _decompile_bp(bp_payload=res, with_secrets=False, bp_dir=bp_dir) @@ -2282,3 +2290,60 @@ def describe_app_actions_to_update(app_name): if not any_action_to_be_modified: print("\t\t No actions found to be modified") + + +def filter_launch_profile(launch_profile, resources): + """ + Filters out other profiles and keeps only profile used for launching an application. + Subsequently also filters out profile dependent packages and substrates. + + Args: + launch_profile (str): name of profile used for launching + resources (dict): blueprint spec resources + + """ + if not launch_profile: + return + + # lists to hold names of entities to be removed + substrates_to_be_removed = [] + packages_to_be_removed = [] + profiles_to_be_removed = [] + + # loop to store entities name to be removed + for profile in resources.get("app_profile_list", []): + if profile["name"] == launch_profile: + continue + + profiles_to_be_removed.append(profile["name"]) + + for deployment in profile.get("deployment_create_list", []): + packages = deployment.get("package_local_reference_list", []) + for pkg in packages: + packages_to_be_removed.append(pkg.get("name", "")) + + substrate = deployment.get("substrate_local_reference", {}) + substrates_to_be_removed.append(substrate.get("name", "")) + + # remove substrates + substrates = deepcopy(resources.get("substrate_definition_list", [])) + for substrate in resources.get("substrate_definition_list", []): + if substrate["name"] in substrates_to_be_removed: + substrates.remove(substrate) + + # remove packages + packages = deepcopy(resources.get("package_definition_list", [])) + for pkg in resources.get("package_definition_list", []): + if pkg["name"] in packages_to_be_removed: + packages.remove(pkg) + + # remove profiles + profiles = deepcopy(resources.get("app_profile_list", [])) + for profile in resources.get("app_profile_list", []): + if profile["name"] in profiles_to_be_removed: + profiles.remove(profile) + + # re-assign substrates, packages for profile used for launching application + resources["substrate_definition_list"] = substrates + resources["package_definition_list"] = packages + resources["app_profile_list"] = profiles From 2d495e79dda237cc54abf9c9ffe2184d52c10ddc Mon Sep 17 00:00:00 2001 From: Prabhat Dwivedi Date: Wed, 22 May 2024 08:17:19 +0530 Subject: [PATCH 02/24] DSL Regression Cleanup (#444) Cleanup activity: Removing stale entries in cache (~/.calm/dsl.db) from previous run so that it doesn't impact next regression run. Test: Tested by running another successfull round of regression after clean up. Regression: http://erdinger.eng.nutanix.com/blue/organizations/jenkins/QA%2FDSL_Regression/detail/DSL_Regression/618/pipeline/ (cherry picked from commit 28350835adbb642fb6e7cdf11a2360be4378248d) --- Makefile | 1 + 1 file changed, 1 insertion(+) diff --git a/Makefile b/Makefile index c4019766..6f44fed5 100644 --- a/Makefile +++ b/Makefile @@ -39,6 +39,7 @@ clean: docker ps -aq --no-trunc --filter "status=exited" | xargs -I {} docker rm {} && \ docker image prune -f rm -rf venv/ && mkdir venv/ && touch venv/.empty + rm -rf ~/.calm/dsl.db test-verbose: dev venv/bin/py.test -s -vv From abdecebf2bb964791605bf885a116cd4c1399b23 Mon Sep 17 00:00:00 2001 From: Prabhat Dwivedi Date: Thu, 23 May 2024 10:10:32 +0530 Subject: [PATCH 03/24] CALM-45116: Fix Azure Test Cases (#456) Azure provider spec test cases were failing due to deprecated image being used. Replaced it with valid image version. Impacted test case: `tests/cli/provider_plugins/azure/test_azure_create_spec.py` image (cherry picked from commit 9016c96a4df66e4611dd72c47aec8c67bef74e2a) --- examples/AZURE_Example/azure_spec.yaml | 2 +- tests/cli/provider_plugins/azure/normal_spec.yaml | 6 +++--- .../cli/provider_plugins/azure/spec_having_data_disks.yaml | 6 +++--- tests/cli/provider_plugins/azure/spec_having_nics.yaml | 6 +++--- tests/cli/provider_plugins/azure/spec_having_tags.yaml | 6 +++--- .../azure/spec_having_windows_os_with_gc.yaml | 6 +++--- .../provider_plugins/azure/spec_linux_os_with_secrets.yaml | 6 +++--- .../provider_plugins/azure/spec_with_availability_set.yaml | 6 +++--- tests/cli/provider_plugins/constants.py | 2 +- 9 files changed, 23 insertions(+), 23 deletions(-) diff --git a/examples/AZURE_Example/azure_spec.yaml b/examples/AZURE_Example/azure_spec.yaml index d5adb65d..4b2c9081 100644 --- a/examples/AZURE_Example/azure_spec.yaml +++ b/examples/AZURE_Example/azure_spec.yaml @@ -41,7 +41,7 @@ resources: publisher: Canonical sku: 18.04-LTS use_custom_image: false - version: 18.04.201903121 + version: 18.04.202401161 is_managed: true os_disk_details: caching_type: None diff --git a/tests/cli/provider_plugins/azure/normal_spec.yaml b/tests/cli/provider_plugins/azure/normal_spec.yaml index 3de955e9..febe48fc 100644 --- a/tests/cli/provider_plugins/azure/normal_spec.yaml +++ b/tests/cli/provider_plugins/azure/normal_spec.yaml @@ -14,7 +14,7 @@ input: - "137" # Publisher = Canonical - "19" # Image Offer = UbuntuServer - "18" # Image Sku = 18.04-LTS -- "27" # Image Version = 18.04.201903121 +- "27" # Image Version = 18.04.202401161 - "os-@@{calm_unique_hash}@@-@@{calm_array_index}@@-disk" # OS Disk Name - "n" # Want to add storage type - "1" # Cache type = None @@ -33,7 +33,7 @@ spec_assertions: - "calmrg" # Resource group - "Canonical" # Publisher - "18.04-LTS" # Image Sku -- "18.04.201903121" # Image Version +- "18.04.202401161" # Image Version - "Attach" # Os disk create option - "UbuntuServer" # Image offer - "data_disk_list: []" # Data disks = No @@ -47,5 +47,5 @@ dependencies: - [4, accounts/azure_primary/resource_groups/calmrg/index] - [11, accounts/azure_primary/locations/East US 2/publishers/Canonical/index] - [13, accounts/azure_primary/locations/East US 2/publishers/Canonical/image_offers/UbuntuServer/image_skus/18.04-LTS/index] -- [14, accounts/azure_primary/locations/East US 2/publishers/Canonical/image_offers/UbuntuServer/image_skus/18.04-LTS/image_versions/18.04.201903121/index] +- [14, accounts/azure_primary/locations/East US 2/publishers/Canonical/image_offers/UbuntuServer/image_skus/18.04-LTS/image_versions/18.04.202401161/index] - [12, accounts/azure_primary/locations/East US 2/publishers/Canonical/image_offers/UbuntuServer/index] \ No newline at end of file diff --git a/tests/cli/provider_plugins/azure/spec_having_data_disks.yaml b/tests/cli/provider_plugins/azure/spec_having_data_disks.yaml index 25551580..afc40c40 100644 --- a/tests/cli/provider_plugins/azure/spec_having_data_disks.yaml +++ b/tests/cli/provider_plugins/azure/spec_having_data_disks.yaml @@ -14,7 +14,7 @@ input: - "137" # Publisher = Canonical - "19" # Image Offer = UbuntuServer - "18" # Image Sku = 18.04-LTS -- "27" # Image Version = 18.04.201903121 +- "27" # Image Version = 18.04.202401161 - "os-@@{calm_unique_hash}@@-@@{calm_array_index}@@-disk" # OS Disk Name - "n" # Want to add storage type - "1" # Cache type = None @@ -47,7 +47,7 @@ spec_assertions: - "calmrg" # Resource group - "Canonical" # Publisher - "18.04-LTS" # Image Sku -- "18.04.201903121" # Image Version +- "18.04.202401161" # Image Version - "Attach" # Os disk create option - "UbuntuServer" # Image offer - "os-@@{calm_unique_hash}@@-@@{calm_array_index}@@-disk" # Os disk name @@ -74,5 +74,5 @@ dependencies: - [4, accounts/azure_primary/resource_groups/calmrg/index] - [11, accounts/azure_primary/locations/East US 2/publishers/Canonical/index] - [13, accounts/azure_primary/locations/East US 2/publishers/Canonical/image_offers/UbuntuServer/image_skus/18.04-LTS/index] -- [14, accounts/azure_primary/locations/East US 2/publishers/Canonical/image_offers/UbuntuServer/image_skus/18.04-LTS/image_versions/18.04.201903121/index] +- [14, accounts/azure_primary/locations/East US 2/publishers/Canonical/image_offers/UbuntuServer/image_skus/18.04-LTS/image_versions/18.04.202401161/index] - [12, accounts/azure_primary/locations/East US 2/publishers/Canonical/image_offers/UbuntuServer/index] diff --git a/tests/cli/provider_plugins/azure/spec_having_nics.yaml b/tests/cli/provider_plugins/azure/spec_having_nics.yaml index 9f779710..36d58c80 100644 --- a/tests/cli/provider_plugins/azure/spec_having_nics.yaml +++ b/tests/cli/provider_plugins/azure/spec_having_nics.yaml @@ -14,7 +14,7 @@ input: - "137" # Publisher = Canonical - "19" # Image Offer = UbuntuServer - "18" # Image Sku = 18.04-LTS -- "27" # Image Version = 18.04.201903121 +- "27" # Image Version = 18.04.202401161 - "os-@@{calm_unique_hash}@@-@@{calm_array_index}@@-disk" # OS Disk Name - "n" # Want to add storage type - "1" # Cache type = None @@ -52,7 +52,7 @@ spec_assertions: - "calmrg" # Resource group - "Canonical" # Publisher - "18.04-LTS" # Image Sku -- "18.04.201903121" # Image Version +- "18.04.202401161" # Image Version - "Attach" # Os disk create option - "UbuntuServer" # Image offer - "data_disk_list: []" # Data disks = No @@ -77,7 +77,7 @@ dependencies: - [4, accounts/azure_primary/resource_groups/calmrg/index] - [11, accounts/azure_primary/locations/East US 2/publishers/Canonical/index] - [13, accounts/azure_primary/locations/East US 2/publishers/Canonical/image_offers/UbuntuServer/image_skus/18.04-LTS/index] -- [14, accounts/azure_primary/locations/East US 2/publishers/Canonical/image_offers/UbuntuServer/image_skus/18.04-LTS/image_versions/18.04.201903121/index] +- [14, accounts/azure_primary/locations/East US 2/publishers/Canonical/image_offers/UbuntuServer/image_skus/18.04-LTS/image_versions/18.04.202401161/index] - [12, accounts/azure_primary/locations/East US 2/publishers/Canonical/image_offers/UbuntuServer/index] - [22, accounts/azure_primary/locations/East US 2/security_groups/DND-CENTOS-IMAGE-nsg/index] - [23, accounts/azure_primary/locations/East US 2/virtual_networks/calm-virtual-network-eastus2/index] diff --git a/tests/cli/provider_plugins/azure/spec_having_tags.yaml b/tests/cli/provider_plugins/azure/spec_having_tags.yaml index 995a9323..1a59e527 100644 --- a/tests/cli/provider_plugins/azure/spec_having_tags.yaml +++ b/tests/cli/provider_plugins/azure/spec_having_tags.yaml @@ -14,7 +14,7 @@ input: - "137" # Publisher = Canonical - "19" # Image Offer = UbuntuServer - "18" # Image Sku = 18.04-LTS -- "27" # Image Version = 18.04.201903121 +- "27" # Image Version = 18.04.202401161 - "os-@@{calm_unique_hash}@@-@@{calm_array_index}@@-disk" # OS Disk Name - "n" # Want to add storage type - "1" # Cache type = None @@ -39,7 +39,7 @@ spec_assertions: - "calmrg" # Resource group - "Canonical" # Publisher - "18.04-LTS" # Image Sku -- "18.04.201903121" # Image Version +- "18.04.202401161" # Image Version - "Attach" # Os disk create option - "UbuntuServer" # Image offer - "data_disk_list: []" # Data disks = No @@ -57,5 +57,5 @@ dependencies: - [4, accounts/azure_primary/resource_groups/calmrg/index] - [11, accounts/azure_primary/locations/East US 2/publishers/Canonical/index] - [13, accounts/azure_primary/locations/East US 2/publishers/Canonical/image_offers/UbuntuServer/image_skus/18.04-LTS/index] -- [14, accounts/azure_primary/locations/East US 2/publishers/Canonical/image_offers/UbuntuServer/image_skus/18.04-LTS/image_versions/18.04.201903121/index] +- [14, accounts/azure_primary/locations/East US 2/publishers/Canonical/image_offers/UbuntuServer/image_skus/18.04-LTS/image_versions/18.04.202401161/index] - [12, accounts/azure_primary/locations/East US 2/publishers/Canonical/image_offers/UbuntuServer/index] diff --git a/tests/cli/provider_plugins/azure/spec_having_windows_os_with_gc.yaml b/tests/cli/provider_plugins/azure/spec_having_windows_os_with_gc.yaml index 032f8a21..2b3faa79 100644 --- a/tests/cli/provider_plugins/azure/spec_having_windows_os_with_gc.yaml +++ b/tests/cli/provider_plugins/azure/spec_having_windows_os_with_gc.yaml @@ -46,7 +46,7 @@ input: - "137" # Publisher = Canonical - "19" # Image Offer = UbuntuServer - "18" # Image Sku = 18.04-LTS -- "27" # Image Version = 18.04.201903121 +- "27" # Image Version = 18.04.202401161 - "os-@@{calm_unique_hash}@@-@@{calm_array_index}@@-disk" # OS Disk Name - "n" # Want to add storage type - "1" # Cache type = None @@ -84,7 +84,7 @@ spec_assertions: - "calmrg" # Resource group - "Canonical" # Publisher - "18.04-LTS" # Image Sku -- "18.04.201903121" # Image Version +- "18.04.202401161" # Image Version - "Attach" # Os disk create option - "UbuntuServer" # Image offer - "data_disk_list: []" # Data disks = No @@ -98,5 +98,5 @@ dependencies: - [4, accounts/azure_primary/resource_groups/calmrg/index] - [43, accounts/azure_primary/locations/East US 2/publishers/Canonical/index] - [45, accounts/azure_primary/locations/East US 2/publishers/Canonical/image_offers/UbuntuServer/image_skus/18.04-LTS/index] -- [46, accounts/azure_primary/locations/East US 2/publishers/Canonical/image_offers/UbuntuServer/image_skus/18.04-LTS/image_versions/18.04.201903121/index] +- [46, accounts/azure_primary/locations/East US 2/publishers/Canonical/image_offers/UbuntuServer/image_skus/18.04-LTS/image_versions/18.04.202401161/index] - [44, accounts/azure_primary/locations/East US 2/publishers/Canonical/image_offers/UbuntuServer/index] diff --git a/tests/cli/provider_plugins/azure/spec_linux_os_with_secrets.yaml b/tests/cli/provider_plugins/azure/spec_linux_os_with_secrets.yaml index 5ce229ec..337f9a77 100644 --- a/tests/cli/provider_plugins/azure/spec_linux_os_with_secrets.yaml +++ b/tests/cli/provider_plugins/azure/spec_linux_os_with_secrets.yaml @@ -28,7 +28,7 @@ input: - "137" # Publisher = Canonical - "19" # Image Offer = UbuntuServer - "18" # Image Sku = 18.04-LTS -- "27" # Image Version = 18.04.201903121 +- "27" # Image Version = 18.04.202401161 - "os-@@{calm_unique_hash}@@-@@{calm_array_index}@@-disk" # OS Disk Name - "n" # Want to add storage type - "1" # Cache type = None @@ -53,7 +53,7 @@ spec_assertions: - "calmrg" # Resource group - "Canonical" # Publisher - "18.04-LTS" # Image Sku -- "18.04.201903121" # Image Version +- "18.04.202401161" # Image Version - "Attach" # Os disk create option - "UbuntuServer" # Image offer - "data_disk_list: []" # Data disks = No @@ -67,5 +67,5 @@ dependencies: - [4, accounts/azure_primary/resource_groups/calmrg/index] - [25, accounts/azure_primary/locations/East US 2/publishers/Canonical/index] - [27, accounts/azure_primary/locations/East US 2/publishers/Canonical/image_offers/UbuntuServer/image_skus/18.04-LTS/index] -- [28, accounts/azure_primary/locations/East US 2/publishers/Canonical/image_offers/UbuntuServer/image_skus/18.04-LTS/image_versions/18.04.201903121/index] +- [28, accounts/azure_primary/locations/East US 2/publishers/Canonical/image_offers/UbuntuServer/image_skus/18.04-LTS/image_versions/18.04.202401161/index] - [26, accounts/azure_primary/locations/East US 2/publishers/Canonical/image_offers/UbuntuServer/index] diff --git a/tests/cli/provider_plugins/azure/spec_with_availability_set.yaml b/tests/cli/provider_plugins/azure/spec_with_availability_set.yaml index 46e86f60..e0838001 100644 --- a/tests/cli/provider_plugins/azure/spec_with_availability_set.yaml +++ b/tests/cli/provider_plugins/azure/spec_with_availability_set.yaml @@ -16,7 +16,7 @@ input: - "137" # Publisher = Canonical - "19" # Image Offer = UbuntuServer - "18" # Image Sku = 18.04-LTS -- "27" # Image Version = 18.04.201903121 +- "27" # Image Version = 18.04.202401161 - "os-@@{calm_unique_hash}@@-@@{calm_array_index}@@-disk" # OS Disk Name - "n" # Want to add storage type - "1" # Cache type = None @@ -36,7 +36,7 @@ spec_assertions: - "calmrg" # Resource group - "Canonical" # Publisher - "18.04-LTS" # Image Sku -- "18.04.201903121" # Image Version +- "18.04.202401161" # Image Version - "Attach" # Os disk create option - "UbuntuServer" # Image offer - "data_disk_list: []" # Data disks = No @@ -50,5 +50,5 @@ dependencies: - [4, accounts/azure_primary/resource_groups/calmrg/index] - [13, accounts/azure_primary/locations/East US 2/publishers/Canonical/index] - [15, accounts/azure_primary/locations/East US 2/publishers/Canonical/image_offers/UbuntuServer/image_skus/18.04-LTS/index] -- [16, accounts/azure_primary/locations/East US 2/publishers/Canonical/image_offers/UbuntuServer/image_skus/18.04-LTS/image_versions/18.04.201903121/index] +- [16, accounts/azure_primary/locations/East US 2/publishers/Canonical/image_offers/UbuntuServer/image_skus/18.04-LTS/image_versions/18.04.202401161/index] - [14, accounts/azure_primary/locations/East US 2/publishers/Canonical/image_offers/UbuntuServer/index] diff --git a/tests/cli/provider_plugins/constants.py b/tests/cli/provider_plugins/constants.py index 4e485a8c..8dff21b0 100644 --- a/tests/cli/provider_plugins/constants.py +++ b/tests/cli/provider_plugins/constants.py @@ -24,7 +24,7 @@ class AZURE: PUBLISHERS = ["Canonical"] IMAGE_OFFERS = ["UbuntuServer"] IMAGE_SKUS = ["18.04-LTS"] - IMAGE_VERSIONS = ["18.04.201903121"] + IMAGE_VERSIONS = ["18.04.202401161"] SECURITY_GROUPS = ["DND-CENTOS-IMAGE-nsg"] VIRTUAL_NETWORKS = ["calm-virtual-network-eastus2"] SUBNETS = ["subnet1", "default"] From fb80825b072f7d5e0fe30c3b07c62ac44b010a49 Mon Sep 17 00:00:00 2001 From: Prabhat Dwivedi Date: Wed, 5 Jun 2024 12:04:17 +0530 Subject: [PATCH 04/24] CALM-45373: Fix calm get marketplace bps for CALM VM (#463) Issue Link: https://jira.nutanix.com/browse/CALM-45373 Description: 1. Command was failing when categories was used as field in function `get_group_data_value` 2. entity_value[0]["values"] was empty list and function was returning 0th index from empty list/ 3. Added extra guard condition to return empty string in case entity_value[0]["values"] is empty list. Tested the fix and calm get marketplace bps execute successfully with valid categories column: ![image](https://github.com/ideadevice/calm-dsl/assets/123161845/9865b1bc-463f-4381-86a9-bcd2aaae0bda) (cherry picked from commit 00ccc7de7caefa95fe3858841c0dd3e974f613cc) --- calm/dsl/cli/marketplace.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/calm/dsl/cli/marketplace.py b/calm/dsl/cli/marketplace.py index 64cb46ac..b85985de 100644 --- a/calm/dsl/cli/marketplace.py +++ b/calm/dsl/cli/marketplace.py @@ -68,11 +68,11 @@ def get_group_data_value(data_list, field, value_list=False): entity_value = entity["values"] if not entity_value: return None - return ( - entity_value[0]["values"] - if value_list - else entity_value[0]["values"][0] - ) + if value_list: + return entity_value[0]["values"] + else: + _value_list = entity_value[0].get("values", []) + return _value_list[0] if _value_list else "" return None From 44ffd8dafd85905a18e12dc7db72e2a577bca5b7 Mon Sep 17 00:00:00 2001 From: kushjajal7 Date: Wed, 5 Jun 2024 14:10:18 +0530 Subject: [PATCH 05/24] Bug/calm 32219 failure during guest customizarion (#317) Guest customisation was failing with key error if title was not a key in the optionDict during adding guest customisation. Used optionDict.get("title") which solves the is sue as it returns None if not found. Also added support for XML file paths. If XML file paths are used then it extracts content and adds it to the final spec, with verification of flow verifying existence of the xml file. As shown below example which uses dummy xml. Screenshot 2023-10-18 at 4 02 51 PM Original flow where content can be directly mentioned and they get used directly is also present. As shown below. Screenshot 2023-10-18 at 4 01 57 PM (cherry picked from commit ef857187346f05d9f5186effc62d9f10cab42d7f) --- calm/dsl/providers/plugins/ahv_vm/main.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/calm/dsl/providers/plugins/ahv_vm/main.py b/calm/dsl/providers/plugins/ahv_vm/main.py index db9fb25d..f3b38afd 100644 --- a/calm/dsl/providers/plugins/ahv_vm/main.py +++ b/calm/dsl/providers/plugins/ahv_vm/main.py @@ -3,6 +3,7 @@ import sys import json import copy +import os from ruamel import yaml from distutils.version import LooseVersion as LV @@ -1771,7 +1772,21 @@ def create_spec(client): script["install_type"] = install_type path.append("unattend_xml") - script["unattend_xml"] = get_field(schema, path, option, default="") + + script["unattend_xml"] = "" + + while script["unattend_xml"] == "": + + script["unattend_xml"] = get_field(schema, path, option, default="") + + if script["unattend_xml"][-4:] == ".xml": + click.echo("Loading given XML file data.") + if os.path.exists(script["unattend_xml"]): + with open(script["unattend_xml"], "r") as f: + script["unattend_xml"] = f.read() + else: + click.echo("File not found!!, Enter valid file path.") + script["unattend_xml"] = "" sysprep_dict = { "unattend_xml": script["unattend_xml"], @@ -1825,7 +1840,7 @@ def find_schema(schema, path, option): resDict = None for optionDict in schema["anyOf"]: - if optionDict["title"] == option[indOpt]: + if optionDict.get("title") == option[indOpt]: resDict = optionDict break From 0fd1cb93ede1f1c962889e1de6a59c09662bbba1 Mon Sep 17 00:00:00 2001 From: Prabhat Dwivedi Date: Wed, 5 Jun 2024 19:08:40 +0530 Subject: [PATCH 06/24] Update main readme.md (#475) All basic contents of main readme.md is now shifted to documentation on nutanix dev community website (https://www.nutanix.dev/docs/self-service-dsl/). Therefore, cleaning up sections and redirecting users to main documentation for respective section. (cherry picked from commit 1229652586ad6907cdeea4f27ca2ebc16df4cbc5) --- README.md | 257 ++++++------------------------------------------------ 1 file changed, 28 insertions(+), 229 deletions(-) diff --git a/README.md b/README.md index f9271810..220bab78 100644 --- a/README.md +++ b/README.md @@ -11,247 +11,34 @@ ## About NCM Self Service DSL -NCM Self Service DSL describes a simpler Python3 based DSL for writing NCM Self Service blueprints. As NCM Self Service uses Services, Packages, Substrates, Deployments and Application Profiles as building blocks for a Blueprint, these entities can be defined as python classes. Their attributes can be specified as class attributes and actions on those entities (procedural runbooks) can be defined neatly as class methods. NCM Self Service blueprint DSL can also accept appropriate native data formats such as YAML and JSON, allowing the reuse and leveraging that work into the larger application lifecycle context of a NCM Self Service blueprint. +NCM Self-Service DSL refers to the Domain-Specific Language (DSL) used in [NCM Self-Service (formerly Calm)](https://www.nutanix.com/products/cloud-manager/self-service), an application management platform. DSL is a specialized Python based programming language that allows users to define and automate tasks and application workflows within their infrastructure as code (IaC). It also has support for executing CLI commands empowering users to interact with and utilize Self-Service features and functionality in a convenient, efficient, and automated manner. ### Why Python3 as NCM Self Service DSL? Language design is black art, and building upon a well-established language is design-wise a big win. The language has also solved many issues like scoping, modules, if-else, inheritance, etc. Well established languages have great tooling support: IDEs, syntax checkers, third-party modules, coding practices, better readability, editing, syntax highlighting, code completion, versioning, collaboration, etc. They see much more community improvements as well. Python specifically comes with a very good REPL (read–eval–print-loop). Having an interactive prompt to play around and slowly build objects is an order-of-magnitude improvement in developer productivity. Python is very easy language to learn and use; and most of the ITOps/DevOps community already use Python for scripting. -## Getting Started for Developers +## Getting Started and Documentation -### Initialization - - Setup: `calm init dsl`. Please fill in the right Prism Central (PC) settings. - - Server status: `calm get server status`. Check if NCM Self Service is enabled on PC & it's version is >=2.9.7. - - Config: `calm show config`. Please see `calm set config --help` to update configuration. - -### NCM Self Service DSL Context -Context info includes server, project and log configuration for dsl operations. -- Flow: Context info is taken from config file passed inline with cli command or environment data or default config file stored mentioned in `~/.calm/init.ini`. -- Environment variables for server configuration: `CALM_DSL_PC_IP`, `CALM_DSL_PC_PORT`, `CALM_DSL_PC_USERNAME`, `CALM_DSL_PC_PASSWORD`. -- Environment variable for project configuration: `CALM_DSL_DEFAULT_PROJECT`. -- Environment variable for log configuration: `CALM_DSL_LOG_LEVEL`. -- Environment variables for init configuration: `CALM_DSL_CONFIG_FILE_LOCATION`, `CALM_DSL_LOCAL_DIR_LOCATION`, `CALM_DSL_DB_LOCATION`. -- Config file parameter: `calm --config/-c ...` -- Show config in context: `calm show config`. - -### Blueprint - - First blueprint: `calm init bp`. This will create a folder `HelloBlueprint` with all the necessary files. `HelloBlueprint/blueprint.py` is the main blueprint DSL file. Please read the comments in the beginning of the file for more details about the blueprint. - - Compile blueprint: `calm compile bp --file HelloBlueprint/blueprint.py`. This command will print the compiled blueprint JSON. - - Create blueprint on NCM Self Service Server: `calm create bp --file HelloBlueprint/blueprint.py --name `. Please use a unique name for ``. - - List blueprints: `calm get bps`. You can also pass in filters like `calm get bps --name ` and so on. Please look at `calm get bps --help`. - - Describe blueprint: `calm describe bp `. It will print a summary of the blueprint. - - Launch blueprint to create Application: `calm launch bp --app_name -i` - - Launch blueprint using environment configuration: `calm launch bp --app_name --environment ` - - Publish blueprint to marketplace manager: `calm publish bp --version --project `. Please look at `calm publish bp --help`. - -### Application - - List apps: `calm get apps`. Use `calm get apps -q` to show only application names. - - Create app: `calm create app -f `. Command will create blueprint and launch it to get application. Please look at `calm create app -h`. - - Describe app: `calm describe app `. It will print a summary of the application and the current application state. Use `calm describe app 2>/dev/null --out json | jq '.["status"]'` to get fields from the app json. More info on how to use `jq` [here](https://stedolan.github.io/jq/tutorial/). - - Delete app: `calm delete app `. You can delete multiple apps using: `calm get apps -q | xargs -I {} calm delete app {}`. - - Run action on application: `calm run action --app ` - - Start an application: `calm start app ` - - Stop an application: `calm stop app ` - - Restart an application: `calm restart app ` - - Display app action runlogs: `calm watch app ` - - Watch app action runlog: `calm watch action_runlog --app ` - - Download app action runlogs: `calm download action_runlog --app --file ` - -### Brownfield Application -- Two ways to declare brownfield deployments in dsl: User can define brownfield deployments in blueprint [file](examples/Brownfield/inline_example/blueprint.py) OR he can declare brownfield deployments in separate [file](examples/Brownfield/separate_file_example/brownfield_deployments.py) and pass it as cli parameter while creating brownfield application. -- List Brownfield vms: `calm get brownfield vms --project --type [AHV_VM|AWS_VM|AZURE_VM|GCP_VM|VMWARE_VM]`. Please use `--account` cli option, if project has multiple accounts for a provider type. -- Compile Blueprint: `calm compile bp -f -b `. -- Create Brownfield Application: `calm create app -f -b -n -i`. -- Create Brownfield Application using existing blueprint: `calm launch bp -b -n `. Command will launch existing blueprint to create brownfield application (3.3.0 onwards). Look at sample file [here](examples/Brownfield/separate_file_example/brownfield_deployments.py). - -### Decompiling Blueprints (`.json`->`.py`) -Decompilation is process to consume json data for any entity and convert it back to dsl python helpers/classes. Currently, decompile is supported for converting blueprint json to python files. Summary of support for blueprint decompilation(Experimental feature): -- Python helpers/classes are automatically generated with the use of jinja templates. -- Generated python file is formatted using [black](https://github.com/psf/black) -- Default values for most of the entities will be shown in decompiled file. -- Separate files are created under `.local` directory in decompiled blueprint directory for handling secrets used inside blueprints i.e. passwords etc. -- Separate files are created under `scripts` directory in decompiled blueprint directory for storing scripts used in variable, tasks, guest customization etc. -- Provider specs (Other than AHV) / Runtime editables for substrates are stored in `specs` directory in blueprint directory. -- Name of created files are taken from the context of variable/task. For ex: Filename for service action task script: Service_MySQLService_Action___create___Task_Task1 -- Decompile blueprint with secrets: Decompile the blueprint using the `--passphrase` or`-ps` flag. This will create files for all secret files and have encrypted secrets in them. This also creates a `decompiled_secrets.bin` file (Not to be changed) which is used during blueprint create for validating if the encrypted secret value is not modified while launching the blueprint again. -- [Deprecated] Decompile blueprint : `calm decompile bp `. Use `calm decompile bp --with_secrets` to fill the value for secrets at runtime in the terminal which are used inside blueprint. -- Decompile bp from existing json file: `calm decompile bp --file `. -- Decompile marketplace blueprint: `calm decompile marketplace bp --version `. -- Decompile bp to a location: `calm decompile bp --dir `. It will decompile blueprint entities to `bp_dir` location. -- Note: Decompliation support for providers other than AHV is experimental. - -### Runbooks - - First runbook: `calm init runbook`. This will create a folder `HelloRunbook` with all the necessary files. `HelloRunbook/runbook.py` is the main runbook DSL file. Please read the comments in the beginning of the file for more details about the runbook. - - Compile runbook: `calm compile runbook --file HelloRunbook/runbook.py`. This command will print the compiled runbook JSON. - - Create runbook on Calm Server: `calm create runbook --file HelloRunbook/runbook.py --name `. Please use a unique name for ``. - - List runbooks: `calm get runbooks`. You can also pass in filters like `calm get runbooks --name ` and so on. Please look at `calm get runbooks --help`. - - Describe runbook: `calm describe runbook `. It will print a summary of the runbook. - - Execute runbook: `calm run runbook `. Please look at `calm run runbook -h` for more info. - - List runbook executions: `calm get runbook_executions`. - - Watch runbook execution: `calm watch runbook_execution `. It will display the runbook execution. - - Pause runbook execution: `calm pause runbook_execution `. It will pause the running runbook execution. - - Resume runbook execution: `calm resume runbook_execution `. It will play/resume the paused runbook execution. - - Abort runbook execution: `calm abort runbook_execution `. It will abort the runbook execution. - - Please look [here](docs/01-Calm-Terminology#runbooks) for more details. - -### Decompiling Runbooks (`.json`->`.py`) -Decompilation is process to consume json data for any entity and convert it back to dsl python helpers/classes. Currently, decompile is supported for converting blueprint and runbook jsons to python files. Summary of support for runbook decompilation(Experimental feature): -- Python helpers/classes are automatically generated with the use of jinja templates. -- Generated python file is formatted using [black](https://github.com/psf/black) -- Default values for most of the entities will be shown in decompiled file. -- Separate files are created under `.local` directory in decompiled runbook directory for handling secrets used inside runbooks i.e. passwords etc. -- Separate files are created under `scripts` directory in decompiled runbook directory for storing scripts used in variable, tasks etc. -- Name of created files are taken from the context of variable/task. For ex: Filename for task script: _Runbook_test_rb_1_Task_Task2.py -- Decompile existing server runbook: `calm decompile runbook `. -- Decompile runbook from existing json file: `calm decompile runbook --file `. -- Decompile runbook to a location: `calm decompile runbook --dir `. It will decompile runbook entities to `runbook_dir` location. - -### Task Library - - List task library items: `calm get library tasks`. Use `calm get library tasks -q` to show only task library names. - - Create task library item: `calm create library task -f `. Command will create task under library. Please look at `calm create library task -h`. - - Describe task library item: `calm describe library task `. It will print a summary of the task and the current state. Use `calm describe library task 2>/dev/null --out json | jq '.["status"]'` to get fields from the task json. More info on how to use `jq` [here](https://stedolan.github.io/jq/tutorial/). - - Delete task library item: `calm delete library task `. You can delete multiple task library items using: `calm get library tasks -q | xargs -I {} calm delete library task {}`. - - Import script files as task library item: `calm import library task -f (.json, .sh, .escript, .ps1)`. Create task under library by passing scripts shell, powershell etc. - - -## Getting started for Admins - -### Initialization - - Setup: `calm init dsl`. Please fill in the right Prism Central (PC) settings. - - Server status: `calm get server status`. Check if NCM Self Service is enabled on PC & it's version is >=2.9.7. - - Config: `calm show config`. Please see `calm set config --help` to update configuration. - -### NCM Self Service DSL Context -Context information includes server, project and log configuration for dsl operations. -- Flow: Context info is taken from config file passed inline with cli command or environment data or default config file stored mentioned in `~/.calm/init.ini`. -- Environment variables for server configuration: `CALM_DSL_PC_IP`, `CALM_DSL_PC_PORT`, `CALM_DSL_PC_USERNAME`, `CALM_DSL_PC_PASSWORD`. -- Environment variable for project configuration: `CALM_DSL_DEFAULT_PROJECT`. -- Environment variable for log configuration: `CALM_DSL_LOG_LEVEL`. -- Environment variables for init configuration: `CALM_DSL_CONFIG_FILE_LOCATION`, `CALM_DSL_LOCAL_DIR_LOCATION`, `CALM_DSL_DB_LOCATION`. -- Config file parameter: `calm --config/-c ...` -- Show config in context: `calm show config`. - -### Roles -Use `calm get roles` to list all roles in PC. The below roles are relevant for NCM Self Service: - - `Prism Admin`: Day-to-day admin of a Nutanix deployment. Manages the infrastructure and platform, but cannot entitle other users to be admins. - - `Project Admin`: Team lead to whom cloud administration gets delegated in the context of a project. Manages end users within the project and has full access to their entities. - - `Developer`: Application developer within a team. Authors blueprints, tests deployments, and publishes applications for other project members. - - `Operator`: Owner of team applications at runtime. Works on existing application deployments, exercises blueprint actions. - - `Consumer`: Lifecycle manager for team applications. Launches blueprints and controls their lifecycle and actions. - -### Directory Services -- Current directory services are listed under `calm get directory_services`. - -### Users -- Create user: `calm create user --name --directory `. -- List users: `calm get users`. Get users, optionally filtered by a string -- Delete user: `calm delete user ` - -### User-Groups -- Create group: `calm create group `. -- List groups: `calm get groups`. Get user groups, optionally filtered by a string -- Delete group: `calm delete group ` - -### Projects -- Compile project: `calm compile project --file `. This command will print the compiled project JSON. Look at sample file [here](examples/Project/demo_project.py) and [here](examples/Project/project_with_env.py). -- Create project on NCM Self Service Server: `calm create project --file --name --description `.\ -**Options:**\ -           `--no-cache-update`: flag to skip cache updations post operation.\ -           `--force`: flag to delete existing project with the same name before create, if entities are not associated with it. - -- List projects: `calm get projects`. Get projects, optionally filtered by a string -- Describe project: `calm describe project `. It will print summary of project. -- Update project using dsl file: `calm update project --file `. Environments will not be updated as part of this operation. Use `no-cache-update` flag to skip cache updations post operation. -- Update project using cli switches: `calm update project --add_user/--remove_user --add_group/--remove_group --add_account/--remove_account `. -- Delete project: `calm delete project `. Use `no-cache-update` flag to skip cache updations post operation. -- Enable/Disable Quotas: - - During project creation, it checks if quotas are avaialble in project payload (json or python file). If it is there, then quotas are enabled in the project. - - During project updation, use the following flags `--enable-quotas/-eq` and `--disable-quotas/-dq` - - If the project already has quotas set and enabled and quotas are present in {project_file} then the quotas would be updated - - If the project already has quotas set and enabled and there are no quotas in {project_file} then the original quotas in the projects would be persisted. - - If the project doesn't have quotas enabled/set and the {project_file} has quotas then the quotas would be enabled and set in the project. -- Note: While using `no-cache-update` flag in project create and update commands, user should not pass environment object in the project model. User should update the cache separately after creation/updation of projects. Feature is experimental and will be discontinued after [#184](https://github.com/nutanix/calm-dsl/issues/184) is fixed. -- Decompile existing server project: `calm decompile project `. -- Decompile project from existing json file: `calm decompile project --file `. -- Decompile project to a location: `calm decompile project --dir `. It will decompile project entities to `project_dir` location. - -### Environments -- Compile environment: `calm compile environment --file --project `. Command will print the compiled environment JSON. Look at sample file [here](examples/Environment/sample_environment.py) -- Create environment to existing project: `calm create environment --file --project --name `.\ -**Options:**\ -           `--no-cache-update`: flag to skip cache updations post operation.\ -           `--force`: flag to delete existing environment in a project with the same name before create, if entities are not associated with it. -- Update environment: `calm update environment --file --project `. Use `no-cache-update` flag to skip cache updations post operation. -- List environments: `calm get environments --project `. Get environments of project. -- Delete environment: `calm delete environment --project `. Use `no-cache-update` flag to skip cache updations post operation. -- Decompile existing server environment: `calm decompile environment --project `. -- Decompile environment from existing json file: `calm decompile environment --file --project `. -- Decompile environment to a location: `calm decompile environment --project --dir `. It will decompile environment entities to `environment_dir` location. - -### Access Control Policies -Access control policies ensures that a project member can access only the entities or perform only the actions defined in the role assigned to that project member. -- Create ACP: `calm create acp --role --project --user --group --name `. It is used to assign given role to users/groups. Parameters `user` and `group` can be provided multiple times. -- List ACPs: `calm get acps --project `.Get acps, optionally filtered by a string -- Describe ACP: `calm describe acp --project `. -- Update ACP: `calm update acp --project --add_user/--remove_user --add_group/--remove_group `. Paramters `add_user`, `remove_user`, `add_group` and `remove_group` can be provided multiple times. -- Delete ACP: `calm delete acp --project `. - -### Examples -Sample Project flow for `Admin` users: -- Project Creation: `calm create project --file "project_file_location" --name "project_name"` -- Create users: `calm create user --name "user_1" --directory "user_1_directory_service"` -- Create User-Group: `calm create group "group_1"` -- Update Project for adding created users/groups to project: `calm update project "project_name" --add_user "user_1" --add_user "user_2" --add_group "group_1" --add_group "group_2"`. -- Create ACP for `Project Admin` role assignment to project users/groups: `calm create acp --role "Project Admin" --project "project_name" --user "user_1" --user "user_2" --group "group_1" --group "group_2" --name "acp_name"` - -Sample Project Flow for `Project Admin` users: -- Update project for adding/removing users or groups in project: `calm update project "project_name" --add_user "user_3" --remove_user "user_2" --add_group "group_3" --remove_group "group_2"`. -- Create ACPs for other roles in project i.e. Consumer, Developer, Operator. Ex: `calm create acp --role "Developer" --project "project_name" --user "user_3" --group "group_3" --name "acp_developer_name"` -- Update ACPs: `calm update acp "acp_developer_name" --project "project_name" --add_user "user_1" --remove_user "user_3" --add_group "group_1" --remove_group "group_3"`. - -## Docker - - For Self-Service(formerly Calm) version less than 3.7.2.1: - - Latest image: `docker pull ntnx/calm-dsl:{tag}` - - Run: `docker run -it ntnx/calm-dsl:{tag}` - - From Self-Service(formerly Calm) version 3.7.2.1 onwards: - - Latest image: `docker pull nutanix/calm-dsl:{tag}` - - Run: `docker run -it nutanix/calm-dsl:{tag}` - - Note: Tag starts with `v`. Ex: `v3.7.2.1`, `v3.7.0`. - -## Dev Setup -MacOS: - - Install [Xcode](https://apps.apple.com/us/app/xcode/id497799835) - - Install homebrew: `/usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"`. - - Install python3, git and openssl: `brew install git python3 openssl`. - - Install virtualenv: `pip install virtualenv` - - Add path to flags: `export LDFLAGS="-L$(brew --prefix openssl)/lib"` & `export CFLAGS="-I$(brew --prefix openssl)/include"`. - - Clone this repo and run: `make dev` from top directory. - - Getting into virtualenv: `source venv/bin/activate`. - - Getting out of virtualenv: `deactivate`. - -Centos: - - `make _init_centos` to setup your CentOS 7 VM for development. This will install python3 and docker. - -Use: - - `make dev` to create/use python3 virtualenv in `$TOPDIR/venv` and setup dev environment. Activate it by calling `source venv/bin/activate`. Use `deactivate` to deactivate virtualenv. - - `make test` to run quick tests. `make test-all` to run all tests. - - `make dist` to generate a `calm.dsl` python distribution. - - `make docker` to build docker container. (Assumes docker client is setup on your machine) - - `make run` to run container. - - `make clean` to reset. - -## Documentation +**Complete documentation is available on Nutanix Dev Community [Website](https://www.nutanix.dev/self-service-dsl/)** + - [DSL Technical Documentation](https://www.nutanix.dev/docs/self-service-dsl/) + - [DSL Setup](https://www.nutanix.dev/docs/self-service-dsl/setup/) + - [DSL Initialization](https://www.nutanix.dev/docs/self-service-dsl/initialization/) + - [DSL CLI commands](https://www.nutanix.dev/docs/self-service-dsl/getting-started/commands/) + - [DSL Release Notes](https://github.com/nutanix/calm-dsl/tree/master/release-notes) - [NCM Self Service Terminology](docs/01-Calm-Terminology/) - - [DSL Blueprint Architecture](docs/02-DSL-Blueprint-Architecture/) - - [DSL Lab](docs/03-Quickstart/) + - [DSL Blueprint Architecture](https://www.nutanix.dev/docs/self-service-dsl/models/Blueprint/) + + ### Tutorials + - [Create your First Blueprint](https://www.nutanix.dev/docs/self-service-dsl/tutorial/first_blueprint/) + - [Create your First Runbook](https://www.nutanix.dev/docs/self-service-dsl/tutorial/first_runbook/) -## Video Links +### Video Links - [Workstation Setup](https://youtu.be/uIZmHQhioZg) - [Blueprint & App management](https://youtu.be/jb-ZllhaROs) - [NCM Self Service DSL Blueprint Architecture](https://youtu.be/Y-6eq91rtSw) -## [Blogs](https://www.nutanix.dev/calm-dsl/) +### [Blogs](https://www.nutanix.dev/calm-dsl/) - [Introducing the NCM Self Service DSL](https://www.nutanix.dev/2020/03/17/introducing-the-nutanix-calm-dsl/) - [Creating Custom Blueprint](https://www.nutanix.dev/2020/03/30/nutanix-calm-dsl-creating-custom-blueprint/) - [Generating VM Specs](https://www.nutanix.dev/2020/04/06/nutanix-calm-dsl-generating-vm-specs/) @@ -259,6 +46,18 @@ Use: - [Remote Container Development (Part 1)](https://www.nutanix.dev/2020/04/24/nutanix-calm-dsl-remote-container-development-part-1/) - [From UI to Code – NCM Self Service DSL and Blueprint Decompile](https://www.nutanix.dev/2020/07/20/from-ui-to-code-calm-dsl-and-blueprint-decompile/) -## Demos +### Demos - [Zero-touch CI/CD - VDI Template Creation with NCM Self Service DSL](https://youtu.be/5k_K7idGxsI) - [Integrating with Azure DevOps CI/CD pipeline](https://youtu.be/496bvlIi4pk) + +## Contributing to Self-Service DSL + +This repository only contains Self-Service DSL command line interface and the python model for different Self-Service enitities. To know more about compiling DSL and contributing suggested changes, refer to the [contribution guide](https://www.nutanix.dev/docs/self-service-dsl/contributions/). + +## Reporting Issues + +To raise and track any improvement or a bug, create an open issue in DSL github repository, [issue section.](https://github.com/nutanix/calm-dsl/issues) + +## License + +**[Apache-2.0 license](https://github.com/nutanix/calm-dsl/blob/master/LICENSE)** \ No newline at end of file From 357baada724622df1a0d0066154cb204324cd54e Mon Sep 17 00:00:00 2001 From: sankarspk Date: Tue, 18 Jun 2024 11:19:16 +0530 Subject: [PATCH 07/24] task/Automate_CALM_44434 (#481) **PR Description** Summary: Automate_CALM_44434_Policy_ID_updated_in_approval_name_list Test Plan: Approval policy name updated with policy ID so updated the code Bugs: #CALM-44434 (cherry picked from commit c5e00319ef6ec49c841f378ade105f7e3e393106) --- tests/cli/test_policy_commands.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/tests/cli/test_policy_commands.py b/tests/cli/test_policy_commands.py index b083d8c5..d54bdb16 100644 --- a/tests/cli/test_policy_commands.py +++ b/tests/cli/test_policy_commands.py @@ -383,7 +383,8 @@ def _test_dsl_policy_delete(self): def _test_get_approval_requests(self): time.sleep(10) runner = CliRunner() - result = runner.invoke(cli, ["get", "approval-requests"]) + + result = runner.invoke(cli, ["get", "approval-requests", "--out=json"]) if result.exit_code: cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} LOG.info(result.output) @@ -393,9 +394,17 @@ def _test_get_approval_requests(self): ) ) pytest.fail("Policy get approval request call from python file failed") + assert ( self.approval_request_name in result.output ), "Approval Request {} not found".format(self.approval_request_name) + res = json.loads(result.output) + for entity in res["entities"]: + LOG.info(entity["spec"]["name"]) + if self.approval_request_name in entity["spec"]["name"]: + self.approval_request_name = entity["spec"]["name"] + self.approval_request_uuid = entity["metadata"]["uuid"] + break LOG.info("Success") def _test_execution_check_in_get_policy_execution(self): @@ -500,6 +509,7 @@ def _test_approve_policy(self, uuid=None): "approve", "approval-request", self.approval_request_name, + "--uuid={}".format(self.approval_request_uuid), ], ) From 687b73d6aca55059d5fef1fb1d993a69c240de1e Mon Sep 17 00:00:00 2001 From: Prabhat Dwivedi Date: Tue, 2 Jul 2024 10:20:44 +0530 Subject: [PATCH 08/24] CALM-17380: Support for Playground functionality in CALM-DSL (#477) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Issue Link: https://jira.nutanix.com/browse/CALM-17380 Description: 1. Added support for testing escripts, shell scripts, powershell scripts, python remote tasks using run-script cli command. 2. `calm run-script` command has following flags: -> `(-t/--type)` - type of script to be tested, supported values [escript, shell, powershell, python] -> `(-f--file)` - script file path to test -> `(-e/--endpoint)` - endpoint file path (required for shell, powershell, python scripts) to define target machine -> `-p/--project` - reference project to be used while running script. Defaults to current context project in DSL. 4. Following support is being added through improvement: -> Test run of escript file -> Test run of shell script file with basic auth linux endpoint -> Test run of shell script file with ssh key based linux endpoint -> Test run of powershell script file with basic auth windows endpoint -> Test run of python remote script file with basic auth linux endpoint -> Test run of python remote script file with ssh key based linux endpoint 5. Following support is not present through UI. Hence, not implementing in DSL. -> Python remote tasks don't run on windows endpoint. -> Windows endpoint only have basic auth and no ssh based authentication. 6. Added unit test to test `calm run-script` command. image Example description of Run Script CLI commands: 1. Command to test escript file (doesn't require endpoint) Screenshot 2024-06-19 at 5 34 08 PM 2. Command to test shell script with linux endpoint having basic auth Screenshot 2024-06-19 at 5 34 57 PM 3. Command to test shell script with linux endpoint having ssh auth Screenshot 2024-06-19 at 5 35 25 PM 4. Command to test powershell script with windows endpoint Screenshot 2024-06-19 at 5 36 07 PM 5. Command to test python script with linux endpoint Screenshot 2024-06-19 at 5 36 49 PM (cherry picked from commit 79e5140b04f01e59b97b030de8073f496203306b) --- calm/dsl/api/blueprint.py | 18 ++ calm/dsl/cli/constants.py | 11 + calm/dsl/cli/main.py | 50 +++ calm/dsl/cli/run_script.py | 297 ++++++++++++++++++ .../init/blueprint/ahv_blueprint.py.jinja2 | 4 +- .../ahv_single_vm_blueprint.py.jinja2 | 2 +- tests/unit/run_script/blueprint.py | 104 ++++++ .../linux_endpoint_with_basic_cred.py | 15 + .../endpoints/linux_endpoint_with_ssh_cred.py | 21 ++ .../windows_endpoint_with_basic_cred.py | 18 ++ tests/unit/run_script/scripts/escript.py | 1 + .../run_script/scripts/powershell_script.ps1 | 1 + tests/unit/run_script/scripts/shell_script.sh | 1 + .../run_script/test_run_script_command.py | 200 ++++++++++++ 14 files changed, 740 insertions(+), 3 deletions(-) create mode 100644 calm/dsl/cli/run_script.py create mode 100644 tests/unit/run_script/blueprint.py create mode 100644 tests/unit/run_script/endpoints/linux_endpoint_with_basic_cred.py create mode 100644 tests/unit/run_script/endpoints/linux_endpoint_with_ssh_cred.py create mode 100644 tests/unit/run_script/endpoints/windows_endpoint_with_basic_cred.py create mode 100644 tests/unit/run_script/scripts/escript.py create mode 100644 tests/unit/run_script/scripts/powershell_script.ps1 create mode 100644 tests/unit/run_script/scripts/shell_script.sh create mode 100644 tests/unit/run_script/test_run_script_command.py diff --git a/calm/dsl/api/blueprint.py b/calm/dsl/api/blueprint.py index 6f36071f..dbc209f1 100644 --- a/calm/dsl/api/blueprint.py +++ b/calm/dsl/api/blueprint.py @@ -37,6 +37,8 @@ def __init__(self, connection): self.PROTECTION_POLICY_LIST = ( self.ITEM + "/app_profile/{}/config_spec/{}/app_protection_policies/list" ) + self.RUN_SCRIPT = self.PREFIX + "/{}/run_script" + self.GET_SCRIPT = self.RUN_SCRIPT + "/output/{}/{}" # TODO https://jira.nutanix.com/browse/CALM-17178 # Blueprint creation timeout is dependent on payload. @@ -384,3 +386,19 @@ def variable_values_from_trlid(self, uuid, var_uuid, req_id, trl_id): return self.connection._call( url, verify=False, method=REQUEST.METHOD.GET, ignore_error=True ) + + def run_scripts(self, bp_uuid, payload): + return self.connection._call( + self.RUN_SCRIPT.format(bp_uuid), + verify=False, + request_json=payload, + method=REQUEST.METHOD.POST, + ) + + def test_scripts(self, bp_uuid, trl_id, request_id): + return self.connection._call( + self.GET_SCRIPT.format(bp_uuid, trl_id, request_id), + verify=False, + method=REQUEST.METHOD.GET, + ignore_error=True, + ) diff --git a/calm/dsl/cli/constants.py b/calm/dsl/cli/constants.py index 635ec2d8..0bc11fcf 100644 --- a/calm/dsl/cli/constants.py +++ b/calm/dsl/cli/constants.py @@ -761,3 +761,14 @@ class ENTITY_FILTER_EXPRESSION_LIST: }, }, ] + + +class TEST_SCRIPTS: + class STATUS: + SUCCESS = "SUCCESS" + PENDING = "PENDING" + RUNNING = "RUNNING" + ERROR = "ERROR" + + TERMINAL_STATES = [STATUS.SUCCESS, STATUS.ERROR] + TYPE = ["escript", "shell", "powershell", "python"] diff --git a/calm/dsl/cli/main.py b/calm/dsl/cli/main.py index e3315573..26ff9871 100644 --- a/calm/dsl/cli/main.py +++ b/calm/dsl/cli/main.py @@ -24,6 +24,7 @@ from .utils import FeatureFlagGroup, highlight_text from calm.dsl.store import Version from calm.dsl.config.init_config import get_init_config_handle +from calm.dsl.cli.run_script import * CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"]) @@ -587,3 +588,52 @@ def sync(): def verify(): """Verify an account""" pass + + +@main.command("run-script") +@click.option( + "--type", + "-t", + "script_type", + type=click.Choice(test_scripts_type()), + default="escript", + help="Type of script that need to be tested.", +) +@click.option( + "--file", + "-f", + "script_file", + type=click.Path(exists=True, file_okay=True, dir_okay=False, readable=True), + required=True, + help="File path of script that need to be tested", +) +@click.option( + "--project", + "-p", + "project_name", + help="Project used by test scripts", +) +@click.option( + "--endpoint", + "-e", + "endpoint_file", + type=click.Path(exists=True, file_okay=True, dir_okay=False, readable=True), + help="Endpoint to be used while testing shell scripts, powershell scripts and python remote tasks", +) +def run_script(script_type, script_file, project_name, endpoint_file): + """Tests escripts/shell_scripts/powershell/python scripts for syntactical errors""" + if script_type == "escript": + test_escript(script_file, project_name) + + elif script_type == "shell": + test_shell_script(script_file, endpoint_file, project_name) + + elif script_type == "powershell": + test_powershell_script(script_file, endpoint_file, project_name) + + elif script_type == "python": + test_python_script(script_file, endpoint_file, project_name) + + else: + LOG.error("Invalid script type {}. Use one of {}".format(test_scripts_type())) + sys.exit(-1) diff --git a/calm/dsl/cli/run_script.py b/calm/dsl/cli/run_script.py new file mode 100644 index 00000000..470c0270 --- /dev/null +++ b/calm/dsl/cli/run_script.py @@ -0,0 +1,297 @@ +import uuid +import sys +import time +import click +import os + +from .utils import highlight_text +from calm.dsl.api import get_api_client +from calm.dsl.builtins import Ref +from calm.dsl.builtins.models.helper import common as common_helper +from calm.dsl.cli.constants import TEST_SCRIPTS +from calm.dsl.log import get_logging_handle +from calm.dsl.cli.endpoints import compile_endpoint + +LOG = get_logging_handle(__name__) + + +def test_scripts_type(): + """Provides the list of scripts that can be run in playground""" + return TEST_SCRIPTS.TYPE + + +def _display_script_run_status(state, response): + """Displays the script run status on the console. + + Args: + -> state (str): state of the script run + -> response (dict): response of the script run + + """ + if not (state or response): + LOG.error("Unable to fetch proper api response") + sys.exit(-1) + + LOG.info("Script execution reached state: {}".format(state)) + + status_code = str(response.get("code", "")) + + if state == TEST_SCRIPTS.STATUS.ERROR: + response = response.get("error", response) + + message_list = response.get("message_list", []) + message = response.get("message", "") + reason = "" + final_output = "" + + if message_list: + if message_list[0].get("details", {}): + final_output = message_list[0]["details"].get("final_output", "") + message = message_list[0].get("message", "") + reason = message_list[0].get("reason", "") + LOG.debug( + "Script execution failed. Status code: {} reason: {}".format( + status_code, message_list + ) + ) + else: + final_output = response.get("output", "") + + click.echo("\n----Script Run Summary----\n") + click.echo("Status: " + highlight_text(state)) + if final_output: + click.echo("Final Output: " + final_output) + if status_code: + click.echo("Status Code: " + status_code) + if message: + click.echo("Message: " + message) + if reason: + click.echo("Reason: " + reason) + + +def _poll_test_scripts(bp_uuid, trl_id, request_id, poll_interval=5, maxWait=500): + """Polls the test script status for every poll_interval seconds until maxWait seconds. + + Args: + -> bp_uuid (str): blueprint uuid + -> trl_id (str): trl id + -> request_id (str): request id + -> poll_interval (int): interval in seconds to poll the status + -> maxWait (int): maximum time to wait for the script to complete + """ + client = get_api_client() + count = 0 + status = None + response = {} + + while count < maxWait: + LOG.info("Polling to get script run status") + res, err = client.blueprint.test_scripts(bp_uuid, trl_id, request_id) + if err: + return err["error"].get("state", TEST_SCRIPTS.STATUS.ERROR), err + + response = res.json() + status = response.get("state", None) + + if status and status in TEST_SCRIPTS.TERMINAL_STATES: + return status, response + + count += poll_interval + time.sleep(poll_interval) + + return status, response + + +def _run_script(bp_uuid, payload): + """Fetches trl_id and request_id required to run script on target machine and then runs it on target machine. + Args: + -> bp_uuid (str): blueprint uuid + -> payload (dict): payload to run the script + + Returns: + -> status and response of script run + """ + client = get_api_client() + res, err = client.blueprint.run_scripts(bp_uuid, payload) + + if err: + LOG.error( + "Script run failed due to: [{}] - {}".format(err["code"], err["error"]) + ) + sys.exit("Script run failed") + + response = res.json() + trl_id = None + request_id = None + + if response.get("status", {}): + trl_id = response["status"].get("trl_id", None) + request_id = response["status"].get("request_id", None) + + return _poll_test_scripts(bp_uuid, trl_id, request_id) + + +def _test_script(script_type, script_file, endpoint_file, project_name): + """Constructs payload required to test script run and displays it on console + Args: + -> script_type (str): type of script to run + -> script_file (str): path to script file + -> endpoint_file (str): path to endpoint file + -> project_name (optional) (str): name of the reference project + """ + EndpointPayload = compile_endpoint(endpoint_file) + + endpoint_attrs = EndpointPayload["spec"]["resources"].get("attrs", {}) + + if not endpoint_attrs: + LOG.error("Endpoint attributes not found in the endpoint file") + sys.exit(-1) + + machine = None + cred_name = None + username = None + password = None + secret = {} + protocol = endpoint_attrs.get("connection_protocol", None) + port = endpoint_attrs.get("port", None) + + if endpoint_attrs.get("values", []): + machine = endpoint_attrs["values"][0] + else: + LOG.error("Target VM IP not found in the endpoint file") + sys.exit(-1) + + if endpoint_attrs.get("credential_definition_list", []): + username = endpoint_attrs["credential_definition_list"][0].get("username", None) + cred_type = endpoint_attrs["credential_definition_list"][0].get("type", "") + if cred_type == "KEY": + secret = endpoint_attrs["credential_definition_list"][0].get("secret", {}) + cred_name = endpoint_attrs["credential_definition_list"][0].get("name", "") + else: + password = ( + endpoint_attrs["credential_definition_list"][0] + .get("secret", {}) + .get("value", None) + ) + + with open(os.path.abspath(script_file), "r") as scriptf: + script_data = scriptf.read().strip() + if script_data.startswith('"') and script_data.endswith('"'): + script_data = script_data[1:-1] + + # attach project from current context if no project is supplied + if not project_name: + project_cache_data = common_helper.get_cur_context_project() + project_name = project_cache_data.get("name") + + project_ref = Ref.Project(project_name) + bp_uuid = str(uuid.uuid4()) + + payload = { + "metadata": { + "kind": "blueprint", + "project_reference": project_ref, + "uuid": bp_uuid, + }, + "spec": { + "targetDetails": { + "from_blueprint": False, + "port": port, + "machine": machine, + "loginDetails": {}, + }, + "attrs": { + "script_type": script_type, + "script": script_data, + }, + }, + } + + # Connection Protocol are only present in windows endpoints. Two valid protocols are HTTP and HTTPS. + if protocol: + payload["spec"]["targetDetails"]["protocol"] = protocol + + if secret: + payload["spec"]["targetDetails"]["from_blueprint"] = True + payload["spec"]["targetDetails"]["creds"] = { + "username": username, + "secret": secret, + "cred_class": "static", + "type": cred_type, + "name": cred_name, + "uuid": str(uuid.uuid4()), + } + else: + payload["spec"]["targetDetails"]["loginDetails"] = { + "username": username, + "password": password, + } + + state, response = _run_script(bp_uuid, payload) + _display_script_run_status(state, response) + + +def test_escript(script_file, project_name): + """Tests the execution of escript file + Args: + -> script_file (str): path to escript file + -> project_name (optional) (str): name of the reference project + """ + with open(os.path.abspath(script_file), "r") as scriptf: + script_data = scriptf.read() + + # attach project from current context if no project is supplied + if not project_name: + project_cache_data = common_helper.get_cur_context_project() + project_name = project_cache_data.get("name") + + project_ref = Ref.Project(project_name) + bp_uuid = str(uuid.uuid4()) + + payload = { + "metadata": { + "kind": "blueprint", + "project_reference": project_ref, + "uuid": bp_uuid, + }, + "spec": { + "targetDetails": {"from_blueprint": False}, + "attrs": { + "script_type": "static_py3", + "script": script_data, + }, + }, + } + + state, response = _run_script(bp_uuid, payload) + _display_script_run_status(state, response) + + +def test_python_script(script_file, endpoint_file, project_name): + """Tests the execution of python remote scripts on linux machine + Args: + -> script_file (str): path to script file + _> endpoint_file (str): path to endpoint file + -> project_name (optional) (str): name of the reference project + """ + _test_script("python_remote", script_file, endpoint_file, project_name) + + +def test_shell_script(script_file, endpoint_file, project_name): + """Tests the execution of shell scripts on linux machine + Args: + -> script_file (str): path to script file + _> endpoint_file (str): path to endpoint file + -> project_name (optional) (str): name of the reference project + """ + _test_script("sh", script_file, endpoint_file, project_name) + + +def test_powershell_script(script_file, endpoint_file, project_name): + """Tests the execution of powershell scripts on windows machine + Args: + -> script_file (str): path to script file + _> endpoint_file (str): path to endpoint file + -> project_name (optional) (str): name of the reference project + """ + _test_script("npsscript", script_file, endpoint_file, project_name) diff --git a/calm/dsl/init/blueprint/ahv_blueprint.py.jinja2 b/calm/dsl/init/blueprint/ahv_blueprint.py.jinja2 index 8cd85b17..ad86ef3d 100644 --- a/calm/dsl/init/blueprint/ahv_blueprint.py.jinja2 +++ b/calm/dsl/init/blueprint/ahv_blueprint.py.jinja2 @@ -192,7 +192,7 @@ class {{bp_name}}Substrate(Substrate): # Step 1 Task.Exec.escript( - name="Task1", script="print 'Pre Create task runs before VM is created'" + name="Task1", script="print('Pre Create task runs before VM is created')" ) @action @@ -200,7 +200,7 @@ class {{bp_name}}Substrate(Substrate): # Step 1 Task.Exec.escript( - name="Task1", script="print 'Post delete task runs after VM is deleted'" + name="Task1", script="print('Post delete task runs after VM is deleted')" ) diff --git a/calm/dsl/init/blueprint/ahv_single_vm_blueprint.py.jinja2 b/calm/dsl/init/blueprint/ahv_single_vm_blueprint.py.jinja2 index a6f4cf8b..f6c6490f 100644 --- a/calm/dsl/init/blueprint/ahv_single_vm_blueprint.py.jinja2 +++ b/calm/dsl/init/blueprint/ahv_single_vm_blueprint.py.jinja2 @@ -122,7 +122,7 @@ class {{bp_name}}Profile(VmProfile): # Step 1 Task.Exec.escript( - name="Task1", script="print 'Pre Create task runs before VM is created'" + name="Task1", script="print('Pre Create task runs before VM is created')" ) # Profile Actions diff --git a/tests/unit/run_script/blueprint.py b/tests/unit/run_script/blueprint.py new file mode 100644 index 00000000..3350f251 --- /dev/null +++ b/tests/unit/run_script/blueprint.py @@ -0,0 +1,104 @@ +import json + +from calm.dsl.builtins import Service, Package, Substrate +from calm.dsl.builtins import Deployment, Profile, Blueprint, Metadata +from calm.dsl.builtins import ref, basic_cred +from calm.dsl.builtins import read_local_file +from calm.dsl.builtins import vm_disk_package, AhvVmDisk, AhvVmNic +from calm.dsl.builtins import AhvVmGC, AhvVmResources, AhvVm +from calm.dsl.builtins import AhvVmGC, AhvVmResources, AhvVm, Ref + + +DSL_CONFIG = json.loads(read_local_file(".tests/config.json")) + +PROJECT = DSL_CONFIG["PROJECTS"]["PROJECT1"] +PROJECT_NAME = PROJECT["NAME"] +NTNX_LOCAL_ACCOUNT = DSL_CONFIG["ACCOUNTS"]["NTNX_LOCAL_AZ"] +SUBNET_NAME = NTNX_LOCAL_ACCOUNT["SUBNETS"][1]["NAME"] +CLUSTER_NAME = NTNX_LOCAL_ACCOUNT["SUBNETS"][1]["CLUSTER"] + +# SSH Credentials +CENTOS_USER = "centos" +CENTOS_KEY = read_local_file(".tests/keys/centos") +CENTOS_PUBLIC_KEY = read_local_file(".tests/keys/centos_pub") +CentosCred = basic_cred( + CENTOS_USER, + CENTOS_KEY, + name="Centos", + type="KEY", + default=True, +) + +# OS Image details for VM +CENTOS_IMAGE_SOURCE = "http://download.nutanix.com/calm/CentOS-7-x86_64-1810.qcow2" +CentosPackage = vm_disk_package( + name="centos_disk", + config={"image": {"source": CENTOS_IMAGE_SOURCE}}, +) + + +class HelloService(Service): + pass + + +class HelloPackage(Package): + services = [ref(HelloService)] + + +class HelloVmResources(AhvVmResources): + + memory = 4 + vCPUs = 2 + cores_per_vCPU = 1 + disks = [ + AhvVmDisk.Disk.Scsi.cloneFromVMDiskPackage(CentosPackage, bootable=True), + ] + nics = [AhvVmNic.DirectNic.ingress(subnet=SUBNET_NAME, cluster=CLUSTER_NAME)] + + guest_customization = AhvVmGC.CloudInit( + config={ + "users": [ + { + "name": CENTOS_USER, + "ssh-authorized-keys": [CENTOS_PUBLIC_KEY], + "sudo": ["ALL=(ALL) NOPASSWD:ALL"], + } + ] + } + ) + + +class HelloVm(AhvVm): + resources = HelloVmResources + + +class HelloSubstrate(Substrate): + """AHV VM Substrate""" + + provider_type = "AHV_VM" + provider_spec = HelloVm + + +class HelloDeployment(Deployment): + """Sample Deployment""" + + packages = [ref(HelloPackage)] + substrate = ref(HelloSubstrate) + + +class HelloProfile(Profile): + deployments = [HelloDeployment] + + +class Hello(Blueprint): + """Sample blueprint for Hello app using AHV VM""" + + credentials = [CentosCred] + services = [HelloService] + packages = [HelloPackage, CentosPackage] + substrates = [HelloSubstrate] + profiles = [HelloProfile] + + +class BpMetadata(Metadata): + project = Ref.Project(PROJECT_NAME) diff --git a/tests/unit/run_script/endpoints/linux_endpoint_with_basic_cred.py b/tests/unit/run_script/endpoints/linux_endpoint_with_basic_cred.py new file mode 100644 index 00000000..a0ce5a27 --- /dev/null +++ b/tests/unit/run_script/endpoints/linux_endpoint_with_basic_cred.py @@ -0,0 +1,15 @@ +import json + +from calm.dsl.runbooks import read_local_file, basic_cred +from calm.dsl.runbooks import CalmEndpoint as Endpoint + +DSL_CONFIG = json.loads(read_local_file(".tests/config.json")) + + +linux_ip = DSL_CONFIG["EXISTING_MACHINE"]["IP_2"] +CRED_USERNAME = DSL_CONFIG["EXISTING_MACHINE"]["CREDS"]["LINUX"]["USERNAME"] +CRED_PASSWORD = DSL_CONFIG["EXISTING_MACHINE"]["CREDS"]["LINUX"]["PASSWORD"] + +LinuxCred = basic_cred(CRED_USERNAME, CRED_PASSWORD, name="linux_cred") + +LinuxEndpoint = Endpoint.Linux.ip([linux_ip], cred=LinuxCred) diff --git a/tests/unit/run_script/endpoints/linux_endpoint_with_ssh_cred.py b/tests/unit/run_script/endpoints/linux_endpoint_with_ssh_cred.py new file mode 100644 index 00000000..0c1be83c --- /dev/null +++ b/tests/unit/run_script/endpoints/linux_endpoint_with_ssh_cred.py @@ -0,0 +1,21 @@ +import json + +from calm.dsl.runbooks import read_local_file, basic_cred +from calm.dsl.runbooks import CalmEndpoint as Endpoint + +DSL_CONFIG = json.loads(read_local_file(".tests/config.json")) + + +linux_ip = "" # ip address will be filled while executing unit test: tests/unit/run_script/test_run_script_command.py +CRED_USERNAME = "centos" +SSH_KEY = read_local_file(".tests/keys/centos") + +LinuxCred = basic_cred( + CRED_USERNAME, + SSH_KEY, + name="linux_cred", + type="KEY", + default=True, +) + +LinuxEndpoint = Endpoint.Linux.ip([linux_ip], cred=LinuxCred) diff --git a/tests/unit/run_script/endpoints/windows_endpoint_with_basic_cred.py b/tests/unit/run_script/endpoints/windows_endpoint_with_basic_cred.py new file mode 100644 index 00000000..cd31170e --- /dev/null +++ b/tests/unit/run_script/endpoints/windows_endpoint_with_basic_cred.py @@ -0,0 +1,18 @@ +import json + +from calm.dsl.runbooks import read_local_file, basic_cred +from calm.dsl.runbooks import CalmEndpoint as Endpoint + +DSL_CONFIG = json.loads(read_local_file(".tests/config.json")) + + +windows_ip = DSL_CONFIG["EXISTING_MACHINE"]["WIN_IP_ADDR"] +CRED_USERNAME = DSL_CONFIG["EXISTING_MACHINE"]["CREDS"]["WINDOWS"]["USERNAME"] +CRED_PASSWORD = DSL_CONFIG["EXISTING_MACHINE"]["CREDS"]["WINDOWS"]["PASSWORD"] + + +WindowsCred = basic_cred(CRED_USERNAME, CRED_PASSWORD, name="windows_cred") + +WindowsEndpoint = Endpoint.Windows.ip( + [windows_ip], connection_protocol="HTTPS", cred=WindowsCred +) diff --git a/tests/unit/run_script/scripts/escript.py b/tests/unit/run_script/scripts/escript.py new file mode 100644 index 00000000..a52d036a --- /dev/null +++ b/tests/unit/run_script/scripts/escript.py @@ -0,0 +1 @@ +print("Test") diff --git a/tests/unit/run_script/scripts/powershell_script.ps1 b/tests/unit/run_script/scripts/powershell_script.ps1 new file mode 100644 index 00000000..87245193 --- /dev/null +++ b/tests/unit/run_script/scripts/powershell_script.ps1 @@ -0,0 +1 @@ +dir \ No newline at end of file diff --git a/tests/unit/run_script/scripts/shell_script.sh b/tests/unit/run_script/scripts/shell_script.sh new file mode 100644 index 00000000..013c184c --- /dev/null +++ b/tests/unit/run_script/scripts/shell_script.sh @@ -0,0 +1 @@ +pwd \ No newline at end of file diff --git a/tests/unit/run_script/test_run_script_command.py b/tests/unit/run_script/test_run_script_command.py new file mode 100644 index 00000000..893fce96 --- /dev/null +++ b/tests/unit/run_script/test_run_script_command.py @@ -0,0 +1,200 @@ +import json +import os +import traceback +import pytest +import uuid +from click.testing import CliRunner + +from calm.dsl.cli import main as cli +from calm.dsl.builtins import read_local_file +from tests.utils import Application as ApplicationHelper +from calm.dsl.cli.main import get_api_client +from calm.dsl.log import get_logging_handle + +LOG = get_logging_handle(__name__) + +DSL_CONFIG = json.loads(read_local_file(".tests/config.json")) +NTNX_LOCAL_AZ = DSL_CONFIG["ACCOUNTS"]["NTNX_LOCAL_AZ"] + +LINUX_ENDPOINT_BASIC_CRED_FILE = os.path.abspath( + "tests/unit/run_script/endpoints/linux_endpoint_with_basic_cred.py" +) +LINUX_ENDPOINT_SSH_CRED_FILE = os.path.abspath( + "tests/unit/run_script/endpoints/linux_endpoint_with_ssh_cred.py" +) +WINDOWS_ENDPOINT_BASIC_CRED_FILE = os.path.abspath( + "tests/unit/run_script/endpoints/windows_endpoint_with_basic_cred.py" +) + +ESCRIPT_FILE = os.path.abspath("tests/unit/run_script/scripts/escript.py") +SHELL_SCRIPT_FILE = os.path.abspath("tests/unit/run_script/scripts/shell_script.sh") +POWERSHELL_SCRIPT_FILE = os.path.abspath( + "tests/unit/run_script/scripts/powershell_script.ps1" +) + +BP_FILE_PATH = os.path.abspath("tests/unit/run_script/blueprint.py") + + +class TestRunScriptCommands: + app_helper = ApplicationHelper() + + @classmethod + def setup_class(cls): + runner = CliRunner() + cls.app_name = "Test_DSL_App_Run_Script_" + str(uuid.uuid4())[-10:] + LOG.info("Creating app {} for ssh target machine".format(cls.app_name)) + result = runner.invoke( + cli, + [ + "create", + "app", + "--file={}".format(BP_FILE_PATH), + "--name={}".format(cls.app_name), + ], + ) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + + cls.app_helper._wait_for_non_busy_state(cls.app_name) + params = {"filter": "name=={}".format(cls.app_name)} + client = get_api_client() + res, err = client.application.list(params=params) + if err: + pytest.fail("[{}] - {}".format(err["code"], err["error"])) + + response = res.json() + entities = response.get("entities", None) + app = None + if entities: + app = entities[0] + else: + pytest.fail("Application {} not found".format(cls.app_name)) + + app_uuid = app["metadata"]["uuid"] + res, err = client.application.read(app_uuid) + if err: + pytest.fail(err) + response = res.json() + # Reading ip address of app created + cls.ip_address = response["status"]["resources"]["deployment_list"][0][ + "substrate_configuration" + ]["element_list"][0].get("address", "") + if not cls.ip_address: + pytest.fail( + "Unable to find ip address of application {}".format(cls.app_name) + ) + + LOG.info( + "Setting linux machine ip address {} in endpoint file: {}".format( + cls.ip_address, LINUX_ENDPOINT_SSH_CRED_FILE + ) + ) + with open(LINUX_ENDPOINT_SSH_CRED_FILE, "r") as fd: + data = fd.read() + data = data.replace( + 'linux_ip = ""', "linux_ip = '{}'".format(cls.ip_address) + ) + + with open(LINUX_ENDPOINT_SSH_CRED_FILE, "w+") as fd: + fd.write(data) + + @classmethod + def teardown_class(cls): + runner = CliRunner() + + LOG.info("Restoring endpoint file: {}".format(LINUX_ENDPOINT_SSH_CRED_FILE)) + with open(LINUX_ENDPOINT_SSH_CRED_FILE, "r") as fd: + data = fd.read() + data = data.replace( + "linux_ip = '{}'".format(cls.ip_address), 'linux_ip = ""' + ) + + with open(LINUX_ENDPOINT_SSH_CRED_FILE, "w+") as fd: + fd.write(data) + + cls.app_helper._wait_for_non_busy_state(cls.app_name) + LOG.info("Deleting App {} ".format(cls.app_name)) + result = runner.invoke(cli, ["delete", "app", cls.app_name]) + assert result.exit_code == 0 + LOG.info("App {} deleted successfully".format(cls.app_name)) + + @pytest.mark.parametrize( + "SCRIPT_TYPE, " "SCRIPT_FILE, ENDPOINT_FILE", + [ + pytest.param("escript", ESCRIPT_FILE, None), + pytest.param("shell", SHELL_SCRIPT_FILE, LINUX_ENDPOINT_BASIC_CRED_FILE), + pytest.param("shell", SHELL_SCRIPT_FILE, LINUX_ENDPOINT_SSH_CRED_FILE), + pytest.param( + "powershell", POWERSHELL_SCRIPT_FILE, WINDOWS_ENDPOINT_BASIC_CRED_FILE + ), + pytest.param("python", ESCRIPT_FILE, LINUX_ENDPOINT_BASIC_CRED_FILE), + ], + ) + def test_scripts(self, SCRIPT_TYPE, SCRIPT_FILE, ENDPOINT_FILE): + """ + Tests `calm run-script` cli command for following cases: + -> Successful run of escript file + -> Successful run of shell script file with basic auth linux endpoint + -> Successful run of shell script file with ssh key based linux endpoint + -> Successful run of powershell script file with basic auth windows endpoint + -> Successful run of python remote script file with basic auth linux endpoint + + Note: + -> Python remote tasks don't run on windows endpoint. + -> Windows endpoint only have basic auth and no ssh based authentication. + """ + runner = CliRunner() + LOG.info("Running 'calm run-script' command for {} script".format(SCRIPT_TYPE)) + if SCRIPT_TYPE == "escript": + LOG.info("Running 'calm run-script' command for {}".format(SCRIPT_TYPE)) + LOG.info("Script file used: {}".format(SCRIPT_FILE)) + result = runner.invoke( + cli, + [ + "run-script", + "--type={}".format(SCRIPT_TYPE), + "--file={}".format(SCRIPT_FILE), + ], + ) + else: + LOG.info( + "Running 'calm run-script' command for {} script".format(SCRIPT_TYPE) + ) + LOG.info( + "Script file used: {}, Endpoint file used: {}".format( + SCRIPT_FILE, ENDPOINT_FILE + ) + ) + result = runner.invoke( + cli, + [ + "run-script", + "--type={}".format(SCRIPT_TYPE), + "--file={}".format(SCRIPT_FILE), + "--endpoint={}".format(ENDPOINT_FILE), + ], + ) + if result.exit_code: + cli_res_dict = {"Output": result.output, "Exception": str(result.exception)} + LOG.debug( + "Cli Response: {}".format( + json.dumps(cli_res_dict, indent=4, separators=(",", ": ")) + ) + ) + LOG.debug( + "Traceback: \n{}".format( + "".join(traceback.format_tb(result.exc_info[2])) + ) + ) + + assert "Status: SUCCESS" in result.output, "failed to test shell script" From cffd01e21210ec0bde4c370edd2faa21c2f30d13 Mon Sep 17 00:00:00 2001 From: Prabhat Dwivedi Date: Wed, 3 Jul 2024 18:49:14 +0530 Subject: [PATCH 09/24] added api key flag to store location of api key (#338) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit CALM-39463 : https://jira.nutanix.com/browse/CALM-39463 Summary: 1. Added a new flag(api key) to capture the api key file location containing (name, token) to authenticate saas instance via dsl 2. Successfull authentication via saas api token showing username used to login (taking port 443 by default when api key location is provided) ![Screenshot 2023-11-29 at 2 57 50 PM](https://github.com/ideadevice/calm-dsl/assets/123161845/df8c732c-46c4-45a4-b94c-7bd5ec8532e2) 3. Successfull authentication of pc (backward compatibility check) Screenshot 2023-11-09 at 10 13 42 AM 4. Check set config command for saas api token Screenshot 2023-11-09 at 10 17 11 AM 5. Error handling check for incorrect or badly formatted keyfile ![Screenshot 2023-11-29 at 3 16 23 PM](https://github.com/ideadevice/calm-dsl/assets/123161845/f7c5e526-2a8e-48a1-8704-614e2bcf4606) ![Screenshot 2023-11-29 at 3 18 57 PM](https://github.com/ideadevice/calm-dsl/assets/123161845/5263e670-8556-458f-bb4c-06b13ea23533) 6. If saas port is provided without api key location. Throw a warning to supply api key location alongwith authentication error. ![Screenshot 2023-11-29 at 2 54 37 PM](https://github.com/ideadevice/calm-dsl/assets/123161845/018f8bb5-4a36-4327-9712-92ab9d47c024) (cherry picked from commit 542d1714e1d34e72c6fbde2435d12740b68806b8) --- calm/dsl/api/handle.py | 7 +- calm/dsl/api/util.py | 52 +++++++++++ calm/dsl/builtins/models/blueprint_payload.py | 2 +- calm/dsl/cli/bps.py | 2 +- calm/dsl/cli/init_command.py | 87 +++++++++++++++++-- calm/dsl/cli/main.py | 2 +- calm/dsl/cli/runbooks.py | 2 +- calm/dsl/config/config.ini.jinja2 | 5 +- calm/dsl/config/config.py | 6 ++ calm/dsl/config/constants.py | 1 + calm/dsl/config/context.py | 5 +- calm/dsl/constants.py | 4 +- calm/dsl/store/cache.py | 7 +- 13 files changed, 162 insertions(+), 20 deletions(-) diff --git a/calm/dsl/api/handle.py b/calm/dsl/api/handle.py index 5f46e61a..067a4cfc 100644 --- a/calm/dsl/api/handle.py +++ b/calm/dsl/api/handle.py @@ -38,6 +38,7 @@ from .approval_request import ApprovalRequestAPI from .provider import ProviderAPI from .quotas import QuotasAPI +from .util import get_auth_info class ClientHandle: @@ -132,8 +133,10 @@ def get_api_client(): pc_ip = server_config.get("pc_ip") pc_port = server_config.get("pc_port") - username = server_config.get("pc_username") - password = server_config.get("pc_password") + api_key_location = server_config.get("api_key_location", None) + cred = get_auth_info(api_key_location) + username = cred.get("username") + password = cred.get("password") update_api_client(host=pc_ip, port=pc_port, auth=(username, password)) diff --git a/calm/dsl/api/util.py b/calm/dsl/api/util.py index b0fe152c..0ef20d71 100644 --- a/calm/dsl/api/util.py +++ b/calm/dsl/api/util.py @@ -1,7 +1,11 @@ import copy +import json import sys +import os from calm.dsl.log import get_logging_handle +from calm.dsl.config import get_context +from calm.dsl.constants import DSL_CONFIG LOG = get_logging_handle(__name__) @@ -504,6 +508,54 @@ def patch_secrets(resources, secret_map, secret_variables, existing_secrets=[]): return resources +def get_auth_info(api_key_location): + """ + Reads name+token from specified api-key file (if it exists) as username+password + Else it reads username+password from config. + Args: + api_key_location (str): location of api-key + Returns: + dict: containing username and password for authentication. + """ + + if api_key_location not in [None, DSL_CONFIG.EMPTY_CONFIG_ENTITY_NAME]: + if not os.path.exists(api_key_location): + LOG.error("{} not found".format(api_key_location)) + sys.exit(-1) + + with open(api_key_location, "r") as f: + auth_creds = f.read() + auth_creds = json.loads(auth_creds) + + if not auth_creds.get("name"): + LOG.error( + "Badly formatted key file. Key name not present in {}".format( + api_key_location + ) + ) + sys.exit(-1) + + if not auth_creds.get("token"): + LOG.error( + "Badly formatted key file. Token not present in {}".format( + api_key_location + ) + ) + sys.exit(-1) + + cred = {"username": auth_creds.get("name"), "password": auth_creds.get("token")} + # Read username/password from config when api-key is not supplied + else: + context = get_context() + server_config = context.get_server_config() + cred = { + "username": server_config.get("pc_username"), + "password": server_config.get("pc_password"), + } + + return cred + + def _create_task_name_substrate_map(bp_payload, entity_type, **kwargs): vm_power_action_uuid_substrate_map = kwargs.get( "vm_power_action_uuid_substrate_map", {} diff --git a/calm/dsl/builtins/models/blueprint_payload.py b/calm/dsl/builtins/models/blueprint_payload.py index 089a518a..ca35d334 100644 --- a/calm/dsl/builtins/models/blueprint_payload.py +++ b/calm/dsl/builtins/models/blueprint_payload.py @@ -67,7 +67,7 @@ def create_blueprint_payload(UserBlueprint, metadata={}): # Project will be taken from config if not provided if not metadata.get("project_reference", {}): project_name = project_config["name"] - if project_name == DSL_CONFIG.EMPTY_PROJECT_NAME: + if project_name == DSL_CONFIG.EMPTY_CONFIG_ENTITY_NAME: LOG.error(DSL_CONFIG.EMPTY_PROJECT_MESSAGE) sys.exit("Invalid project configuration") diff --git a/calm/dsl/cli/bps.py b/calm/dsl/cli/bps.py index 718b9a08..c1b7087a 100644 --- a/calm/dsl/cli/bps.py +++ b/calm/dsl/cli/bps.py @@ -329,7 +329,7 @@ def compile_blueprint(bp_file, brownfield_deployment_file=None): ] else: project_name = project_config["name"] - if project_name == DSL_CONFIG.EMPTY_PROJECT_NAME: + if project_name == DSL_CONFIG.EMPTY_CONFIG_ENTITY_NAME: LOG.error(DSL_CONFIG.EMPTY_PROJECT_MESSAGE) sys.exit("Invalid project configuration") diff --git a/calm/dsl/cli/init_command.py b/calm/dsl/cli/init_command.py index eba1e623..417539ba 100644 --- a/calm/dsl/cli/init_command.py +++ b/calm/dsl/cli/init_command.py @@ -21,7 +21,8 @@ from calm.dsl.providers import get_provider_types from calm.dsl.store import Version from calm.dsl.constants import POLICY, STRATOS, DSL_CONFIG - +from calm.dsl.builtins import file_exists +from calm.dsl.api.util import get_auth_info from .main import init, set from calm.dsl.log import get_logging_handle, CustomLogging @@ -119,6 +120,13 @@ default=DEFAULT_CONNECTION_CONFIG["read_timeout"], help="Read timeout for api connections", ) +@click.option( + "--api-key", + "-ak", + "api_key_location", + default=None, + help="Path to api key file for authentication", +) def initialize_engine( ip, port, @@ -132,6 +140,7 @@ def initialize_engine( retries_enabled, connection_timeout, read_timeout, + api_key_location, ): """ \b @@ -148,6 +157,11 @@ def initialize_engine( viii.) CALM_DSL_DB_LOCATION: Default internal dsl db location """ + if api_key_location: + api_key_location = os.path.expanduser(api_key_location) + if not file_exists(api_key_location): + LOG.error("{} not found".format(api_key_location)) + sys.exit(-1) set_server_details( ip=ip, @@ -162,6 +176,7 @@ def initialize_engine( retries_enabled=retries_enabled, connection_timeout=connection_timeout, read_timeout=read_timeout, + api_key_location=api_key_location, ) init_db() sync_cache() @@ -191,17 +206,26 @@ def set_server_details( retries_enabled, connection_timeout, read_timeout, + api_key_location, ): if not (ip and port and username and password and project_name): click.echo("Please provide Calm DSL settings:\n") host = ip or click.prompt("Prism Central IP", default="") - port = port or click.prompt("Port", default="9440") - username = username or click.prompt("Username", default="admin") - password = password or click.prompt("Password", default="", hide_input=True) + + if api_key_location: + cred = get_auth_info(api_key_location) + username = cred.get("username") + password = cred.get("password") + port = DSL_CONFIG.SAAS_PORT + else: + port = port or click.prompt("Port", default="9440") + username = username or click.prompt("Username", default="admin") + password = password or click.prompt("Password", default="", hide_input=True) + project_name = project_name or click.prompt( - "Project", default=DSL_CONFIG.EMPTY_PROJECT_NAME + "Project", default=DSL_CONFIG.EMPTY_CONFIG_ENTITY_NAME ) # Do not prompt for init config variables, Take default values for init.ini file @@ -209,6 +233,12 @@ def set_server_details( local_dir = local_dir or get_default_local_dir() db_file = db_file or get_default_db_file() + if port == DSL_CONFIG.SAAS_PORT: + if api_key_location: + LOG.info("Authenticating with username: {}".format(username)) + else: + LOG.warning(DSL_CONFIG.SAAS_LOGIN_WARN) + LOG.info("Checking if Calm is enabled on Server") # Get temporary client handle @@ -285,7 +315,8 @@ def set_server_details( else: LOG.debug("Stratos is not supported") stratos_status = False - if project_name != DSL_CONFIG.EMPTY_PROJECT_NAME: + + if project_name != DSL_CONFIG.EMPTY_CONFIG_ENTITY_NAME: LOG.info("Verifying the project details") project_name_uuid_map = client.project.get_name_uuid_map( params={"filter": "name=={}".format(project_name)} @@ -295,12 +326,17 @@ def set_server_details( sys.exit(-1) LOG.info("Project '{}' verified successfully".format(project_name)) + if api_key_location: + username = DSL_CONFIG.EMPTY_CONFIG_ENTITY_NAME + password = DSL_CONFIG.EMPTY_CONFIG_ENTITY_NAME + # Writing configuration to file set_dsl_config( host=host, port=port, username=username, password=password, + api_key_location=api_key_location or DSL_CONFIG.EMPTY_CONFIG_ENTITY_NAME, project_name=project_name, log_level=log_level, config_file=config_file, @@ -459,6 +495,13 @@ def init_dsl_runbook(runbook_name, dir_name): type=int, help="read timeout", ) +@click.option( + "--api-key", + "-ak", + "api_key_location", + default=None, + help="Path to api key file for authentication", +) @click.argument("config_file", required=False) def _set_config( host, @@ -473,6 +516,7 @@ def _set_config( retries_enabled, connection_timeout, read_timeout, + api_key_location, ): """writes the configuration to config files i.e. config.ini and init.ini @@ -488,13 +532,33 @@ def _set_config( # Update cache if there is change in host ip update_cache = host != server_config["pc_ip"] if host else False host = host or server_config["pc_ip"] - username = username or server_config["pc_username"] + + # Reading api key location and port from config if not provided + api_key_location = api_key_location or server_config.get( + "api_key_location", DSL_CONFIG.EMPTY_CONFIG_ENTITY_NAME + ) + api_key_location = os.path.expanduser(api_key_location) port = port or server_config["pc_port"] - password = password or server_config["pc_password"] + cred = get_auth_info(api_key_location) + stored_username = cred.get("username") + stored_password = cred.get("password") + + # Resetting stored location of api key for (PC login case) + if username or password: + api_key_location = None + + username = username or stored_username + password = password or stored_password project_config = ContextObj.get_project_config() project_name = project_name or project_config.get("name") + if port == DSL_CONFIG.SAAS_PORT: + if api_key_location: + LOG.info("Authenticating with username: {}".format(username)) + else: + LOG.warning(DSL_CONFIG.SAAS_LOGIN_WARN) + LOG.info("Checking if Calm is enabled on Server") # Get temporary client handle @@ -571,7 +635,7 @@ def _set_config( LOG.debug("Stratos is not supported") stratos_status = False - if project_name != DSL_CONFIG.EMPTY_PROJECT_NAME: + if project_name != DSL_CONFIG.EMPTY_CONFIG_ENTITY_NAME: LOG.info("Verifying the project details") project_name_uuid_map = client.project.get_name_uuid_map( params={"filter": "name=={}".format(project_name)} @@ -599,12 +663,17 @@ def _set_config( connection_timeout = connection_timeout or connection_config["connection_timeout"] read_timeout = read_timeout or connection_config["read_timeout"] + if api_key_location: + username = DSL_CONFIG.EMPTY_CONFIG_ENTITY_NAME + password = DSL_CONFIG.EMPTY_CONFIG_ENTITY_NAME + # Set the dsl configuration set_dsl_config( host=host, port=port, username=username, password=password, + api_key_location=api_key_location or DSL_CONFIG.EMPTY_CONFIG_ENTITY_NAME, project_name=project_name, db_location=db_location, log_level=log_level, diff --git a/calm/dsl/cli/main.py b/calm/dsl/cli/main.py index 26ff9871..69572751 100644 --- a/calm/dsl/cli/main.py +++ b/calm/dsl/cli/main.py @@ -116,7 +116,7 @@ def main(ctx, config_file, sync): project_config = ContextObj.get_project_config() project_name = project_config.get("name") - if project_name == DSL_CONFIG.EMPTY_PROJECT_NAME: + if project_name == DSL_CONFIG.EMPTY_CONFIG_ENTITY_NAME: LOG.warning(DSL_CONFIG.EMPTY_PROJECT_MESSAGE) if sync: diff --git a/calm/dsl/cli/runbooks.py b/calm/dsl/cli/runbooks.py index 3823af91..8204e7ff 100644 --- a/calm/dsl/cli/runbooks.py +++ b/calm/dsl/cli/runbooks.py @@ -161,7 +161,7 @@ def compile_runbook(runbook_file): ] else: project_name = project_config["name"] - if project_name == DSL_CONFIG.EMPTY_PROJECT_NAME: + if project_name == DSL_CONFIG.EMPTY_CONFIG_ENTITY_NAME: LOG.error(DSL_CONFIG.EMPTY_PROJECT_MESSAGE) sys.exit("Invalid project configuration") diff --git a/calm/dsl/config/config.ini.jinja2 b/calm/dsl/config/config.ini.jinja2 index c5c50eeb..252ee4f7 100644 --- a/calm/dsl/config/config.ini.jinja2 +++ b/calm/dsl/config/config.ini.jinja2 @@ -1,10 +1,11 @@ -{% macro ConfigTemplate(ip, port, username, password, project_name, db_location, log_level, policy_status, approval_policy_status, stratos_status, retries_enabled, connection_timeout, read_timeout) -%} +{% macro ConfigTemplate(ip, port, username, password, api_key_location, project_name, db_location, log_level, policy_status, approval_policy_status, stratos_status, retries_enabled, connection_timeout, read_timeout) -%} [SERVER] pc_ip = {{ip}} pc_port = {{port}} pc_username = {{username}} pc_password = {{password}} +api_key_location = {{api_key_location}} [PROJECT] name = {{project_name}} @@ -30,4 +31,4 @@ read_timeout = {{read_timeout}} {%- endmacro %} -{{ConfigTemplate(ip, port, username, password, project_name, db_location, log_level, policy_status, approval_policy_status, stratos_status, retries_enabled, connection_timeout, read_timeout)}} +{{ConfigTemplate(ip, port, username, password, api_key_location, project_name, db_location, log_level, policy_status, approval_policy_status, stratos_status, retries_enabled, connection_timeout, read_timeout)}} diff --git a/calm/dsl/config/config.py b/calm/dsl/config/config.py index cb13096e..ad80999a 100644 --- a/calm/dsl/config/config.py +++ b/calm/dsl/config/config.py @@ -190,6 +190,7 @@ def _render_config_template( port, username, password, + api_key_location, project_name, log_level, retries_enabled, @@ -210,6 +211,7 @@ def _render_config_template( port=port, username=username, password=password, + api_key_location=api_key_location, project_name=project_name, log_level=log_level, retries_enabled=retries_enabled, @@ -229,6 +231,7 @@ def update_config_file( port, username, password, + api_key_location, project_name, log_level, retries_enabled, @@ -247,6 +250,7 @@ def update_config_file( port, username, password, + api_key_location, project_name, log_level, retries_enabled, @@ -273,6 +277,7 @@ def set_dsl_config( port, username, password, + api_key_location, project_name, log_level, db_location, @@ -306,6 +311,7 @@ def set_dsl_config( port=port, username=username, password=password, + api_key_location=api_key_location, project_name=project_name, log_level=log_level, retries_enabled=retries_enabled, diff --git a/calm/dsl/config/constants.py b/calm/dsl/config/constants.py index cecc9328..950119bb 100644 --- a/calm/dsl/config/constants.py +++ b/calm/dsl/config/constants.py @@ -35,6 +35,7 @@ class SERVER(IterableConstants): PORT = "pc_port" USERNAME = "pc_username" PASSWORD = "pc_password" + API_KEY_LOCATION = "api_key_location" class PROJECT(IterableConstants): NAME = "name" diff --git a/calm/dsl/config/context.py b/calm/dsl/config/context.py index bd235458..c34ac6f3 100644 --- a/calm/dsl/config/context.py +++ b/calm/dsl/config/context.py @@ -104,7 +104,7 @@ def get_project_config(self): config = self.project_config if not config.get(CONFIG.PROJECT.NAME): - config["name"] = DSL_CONFIG.EMPTY_PROJECT_NAME + config["name"] = DSL_CONFIG.EMPTY_CONFIG_ENTITY_NAME return config @@ -205,6 +205,9 @@ def print_config(self): port=server_config[CONFIG.SERVER.PORT], username=server_config[CONFIG.SERVER.USERNAME], password="xxxxxxxx", # Do not render password + api_key_location=server_config.get( + CONFIG.SERVER.API_KEY_LOCATION, DSL_CONFIG.EMPTY_CONFIG_ENTITY_NAME + ), project_name=project_config[CONFIG.PROJECT.NAME], log_level=log_config[CONFIG.LOG.LEVEL], policy_status=policy_config[CONFIG.POLICY.STATUS], diff --git a/calm/dsl/constants.py b/calm/dsl/constants.py index 2969e61e..f855bf49 100644 --- a/calm/dsl/constants.py +++ b/calm/dsl/constants.py @@ -214,8 +214,10 @@ class OPENAPI_TYPE: class DSL_CONFIG: - EMPTY_PROJECT_NAME = "-" + EMPTY_CONFIG_ENTITY_NAME = "-" EMPTY_PROJECT_MESSAGE = "Project configuration not available. Use command `calm set config -pj ` to set it." + SAAS_PORT = "443" + SAAS_LOGIN_WARN = "Seems like you are trying to authenticate saas instance. Please provide API key location." class SUBSTRATE: diff --git a/calm/dsl/store/cache.py b/calm/dsl/store/cache.py index 8b58ff74..042a1ae5 100644 --- a/calm/dsl/store/cache.py +++ b/calm/dsl/store/cache.py @@ -9,6 +9,7 @@ from calm.dsl.db import get_db_handle, init_db_handle from calm.dsl.log import get_logging_handle from calm.dsl.api import get_client_handle_obj +from calm.dsl.api.util import get_auth_info LOG = get_logging_handle(__name__) @@ -30,10 +31,14 @@ def get_cache_tables(cls, sync_version=False): calm_version = Version.get_version("Calm") if sync_version or (not calm_version): server_config = context.get_server_config() + api_key_location = server_config.get("api_key_location", None) + cred = get_auth_info(api_key_location) + username = cred.get("username") + password = cred.get("password") client = get_client_handle_obj( server_config["pc_ip"], server_config["pc_port"], - auth=(server_config["pc_username"], server_config["pc_password"]), + auth=(username, password), ) res, err = client.version.get_calm_version() if err: From 7948f9bc3e8c536aacba388c12d6aaafb8223595 Mon Sep 17 00:00:00 2001 From: Prabhat Dwivedi Date: Thu, 4 Jul 2024 16:11:51 +0530 Subject: [PATCH 10/24] Reset api_key_location to None for PC login (#496) Resetting api_key_location to None if it's the case of PC login. (cherry picked from commit febe0951534a365da6a5e1b99dbbcd15cdcdbbb8) --- calm/dsl/cli/init_command.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/calm/dsl/cli/init_command.py b/calm/dsl/cli/init_command.py index 417539ba..c241987b 100644 --- a/calm/dsl/cli/init_command.py +++ b/calm/dsl/cli/init_command.py @@ -537,17 +537,19 @@ def _set_config( api_key_location = api_key_location or server_config.get( "api_key_location", DSL_CONFIG.EMPTY_CONFIG_ENTITY_NAME ) - api_key_location = os.path.expanduser(api_key_location) + + # Resetting stored location of api key (for PC login case) + if api_key_location != DSL_CONFIG.EMPTY_CONFIG_ENTITY_NAME: + api_key_location = os.path.expanduser(api_key_location) + else: + api_key_location = None + port = port or server_config["pc_port"] cred = get_auth_info(api_key_location) stored_username = cred.get("username") stored_password = cred.get("password") - # Resetting stored location of api key for (PC login case) - if username or password: - api_key_location = None - username = username or stored_username password = password or stored_password project_config = ContextObj.get_project_config() From bf7ab7c8b4b50ad46b57a53581ab89c4b2f154ae Mon Sep 17 00:00:00 2001 From: Yedhu Tilak P Date: Sat, 13 Apr 2024 11:03:17 +0530 Subject: [PATCH 11/24] (#CALM-44134, #CALM-43590) - Fix VM Endpoint tests to pass account uuid and upload endpoint to default project (#415) As part of the changes brought on by: https://github.com/ideadevice/calm/pull/3269, a valid account reference needs to be sent while creating VM endpoints. The test was sending just a name and kind as part of the account_reference in the VM EP create payload. Modified the test to send a valid account_reference and uuid in the payload. The tests also downloads and uploads the endpoint, but does it using the _internal project. But since _internal project has no accounts in it. the /endpoints/import_file was not seeding the account_uuid in the uploaded endpoint. Hence modified the test to upload the endpoint to default project instead of internal project. --------- Co-authored-by: Prabhat Dwivedi (cherry picked from commit 18f153cbe0614b9a5fef68bb05c45bd23e24be4b) --- .../windows_vm_dynamic_ahv_ep_payload.json | 6 ++-- .../windows_vm_static_ahv_ep_payload.json | 6 ++-- .../test_runbooks/test_vm_endpoints.py | 28 +++++++++++++++++-- tests/api_interface/test_runbooks/utils.py | 20 +++++++++++++ 4 files changed, 51 insertions(+), 9 deletions(-) diff --git a/tests/api_interface/test_runbooks/test_files/windows_vm_dynamic_ahv_ep_payload.json b/tests/api_interface/test_runbooks/test_files/windows_vm_dynamic_ahv_ep_payload.json index 6e3133b2..8741f6df 100644 --- a/tests/api_interface/test_runbooks/test_files/windows_vm_dynamic_ahv_ep_payload.json +++ b/tests/api_interface/test_runbooks/test_files/windows_vm_dynamic_ahv_ep_payload.json @@ -29,9 +29,9 @@ "uuid": "63d922fd-f9e1-4c68-b355-dc3570ae17c1" }, "account_reference": { - "kind": "account", - "name": "Vmware 1" - }, + "kind": "account", + "name": "NTNX_LOCAL_AZ" + }, "filter_type": "dynamic", "filter": "name==vm*", "subnet": "10.0.0.0/8" diff --git a/tests/api_interface/test_runbooks/test_files/windows_vm_static_ahv_ep_payload.json b/tests/api_interface/test_runbooks/test_files/windows_vm_static_ahv_ep_payload.json index 1b76e37c..84f7c69e 100644 --- a/tests/api_interface/test_runbooks/test_files/windows_vm_static_ahv_ep_payload.json +++ b/tests/api_interface/test_runbooks/test_files/windows_vm_static_ahv_ep_payload.json @@ -34,9 +34,9 @@ "uuid": "b248f749-aef0-47ee-ae04-515993464d0b" }, "account_reference": { - "kind": "account", - "name": "Vmware 1" - }, + "kind": "account", + "name": "NTNX_LOCAL_AZ" + }, "filter_type": "static", "subnet": "10.0.0.0/8" } diff --git a/tests/api_interface/test_runbooks/test_vm_endpoints.py b/tests/api_interface/test_runbooks/test_vm_endpoints.py index 8b44768f..a597642c 100644 --- a/tests/api_interface/test_runbooks/test_vm_endpoints.py +++ b/tests/api_interface/test_runbooks/test_vm_endpoints.py @@ -5,7 +5,7 @@ from calm.dsl.store import Version from calm.dsl.cli.main import get_api_client from calm.dsl.cli.constants import ENDPOINT -from utils import read_test_config, change_uuids +from utils import read_test_config, change_uuids, add_account_uuid LinuxVMStaticAHVEpPayload = read_test_config( file_name="linux_vm_static_ahv_ep_payload.json" @@ -44,6 +44,9 @@ def test_vm_endpoint_static_crud(self, EndpointPayload): """Endpoint for VM crud""" client = get_api_client() endpoint = change_uuids(EndpointPayload, {}) + res, err = add_account_uuid(EndpointPayload) + if not res: + pytest.fail(err) # Endpoint Create print(">> Creating endpoint") @@ -107,12 +110,20 @@ def test_vm_endpoint_static_crud(self, EndpointPayload): print(">> Downloading endpoint (uuid={})".format(ep_uuid)) file_path = client.endpoint.export_file(ep_uuid, passphrase="test_passphrase") + project_list_params = {"filter": "name=={}".format("default")} + res, err = client.project.list(params=project_list_params) + if err: + raise Exception("[{}] - {}".format(err["code"], err["error"])) + response = res.json() + default_project_uuid = response["entities"][0]["metadata"]["uuid"] + print(">> Default project uuid: {}".format(default_project_uuid)) + # upload the endpoint print(">> Uploading endpoint (uuid={})".format(ep_uuid)) res, err = client.endpoint.import_file( file_path, ep_name + "-uploaded", - ep["metadata"].get("project_reference", {}).get("uuid", ""), + default_project_uuid, passphrase="test_passphrase", ) if err: @@ -155,6 +166,9 @@ def test_vm_endpoint_dynamic_crud(self, EndpointPayload): """Endpoint for VM crud""" client = get_api_client() endpoint = change_uuids(EndpointPayload, {}) + res, err = add_account_uuid(EndpointPayload) + if not res: + pytest.fail(err) # Endpoint Create print(">> Creating endpoint") @@ -218,12 +232,20 @@ def test_vm_endpoint_dynamic_crud(self, EndpointPayload): print(">> Downloading endpoint (uuid={})".format(ep_uuid)) file_path = client.endpoint.export_file(ep_uuid, passphrase="test_passphrase") + project_list_params = {"filter": "name=={}".format("default")} + res, err = client.project.list(params=project_list_params) + if err: + raise Exception("[{}] - {}".format(err["code"], err["error"])) + response = res.json() + default_project_uuid = response["entities"][0]["metadata"]["uuid"] + print(">> Default project uuid: {}".format(default_project_uuid)) + # upload the endpoint print(">> Uploading endpoint (uuid={})".format(ep_uuid)) res, err = client.endpoint.import_file( file_path, ep_name + "-uploaded", - ep["metadata"].get("project_reference", {}).get("uuid", ""), + default_project_uuid, passphrase="test_passphrase", ) if err: diff --git a/tests/api_interface/test_runbooks/utils.py b/tests/api_interface/test_runbooks/utils.py index efc62b31..b643b776 100644 --- a/tests/api_interface/test_runbooks/utils.py +++ b/tests/api_interface/test_runbooks/utils.py @@ -562,3 +562,23 @@ def update_tunnel_and_project(tunnel_reference, project, endpoint_payload): if resources.get("tunnel_reference", {}): resources["tunnel_reference"]["uuid"] = tunnel_reference.get("uuid") resources["tunnel_reference"]["name"] = tunnel_reference.get("name") + + +def add_account_uuid(endpoint_payload): + payload = {"length": 250, "offset": 0} + client = get_api_client() + + account_name_uuid_map = client.account.get_name_uuid_map(payload) + account_ref = endpoint_payload["spec"]["resources"]["attrs"]["account_reference"] + + if account_ref.get("name", None): + account_uuid = account_name_uuid_map.get(account_ref["name"], None) + if not account_uuid: + err_msg = "Unable to fetch account uuid for {}".format(account_ref["name"]) + return False, err_msg + + account_ref["uuid"] = account_uuid + return True, None + else: + err_msg = "Unable to fetch account name from given account reference" + return False, err_msg From e522319e304da1c114fdedcacd3b0ac0bd155bd1 Mon Sep 17 00:00:00 2001 From: Prabhat Dwivedi Date: Thu, 20 Jun 2024 15:27:44 +0530 Subject: [PATCH 12/24] CALM-45918: Fix VM Endpoint DSL Tests (#484) Description: -> Create endpoint payload should have valid vm reference due to fix in https://jira.nutanix.com/browse/CALM-45554 -> Earlier we were using dummy vm references. From now onwards vm references will be checked for authenticity, therefore, passing valid vm reference before executing vm endpoint tests. (cherry picked from commit 92602e1d2743301c74bbe37a59d34ef80d84233b) --- .../test_runbooks/test_vm_endpoints.py | 24 ++++++++- tests/api_interface/test_runbooks/utils.py | 53 +++++++++++++++++++ 2 files changed, 76 insertions(+), 1 deletion(-) diff --git a/tests/api_interface/test_runbooks/test_vm_endpoints.py b/tests/api_interface/test_runbooks/test_vm_endpoints.py index a597642c..aed725bf 100644 --- a/tests/api_interface/test_runbooks/test_vm_endpoints.py +++ b/tests/api_interface/test_runbooks/test_vm_endpoints.py @@ -1,11 +1,27 @@ import pytest import os +import json from distutils.version import LooseVersion as LV from calm.dsl.store import Version from calm.dsl.cli.main import get_api_client from calm.dsl.cli.constants import ENDPOINT -from utils import read_test_config, change_uuids, add_account_uuid +from tests.api_interface.test_runbooks.utils import ( + read_test_config, + change_uuids, + add_account_uuid, + add_vm_reference, +) +from calm.dsl.builtins import read_local_file +from calm.dsl.log import get_logging_handle + + +LOG = get_logging_handle(__name__) + +DSL_CONFIG = json.loads(read_local_file(".tests/config.json")) +PROJECT = DSL_CONFIG["PROJECTS"]["PROJECT1"] +PROJECT_NAME = PROJECT["NAME"] + LinuxVMStaticAHVEpPayload = read_test_config( file_name="linux_vm_static_ahv_ep_payload.json" @@ -43,8 +59,14 @@ class TestVMEndpoints: def test_vm_endpoint_static_crud(self, EndpointPayload): """Endpoint for VM crud""" client = get_api_client() + vm_references = EndpointPayload["spec"]["resources"]["attrs"].get( + "vm_references", [] + ) + + add_vm_reference(vm_references, PROJECT_NAME) endpoint = change_uuids(EndpointPayload, {}) res, err = add_account_uuid(EndpointPayload) + if not res: pytest.fail(err) diff --git a/tests/api_interface/test_runbooks/utils.py b/tests/api_interface/test_runbooks/utils.py index b643b776..c0d93771 100644 --- a/tests/api_interface/test_runbooks/utils.py +++ b/tests/api_interface/test_runbooks/utils.py @@ -8,6 +8,11 @@ from calm.dsl.cli.constants import MARKETPLACE_ITEM from calm.dsl.config import get_context from calm.dsl.api import get_api_client +from calm.dsl.api import get_resource_api +from calm.dsl.log import get_logging_handle + + +LOG = get_logging_handle(__name__) def change_uuids(bp, context): @@ -582,3 +587,51 @@ def add_account_uuid(endpoint_payload): else: err_msg = "Unable to fetch account name from given account reference" return False, err_msg + + +def add_vm_reference(vm_references, project_name): + client = get_api_client() + + if vm_references: + payload = { + "entity_type": "mh_vm", + "query_name": "", + "grouping_attribute": " ", + "group_count": 20, + "group_offset": 0, + "group_attributes": [], + "group_member_count": 20, + "group_member_offset": 0, + "group_member_sort_attribute": "vm_name", + "group_member_sort_order": "ASCENDING", + "group_member_attributes": [{"attribute": "vm_name"}], + "filter_criteria": "is_cvm==0;power_state==on;project_name=={}".format( + project_name + ), + } + Obj = get_resource_api("groups", client.connection) + res, err = Obj.create(payload) + if err: + LOG.error("[{}] - {}".format(err["code"], err["error"])) + + response = res.json() + group_results = response.get("group_results", []) + if group_results: + entity_results = group_results[0].get("entity_results", []) + if not entity_results: + LOG.debug("vm groups call response: {}".format(response)) + pytest.fail( + "No target vm reference found for project {}".format(project_name) + ) + + vm_name = entity_results[0]["data"][0]["values"][0]["values"][0] + vm_uuid = entity_results[0]["entity_id"] + + vm_references[0]["name"] = vm_name + vm_references[0]["uuid"] = vm_uuid + + else: + LOG.debug("vm groups call response: {}".format(response)) + pytest.fail( + "No target vm reference found for project {}".format(project_name) + ) From c657c7b18e6fa727a80860ba47b3b95acecfd9f7 Mon Sep 17 00:00:00 2001 From: Prabhat Dwivedi Date: Fri, 21 Jun 2024 11:29:29 +0530 Subject: [PATCH 13/24] CALM-45977: DSL build failure fix (#485) (cherry picked from commit 5f56704431c642b88f1913b5df9c9530e76e87d4) --- tests/api_interface/test_runbooks/test_vm_endpoints.py | 7 +------ tests/api_interface/test_runbooks/utils.py | 3 ++- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/tests/api_interface/test_runbooks/test_vm_endpoints.py b/tests/api_interface/test_runbooks/test_vm_endpoints.py index aed725bf..573f4491 100644 --- a/tests/api_interface/test_runbooks/test_vm_endpoints.py +++ b/tests/api_interface/test_runbooks/test_vm_endpoints.py @@ -18,11 +18,6 @@ LOG = get_logging_handle(__name__) -DSL_CONFIG = json.loads(read_local_file(".tests/config.json")) -PROJECT = DSL_CONFIG["PROJECTS"]["PROJECT1"] -PROJECT_NAME = PROJECT["NAME"] - - LinuxVMStaticAHVEpPayload = read_test_config( file_name="linux_vm_static_ahv_ep_payload.json" ) @@ -63,7 +58,7 @@ def test_vm_endpoint_static_crud(self, EndpointPayload): "vm_references", [] ) - add_vm_reference(vm_references, PROJECT_NAME) + add_vm_reference(vm_references) endpoint = change_uuids(EndpointPayload, {}) res, err = add_account_uuid(EndpointPayload) diff --git a/tests/api_interface/test_runbooks/utils.py b/tests/api_interface/test_runbooks/utils.py index c0d93771..804a0919 100644 --- a/tests/api_interface/test_runbooks/utils.py +++ b/tests/api_interface/test_runbooks/utils.py @@ -589,7 +589,8 @@ def add_account_uuid(endpoint_payload): return False, err_msg -def add_vm_reference(vm_references, project_name): +def add_vm_reference(vm_references): + project_name = "_internal" client = get_api_client() if vm_references: From f7482783bfb4bd93fd428ea52809a953d5149e28 Mon Sep 17 00:00:00 2001 From: Yedhu Tilak P Date: Tue, 2 Jul 2024 14:47:06 +0530 Subject: [PATCH 14/24] (#CALM-45918) - Fix Endpoint VM DSL Tests (#491) Fixed Endpoint VM Tests to use the VMs created via test_dsl setup and to create the endpoints inside the default project instead of _internal project. Fixed vm_actions tests to handle scenarios of endpoints with incorrect UUID (VM doesn't exists) (cherry picked from commit 2fe9d3b9b13f4cbf2718d0c738ea0dc8086208bf) --- .../test_runbooks/test_vm_endpoints.py | 35 +++++++++++++------ .../test_vm_endpoints_failures.py | 22 ++++++++++++ tests/api_interface/test_runbooks/utils.py | 5 +-- 3 files changed, 49 insertions(+), 13 deletions(-) diff --git a/tests/api_interface/test_runbooks/test_vm_endpoints.py b/tests/api_interface/test_runbooks/test_vm_endpoints.py index 573f4491..6fc24a10 100644 --- a/tests/api_interface/test_runbooks/test_vm_endpoints.py +++ b/tests/api_interface/test_runbooks/test_vm_endpoints.py @@ -31,6 +31,8 @@ file_name="windows_vm_dynamic_ahv_ep_payload.json" ) +AHV_LINUX_ID = read_local_file(".tests/runbook_tests/ahv_linux_id") + # calm_version CALM_VERSION = Version.get_version("Calm") @@ -57,14 +59,33 @@ def test_vm_endpoint_static_crud(self, EndpointPayload): vm_references = EndpointPayload["spec"]["resources"]["attrs"].get( "vm_references", [] ) - - add_vm_reference(vm_references) - endpoint = change_uuids(EndpointPayload, {}) + context = {} + + if len(vm_references) > 0: + vm_references[0] = { + "uuid": AHV_LINUX_ID, + } + context[AHV_LINUX_ID] = AHV_LINUX_ID + endpoint = change_uuids(EndpointPayload, context) res, err = add_account_uuid(EndpointPayload) if not res: pytest.fail(err) + project_list_params = {"filter": "name=={}".format("default")} + res, err = client.project.list(params=project_list_params) + if err: + raise Exception("[{}] - {}".format(err["code"], err["error"])) + response = res.json() + default_project_uuid = response["entities"][0]["metadata"]["uuid"] + print(">> Default project uuid: {}".format(default_project_uuid)) + + endpoint["metadata"]["project_reference"] = { + "uuid": default_project_uuid, + "name": "default", + "kind": "project", + } + # Endpoint Create print(">> Creating endpoint") res, err = client.endpoint.create(endpoint) @@ -127,14 +148,6 @@ def test_vm_endpoint_static_crud(self, EndpointPayload): print(">> Downloading endpoint (uuid={})".format(ep_uuid)) file_path = client.endpoint.export_file(ep_uuid, passphrase="test_passphrase") - project_list_params = {"filter": "name=={}".format("default")} - res, err = client.project.list(params=project_list_params) - if err: - raise Exception("[{}] - {}".format(err["code"], err["error"])) - response = res.json() - default_project_uuid = response["entities"][0]["metadata"]["uuid"] - print(">> Default project uuid: {}".format(default_project_uuid)) - # upload the endpoint print(">> Uploading endpoint (uuid={})".format(ep_uuid)) res, err = client.endpoint.import_file( diff --git a/tests/api_interface/test_runbooks/test_vm_endpoints_failures.py b/tests/api_interface/test_runbooks/test_vm_endpoints_failures.py index ee9de342..f5b85dd4 100644 --- a/tests/api_interface/test_runbooks/test_vm_endpoints_failures.py +++ b/tests/api_interface/test_runbooks/test_vm_endpoints_failures.py @@ -46,6 +46,14 @@ def test_warnings_on_vm_endpoint(self, Runbook, warning_msg): client = get_api_client() rb_name = "test_warning_vm_endpoint_" + str(uuid.uuid4())[-10:] + if Runbook == VMEndpointWithIncorrectID: + # For VMEndpointWithIncorrectID, since the VM ID is incorrect, the runbook upload should fail as endpoint can't be created + res_json = upload_runbook( + client, rb_name, Runbook, return_error_response=True + ) + assert res_json["code"] == 403 + assert res_json["message_list"][0]["reason"] == "ACCESS_DENIED" + return rb = upload_runbook(client, rb_name, Runbook) rb_state = rb["status"]["state"] rb_uuid = rb["metadata"]["uuid"] @@ -140,6 +148,20 @@ def test_failures_on_vm_endpoint(self, Runbook, warning_msg): client = get_api_client() rb_name = "test_warning_vm_endpoint_" + str(uuid.uuid4())[-10:] + if Runbook == VMEndpointWithIncorrectID: + # For VMEndpointWithIncorrectID, since the VM ID is incorrect, the runbook upload should fail as endpoint can't be created + res_json = upload_runbook( + client, rb_name, Runbook, return_error_response=True + ) + assert res_json["code"] == 403 + assert res_json["message_list"][0]["reason"] == "ACCESS_DENIED" + return + + # TODO: Cleanup the below code + # As part of CALM-45554, Endpoint Creation will be blocked if incorrect unauthorized VM ID is provided + # Because of this Runbook Upload will fail for VMEndpointWithIncorrectID. + # Since the only param for this test is VMEndpointWithIncorrectID, the below code won't be executed + rb = upload_runbook(client, rb_name, Runbook) rb_state = rb["status"]["state"] rb_uuid = rb["metadata"]["uuid"] diff --git a/tests/api_interface/test_runbooks/utils.py b/tests/api_interface/test_runbooks/utils.py index 804a0919..fabd5799 100644 --- a/tests/api_interface/test_runbooks/utils.py +++ b/tests/api_interface/test_runbooks/utils.py @@ -88,7 +88,7 @@ def update_endpoints_name(rb, context): return rb -def upload_runbook(client, rb_name, Runbook): +def upload_runbook(client, rb_name, Runbook, return_error_response=False): """ This routine uploads the given runbook Args: @@ -135,7 +135,8 @@ def upload_runbook(client, rb_name, Runbook): print(">> {} uploaded with creds >>".format(Runbook)) assert res.ok is True else: - pytest.fail("[{}] - {}".format(err["code"], err["error"])) + if not return_error_response: + pytest.fail("[{}] - {}".format(err["code"], err["error"])) return res.json() From 62de11cc1bebedceabc3950692862811eaa08c76 Mon Sep 17 00:00:00 2001 From: Yedhu Tilak P Date: Wed, 10 Jul 2024 15:44:38 +0530 Subject: [PATCH 15/24] (#CALM-46402) - Pass project_uuid while making accounts//vms/list API call (#497) Currently, DSL calls /accounts/`uuid`/vms/list API to resolve VM references during Vm Endpoints Compilation. Since by default this API returns all VMs in the account. If the user supplies a VM that he isn't authorized to access, Vm Endpoint compilation goes through. Made a change in VM Ref compile code to always pass project_uuid in accounts/`uuid`/vms/list payload so that compilation passes only when authorized VMs are present in the reference. --------- Co-authored-by: Prabhat Dwivedi (cherry picked from commit 23af8454d3dc2e94551dfc5d81659c79ade1371b) --- calm/dsl/builtins/models/calm_ref.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/calm/dsl/builtins/models/calm_ref.py b/calm/dsl/builtins/models/calm_ref.py index c22ce1a4..d78b16a2 100644 --- a/calm/dsl/builtins/models/calm_ref.py +++ b/calm/dsl/builtins/models/calm_ref.py @@ -2,6 +2,8 @@ import uuid import json +from distutils.version import LooseVersion as LV + from calm.dsl.db.table_config import AhvSubnetsCache from calm.dsl.builtins.models.constants import NutanixDB as NutanixDBConst @@ -12,7 +14,7 @@ from .ahv_vm_cluster import AhvCluster from .ahv_vm_vpc import AhvVpc -from calm.dsl.store import Cache +from calm.dsl.store import Cache, Version from calm.dsl.constants import CACHE from calm.dsl.api.handle import get_api_client from calm.dsl.log import get_logging_handle @@ -20,6 +22,8 @@ LOG = get_logging_handle(__name__) +CALM_VERSION = Version.get_version("Calm") + # CalmRef class CalmRefDict(EntityDict): @@ -464,6 +468,8 @@ def compile(cls, name="", **kwargs): account_uuid = "" try: account_ref = cls.__parent__.attrs.get("account_reference", {}) + if isinstance(account_ref, CalmRefType): + account_ref = account_ref.get_dict() account_uuid = account_ref.get("uuid", "") except Exception as exp: pass @@ -472,6 +478,9 @@ def compile(cls, name="", **kwargs): if name: params = {"filter": "name=={}".format(name), "length": 250} + if LV(CALM_VERSION) >= LV("3.8.1"): + project_cache_data = common_helper.get_cur_context_project() + params["project_uuid"] = project_cache_data.get("uuid") res, err = client.account.vms_list(account_uuid, params) if err: LOG.error(err) From 39914da5c117fbabd6b02d2623166d1e4a816f38 Mon Sep 17 00:00:00 2001 From: glenadsoza Date: Thu, 18 Jul 2024 17:53:36 +0530 Subject: [PATCH 16/24] Fix for pre check build failure in DSL (#513) (cherry picked from commit 176d3903b932ed98f2ef4b8e117802621e9249c6) --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 6f44fed5..9db8863a 100644 --- a/Makefile +++ b/Makefile @@ -9,7 +9,7 @@ dev: # Setup our python3 based virtualenv # This step assumes python3 is installed on your dev machine [ -f venv/bin/python3 ] || (python3 -m venv venv && \ - venv/bin/pip3 install --upgrade pip setuptools) + venv/bin/pip3 install --upgrade pip "setuptools<=70.3.0") venv/bin/pip3 install --no-cache -r requirements.txt -r dev-requirements.txt venv/bin/python3 setup.py develop From 05c04e2b806c41984bbd6f16d9ef57181d302365 Mon Sep 17 00:00:00 2001 From: Prabhat Dwivedi Date: Wed, 24 Jul 2024 16:50:43 +0530 Subject: [PATCH 17/24] CALM-46837: Compile endpoint metadata first to use project in metadata (#518) Issue Link: https://jira.nutanix.com/browse/CALM-46837 RCA: Vm reference verification check in #497 uses project in current context to fetch valid vm attached to a project but due to metadata payload being constructed after endpoint payload compilation, function `get_cur_context_project()` returned project from config.ini. Fix: Construct metadata payload before compiling endpoint and runbook. Note: While creating endpoint we are constructing metadata payload first therefore we didn't hit this issue. Adding fixing in compile endpoint logic. (cherry picked from commit 5225c62b802dfca4564bd9c0a813c4bc457f33c1) --- calm/dsl/cli/endpoints.py | 5 ++++- calm/dsl/cli/runbooks.py | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/calm/dsl/cli/endpoints.py b/calm/dsl/cli/endpoints.py index 8cb2bf4e..a0848c3e 100644 --- a/calm/dsl/cli/endpoints.py +++ b/calm/dsl/cli/endpoints.py @@ -137,12 +137,15 @@ def compile_endpoint(endpoint_file): def compile_endpoint_command(endpoint_file, out): + # Note: Metadata should be constructed before loading endpoint module. + # As metadata will be used while verifying vm reference in endpoint. + metadata_payload = get_metadata_payload(endpoint_file) + endpoint_payload = compile_endpoint(endpoint_file) if endpoint_payload is None: LOG.error("User endpoint not found in {}".format(endpoint_file)) return - metadata_payload = get_metadata_payload(endpoint_file) project_cache_data = {} project_name = "" if "project_reference" in metadata_payload: diff --git a/calm/dsl/cli/runbooks.py b/calm/dsl/cli/runbooks.py index 8204e7ff..b32775ad 100644 --- a/calm/dsl/cli/runbooks.py +++ b/calm/dsl/cli/runbooks.py @@ -142,6 +142,10 @@ def get_runbook_class_from_module(user_runbook_module): def compile_runbook(runbook_file): + # Note: Metadata should be constructed before loading runbook module. As metadata + # will be used while verifying vm reference in endpoint used withing runbook. + metadata_payload = get_metadata_payload(runbook_file) + user_runbook_module = get_runbook_module_from_file(runbook_file) UserRunbook = get_runbook_class_from_module(user_runbook_module) if UserRunbook is None: @@ -154,7 +158,6 @@ def compile_runbook(runbook_file): ContextObj = get_context() project_config = ContextObj.get_project_config() - metadata_payload = get_metadata_payload(runbook_file) if "project_reference" in metadata_payload: runbook_payload["metadata"]["project_reference"] = metadata_payload[ "project_reference" From f21354624ff451afc2e3eda17ccb393b6648ea83 Mon Sep 17 00:00:00 2001 From: Prabhat Dwivedi Date: Wed, 31 Jul 2024 10:12:33 +0530 Subject: [PATCH 18/24] Bump version to 381 (#493) (cherry picked from commit 2390d3b12475099d95877f2f036249e95705ac06) --- CalmVersion | 2 +- calm/dsl/cli/main.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CalmVersion b/CalmVersion index 19811903..f2807196 100644 --- a/CalmVersion +++ b/CalmVersion @@ -1 +1 @@ -3.8.0 +3.8.1 diff --git a/calm/dsl/cli/main.py b/calm/dsl/cli/main.py index 69572751..bd37dcad 100644 --- a/calm/dsl/cli/main.py +++ b/calm/dsl/cli/main.py @@ -51,7 +51,7 @@ default=False, help="Update cache before running command", ) -@click.version_option("3.8.0") +@click.version_option("3.8.1") @click.pass_context def main(ctx, config_file, sync): """Calm CLI From e5969dd7506cd1ae2022c262fb509e1460e12b40 Mon Sep 17 00:00:00 2001 From: Pradeepsingh Bhati Date: Fri, 2 Aug 2024 12:17:13 +0530 Subject: [PATCH 19/24] Add doc for tunnel VM reset in read me (#532) Co-authored-by: Prabhat Dwivedi (cherry picked from commit ed6ff9c45bfa97ede90445dc348a3f5fe4eea366) --- docs/05-VPC/README.md | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/docs/05-VPC/README.md b/docs/05-VPC/README.md index e353e4e6..8fdacb4a 100644 --- a/docs/05-VPC/README.md +++ b/docs/05-VPC/README.md @@ -1,6 +1,6 @@ # Calm-DSL supports VPC & Overlay Subnets across entities: -1. Ability to Create, Delete, List VPC Tunnels through network-group-tunnels commands +1. Ability to Create, Delete, List and Reset VPC Tunnels through network-group-tunnels commands 2. Ability to Whitelist Cluster, VPC and Overlay Subnets in Projects & Environments 3. Ability specify Cluster in VM Spec and Overlay subnets in NICs in Environment 4. Ability to specify Cluster in VM Spec and Overlay subnets in NICs in Blueprints @@ -14,12 +14,12 @@ ## VPC Tunnels/Network Group Tunnels -Network Group Tunnels (network-group-tunnels) commands are available to perform Create, Delete and List of VPC Tunnels. VPC Tunnels are refrenced using Tunnel Name. +Network Group Tunnels (network-group-tunnels) commands are available to perform Create, Delete, List and Reset of VPC Tunnels. VPC Tunnels are refrenced using Tunnel Name. ### Sample Commands - calm create network_group_tunnel -f tunnel_file.py; Sample below of tunnel_file - + ``` from calm.dsl.builtins import NetworkGroupTunnel from calm.dsl.builtins import Provider, Ref @@ -36,7 +36,17 @@ Network Group Tunnels (network-group-tunnels) commands are available to perform - calm get network-group-tunnels - List of all VPC/Network Group Tunnels - calm describe network-group-tunnel - Describes a VPC Tunnel - calm delete network-group-tunnel - Deletes a VPC Tunnel +- calm reset network-group-tunnel-vm -f examples/NetworkGroupTunnel/network_group_tunnel.py -n - Reset VPC Tunnel. + Resetting tunnel will spin up new VPC tunnel VM and delete older tunnel VM. Tunnel reference is not changed. Sample file below: + ``` + from calm.dsl.builtins import NetworkGroupTunnelVMSpec + class NewNetworkGroupTunnel1(NetworkGroupTunnelVMSpec): + """Network group tunnel spec for reset""" + cluster = "" + subnet = "" + type = "AHV" + ``` ## Project - Calm-DSL supports whitelisting Cluster, VPC and Overlay Subnets @@ -77,7 +87,7 @@ Network Group Tunnels (network-group-tunnels) commands are available to perform class MyVM(AhvVM): cluster = Ref.Cluster(name="cluster-name") ``` - + ## Overlay Subnets - Can be used in Projects as described above - Can be used within AhvVmResources @@ -94,7 +104,7 @@ Network Group Tunnels (network-group-tunnels) commands are available to perform ), ] ``` - + ## VPCs - Can be used in Projects & VPC Tunnels/Network Group Tunnels as described above From ddf9fb016dcc020133cd4332245ae13e510bbc56 Mon Sep 17 00:00:00 2001 From: Prabhat Dwivedi Date: Fri, 2 Aug 2024 20:29:50 +0530 Subject: [PATCH 20/24] Added 381 release notes (#510) (cherry picked from commit 8c179825c96c4e5c3d8814ceb82d9515b49ea547) --- release-notes/3.8.1/README.md | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 release-notes/3.8.1/README.md diff --git a/release-notes/3.8.1/README.md b/release-notes/3.8.1/README.md new file mode 100644 index 00000000..97957eca --- /dev/null +++ b/release-notes/3.8.1/README.md @@ -0,0 +1,14 @@ + +# Major Feats + +1. Added `calm run-script` command which will test scripts for any syntactical errors. Supported scripts are: escripts, shell scripts, powershell, python. For more details refer Playground section of Getting-Started in [DSL-Documentation](https://www.nutanix.dev/docs/self-service-dsl/) + +2. Added `-ak/--api-key` flag in `calm init dsl`, `calm set config` command to support api key authentication for SAAS Instance. For more details refer Saas Instance Login section of DSL-Configuration -> Initialization in [DSL-Documentation](https://www.nutanix.dev/docs/self-service-dsl/) + +# Bug Fixes/Improvements + +- Fixes `calm get marketplace bps` command failure in Calm-VM 3.8.0 for objects MPI. +- Fixes `calm update app-migratable-bp` command failure for multi-profile multi-VM blueprint. +- Fixes failure while providing guest customization for AHV using command `calm create provider spec --type AHV_VM` +- Adds support to provide xml file path for guest customization while creating provider spec for AHV using `calm create provider spec --type AHV_VM` +- Endpoints of VM type will only be created if referenced VM is in current context project i.e VM is authorized for a given project. Similarly, only authorized VM can be used as target endpoint in runbook/blueprint. \ No newline at end of file From 3f6b244b7f2987deae0e41d7f56f3e01e88ef446 Mon Sep 17 00:00:00 2001 From: Prabhat Dwivedi Date: Mon, 5 Aug 2024 10:30:18 +0530 Subject: [PATCH 21/24] Main Readme Updated (#530) Updated latest version info (cherry picked from commit a0774e41549c9982e9bae56325f98a83fb1b29d6) --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 220bab78..51c1fc9d 100644 --- a/README.md +++ b/README.md @@ -2,9 +2,9 @@ ![Build](https://github.com/nutanix/calm-dsl/workflows/Setup%20&%20build%20calm-dsl/badge.svg) -`Latest release version: 3.8.0, Latest-release-tag: v3.8.0` +`Latest release version: 3.8.1, Latest-release-tag: v3.8.1` -`Latest Release Notes:` [read here](release-notes/3.8.0) +`Latest Release Notes:` [read here](release-notes/3.8.1) # Nutanix Cloud Manager (NCM) Self Service (formerly Calm) DSL From 6b198e5c622c57159b8fae885670d41d60ae6455 Mon Sep 17 00:00:00 2001 From: Prabhat Dwivedi Date: Fri, 9 Aug 2024 12:52:58 +0530 Subject: [PATCH 22/24] CALM-47277: Added all state filter when `all` paramter is passed to get an entity. (#534) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Issue Link: https://jira.nutanix.com/browse/CALM-47277 Description: 1. `/list` api for a runbook returns only non deleted runbook when `all` paramter is set `True` in `get_runbook` 2. Ideally, deleted entity should also be returned with `all` parameter 3. Modified filter parameter using `get_states_filter` method which now returns all types of runbook with `all` parameter 4. Similar changes done to endpoint, blueprints, library tasks, scheduler job 5. Use case: in describe command we pass all parameter as True which means we can describe deleted entities as well. With this changes it funcitons properly now. Earlier for deleted entities no entity found error came. ![Screenshot 2024-08-09 at 12 21 47 PM](https://github.com/user-attachments/assets/5b2ade90-5af7-471d-81d1-61009e8c415f) ![Screenshot 2024-08-09 at 12 06 09 PM](https://github.com/user-attachments/assets/3a8e0b87-52c5-4a8e-ab09-7a8ab364a87e) ![Screenshot 2024-08-09 at 12 00 02 PM](https://github.com/user-attachments/assets/110106a8-ad30-4104-b3ba-079106688b9a) ![Screenshot 2024-08-09 at 11 58 19 AM](https://github.com/user-attachments/assets/6858dd94-f911-4aeb-a4ba-45657b6b0866) (cherry picked from commit a2c16e952272a2c3440f82d40d1646cdafc59deb) --- calm/dsl/cli/bps.py | 2 ++ calm/dsl/cli/endpoints.py | 2 ++ calm/dsl/cli/library_tasks.py | 2 ++ calm/dsl/cli/runbooks.py | 2 ++ calm/dsl/cli/scheduler.py | 4 +++- 5 files changed, 11 insertions(+), 1 deletion(-) diff --git a/calm/dsl/cli/bps.py b/calm/dsl/cli/bps.py index c1b7087a..d6a56123 100644 --- a/calm/dsl/cli/bps.py +++ b/calm/dsl/cli/bps.py @@ -778,6 +778,8 @@ def get_blueprint_uuid(name, all=False, is_brownfield=False): params = {"filter": "name=={}".format(name)} if not all: params["filter"] += ";state!=DELETED" + else: + params["filter"] += get_states_filter(BLUEPRINT.STATES) if is_brownfield: params["filter"] += ";type==BROWNFIELD" diff --git a/calm/dsl/cli/endpoints.py b/calm/dsl/cli/endpoints.py index a0848c3e..2777592d 100644 --- a/calm/dsl/cli/endpoints.py +++ b/calm/dsl/cli/endpoints.py @@ -188,6 +188,8 @@ def get_endpoint(client, name, all=False): params = {"filter": "name=={}".format(name)} if not all: params["filter"] += ";deleted==FALSE" + else: + params["filter"] += get_states_filter(ENDPOINT.STATES, state_key="_state") res, err = client.endpoint.list(params=params) if err: diff --git a/calm/dsl/cli/library_tasks.py b/calm/dsl/cli/library_tasks.py index ece97d27..d65021be 100644 --- a/calm/dsl/cli/library_tasks.py +++ b/calm/dsl/cli/library_tasks.py @@ -209,6 +209,8 @@ def get_task(client, name, all=False): params = {"filter": "name=={}".format(name)} if not all: params["filter"] += ";state!=DELETED" + else: + params["filter"] += get_states_filter(TASKS.STATES) res, err = client.task.list(params=params) if err: diff --git a/calm/dsl/cli/runbooks.py b/calm/dsl/cli/runbooks.py index b32775ad..9595244d 100644 --- a/calm/dsl/cli/runbooks.py +++ b/calm/dsl/cli/runbooks.py @@ -584,6 +584,8 @@ def get_runbook(client, name, all=False): params = {"filter": "name=={}".format(name)} if not all: params["filter"] += ";deleted==FALSE" + else: + params["filter"] += get_states_filter(RUNBOOK.STATES) res, err = client.runbook.list(params=params) if err: diff --git a/calm/dsl/cli/scheduler.py b/calm/dsl/cli/scheduler.py index d32780ff..af292a81 100644 --- a/calm/dsl/cli/scheduler.py +++ b/calm/dsl/cli/scheduler.py @@ -254,6 +254,8 @@ def get_job(client, name, all=False): params = {"filter": "name=={}".format(name)} if not all: params["filter"] += ";deleted==FALSE" + else: + params["filter"] += get_states_filter(JOBS.STATES) res, err = client.job.list(params=params) if err: @@ -593,7 +595,7 @@ def get_job_list_command(name, filter_by, limit, offset, quiet, all_items): def get_job_instances_command(job_name, out, filter_by, limit, offset, all_items): """Displays job instance data""" client = get_api_client() - job_get_res = get_job(client, job_name, all=True) + job_get_res = get_job(client, job_name, all=all_items) params = {"length": limit, "offset": offset} filter_query = "" From 9efcf19d63c2a666c147cb732418eb94d6224f8d Mon Sep 17 00:00:00 2001 From: Prabhat Dwivedi Date: Mon, 12 Aug 2024 13:34:52 +0530 Subject: [PATCH 23/24] 381 release notes updated (#538) (cherry picked from commit 46ce17c500fb1b40b1e5fa1da5201632e8eba022) --- release-notes/3.8.1/README.md | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/release-notes/3.8.1/README.md b/release-notes/3.8.1/README.md index 97957eca..94f8afe3 100644 --- a/release-notes/3.8.1/README.md +++ b/release-notes/3.8.1/README.md @@ -3,12 +3,15 @@ 1. Added `calm run-script` command which will test scripts for any syntactical errors. Supported scripts are: escripts, shell scripts, powershell, python. For more details refer Playground section of Getting-Started in [DSL-Documentation](https://www.nutanix.dev/docs/self-service-dsl/) -2. Added `-ak/--api-key` flag in `calm init dsl`, `calm set config` command to support api key authentication for SAAS Instance. For more details refer Saas Instance Login section of DSL-Configuration -> Initialization in [DSL-Documentation](https://www.nutanix.dev/docs/self-service-dsl/) +2. [#170](https://github.com/nutanix/calm-dsl/issues/170) Added `-ak/--api-key` flag in `calm init dsl`, `calm set config` command to support api key authentication for SAAS Instance. For more details refer Saas Instance Login section of DSL-Configuration -> Initialization in [DSL-Documentation](https://www.nutanix.dev/docs/self-service-dsl/) -# Bug Fixes/Improvements +# Improvements + +- Adds support to provide xml file path for guest customization while creating provider spec for AHV using `calm create provider spec --type AHV_VM` +- Endpoints of VM type will only be created if referenced VM is in current context project i.e VM is authorized for a given project. Similarly, only authorized VM can be used as target endpoint in runbook/blueprint. + +# Bug Fixes - Fixes `calm get marketplace bps` command failure in Calm-VM 3.8.0 for objects MPI. - Fixes `calm update app-migratable-bp` command failure for multi-profile multi-VM blueprint. -- Fixes failure while providing guest customization for AHV using command `calm create provider spec --type AHV_VM` -- Adds support to provide xml file path for guest customization while creating provider spec for AHV using `calm create provider spec --type AHV_VM` -- Endpoints of VM type will only be created if referenced VM is in current context project i.e VM is authorized for a given project. Similarly, only authorized VM can be used as target endpoint in runbook/blueprint. \ No newline at end of file +- Fixes failure while providing guest customization for AHV using command `calm create provider spec --type AHV_VM` \ No newline at end of file From 53d74fc108e6de1096017bc37d13f3d4837a5fe4 Mon Sep 17 00:00:00 2001 From: Prabhat Dwivedi Date: Tue, 13 Aug 2024 15:24:17 +0000 Subject: [PATCH 24/24] updated release metadata to 381 --- metadata.json | 2 +- release_config.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/metadata.json b/metadata.json index 222461b9..e33f01c6 100644 --- a/metadata.json +++ b/metadata.json @@ -1,3 +1,3 @@ { - "version": "3.8.0" + "version": "3.8.1" } \ No newline at end of file diff --git a/release_config.json b/release_config.json index 719fc514..f47086dc 100644 --- a/release_config.json +++ b/release_config.json @@ -2,7 +2,7 @@ "releases": [ { "product": "calm-dsl", - "version": "3.8.0", + "version": "3.8.1", "exclude": [ "" ],