Skip to content

Commit

Permalink
Merge branch 'main' into IOPID-1969-ioweb-kv-monorepo-ap
Browse files Browse the repository at this point in the history
  • Loading branch information
BurnedMarshal authored Nov 20, 2024
2 parents e60ef22 + 24a04f6 commit 1969c6d
Show file tree
Hide file tree
Showing 18 changed files with 273 additions and 13 deletions.
25 changes: 25 additions & 0 deletions src/_modules/data_factory_cosmos/data.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
data "azurerm_subscription" "current" {
}

data "azurerm_cosmosdb_account" "source" {
name = var.cosmos_accounts.source.name
resource_group_name = var.cosmos_accounts.source.resource_group_name
}

data "azurerm_cosmosdb_account" "target" {
name = var.cosmos_accounts.target.name
resource_group_name = var.cosmos_accounts.target.resource_group_name
}

data "azapi_resource_list" "databases" {
type = "Microsoft.DocumentDB/databaseAccounts/sqlDatabases@2024-05-15"
parent_id = data.azurerm_cosmosdb_account.source.id
response_export_values = ["*"]
}

data "azapi_resource_list" "containers" {
for_each = local.databases
type = "Microsoft.DocumentDB/databaseAccounts/sqlDatabases/containers@2024-05-15"
parent_id = each.key
response_export_values = ["*"]
}
17 changes: 17 additions & 0 deletions src/_modules/data_factory_cosmos/datasets.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
resource "azurerm_data_factory_dataset_cosmosdb_sqlapi" "source_dataset" {
for_each = local.containers_per_database
name = replace(each.value.container.name, "/[$-]/", "_")
data_factory_id = var.data_factory_id
folder = "cosmos/account=${var.cosmos_accounts.source.name}/db=${each.value.container.database_name}/source"
linked_service_name = azurerm_data_factory_linked_service_cosmosdb.source_linked_service_cosmos[each.value.container.database_id].name
collection_name = each.value.container.name
}

resource "azurerm_data_factory_dataset_cosmosdb_sqlapi" "target_dataset" {
for_each = local.containers_per_database
name = replace(each.value.container.name, "/[$-]/", "_")
data_factory_id = var.data_factory_id
folder = "cosmos/account=${var.cosmos_accounts.target.name}/db=${each.value.container.database_name}/target"
linked_service_name = azurerm_data_factory_linked_service_cosmosdb.target_linked_service_cosmos[each.value.container.database_id].name
collection_name = each.value.container.name
}
17 changes: 17 additions & 0 deletions src/_modules/data_factory_cosmos/iam.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
module "roles" {
source = "github.com/pagopa/dx//infra/modules/azure_role_assignments?ref=main"
principal_id = var.data_factory_principal_id

cosmos = [
{
account_name = var.cosmos_accounts.source.name
resource_group_name = var.cosmos_accounts.source.resource_group_name
role = "reader"
},
{
account_name = var.cosmos_accounts.target.name
resource_group_name = var.cosmos_accounts.target.resource_group_name
role = "writer"
}
]
}
17 changes: 17 additions & 0 deletions src/_modules/data_factory_cosmos/linked_services.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
resource "azurerm_data_factory_linked_service_cosmosdb" "source_linked_service_cosmos" {
for_each = local.databases
name = "${var.cosmos_accounts.source.name}-${each.value}-cosmos"
data_factory_id = var.data_factory_id
account_endpoint = data.azurerm_cosmosdb_account.source.endpoint
account_key = data.azurerm_cosmosdb_account.source.primary_key
database = each.value
}

resource "azurerm_data_factory_linked_service_cosmosdb" "target_linked_service_cosmos" {
for_each = local.databases
name = "${var.cosmos_accounts.target.name}-${each.value}-cosmos"
data_factory_id = var.data_factory_id
account_endpoint = data.azurerm_cosmosdb_account.target.endpoint
account_key = data.azurerm_cosmosdb_account.target.primary_key
database = each.value
}
14 changes: 14 additions & 0 deletions src/_modules/data_factory_cosmos/locals.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
locals {
azapi_databases = jsondecode(data.azapi_resource_list.databases.output)
databases = length(var.what_to_migrate.databases) > 0 ? { for database in var.what_to_migrate.databases : "${data.azurerm_cosmosdb_account.source.id}/sqlDatabases/${database}" => database } : { for database in local.azapi_databases.value : database.id => reverse(split("/", database.id))[0] }

containers = { for id, name in local.databases : name => [for container in jsondecode(data.azapi_resource_list.containers[id].output).value : { name = container.name, database_id = id, database_name = name }] }

containers_per_database = {
for pair in flatten([
for database, containers in local.containers :
[for container in containers : { database = database, container = container }]
]) :
"${pair.database}|${pair.container.name}" => pair
}
}
21 changes: 21 additions & 0 deletions src/_modules/data_factory_cosmos/main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
terraform {
required_providers {
azapi = {
source = "Azure/azapi"
version = "<= 1.15.0"
}
}
}

module "naming_convention" {
source = "github.com/pagopa/dx//infra/modules/azure_naming_convention/?ref=main"

environment = {
prefix = var.environment.prefix
env_short = var.environment.env_short
location = var.environment.location
domain = var.environment.domain
app_name = var.environment.app_name
instance_number = var.environment.instance_number
}
}
13 changes: 13 additions & 0 deletions src/_modules/data_factory_cosmos/network.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
resource "azurerm_data_factory_managed_private_endpoint" "cosmos_source" {
name = "${module.naming_convention.prefix}-adf-${var.cosmos_accounts.source.name}-cosmos-${module.naming_convention.suffix}"
data_factory_id = var.data_factory_id
target_resource_id = data.azurerm_cosmosdb_account.source.id
subresource_name = "Sql"
}

resource "azurerm_data_factory_managed_private_endpoint" "cosmos_target" {
name = "${module.naming_convention.prefix}-adf-${var.cosmos_accounts.target.name}-cosmos-${module.naming_convention.suffix}"
data_factory_id = var.data_factory_id
target_resource_id = data.azurerm_cosmosdb_account.target.id
subresource_name = "Sql"
}
10 changes: 10 additions & 0 deletions src/_modules/data_factory_cosmos/outputs.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
output "pipelines" {
value = {
for pipeline in azurerm_data_factory_pipeline.pipeline
: pipeline.name => {
id = pipeline.id
name = pipeline.name
url = "https://adf.azure.com/en/authoring/pipeline/${pipeline.name}?factory=${pipeline.data_factory_id}"
}
}
}
35 changes: 35 additions & 0 deletions src/_modules/data_factory_cosmos/pipelines.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
resource "azurerm_data_factory_pipeline" "pipeline" {
for_each = local.containers_per_database
name = replace(each.value.container.name, "/[$-]/", "_")
data_factory_id = var.data_factory_id
description = "Copy data from Cosmos (${var.cosmos_accounts.source.name}) to (${var.cosmos_accounts.target.name})"
folder = "cosmos/account=${var.cosmos_accounts.source.name}/db=${each.value.container.database_name}"

activities_json = jsonencode([
{
name = "CopyFromCosmosToCosmos"
type = "Copy"
inputs = [
{
referenceName = azurerm_data_factory_dataset_cosmosdb_sqlapi.source_dataset[each.key].name
type = "DatasetReference"
}
]
outputs = [
{
referenceName = azurerm_data_factory_dataset_cosmosdb_sqlapi.target_dataset[each.key].name
type = "DatasetReference"
}
]
typeProperties = {
source = {
type = "CosmosDbSqlApiSource"
}
sink = {
type = "CosmosDbSqlApiSink"
writeBehavior = var.cosmos_accounts.target.write_behavior
}
}
}
])
}
52 changes: 52 additions & 0 deletions src/_modules/data_factory_cosmos/variables.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
variable "environment" {
type = object({
prefix = string
env_short = string
location = string
domain = optional(string)
app_name = string
instance_number = string
})

description = "Values which are used to generate resource names and location short names. They are all mandatory except for domain, which should not be used only in the case of a resource used by multiple domains."
}

variable "data_factory_id" {
description = "Data Factory id where to create resources."
type = string
}

variable "data_factory_principal_id" {
description = "Data Factory principal id to grant access to."
type = string
}

variable "cosmos_accounts" {
type = object({
source = object({
name = string
resource_group_name = string
})

target = object({
name = string
resource_group_name = string
write_behavior = optional(string, "upsert")
})
})

description = "Cosmos accounts to migrate. The target account must have a write_behavior defined. The write_behavior must be one of the following values: insert, upsert."

validation {
condition = contains(["insert", "upsert"], var.cosmos_accounts.target.write_behavior)
error_message = "The write_behavior must be one of the following values: insert or upsert."
}
}

variable "what_to_migrate" {
type = object({
databases = optional(list(string), [])
})

description = "List of database names of the source cosmos db account to migrate. If no database names are provided, all of them are migrated."
}
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
resource "azurerm_data_factory_custom_dataset" "source_dataset_container" {
for_each = toset(local.containers)
name = replace("${module.naming_convention.prefix}-adf-${var.storage_accounts.source.name}-${each.value}-blob-${module.naming_convention.suffix}", "/[$-]/", "_")
name = replace(each.value, "/[$-]/", "_")
data_factory_id = var.data_factory_id
type = "AzureBlob"
folder = "${var.storage_accounts.source.name}/source/blob"
folder = "storage/account=${var.storage_accounts.source.name}/source/blob"

linked_service {
name = azurerm_data_factory_linked_service_azure_blob_storage.source_linked_service_blob[0].name
Expand Down
4 changes: 2 additions & 2 deletions src/_modules/data_factory_storage_account/datasets_tables.tf
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
resource "azurerm_data_factory_custom_dataset" "source_dataset_table" {
for_each = toset(local.tables)
name = replace("${module.naming_convention.prefix}-adf-${var.storage_accounts.source.name}-${each.value}-table-${module.naming_convention.suffix}", "/[$-]/", "_")
name = replace(each.value, "/[$-]/", "_")
data_factory_id = var.data_factory_id
type = "AzureTable"
folder = "${var.storage_accounts.source.name}/source/table"
folder = "storage/account=${var.storage_accounts.source.name}/source/table"

linked_service {
name = azurerm_data_factory_linked_service_azure_table_storage.source_linked_service_table[0].name
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
resource "azurerm_data_factory_pipeline" "pipeline_container" {
for_each = toset(local.containers)
name = replace("${module.naming_convention.prefix}-adf-${var.storage_accounts.source.name}-${each.value}-blob-${module.naming_convention.suffix}", "/[$-]/", "_")
name = replace(each.value, "/[$-]/", "_")
data_factory_id = var.data_factory_id
folder = "${var.storage_accounts.source.name}/blob"
folder = "storage/account=${var.storage_accounts.source.name}/blob"

activities_json = jsonencode(
[
Expand Down
4 changes: 2 additions & 2 deletions src/_modules/data_factory_storage_account/pipeline_tables.tf
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
resource "azurerm_data_factory_pipeline" "pipeline_table" {
for_each = toset(local.tables)
name = replace("${module.naming_convention.prefix}-adf-${var.storage_accounts.source.name}-${each.value}-table-${module.naming_convention.suffix}", "/[$-]/", "_")
name = replace(each.value, "/[$-]/", "_")
data_factory_id = var.data_factory_id
folder = "${var.storage_accounts.source.name}/table"
folder = "storage/account=${var.storage_accounts.source.name}/table"

activities_json = jsonencode(
[
Expand Down
2 changes: 2 additions & 0 deletions src/migration/prod/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

| Name | Source | Version |
|------|--------|---------|
| <a name="module_migrate_cosmos_accounts"></a> [migrate\_cosmos\_accounts](#module\_migrate\_cosmos\_accounts) | ../../_modules/data_factory_cosmos | n/a |
| <a name="module_migrate_storage_accounts"></a> [migrate\_storage\_accounts](#module\_migrate\_storage\_accounts) | ../../_modules/data_factory_storage_account | n/a |

## Resources
Expand All @@ -37,5 +38,6 @@ No inputs.
| Name | Description |
|------|-------------|
| <a name="output_data_factory"></a> [data\_factory](#output\_data\_factory) | n/a |
| <a name="output_data_factory_cosmos_pipelines"></a> [data\_factory\_cosmos\_pipelines](#output\_data\_factory\_cosmos\_pipelines) | n/a |
| <a name="output_data_factory_st_pipelines"></a> [data\_factory\_st\_pipelines](#output\_data\_factory\_st\_pipelines) | n/a |
<!-- END_TF_DOCS -->
19 changes: 19 additions & 0 deletions src/migration/prod/italynorth.tf
Original file line number Diff line number Diff line change
Expand Up @@ -53,4 +53,23 @@ module "migrate_storage_accounts" {
tables = try(each.value.table.tables, [])
}
}
}

module "migrate_cosmos_accounts" {
for_each = { for migration in local.cosmos_accounts : "${migration.source.name}|${migration.target.name}" => migration }
source = "../../_modules/data_factory_cosmos"

environment = local.environment

data_factory_id = azurerm_data_factory.this.id
data_factory_principal_id = azurerm_data_factory.this.identity[0].principal_id

cosmos_accounts = {
source = each.value.source
target = each.value.target
}

what_to_migrate = {
databases = try(each.value.databases, [])
}
}
22 changes: 18 additions & 4 deletions src/migration/prod/locals.tf
Original file line number Diff line number Diff line change
Expand Up @@ -26,16 +26,30 @@ locals {
storage_accounts = [
# Copy both containers and tables
# {
# source = { name = "stdevbiptest1", resource_group_name = "RG-BIP-DEV-TEST" }
# target = { name = "stbipdevtest1", resource_group_name = "dev-fasanorg" }
# source = { name = "<SOURCE_STORAGE_ACCOUNT_NAME>", resource_group_name = "<SOURCE_STORAGE_ACCOUNT_RG_NAME>" }
# target = { name = "<TARGET_STORAGE_ACCOUNT_NAME>", resource_group_name = "<TARGET_STORAGE_ACCOUNT_RG_NAME>" }
# },
#
# Copy only selected containers and tables
# {
# source = { name = "stdevbiptest1", resource_group_name = "RG-BIP-DEV-TEST" }
# target = { name = "stbipdevtest1", resource_group_name = "dev-fasanorg" }
# source = { name = "<SOURCE_STORAGE_ACCOUNT_NAME>", resource_group_name = "<SOURCE_STORAGE_ACCOUNT_RG_NAME>" }
# target = { name = "<TARGET_STORAGE_ACCOUNT_NAME>", resource_group_name = "<TARGET_STORAGE_ACCOUNT_RG_NAME>" }
# blob = {enabled = true, containers = ["c1", "c2", "c3"]}
# table = {enabled = true, tables = ["t1", "t2", "t3"]}
# }
]

cosmos_accounts = [
# Copy all databases (with write_behavior to insert)
# {
# source = { name = "<SOURCE_COSMOS_ACCOUNT_NAME>", resource_group_name = "<SOURCE_COSMOS_ACCOUNT_RG_NAME>" }
# target = { name = "<TARGET_COSMOS_ACCOUNT_NAME>", resource_group_name = "<TARGET_COSMOS_ACCOUNT_RG_NAME>", write_behavior = "insert" }
# },
# Copy only selected databases (with write_behavior defaulting to upsert)
# {
# source = { name = "<SOURCE_COSMOS_ACCOUNT_NAME>", resource_group_name = "<SOURCE_COSMOS_ACCOUNT_RG_NAME>" }
# target = { name = "<TARGET_COSMOS_ACCOUNT_NAME>", resource_group_name = "<TARGET_COSMOS_ACCOUNT_RG_NAME>" }
# databases = ["db1", "db2", "db3"]
# }
]
}
6 changes: 5 additions & 1 deletion src/migration/prod/outputs.tf
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,8 @@ output "data_factory" {

output "data_factory_st_pipelines" {
value = { for migration in local.storage_accounts : "${migration.source.name}|${migration.target.name}" => module.migrate_storage_accounts["${migration.source.name}|${migration.target.name}"].pipelines }
}
}

output "data_factory_cosmos_pipelines" {
value = { for migration in local.cosmos_accounts : "${migration.source.name}|${migration.target.name}" => module.migrate_cosmos_accounts["${migration.source.name}|${migration.target.name}"].pipelines }
}

0 comments on commit 1969c6d

Please sign in to comment.