Merge remote-tracking branch 'origin/main' into fix.bootstrap_order
This commit is contained in:
Коммит
ce69d60d2b
|
@ -6,7 +6,7 @@
|
|||
version: '3.7'
|
||||
services:
|
||||
rover:
|
||||
image: aztfmod/rover-preview:1.3.9-2303.090804
|
||||
image: aztfmod/rover:1.4.6-2305.1807
|
||||
user: vscode
|
||||
|
||||
labels:
|
||||
|
|
|
@ -39,7 +39,7 @@ jobs:
|
|||
random_length: ['5']
|
||||
|
||||
container:
|
||||
image: aztfmod/rover-preview:1.3.9-2303.090804
|
||||
image: aztfmod/rover:1.4.6-2305.1807
|
||||
options: --user 0
|
||||
|
||||
steps:
|
||||
|
@ -96,7 +96,7 @@ jobs:
|
|||
]
|
||||
|
||||
container:
|
||||
image: aztfmod/rover-preview:1.3.9-2303.090804
|
||||
image: aztfmod/rover:1.4.6-2305.1807
|
||||
options: --user 0
|
||||
|
||||
steps:
|
||||
|
@ -143,7 +143,7 @@ jobs:
|
|||
random_length: ['5']
|
||||
|
||||
container:
|
||||
image: aztfmod/rover-preview:1.3.9-2303.090804
|
||||
image: aztfmod/rover:1.4.6-2305.1807
|
||||
options: --user 0
|
||||
|
||||
steps:
|
||||
|
@ -198,7 +198,7 @@ jobs:
|
|||
]
|
||||
|
||||
container:
|
||||
image: aztfmod/rover-preview:1.3.9-2303.090804
|
||||
image: aztfmod/rover:1.4.6-2305.1807
|
||||
options: --user 0
|
||||
|
||||
steps:
|
||||
|
@ -244,7 +244,7 @@ jobs:
|
|||
random_length: ['5']
|
||||
|
||||
container:
|
||||
image: aztfmod/rover-preview:1.3.9-2303.090804
|
||||
image: aztfmod/rover:1.4.6-2305.1807
|
||||
options: --user 0
|
||||
|
||||
steps:
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
|
||||
module "dynamic_keyvault_secrets" {
|
||||
source = "aztfmod/caf/azurerm//modules/security/dynamic_keyvault_secrets"
|
||||
version = "5.6.8"
|
||||
version = "5.6.9"
|
||||
|
||||
for_each = try(var.dynamic_keyvault_secrets, {})
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
module "launchpad" {
|
||||
source = "aztfmod/caf/azurerm"
|
||||
version = "5.6.8"
|
||||
version = "5.6.9"
|
||||
|
||||
providers = {
|
||||
azurerm.vhub = azurerm.vhub
|
||||
|
|
|
@ -99,7 +99,7 @@ locals {
|
|||
"landingzone" = var.landingzone.key
|
||||
}
|
||||
|
||||
tags = merge(local.global_settings.tags, local.landingzone_tag, { "level" = var.landingzone.level }, { "environment" = local.global_settings.environment }, { "rover_version" = var.rover_version }, var.tags)
|
||||
tags = merge(local.global_settings.tags, local.landingzone_tag, { "environment" = local.global_settings.environment }, { "rover_version" = var.rover_version }, var.tags)
|
||||
|
||||
global_settings = {
|
||||
default_region = var.default_region
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
variable "provider_azurerm_features_api_management" {
|
||||
default = {
|
||||
purge_soft_delete_on_destroy = true
|
||||
purge_soft_delete_on_destroy = false
|
||||
recover_soft_deleted_api_managements = true
|
||||
}
|
||||
}
|
||||
|
@ -13,7 +13,7 @@ variable "provider_azurerm_features_application_insights" {
|
|||
|
||||
variable "provider_azurerm_features_cognitive_account" {
|
||||
default = {
|
||||
purge_soft_delete_on_destroy = true
|
||||
purge_soft_delete_on_destroy = false
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -32,19 +32,19 @@ variable "provider_azurerm_features_keyvault" {
|
|||
|
||||
variable "provider_azurerm_features_log_analytics_workspace" {
|
||||
default = {
|
||||
permanently_delete_on_destroy = true
|
||||
permanently_delete_on_destroy = false
|
||||
}
|
||||
}
|
||||
|
||||
variable "provider_azurerm_features_resource_group" {
|
||||
default = {
|
||||
prevent_deletion_if_contains_resources = false
|
||||
prevent_deletion_if_contains_resources = true
|
||||
}
|
||||
}
|
||||
|
||||
variable "provider_azurerm_features_template_deployment" {
|
||||
default = {
|
||||
delete_nested_items_during_deletion = false
|
||||
delete_nested_items_during_deletion = true
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,10 @@
|
|||
terraform {
|
||||
required_providers {
|
||||
kubernetes = {
|
||||
source = "hashicorp/kubernetes"
|
||||
}
|
||||
helm = {
|
||||
source = "hashicorp/helm"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,55 @@
|
|||
resource "kubernetes_namespace" "namespaces" {
|
||||
for_each = var.namespaces
|
||||
metadata {
|
||||
annotations = try(each.value.annotations, null)
|
||||
labels = try(each.value.labels, null)
|
||||
name = each.value.name
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
resource "kubernetes_manifest" "cluster_manifest" {
|
||||
for_each = var.manifests
|
||||
manifest = try(yamldecode(each.value.contents), yamldecode(file("$(path.cwd)/each.value.file")), yamldecode(file("$(path.module)/each.value.file")), yamldecode(file(each.value.file)) )
|
||||
}
|
||||
|
||||
# https://registry.terraform.io/providers/hashicorp/helm/latest/docs/resources/release
|
||||
resource "helm_release" "charts" {
|
||||
for_each = var.helm_charts
|
||||
|
||||
name = each.value.name
|
||||
repository = each.value.repository
|
||||
chart = each.value.chart
|
||||
|
||||
namespace = try(each.value.namespace, var.namespaces[each.value.namespace_key].name)
|
||||
wait = try(each.value.wait, true)
|
||||
timeout = try(each.value.timeout, 900)
|
||||
skip_crds = try(each.value.skip_crds, false)
|
||||
create_namespace = try(each.value.create_namespace, false)
|
||||
values = try([ yamlencode(each.value.contents) ], [file("$(path.cwd)/each.value.file")], [file("$(path.module)/each.value.file")], [file(each.value.file)], [])
|
||||
version = try(each.value.version, null)
|
||||
atomic = try(each.value.atomic, false)
|
||||
lint = try(each.value.lint, false)
|
||||
|
||||
dynamic "set" {
|
||||
for_each = try(each.value.sets, {})
|
||||
content {
|
||||
name = set.key
|
||||
value = set.value
|
||||
}
|
||||
}
|
||||
|
||||
dynamic "set_sensitive" {
|
||||
for_each = try(each.value.sets_sensitive, {})
|
||||
content {
|
||||
name = set_sensitive.key
|
||||
value = set_sensitive.value
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
# depends_on = [kubernetes_namespace.namespaces]
|
||||
# values = [
|
||||
# "${file("values.yaml")}"
|
||||
# ]
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
variable "namespaces" {
|
||||
default = {}
|
||||
}
|
||||
|
||||
variable "helm_charts" {
|
||||
default = {}
|
||||
}
|
||||
|
||||
variable "manifests" {
|
||||
default = {}
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
module "app" {
|
||||
source = "./app"
|
||||
namespaces = var.namespaces
|
||||
helm_charts = var.helm_charts
|
||||
manifests = var.manifests
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
terraform {
|
||||
backend "azurerm" {
|
||||
}
|
||||
}
|
|
@ -0,0 +1,45 @@
|
|||
# naming convention
|
||||
resource "azurecaf_name" "cluster_role" {
|
||||
name = var.settings.name
|
||||
resource_type = "azurerm_role_definition"
|
||||
prefixes = var.global_settings.prefixes
|
||||
random_length = var.global_settings.random_length
|
||||
clean_input = true
|
||||
passthrough = var.global_settings.passthrough
|
||||
use_slug = var.global_settings.use_slug
|
||||
}
|
||||
|
||||
resource "kubernetes_cluster_role_v1" "cluster_role" {
|
||||
metadata {
|
||||
annotations = try(var.settings.annotations, null)
|
||||
labels = try(var.settings.labels, null)
|
||||
name = azurecaf_name.cluster_role.result
|
||||
}
|
||||
dynamic "rule" {
|
||||
for_each = try(var.settings.rule, {})
|
||||
content {
|
||||
api_groups = try(rule.value.api_groups, null)
|
||||
non_resource_urls = try(rule.value.non_resource_urls, null)
|
||||
resource_names = try(rule.value.resource_names, null)
|
||||
resources = try(rule.value.resources, null)
|
||||
verbs = try(rule.value.verbs, null)
|
||||
}
|
||||
}
|
||||
|
||||
dynamic "aggregation_rule" {
|
||||
for_each = try(var.settings.aggregation_rule, {})
|
||||
content {
|
||||
cluster_role_selectors {
|
||||
dynamic "match_expressions" {
|
||||
for_each = try(aggregation_rule.value.match_expressions, {})
|
||||
content {
|
||||
key = try(match_expressions.value.key, null)
|
||||
operator = try(match_expressions.value.operator, null)
|
||||
values = try(match_expressions.value.values, [])
|
||||
}
|
||||
}
|
||||
match_labels = try(aggregation_rule.match_labels, {})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
terraform {
|
||||
required_providers {
|
||||
kubernetes = {
|
||||
source = "hashicorp/kubernetes"
|
||||
}
|
||||
azurecaf = {
|
||||
source = "aztfmod/azurecaf"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
variable "global_settings" {
|
||||
default = {}
|
||||
}
|
||||
variable "role" {
|
||||
default = {}
|
||||
}
|
||||
variable "settings" {}
|
||||
variable "azuread_service_principals" {
|
||||
default = {}
|
||||
}
|
||||
variable "azuread_groups" {
|
||||
default = {}
|
||||
}
|
||||
variable "managed_identities" {
|
||||
default = {}
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
resource "azurecaf_name" "cluster_role_binding" {
|
||||
name = var.settings.name
|
||||
resource_type = "azurerm_role_assignment"
|
||||
prefixes = var.global_settings.prefixes
|
||||
random_length = var.global_settings.random_length
|
||||
clean_input = true
|
||||
passthrough = var.global_settings.passthrough
|
||||
use_slug = var.global_settings.use_slug
|
||||
}
|
||||
|
||||
resource "kubernetes_cluster_role_binding_v1" "cluster_role_binding" {
|
||||
metadata {
|
||||
annotations = try(var.settings.annotations, null)
|
||||
labels = try(var.settings.labels, null)
|
||||
name = azurecaf_name.cluster_role_binding.result
|
||||
}
|
||||
role_ref {
|
||||
name = try(var.cluster_role[var.settings.role_key].name, var.settings.role_name)
|
||||
kind = "ClusterRole"
|
||||
api_group = "rbac.authorization.k8s.io"
|
||||
}
|
||||
dynamic "subject" {
|
||||
for_each = try(var.settings.subjects, {})
|
||||
content {
|
||||
name = coalesce(try(subject.value.name, null), try(var.managed_identities[subject.value.lz_key][subject.value.object_key].rbac_id, null), try(var.azuread_service_principals[subject.value.lz_key][subject.value.object_key].rbac_id, null), try(var.azuread_groups[subject.value.lz_key][subject.value.object_key].rbac_id, null))
|
||||
kind = can(subject.value.kind) ? subject.value.kind : can(try(var.managed_identities[subject.value.lz_key][subject.value.object_key].rbac_id, null)) ? "User" : can(try(var.azuread_service_principals[subject.value.lz_key][subject.value.object_key].rbac_id, null)) ? "User" : can(try(var.azuread_groups[subject.value.lz_key][subject.value.object_key].rbac_id, null)) ? "Group" : null
|
||||
api_group = "rbac.authorization.k8s.io"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
terraform {
|
||||
required_providers {
|
||||
kubernetes = {
|
||||
source = "hashicorp/kubernetes"
|
||||
}
|
||||
azurecaf = {
|
||||
source = "aztfmod/azurecaf"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
variable "global_settings" {
|
||||
default = {}
|
||||
}
|
||||
variable "cluster_role" {
|
||||
default = {}
|
||||
}
|
||||
variable "cluster_role_binding" {
|
||||
default = {}
|
||||
}
|
||||
variable "azuread_service_principals" {
|
||||
default = {}
|
||||
}
|
||||
variable "azuread_groups" {
|
||||
default = {}
|
||||
}
|
||||
variable "managed_identities" {
|
||||
default = {}
|
||||
}
|
||||
variable "settings" {}
|
|
@ -0,0 +1,83 @@
|
|||
# Helm chart definition
|
||||
helm_charts = {
|
||||
falco = {
|
||||
name = "falco"
|
||||
create_namespace = false
|
||||
namespace = "falco-system"
|
||||
repository = "https://falcosecurity.github.io/charts"
|
||||
chart = "falco"
|
||||
version = "3.1.3"
|
||||
}
|
||||
}
|
||||
|
||||
# namespace creation
|
||||
namespaces = {
|
||||
falco = {
|
||||
name = "falco-system"
|
||||
}
|
||||
}
|
||||
|
||||
# Keyvault integration for csi driver
|
||||
kv_csi_driver = {
|
||||
workload_kv_reader = {
|
||||
aks_clusters = {
|
||||
lz_key = "aks"
|
||||
key = "aks_cluster1"
|
||||
}
|
||||
keyvault = {
|
||||
key = "aks_kv"
|
||||
lz_key = "aks"
|
||||
}
|
||||
role_definition_name = "Key Vault Reader"
|
||||
}
|
||||
}
|
||||
|
||||
# kubernetes manifest. More than one in a single file is not supported. separate it with multiple files
|
||||
manifests = {
|
||||
agentconfig = {
|
||||
file = "add-ons/aks_applications_v2/examples/files/denyall.yml"
|
||||
}
|
||||
}
|
||||
|
||||
# Cluster to authenticate
|
||||
aks_clusters = {
|
||||
lz_key = "aks"
|
||||
key = "cluster_re1"
|
||||
}
|
||||
|
||||
# Keyvault to fetch secrets from in order to authenticate with kubernetes provider. a SP credentials must exists
|
||||
# and shall have cluster admin role to perform necessary operations
|
||||
keyvaults = {
|
||||
key = "aks_kv"
|
||||
lz_key = "aks"
|
||||
secret_prefix = "aks"
|
||||
}
|
||||
|
||||
# Kubernetes rbac
|
||||
cluster_role_binding = {
|
||||
cluster_admin = {
|
||||
name = "aks-admin-sp"
|
||||
role_name = "cluster-admin"
|
||||
subjects = {
|
||||
aks_admin_sp = {
|
||||
# lz key where service principal is created
|
||||
lz_key = "aks"
|
||||
# SP key
|
||||
object_key = "aks_admin_sp"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
role_binding = {
|
||||
ns_admin = {
|
||||
name = "aks-ns-admin-sp"
|
||||
namespace_key = "default"
|
||||
role_name = "admin"
|
||||
subjects = {
|
||||
demouser = {
|
||||
# user object id
|
||||
name = "e74a2ee6-433c-46b3-b10f-9abac25b1ba8"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
apiVersion: networking.k8s.io/v1
|
||||
kind: NetworkPolicy
|
||||
metadata:
|
||||
name: default-deny-all
|
||||
namespace: default
|
||||
labels:
|
||||
app.kubernetes.io/managed-by: caf-manifest
|
||||
spec:
|
||||
podSelector: {}
|
||||
policyTypes:
|
||||
- Ingress
|
||||
- Egress
|
|
@ -0,0 +1,8 @@
|
|||
terraform {
|
||||
required_providers {
|
||||
azurerm = {
|
||||
source = "hashicorp/azurerm"
|
||||
version = "~> 2.99"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,5 @@
|
|||
resource "azurerm_role_assignment" "for" {
|
||||
principal_id = var.secret_identity_id
|
||||
role_definition_name = var.settings.role_definition_name
|
||||
scope = try(var.settings.keyvault.keyvault_id, var.keyvaults[var.settings.keyvault.lz_key][var.settings.keyvault.key].id)
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
variable "global_settings" {
|
||||
default = {}
|
||||
}
|
||||
variable "keyvaults" {
|
||||
default = {}
|
||||
}
|
||||
variable "aks_clusters" {
|
||||
default = {}
|
||||
}
|
||||
variable "settings" {}
|
||||
variable "secret_identity_id" {}
|
|
@ -0,0 +1,8 @@
|
|||
module "keyvault-csi-driver" {
|
||||
for_each = var.kv_csi_driver
|
||||
source = "./keyvault-csi-driver"
|
||||
global_settings = local.global_settings
|
||||
settings = each.value
|
||||
secret_identity_id = local.secret_identity_id
|
||||
keyvaults = local.remote.keyvaults
|
||||
}
|
|
@ -0,0 +1,63 @@
|
|||
locals {
|
||||
landingzone = {
|
||||
current = {
|
||||
storage_account_name = var.tfstate_storage_account_name
|
||||
container_name = var.tfstate_container_name
|
||||
resource_group_name = var.tfstate_resource_group_name
|
||||
}
|
||||
lower = {
|
||||
storage_account_name = var.lower_storage_account_name
|
||||
container_name = var.lower_container_name
|
||||
resource_group_name = var.lower_resource_group_name
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
data "terraform_remote_state" "remote" {
|
||||
for_each = try(var.landingzone.tfstates, {})
|
||||
|
||||
backend = var.landingzone.backend_type
|
||||
config = local.remote_state[try(each.value.backend_type, var.landingzone.backend_type, "azurerm")][each.key]
|
||||
}
|
||||
|
||||
locals {
|
||||
|
||||
remote_state = {
|
||||
azurerm = {
|
||||
for key, value in try(var.landingzone.tfstates, {}) : key => {
|
||||
container_name = try(value.workspace, local.landingzone[try(value.level, "current")].container_name)
|
||||
key = value.tfstate
|
||||
resource_group_name = try(value.resource_group_name, local.landingzone[try(value.level, "current")].resource_group_name)
|
||||
storage_account_name = try(value.storage_account_name, local.landingzone[try(value.level, "current")].storage_account_name)
|
||||
subscription_id = try(value.subscription_id, var.tfstate_subscription_id)
|
||||
tenant_id = try(value.tenant_id, data.azurerm_client_config.current.tenant_id)
|
||||
}
|
||||
}
|
||||
}
|
||||
global_settings = data.terraform_remote_state.remote[var.landingzone.global_settings_key].outputs.objects[var.landingzone.global_settings_key].global_settings
|
||||
remote = {
|
||||
global_settings = local.global_settings
|
||||
aks_clusters = {
|
||||
for key, value in try(var.landingzone.tfstates, {}) : key => merge(try(data.terraform_remote_state.remote[key].outputs.objects[key].aks_clusters, {}))
|
||||
}
|
||||
managed_identities = {
|
||||
for key, value in try(var.landingzone.tfstates, {}) : key => merge(try(data.terraform_remote_state.remote[key].outputs.objects[key].managed_identities, {}))
|
||||
}
|
||||
azuread_groups = {
|
||||
for key, value in try(var.landingzone.tfstates, {}) : key => merge(try(data.terraform_remote_state.remote[key].outputs.objects[key].azuread_groups, {}))
|
||||
}
|
||||
azuread_service_principals = {
|
||||
for key, value in try(var.landingzone.tfstates, {}) : key => merge(try(data.terraform_remote_state.remote[key].outputs.objects[key].azuread_service_principals, {}))
|
||||
}
|
||||
keyvaults = {
|
||||
for key, value in try(var.landingzone.tfstates, {}) : key => merge(try(data.terraform_remote_state.remote[key].outputs.objects[key].keyvaults, {}))
|
||||
}
|
||||
azure_container_registries = {
|
||||
for key, value in try(var.landingzone.tfstates, {}) : key => merge(try(data.terraform_remote_state.remote[key].outputs.objects[key].azure_container_registries, {}))
|
||||
}
|
||||
}
|
||||
kubelogin_cred = {
|
||||
secret_prefix = try(var.keyvaults.secret_prefix, "sp")
|
||||
}
|
||||
secret_identity_id = data.azurerm_kubernetes_cluster.kubeconfig.key_vault_secrets_provider[0].secret_identity[0].object_id
|
||||
}
|
|
@ -0,0 +1,23 @@
|
|||
terraform {
|
||||
required_providers {
|
||||
azurerm = {
|
||||
source = "hashicorp/azurerm"
|
||||
version = "~> 2.99.0"
|
||||
}
|
||||
kubernetes = {
|
||||
source = "hashicorp/kubernetes"
|
||||
version = "~> 2.19.0"
|
||||
}
|
||||
helm = {
|
||||
source = "hashicorp/helm"
|
||||
version = "~> 2.9.0"
|
||||
}
|
||||
azurecaf = {
|
||||
source = "aztfmod/azurecaf"
|
||||
version = "~> 1.2.24"
|
||||
}
|
||||
}
|
||||
required_version = ">= 0.13"
|
||||
}
|
||||
|
||||
data "azurerm_client_config" "current" {}
|
|
@ -0,0 +1,75 @@
|
|||
provider "azurerm" {
|
||||
partner_id = "ca4078f8-9bc4-471b-ab5b-3af6b86a42c8"
|
||||
# partner identifier for CAF Terraform landing zones.
|
||||
features {
|
||||
}
|
||||
}
|
||||
|
||||
provider "kubernetes" {
|
||||
host = yamldecode(data.azurerm_kubernetes_cluster.kubeconfig.kube_config_raw).clusters[0].cluster.server
|
||||
cluster_ca_certificate = base64decode(yamldecode(data.azurerm_kubernetes_cluster.kubeconfig.kube_config_raw).clusters[0].cluster.certificate-authority-data)
|
||||
exec {
|
||||
api_version = "client.authentication.k8s.io/v1beta1"
|
||||
command = "/usr/local/bin/kubelogin"
|
||||
args = [
|
||||
"get-token",
|
||||
"--login",
|
||||
"spn",
|
||||
"--environment",
|
||||
"AzurePublicCloud",
|
||||
"--tenant-id",
|
||||
data.azurerm_key_vault_secret.tenant_id.value,
|
||||
"--server-id",
|
||||
yamldecode(data.azurerm_kubernetes_cluster.kubeconfig.kube_config_raw).users[0].user.exec.args[4],
|
||||
"--client-id",
|
||||
data.azurerm_key_vault_secret.client_id.value,
|
||||
"--client-secret",
|
||||
data.azurerm_key_vault_secret.client_secret.value
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
provider "helm" {
|
||||
kubernetes {
|
||||
host = yamldecode(data.azurerm_kubernetes_cluster.kubeconfig.kube_config_raw).clusters[0].cluster.server
|
||||
cluster_ca_certificate = base64decode(yamldecode(data.azurerm_kubernetes_cluster.kubeconfig.kube_config_raw).clusters[0].cluster.certificate-authority-data)
|
||||
exec {
|
||||
api_version = "client.authentication.k8s.io/v1beta1"
|
||||
command = "/usr/local/bin/kubelogin"
|
||||
args = [
|
||||
"get-token",
|
||||
"--login",
|
||||
"spn",
|
||||
"--environment",
|
||||
"AzurePublicCloud",
|
||||
"--tenant-id",
|
||||
data.azurerm_key_vault_secret.tenant_id.value,
|
||||
"--server-id",
|
||||
yamldecode(data.azurerm_kubernetes_cluster.kubeconfig.kube_config_raw).users[0].user.exec.args[4],
|
||||
"--client-id",
|
||||
data.azurerm_key_vault_secret.client_id.value,
|
||||
"--client-secret",
|
||||
data.azurerm_key_vault_secret.client_secret.value
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Get kubeconfig from AKS clusters
|
||||
data "azurerm_kubernetes_cluster" "kubeconfig" {
|
||||
name = local.remote.aks_clusters[var.aks_clusters.lz_key][var.aks_clusters.key].cluster_name
|
||||
resource_group_name = local.remote.aks_clusters[var.aks_clusters.lz_key][var.aks_clusters.key].resource_group_name
|
||||
}
|
||||
|
||||
data "azurerm_key_vault_secret" "client_secret" {
|
||||
key_vault_id = try(var.keyvaults.keyvaylt_id, local.remote.keyvaults[var.keyvaults.lz_key][var.keyvaults.key].id)
|
||||
name = try(var.keyvaults.client_secret_name, "${local.kubelogin_cred.secret_prefix}-client-secret")
|
||||
}
|
||||
data "azurerm_key_vault_secret" "tenant_id" {
|
||||
key_vault_id = try(var.keyvaults.keyvaylt_id, local.remote.keyvaults[var.keyvaults.lz_key][var.keyvaults.key].id)
|
||||
name = try(var.keyvaults.tenant_id, "${local.kubelogin_cred.secret_prefix}-tenant-id")
|
||||
}
|
||||
data "azurerm_key_vault_secret" "client_id" {
|
||||
key_vault_id = try(var.keyvaults.keyvaylt_id, local.remote.keyvaults[var.keyvaults.lz_key][var.keyvaults.key].id)
|
||||
name = try(var.keyvaults.client_id, "${local.kubelogin_cred.secret_prefix}-client-id")
|
||||
}
|
|
@ -0,0 +1,43 @@
|
|||
module "role" {
|
||||
source = "./role"
|
||||
for_each = var.role
|
||||
global_settings = local.global_settings
|
||||
settings = each.value
|
||||
managed_identities = local.remote.managed_identities
|
||||
azuread_groups = local.remote.azuread_groups
|
||||
azuread_service_principals = local.remote.azuread_service_principals
|
||||
}
|
||||
|
||||
module "role_binding" {
|
||||
source = "./role_binding"
|
||||
for_each = var.role_binding
|
||||
depends_on = [module.app]
|
||||
role = var.role
|
||||
global_settings = local.global_settings
|
||||
settings = each.value
|
||||
managed_identities = local.remote.managed_identities
|
||||
azuread_groups = local.remote.azuread_groups
|
||||
azuread_service_principals = local.remote.azuread_service_principals
|
||||
namespaces = var.namespaces
|
||||
}
|
||||
|
||||
module "cluster_role" {
|
||||
source = "./cluster_role"
|
||||
for_each = var.cluster_role
|
||||
global_settings = local.global_settings
|
||||
settings = each.value
|
||||
managed_identities = local.remote.managed_identities
|
||||
azuread_groups = local.remote.azuread_groups
|
||||
azuread_service_principals = local.remote.azuread_service_principals
|
||||
}
|
||||
|
||||
module "cluster_role_binding" {
|
||||
source = "./cluster_role_binding"
|
||||
for_each = var.cluster_role_binding
|
||||
cluster_role = var.cluster_role
|
||||
global_settings = local.global_settings
|
||||
settings = each.value
|
||||
managed_identities = local.remote.managed_identities
|
||||
azuread_groups = local.remote.azuread_groups
|
||||
azuread_service_principals = local.remote.azuread_service_principals
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
# Azure Kubernetes Service Add-on
|
||||
|
||||
The add-on helps you bootstrap AKS cluster with additional components and access control management. Add-on makes use exec plugin authentication ideal for clusters secured by disabling local authentication.
|
||||
|
||||
## Features
|
||||
|
||||
- Kubernetes RBAC for Azure identities
|
||||
- Azure role assignment for user identity created by AKS
|
||||
- helm chart deployment
|
||||
- manifest deployment via kubernetes provider for configuration handling, CRD Deployment etc
|
||||
|
||||
## Pre-Requisites
|
||||
|
||||
- The Implementation assumes, you have a service principal created and is part of AKS Admin group (provided in AKS cluster creation configuration)
|
||||
- The user running deployment (or the impersonating sp) has to access to read this keyvault and fetch service principal mentioned above.
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
terraform {
|
||||
required_providers {
|
||||
kubernetes = {
|
||||
source = "hashicorp/kubernetes"
|
||||
}
|
||||
azurecaf = {
|
||||
source = "aztfmod/azurecaf"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
resource "azurecaf_name" "role" {
|
||||
name = var.settings.name
|
||||
resource_type = "azurerm_role_definition"
|
||||
prefixes = var.global_settings.prefixes
|
||||
random_length = var.global_settings.random_length
|
||||
clean_input = true
|
||||
passthrough = var.global_settings.passthrough
|
||||
use_slug = var.global_settings.use_slug
|
||||
}
|
||||
|
||||
resource "kubernetes_role_v1" "role" {
|
||||
metadata {
|
||||
annotations = try(var.settings.annotations, null)
|
||||
labels = try(var.settings.labels, null)
|
||||
name = azurecaf_name.role.result
|
||||
}
|
||||
dynamic "rule" {
|
||||
for_each = try(var.settings.rule, {})
|
||||
content {
|
||||
api_groups = try(rule.value.api_groups, null)
|
||||
resource_names = try(rule.value.resource_names, null)
|
||||
resources = try(rule.value.resources, null)
|
||||
verbs = try(rule.value.verbs, null)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
variable "global_settings" {
|
||||
default = {}
|
||||
}
|
||||
variable "role" {
|
||||
default = {}
|
||||
}
|
||||
variable "azuread_service_principals" {
|
||||
default = {}
|
||||
}
|
||||
variable "azuread_groups" {
|
||||
default = {}
|
||||
}
|
||||
variable "managed_identities" {
|
||||
default = {}
|
||||
}
|
||||
variable "settings" {}
|
|
@ -0,0 +1,10 @@
|
|||
terraform {
|
||||
required_providers {
|
||||
kubernetes = {
|
||||
source = "hashicorp/kubernetes"
|
||||
}
|
||||
azurecaf = {
|
||||
source = "aztfmod/azurecaf"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
resource "azurecaf_name" "role_binding" {
|
||||
name = var.settings.name
|
||||
resource_type = "azurerm_role_assignment"
|
||||
prefixes = var.global_settings.prefixes
|
||||
random_length = var.global_settings.random_length
|
||||
clean_input = true
|
||||
passthrough = var.global_settings.passthrough
|
||||
use_slug = var.global_settings.use_slug
|
||||
}
|
||||
|
||||
resource "kubernetes_role_binding_v1" "role_binding" {
|
||||
metadata {
|
||||
annotations = try(var.settings.annotations, null)
|
||||
labels = try(var.settings.labels, null)
|
||||
name = azurecaf_name.role_binding.result
|
||||
namespace = try(var.settings.namespace, var.namespaces[var.settings.namespace_key].name)
|
||||
}
|
||||
role_ref {
|
||||
name = try(var.role[var.settings.role_key].name, var.settings.role_name)
|
||||
kind = "Role"
|
||||
api_group = "rbac.authorization.k8s.io"
|
||||
}
|
||||
dynamic "subject" {
|
||||
for_each = try(var.settings.subjects, {})
|
||||
content {
|
||||
name = coalesce(try(subject.value.name, null), try(var.managed_identities[subject.value.lz_key][subject.value.object_key].rbac_id, null), try(var.azuread_service_principals[subject.value.lz_key][subject.value.object_key].rbac_id, null), try(var.azuread_groups[subject.value.lz_key][subject.value.object_key].rbac_id, null))
|
||||
kind = can(subject.value.kind) ? subject.value.kind : can(try(var.managed_identities[subject.value.lz_key][subject.value.object_key].rbac_id, null)) ? "User" : can(try(var.azuread_service_principals[subject.value.lz_key][subject.value.object_key].rbac_id, null)) ? "User" : can(try(var.azuread_groups[subject.value.lz_key][subject.value.object_key].rbac_id, null)) ? "Group" : null
|
||||
api_group = "rbac.authorization.k8s.io"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,23 @@
|
|||
variable "global_settings" {
|
||||
default = {}
|
||||
}
|
||||
variable "role" {
|
||||
default = {}
|
||||
}
|
||||
variable "role_binding" {
|
||||
default = {}
|
||||
}
|
||||
variable "azuread_service_principals" {
|
||||
default = {}
|
||||
}
|
||||
variable "azuread_groups" {
|
||||
default = {}
|
||||
}
|
||||
variable "managed_identities" {
|
||||
default = {}
|
||||
}
|
||||
variable "settings" {}
|
||||
|
||||
variable "namespaces" {
|
||||
default = {}
|
||||
}
|
|
@ -0,0 +1,53 @@
|
|||
# Map of the remote data state for lower level
|
||||
variable "lower_storage_account_name" {}
|
||||
variable "lower_container_name" {}
|
||||
variable "lower_resource_group_name" {}
|
||||
|
||||
variable "tfstate_subscription_id" {
|
||||
description = "This value is populated by the rover. subscription id hosting the remote tfstates"
|
||||
}
|
||||
variable "tfstate_storage_account_name" {}
|
||||
variable "tfstate_container_name" {}
|
||||
variable "tfstate_key" {}
|
||||
variable "tfstate_resource_group_name" {}
|
||||
variable "global_settings" {
|
||||
default = {}
|
||||
}
|
||||
variable "settings" {
|
||||
default = {}
|
||||
}
|
||||
variable "landingzone" {}
|
||||
variable "rover_version" {
|
||||
default = null
|
||||
}
|
||||
variable "namespaces" {
|
||||
default = {}
|
||||
}
|
||||
variable "helm_charts" {
|
||||
default = {}
|
||||
}
|
||||
variable "aks_clusters" {
|
||||
default = {}
|
||||
}
|
||||
variable "role" {
|
||||
default = {}
|
||||
}
|
||||
variable "cluster_role" {
|
||||
default = {}
|
||||
}
|
||||
variable "role_binding" {
|
||||
default = {}
|
||||
}
|
||||
variable "cluster_role_binding" {
|
||||
default = {}
|
||||
}
|
||||
variable "keyvaults" {}
|
||||
variable "kv_csi_driver" {
|
||||
default = {}
|
||||
}
|
||||
variable "secret_identity_id" {
|
||||
default = null
|
||||
}
|
||||
variable "manifests" {
|
||||
default = {}
|
||||
}
|
|
@ -1,7 +1,7 @@
|
|||
|
||||
module "dynamic_keyvault_secrets" {
|
||||
source = "aztfmod/caf/azurerm//modules/security/dynamic_keyvault_secrets"
|
||||
version = "~>5.3.0"
|
||||
version = "~>5.6.8"
|
||||
# source = "git::https://github.com/aztfmod/terraform-azurerm-caf.git//modules/security/dynamic_keyvault_secrets?ref=master"
|
||||
|
||||
|
||||
|
|
|
@ -63,5 +63,9 @@ locals {
|
|||
vnets = {
|
||||
for key, value in try(var.landingzone.tfstates, {}) : key => merge(try(data.terraform_remote_state.remote[key].outputs.objects[key].vnets, {}))
|
||||
}
|
||||
|
||||
resource_groups = {
|
||||
for key, value in try(var.landingzone.tfstates, {}) : key => merge(try(data.terraform_remote_state.remote[key].outputs.objects[key].resource_groups, {}))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@ terraform {
|
|||
}
|
||||
null = {
|
||||
source = "hashicorp/null"
|
||||
version = "~> 2.1.0"
|
||||
version = "~> 3.1.0"
|
||||
}
|
||||
external = {
|
||||
source = "hashicorp/external"
|
||||
|
@ -19,18 +19,17 @@ terraform {
|
|||
}
|
||||
azuredevops = {
|
||||
source = "microsoft/azuredevops"
|
||||
version = "~> 0.1.1"
|
||||
version = "~> 0.5.0"
|
||||
}
|
||||
tls = {
|
||||
source = "hashicorp/tls"
|
||||
version = "~> 2.2.0"
|
||||
version = "~> 3.1.0"
|
||||
}
|
||||
azurecaf = {
|
||||
source = "aztfmod/azurecaf"
|
||||
version = "~> 1.2.0"
|
||||
}
|
||||
}
|
||||
required_version = ">= 0.13"
|
||||
}
|
||||
|
||||
provider "azurerm" {
|
||||
|
@ -43,6 +42,12 @@ provider "azurerm" {
|
|||
}
|
||||
}
|
||||
|
||||
provider "azurerm" {
|
||||
alias = "vhub"
|
||||
skip_provider_registration = true
|
||||
features {}
|
||||
}
|
||||
|
||||
data "azurerm_client_config" "current" {}
|
||||
|
||||
|
||||
|
|
|
@ -1,6 +1,10 @@
|
|||
module "caf" {
|
||||
source = "aztfmod/caf/azurerm"
|
||||
version = "~>5.4.2"
|
||||
version = "~>5.6.8"
|
||||
|
||||
providers = {
|
||||
azurerm.vhub = azurerm.vhub
|
||||
}
|
||||
|
||||
azuread = local.azuread
|
||||
current_landingzone_key = var.landingzone.key
|
||||
|
@ -19,11 +23,13 @@ module "caf" {
|
|||
managed_identities = var.managed_identities
|
||||
role_mapping = var.role_mapping
|
||||
custom_role_definitions = var.custom_role_definitions
|
||||
var_folder_path = var.var_folder_path
|
||||
compute = {
|
||||
virtual_machines = var.virtual_machines
|
||||
}
|
||||
storage = {
|
||||
storage_account_blobs = var.storage_account_blobs
|
||||
storage_containers = var.storage_containers
|
||||
}
|
||||
|
||||
# Pass the remote objects you need to connect to.
|
||||
|
@ -32,5 +38,6 @@ module "caf" {
|
|||
vnets = local.remote.vnets
|
||||
managed_identities = local.remote.managed_identities
|
||||
azuread_groups = local.remote.azuread_groups
|
||||
resource_groups = local.remote.resource_groups
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
module "dynamic_keyvault_secrets" {
|
||||
source = "aztfmod/caf/azurerm//modules/security/dynamic_keyvault_secrets"
|
||||
version = "5.6.8"
|
||||
version = "5.6.9"
|
||||
|
||||
for_each = {
|
||||
for keyvault_key, secrets in try(var.dynamic_keyvault_secrets, {}) : keyvault_key => {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
module "solution" {
|
||||
source = "aztfmod/caf/azurerm"
|
||||
version = "5.6.8"
|
||||
version = "5.6.9"
|
||||
|
||||
providers = {
|
||||
azurerm.vhub = azurerm.vhub
|
||||
|
@ -51,6 +51,7 @@ module "solution" {
|
|||
tenant_id = var.tenant_id
|
||||
tfstates = var.tfstates
|
||||
user_type = var.user_type
|
||||
var_folder_path = var.var_folder_path
|
||||
webapp = local.webapp
|
||||
|
||||
diagnostics = {
|
||||
|
|
|
@ -45,12 +45,12 @@ provider "azurerm" {
|
|||
purge_soft_delete_on_destroy = var.provider_azurerm_features_cognitive_account.purge_soft_delete_on_destroy
|
||||
}
|
||||
key_vault {
|
||||
purge_soft_delete_on_destroy = var.provider_azurerm_features_keyvault.purge_soft_delete_on_destroy
|
||||
purge_soft_delete_on_destroy = try(var.provider_azurerm_features_keyvault.purge_soft_delete_on_destroy, false)
|
||||
# purge_soft_deleted_certificates_on_destroy = var.provider_azurerm_features_keyvault.purge_soft_deleted_certificates_on_destroy
|
||||
# purge_soft_deleted_keys_on_destroy = var.provider_azurerm_features_keyvault.purge_soft_deleted_keys_on_destroy
|
||||
# purge_soft_deleted_secrets_on_destroy = var.provider_azurerm_features_keyvault.purge_soft_deleted_secrets_on_destroy
|
||||
# recover_soft_deleted_certificates = var.provider_azurerm_features_keyvault.recover_soft_deleted_certificates
|
||||
# recover_soft_deleted_key_vaults = var.provider_azurerm_features_keyvault.recover_soft_deleted_key_vaults
|
||||
recover_soft_deleted_key_vaults = try(var.provider_azurerm_features_keyvault.recover_soft_deleted_key_vaults, true)
|
||||
# recover_soft_deleted_keys = var.provider_azurerm_features_keyvault.recover_soft_deleted_keys
|
||||
# recover_soft_deleted_secrets = var.provider_azurerm_features_keyvault.recover_soft_deleted_secrets
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
variable "provider_azurerm_features_api_management" {
|
||||
default = {
|
||||
purge_soft_delete_on_destroy = true
|
||||
purge_soft_delete_on_destroy = false
|
||||
recover_soft_deleted_api_managements = true
|
||||
}
|
||||
}
|
||||
|
@ -13,7 +13,7 @@ variable "provider_azurerm_features_application_insights" {
|
|||
|
||||
variable "provider_azurerm_features_cognitive_account" {
|
||||
default = {
|
||||
purge_soft_delete_on_destroy = true
|
||||
purge_soft_delete_on_destroy = false
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -32,19 +32,19 @@ variable "provider_azurerm_features_keyvault" {
|
|||
|
||||
variable "provider_azurerm_features_log_analytics_workspace" {
|
||||
default = {
|
||||
permanently_delete_on_destroy = true
|
||||
permanently_delete_on_destroy = false
|
||||
}
|
||||
}
|
||||
|
||||
variable "provider_azurerm_features_resource_group" {
|
||||
default = {
|
||||
prevent_deletion_if_contains_resources = false
|
||||
prevent_deletion_if_contains_resources = true
|
||||
}
|
||||
}
|
||||
|
||||
variable "provider_azurerm_features_template_deployment" {
|
||||
default = {
|
||||
delete_nested_items_during_deletion = false
|
||||
delete_nested_items_during_deletion = true
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
version: '3.7'
|
||||
services:
|
||||
rover:
|
||||
image: aztfmod/rover:1.2.1-2206.1703
|
||||
image: aztfmod/rover:1.4.6-2305.1807
|
||||
|
||||
user: vscode
|
||||
|
||||
|
|
Загрузка…
Ссылка в новой задаче