diff --git a/services/terraform/dev/main.tf b/services/terraform/dev/main.tf index 7dd1c84ed..4436a648c 100644 --- a/services/terraform/dev/main.tf +++ b/services/terraform/dev/main.tf @@ -1,47 +1,52 @@ variable "localstack_endpoint" { type = string default = "http://localhost:4566" } locals { aws_settings = ({ region = "us-east-2" access_key = "fake" secret_key = "fake" skip_credentials_validation = true skip_metadata_api_check = true skip_requesting_account_id = true s3_use_path_style = true override_endpoint = var.localstack_endpoint }) } provider "aws" { region = local.aws_settings.region access_key = local.aws_settings.access_key secret_key = local.aws_settings.secret_key skip_credentials_validation = local.aws_settings.skip_credentials_validation skip_metadata_api_check = local.aws_settings.skip_metadata_api_check skip_requesting_account_id = local.aws_settings.skip_requesting_account_id s3_use_path_style = local.aws_settings.s3_use_path_style dynamic "endpoints" { for_each = local.aws_settings.override_endpoint[*] content { + opensearch = endpoints.value dynamodb = endpoints.value s3 = endpoints.value secretsmanager = endpoints.value } } } provider "random" {} # Shared resources between local dev environment and remote AWS module "shared" { source = "../modules/shared" is_dev = true + + vpc_id = null + cidr_block = null + subnet_ids = [] } diff --git a/services/terraform/modules/shared/opensearch.tf b/services/terraform/modules/shared/opensearch.tf new file mode 100644 index 000000000..e9a4d8662 --- /dev/null +++ b/services/terraform/modules/shared/opensearch.tf @@ -0,0 +1,54 @@ +variable "domain" { + default = "identity-search-domain" +} + +resource "aws_security_group" "identity-search" { + count = var.is_dev ? 0 : 1 + name = "${var.vpc_id}-opensearch-service-${var.domain}" + description = "Managed by Terraform" + vpc_id = var.is_dev ? null : var.vpc_id + + ingress { + from_port = 443 + to_port = 443 + protocol = "tcp" + + cidr_blocks = [ + var.cidr_block + ] + } + + tags = { + Name = "${var.vpc_id}-opensearch-service-${var.domain}" + Environment = var.is_dev ? "development" : "production" + } +} + +resource "aws_opensearch_domain" "identity-search" { + domain_name = var.domain + engine_version = "OpenSearch_1.0" + + cluster_config { + instance_type = "t3.medium.search" + } + + vpc_options { + subnet_ids = var.subnet_ids + + security_group_ids = var.is_dev ? [] : [aws_security_group.identity-search[0].id] + } + + advanced_options = { + "rest.action.multi.allow_explicit_index" = "true" + } + + ebs_options { + ebs_enabled = true + volume_size = 10 + } + + tags = { + Name = var.domain + Environment = var.is_dev ? "development" : "production" + } +} diff --git a/services/terraform/modules/shared/outputs.tf b/services/terraform/modules/shared/outputs.tf index e06628ff6..2f9ac5c01 100644 --- a/services/terraform/modules/shared/outputs.tf +++ b/services/terraform/modules/shared/outputs.tf @@ -1,16 +1,21 @@ locals { exported_dynamodb_tables = [ aws_dynamodb_table.feature-flags, aws_dynamodb_table.backup-service-backup, aws_dynamodb_table.reports-service-reports, aws_dynamodb_table.tunnelbroker-undelivered-messages, ] } # map table names to their resources output "dynamodb_tables" { value = { for table in local.exported_dynamodb_tables : table.name => table } } + + +output "opensearch_domain_identity" { + value = aws_opensearch_domain.identity-search +} diff --git a/services/terraform/modules/shared/variables.tf b/services/terraform/modules/shared/variables.tf index eb49ab115..836a8099d 100644 --- a/services/terraform/modules/shared/variables.tf +++ b/services/terraform/modules/shared/variables.tf @@ -1,10 +1,16 @@ variable "is_dev" { type = bool default = false } variable "bucket_name_suffix" { type = string default = "" description = "Suffix added to all bucket names" } + +variable "vpc_id" {} + +variable "cidr_block" {} + +variable "subnet_ids" {} diff --git a/services/terraform/remote/aws_iam.tf b/services/terraform/remote/aws_iam.tf index 71f4877ae..e216d279b 100644 --- a/services/terraform/remote/aws_iam.tf +++ b/services/terraform/remote/aws_iam.tf @@ -1,196 +1,222 @@ ### General AWS Utility IAM resources # Docs: https://docs.aws.amazon.com/AmazonECS/latest/developerguide/instance_IAM_role.html resource "aws_iam_role" "ecs_instance_role" { name = "ecsInstanceRole" description = "Allows EC2 instances to call AWS services on your behalf." assume_role_policy = jsonencode({ Version = "2012-10-17" Statement = [ { Action = "sts:AssumeRole" Effect = "Allow" Principal = { Service = "ec2.amazonaws.com" } } ] }) managed_policy_arns = [ "arn:aws:iam::aws:policy/service-role/AmazonEC2ContainerServiceforEC2Role", # Let instances download Docker images from ECR "arn:aws:iam::aws:policy/AmazonEC2ContainerRegistryReadOnly" ] } # ECS Task execution role # Docs: https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task_execution_IAM_role.html resource "aws_iam_role" "ecs_task_execution" { name = "ecsTaskExecutionRole" assume_role_policy = jsonencode({ Version = "2008-10-17" Statement = [ { Sid = "" Action = "sts:AssumeRole" Effect = "Allow" Principal = { Service = "ecs-tasks.amazonaws.com" } } ] }) managed_policy_arns = [ "arn:aws:iam::aws:policy/AmazonSSMReadOnlyAccess", "arn:aws:iam::aws:policy/service-role/AmazonECSTaskExecutionRolePolicy", # Let ECS write logs to CloudWatch "arn:aws:iam::aws:policy/CloudWatchLogsFullAccess", # Let ECS tasks access secrets to expose them as env vars "arn:aws:iam::aws:policy/SecretsManagerReadWrite", ] } # Assume Role Policy Document for EC2 and ECS # This policy allows ECS and EC2 use roles that it is assigned to data "aws_iam_policy_document" "assume_role_ecs_ec2" { statement { effect = "Allow" actions = [ "sts:AssumeRole", ] principals { type = "Service" identifiers = [ "ec2.amazonaws.com", "ecs-tasks.amazonaws.com" ] } } } # Allows ECS Exec to SSH into service task containers resource "aws_iam_policy" "allow_ecs_exec" { name = "allow-ecs-exec" description = "Adds SSM permissions to enable ECS Exec" policy = jsonencode({ Version = "2012-10-17" Statement = [ { Effect = "Allow" Action = [ "ssmmessages:CreateControlChannel", "ssmmessages:CreateDataChannel", "ssmmessages:OpenControlChannel", "ssmmessages:OpenDataChannel" ] Resource = "*" } ] }) } ### App IAM resources # Our app role - this is to give access to DynamoDB etc # Has trust policy with EC2 and ECS # Also allows to SSH into containers resource "aws_iam_role" "services_ddb_full_access" { name = "dynamodb-s3-full-access" description = "Full RW access to DDB and S3. Allows to SSH into ECS containers" assume_role_policy = data.aws_iam_policy_document.assume_role_ecs_ec2.json managed_policy_arns = [ aws_iam_policy.allow_ecs_exec.arn, "arn:aws:iam::aws:policy/AmazonDynamoDBFullAccess", "arn:aws:iam::aws:policy/AmazonS3FullAccess", ] } # Feature Flags IAM data "aws_iam_policy_document" "read_feature_flags" { statement { sid = "FeatureFlagsDDBReadAccess" effect = "Allow" actions = [ "dynamodb:BatchGetItem", "dynamodb:GetItem", "dynamodb:Query", "dynamodb:Scan", ] resources = [ module.shared.dynamodb_tables["feature-flags"].arn ] } } resource "aws_iam_policy" "read_feature_flags" { name = "feature-flags-ddb-read-access" policy = data.aws_iam_policy_document.read_feature_flags.json description = "Allows full read access to feature-flags DynamoDB table" } resource "aws_iam_role" "feature_flags_service" { name = "feature-flags-service-role" assume_role_policy = data.aws_iam_policy_document.assume_role_ecs_ec2.json managed_policy_arns = [ aws_iam_policy.read_feature_flags.arn ] } # Backup Service IAM data "aws_iam_policy_document" "manage_backup_ddb" { statement { sid = "BackupFullDDBAccess" effect = "Allow" actions = [ "dynamodb:*", ] resources = [ module.shared.dynamodb_tables["backup-service-backup"].arn, "${module.shared.dynamodb_tables["backup-service-backup"].arn}/index/*" ] } } resource "aws_iam_policy" "manage_backup_ddb" { name = "backup-ddb-full-access" policy = data.aws_iam_policy_document.manage_backup_ddb.json description = "Allows full access to backup DynamoDB table" } resource "aws_iam_role" "backup_service" { name = "backup-service-role" assume_role_policy = data.aws_iam_policy_document.assume_role_ecs_ec2.json managed_policy_arns = [ aws_iam_policy.allow_ecs_exec.arn, aws_iam_policy.manage_backup_ddb.arn ] } # Reports Service IAM data "aws_iam_policy_document" "manage_reports_ddb" { statement { sid = "ReportsFullDDBAccess" effect = "Allow" actions = [ "dynamodb:*", ] resources = [ module.shared.dynamodb_tables["reports-service-reports"].arn ] } } resource "aws_iam_policy" "manage_reports_ddb" { name = "reports-ddb-full-access" policy = data.aws_iam_policy_document.manage_reports_ddb.json description = "Allows full access to reports DynamoDB table" } resource "aws_iam_role" "reports_service" { name = "reports-service-role" assume_role_policy = data.aws_iam_policy_document.assume_role_ecs_ec2.json managed_policy_arns = [ aws_iam_policy.allow_ecs_exec.arn, aws_iam_policy.manage_reports_ddb.arn ] } + +data "aws_iam_policy_document" "opensearch_domain_access" { + statement { + effect = "Allow" + + principals { + type = "*" + identifiers = [] + } + + actions = [ + "es:ESHttpHead", + "es:ESHttpPost", + "es:ESHttpGet", + "es:ESHttpDelete", + "es:ESHttpPut", + ] + + resources = ["${module.shared.opensearch_domain_identity.arn}/*"] + } +} + +resource "aws_opensearch_domain_policy" "opensearch_domain_access" { + domain_name = module.shared.opensearch_domain_identity.domain_name + access_policies = data.aws_iam_policy_document.opensearch_domain_access.json +} diff --git a/services/terraform/remote/main.tf b/services/terraform/remote/main.tf index e6fce03eb..f8d3e0b11 100644 --- a/services/terraform/remote/main.tf +++ b/services/terraform/remote/main.tf @@ -1,61 +1,67 @@ terraform { backend "s3" { region = "us-east-2" key = "terraform.tfstate" bucket = "commapp-terraform" dynamodb_table = "terraform-lock" encrypt = true } } provider "random" {} provider "sops" {} data "sops_file" "secrets_json" { source_file = "secrets.json" } locals { environment = terraform.workspace is_staging = local.environment == "staging" secrets = jsondecode(data.sops_file.secrets_json.raw) target_account_id = lookup(local.secrets.accountIDs, local.environment) terraform_role_arn = "arn:aws:iam::${local.target_account_id}:role/Terraform" } provider "aws" { region = "us-east-2" assume_role { role_arn = local.terraform_role_arn external_id = "terraform" } # automatically add these tags to all resources default_tags { tags = { # Helps to distinguish which resources are managed by Terraform managed_by = "terraform" } } } locals { # S3 bucket names are globally unique so we add a suffix to staging buckets s3_bucket_name_suffix = local.is_staging ? "-staging" : "" } # Shared resources between local dev environment and remote AWS module "shared" { source = "../modules/shared" bucket_name_suffix = local.s3_bucket_name_suffix + + vpc_id = aws_vpc.default.id + cidr_block = aws_vpc.default.cidr_block + subnet_ids = [ + aws_subnet.public_a.id, + ] } check "workspace_check" { assert { condition = terraform.workspace == "staging" || terraform.workspace == "production" error_message = "Terraform workspace must be either 'staging' or 'production'!" } }