1

I work with AWS and I mainly use terraform for a lot of things. I want to implement Amazon Appflow and be able to move data from salesforce to a S3 bucket. AppFlow is a wizard and needs to be setup step by step.

I will assume you cannot use Terraform to implement this right? Is this thinking correct?

Baba
  • 2,059
  • 8
  • 48
  • 81

1 Answers1

0

Yes, you can use Terraform to deploy AppFliw reaources. There are two providers you can use. The AWS Provider or the AWS Cloud Control Privider. I have had more luck with AWS Cloud Control currently as it is designed to support new resources sooner. It supports connectors, profiles and flows as well as supporting custom connectors. The AWS provider only supports Connectos and Profiles (no flows). I have also found it doesn't have good support for custom connectors yet.

Right now I'd recommend Cloud Control

Here is a good introduction.

https://www.hashicorp.com/resources/using-the-terraform-aws-cloud-control-provider

And the AWS Cloud Control provider.

https://registry.terraform.io/providers/hashicorp/awscc/latest/docs/resources/appflow_connector

And here are the AWS Provider AppFlow reaources.

https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/appflow_connector_profile

resource "aws_s3_bucket" "example_source" {
  bucket = "example_source"
}

resource "aws_s3_bucket_policy" "example_source" {
  bucket = aws_s3_bucket.example_source.id
  policy = <<EOF
{
    "Statement": [
        {
            "Effect": "Allow",
            "Sid": "AllowAppFlowSourceActions",
            "Principal": {
                "Service": "appflow.amazonaws.com"
            },
            "Action": [
                "s3:ListBucket",
                "s3:GetObject"
            ],
            "Resource": [
                "arn:aws:s3:::example_source",
                "arn:aws:s3:::example_source/*"
            ]
        }
    ],
    "Version": "2012-10-17"
}
EOF
}

resource "aws_s3_object" "example" {
  bucket = aws_s3_bucket.example_source.id
  key    = "example_source.csv"
  source = "example_source.csv"
}

resource "aws_s3_bucket" "example_destination" {
  bucket = "example_destination"
}

resource "aws_s3_bucket_policy" "example_destination" {
  bucket = aws_s3_bucket.example_destination.id
  policy = <<EOF

{
    "Statement": [
        {
            "Effect": "Allow",
            "Sid": "AllowAppFlowDestinationActions",
            "Principal": {
                "Service": "appflow.amazonaws.com"
            },
            "Action": [
                "s3:PutObject",
                "s3:AbortMultipartUpload",
                "s3:ListMultipartUploadParts",
                "s3:ListBucketMultipartUploads",
                "s3:GetBucketAcl",
                "s3:PutObjectAcl"
            ],
            "Resource": [
                "arn:aws:s3:::example_destination",
                "arn:aws:s3:::example_destination/*"
            ]
        }
    ],
    "Version": "2012-10-17"
}
EOF
}

resource "aws_appflow_flow" "example" {
  name = "example"

  source_flow_config {
    connector_type = "S3"
    source_connector_properties {
      s3 {
        bucket_name   = aws_s3_bucket_policy.example_source.bucket
        bucket_prefix = "example"
      }
    }
  }

  destination_flow_config {
    connector_type = "S3"
    destination_connector_properties {
      s3 {
        bucket_name = aws_s3_bucket_policy.example_destination.bucket

        s3_output_format_config {
          prefix_config {
            prefix_type = "PATH"
          }
        }
      }
    }
  }

  task {
    source_fields     = ["exampleField"]
    destination_field = "exampleField"
    task_type         = "Map"

    connector_operator {
      s3 = "NO_OP"
    }
  }

  trigger_config {
    trigger_type = "OnDemand"
  }
}
Dude0001
  • 3,019
  • 2
  • 23
  • 38