1. Packages
  2. AWS
  3. API Docs
  4. appflow
  5. Flow
AWS v6.54.0 published on Friday, Sep 27, 2024 by Pulumi

aws.appflow.Flow

Explore with Pulumi AI

aws logo
AWS v6.54.0 published on Friday, Sep 27, 2024 by Pulumi

    Provides an AppFlow flow resource.

    Example Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as aws from "@pulumi/aws";
    
    const exampleSourceBucketV2 = new aws.s3.BucketV2("example_source", {bucket: "example-source"});
    const exampleSource = aws.iam.getPolicyDocument({
        statements: [{
            sid: "AllowAppFlowSourceActions",
            effect: "Allow",
            principals: [{
                type: "Service",
                identifiers: ["appflow.amazonaws.com"],
            }],
            actions: [
                "s3:ListBucket",
                "s3:GetObject",
            ],
            resources: [
                "arn:aws:s3:::example-source",
                "arn:aws:s3:::example-source/*",
            ],
        }],
    });
    const exampleSourceBucketPolicy = new aws.s3.BucketPolicy("example_source", {
        bucket: exampleSourceBucketV2.id,
        policy: exampleSource.then(exampleSource => exampleSource.json),
    });
    const example = new aws.s3.BucketObjectv2("example", {
        bucket: exampleSourceBucketV2.id,
        key: "example_source.csv",
        source: new pulumi.asset.FileAsset("example_source.csv"),
    });
    const exampleDestinationBucketV2 = new aws.s3.BucketV2("example_destination", {bucket: "example-destination"});
    const exampleDestination = aws.iam.getPolicyDocument({
        statements: [{
            sid: "AllowAppFlowDestinationActions",
            effect: "Allow",
            principals: [{
                type: "Service",
                identifiers: ["appflow.amazonaws.com"],
            }],
            actions: [
                "s3:PutObject",
                "s3:AbortMultipartUpload",
                "s3:ListMultipartUploadParts",
                "s3:ListBucketMultipartUploads",
                "s3:GetBucketAcl",
                "s3:PutObjectAcl",
            ],
            resources: [
                "arn:aws:s3:::example-destination",
                "arn:aws:s3:::example-destination/*",
            ],
        }],
    });
    const exampleDestinationBucketPolicy = new aws.s3.BucketPolicy("example_destination", {
        bucket: exampleDestinationBucketV2.id,
        policy: exampleDestination.then(exampleDestination => exampleDestination.json),
    });
    const exampleFlow = new aws.appflow.Flow("example", {
        name: "example",
        sourceFlowConfig: {
            connectorType: "S3",
            sourceConnectorProperties: {
                s3: {
                    bucketName: exampleSourceBucketPolicy.bucket,
                    bucketPrefix: "example",
                },
            },
        },
        destinationFlowConfigs: [{
            connectorType: "S3",
            destinationConnectorProperties: {
                s3: {
                    bucketName: exampleDestinationBucketPolicy.bucket,
                    s3OutputFormatConfig: {
                        prefixConfig: {
                            prefixType: "PATH",
                        },
                    },
                },
            },
        }],
        tasks: [{
            sourceFields: ["exampleField"],
            destinationField: "exampleField",
            taskType: "Map",
            connectorOperators: [{
                s3: "NO_OP",
            }],
        }],
        triggerConfig: {
            triggerType: "OnDemand",
        },
    });
    
    import pulumi
    import pulumi_aws as aws
    
    example_source_bucket_v2 = aws.s3.BucketV2("example_source", bucket="example-source")
    example_source = aws.iam.get_policy_document(statements=[{
        "sid": "AllowAppFlowSourceActions",
        "effect": "Allow",
        "principals": [{
            "type": "Service",
            "identifiers": ["appflow.amazonaws.com"],
        }],
        "actions": [
            "s3:ListBucket",
            "s3:GetObject",
        ],
        "resources": [
            "arn:aws:s3:::example-source",
            "arn:aws:s3:::example-source/*",
        ],
    }])
    example_source_bucket_policy = aws.s3.BucketPolicy("example_source",
        bucket=example_source_bucket_v2.id,
        policy=example_source.json)
    example = aws.s3.BucketObjectv2("example",
        bucket=example_source_bucket_v2.id,
        key="example_source.csv",
        source=pulumi.FileAsset("example_source.csv"))
    example_destination_bucket_v2 = aws.s3.BucketV2("example_destination", bucket="example-destination")
    example_destination = aws.iam.get_policy_document(statements=[{
        "sid": "AllowAppFlowDestinationActions",
        "effect": "Allow",
        "principals": [{
            "type": "Service",
            "identifiers": ["appflow.amazonaws.com"],
        }],
        "actions": [
            "s3:PutObject",
            "s3:AbortMultipartUpload",
            "s3:ListMultipartUploadParts",
            "s3:ListBucketMultipartUploads",
            "s3:GetBucketAcl",
            "s3:PutObjectAcl",
        ],
        "resources": [
            "arn:aws:s3:::example-destination",
            "arn:aws:s3:::example-destination/*",
        ],
    }])
    example_destination_bucket_policy = aws.s3.BucketPolicy("example_destination",
        bucket=example_destination_bucket_v2.id,
        policy=example_destination.json)
    example_flow = aws.appflow.Flow("example",
        name="example",
        source_flow_config={
            "connector_type": "S3",
            "source_connector_properties": {
                "s3": {
                    "bucket_name": example_source_bucket_policy.bucket,
                    "bucket_prefix": "example",
                },
            },
        },
        destination_flow_configs=[{
            "connector_type": "S3",
            "destination_connector_properties": {
                "s3": {
                    "bucket_name": example_destination_bucket_policy.bucket,
                    "s3_output_format_config": {
                        "prefix_config": {
                            "prefix_type": "PATH",
                        },
                    },
                },
            },
        }],
        tasks=[{
            "source_fields": ["exampleField"],
            "destination_field": "exampleField",
            "task_type": "Map",
            "connector_operators": [{
                "s3": "NO_OP",
            }],
        }],
        trigger_config={
            "trigger_type": "OnDemand",
        })
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/appflow"
    	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/iam"
    	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/s3"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		exampleSourceBucketV2, err := s3.NewBucketV2(ctx, "example_source", &s3.BucketV2Args{
    			Bucket: pulumi.String("example-source"),
    		})
    		if err != nil {
    			return err
    		}
    		exampleSource, err := iam.GetPolicyDocument(ctx, &iam.GetPolicyDocumentArgs{
    			Statements: []iam.GetPolicyDocumentStatement{
    				{
    					Sid:    pulumi.StringRef("AllowAppFlowSourceActions"),
    					Effect: pulumi.StringRef("Allow"),
    					Principals: []iam.GetPolicyDocumentStatementPrincipal{
    						{
    							Type: "Service",
    							Identifiers: []string{
    								"appflow.amazonaws.com",
    							},
    						},
    					},
    					Actions: []string{
    						"s3:ListBucket",
    						"s3:GetObject",
    					},
    					Resources: []string{
    						"arn:aws:s3:::example-source",
    						"arn:aws:s3:::example-source/*",
    					},
    				},
    			},
    		}, nil)
    		if err != nil {
    			return err
    		}
    		exampleSourceBucketPolicy, err := s3.NewBucketPolicy(ctx, "example_source", &s3.BucketPolicyArgs{
    			Bucket: exampleSourceBucketV2.ID(),
    			Policy: pulumi.String(exampleSource.Json),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = s3.NewBucketObjectv2(ctx, "example", &s3.BucketObjectv2Args{
    			Bucket: exampleSourceBucketV2.ID(),
    			Key:    pulumi.String("example_source.csv"),
    			Source: pulumi.NewFileAsset("example_source.csv"),
    		})
    		if err != nil {
    			return err
    		}
    		exampleDestinationBucketV2, err := s3.NewBucketV2(ctx, "example_destination", &s3.BucketV2Args{
    			Bucket: pulumi.String("example-destination"),
    		})
    		if err != nil {
    			return err
    		}
    		exampleDestination, err := iam.GetPolicyDocument(ctx, &iam.GetPolicyDocumentArgs{
    			Statements: []iam.GetPolicyDocumentStatement{
    				{
    					Sid:    pulumi.StringRef("AllowAppFlowDestinationActions"),
    					Effect: pulumi.StringRef("Allow"),
    					Principals: []iam.GetPolicyDocumentStatementPrincipal{
    						{
    							Type: "Service",
    							Identifiers: []string{
    								"appflow.amazonaws.com",
    							},
    						},
    					},
    					Actions: []string{
    						"s3:PutObject",
    						"s3:AbortMultipartUpload",
    						"s3:ListMultipartUploadParts",
    						"s3:ListBucketMultipartUploads",
    						"s3:GetBucketAcl",
    						"s3:PutObjectAcl",
    					},
    					Resources: []string{
    						"arn:aws:s3:::example-destination",
    						"arn:aws:s3:::example-destination/*",
    					},
    				},
    			},
    		}, nil)
    		if err != nil {
    			return err
    		}
    		exampleDestinationBucketPolicy, err := s3.NewBucketPolicy(ctx, "example_destination", &s3.BucketPolicyArgs{
    			Bucket: exampleDestinationBucketV2.ID(),
    			Policy: pulumi.String(exampleDestination.Json),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = appflow.NewFlow(ctx, "example", &appflow.FlowArgs{
    			Name: pulumi.String("example"),
    			SourceFlowConfig: &appflow.FlowSourceFlowConfigArgs{
    				ConnectorType: pulumi.String("S3"),
    				SourceConnectorProperties: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesArgs{
    					S3: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesS3Args{
    						BucketName:   exampleSourceBucketPolicy.Bucket,
    						BucketPrefix: pulumi.String("example"),
    					},
    				},
    			},
    			DestinationFlowConfigs: appflow.FlowDestinationFlowConfigArray{
    				&appflow.FlowDestinationFlowConfigArgs{
    					ConnectorType: pulumi.String("S3"),
    					DestinationConnectorProperties: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesArgs{
    						S3: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args{
    							BucketName: exampleDestinationBucketPolicy.Bucket,
    							S3OutputFormatConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs{
    								PrefixConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs{
    									PrefixType: pulumi.String("PATH"),
    								},
    							},
    						},
    					},
    				},
    			},
    			Tasks: appflow.FlowTaskArray{
    				&appflow.FlowTaskArgs{
    					SourceFields: pulumi.StringArray{
    						pulumi.String("exampleField"),
    					},
    					DestinationField: pulumi.String("exampleField"),
    					TaskType:         pulumi.String("Map"),
    					ConnectorOperators: appflow.FlowTaskConnectorOperatorArray{
    						&appflow.FlowTaskConnectorOperatorArgs{
    							S3: pulumi.String("NO_OP"),
    						},
    					},
    				},
    			},
    			TriggerConfig: &appflow.FlowTriggerConfigArgs{
    				TriggerType: pulumi.String("OnDemand"),
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Aws = Pulumi.Aws;
    
    return await Deployment.RunAsync(() => 
    {
        var exampleSourceBucketV2 = new Aws.S3.BucketV2("example_source", new()
        {
            Bucket = "example-source",
        });
    
        var exampleSource = Aws.Iam.GetPolicyDocument.Invoke(new()
        {
            Statements = new[]
            {
                new Aws.Iam.Inputs.GetPolicyDocumentStatementInputArgs
                {
                    Sid = "AllowAppFlowSourceActions",
                    Effect = "Allow",
                    Principals = new[]
                    {
                        new Aws.Iam.Inputs.GetPolicyDocumentStatementPrincipalInputArgs
                        {
                            Type = "Service",
                            Identifiers = new[]
                            {
                                "appflow.amazonaws.com",
                            },
                        },
                    },
                    Actions = new[]
                    {
                        "s3:ListBucket",
                        "s3:GetObject",
                    },
                    Resources = new[]
                    {
                        "arn:aws:s3:::example-source",
                        "arn:aws:s3:::example-source/*",
                    },
                },
            },
        });
    
        var exampleSourceBucketPolicy = new Aws.S3.BucketPolicy("example_source", new()
        {
            Bucket = exampleSourceBucketV2.Id,
            Policy = exampleSource.Apply(getPolicyDocumentResult => getPolicyDocumentResult.Json),
        });
    
        var example = new Aws.S3.BucketObjectv2("example", new()
        {
            Bucket = exampleSourceBucketV2.Id,
            Key = "example_source.csv",
            Source = new FileAsset("example_source.csv"),
        });
    
        var exampleDestinationBucketV2 = new Aws.S3.BucketV2("example_destination", new()
        {
            Bucket = "example-destination",
        });
    
        var exampleDestination = Aws.Iam.GetPolicyDocument.Invoke(new()
        {
            Statements = new[]
            {
                new Aws.Iam.Inputs.GetPolicyDocumentStatementInputArgs
                {
                    Sid = "AllowAppFlowDestinationActions",
                    Effect = "Allow",
                    Principals = new[]
                    {
                        new Aws.Iam.Inputs.GetPolicyDocumentStatementPrincipalInputArgs
                        {
                            Type = "Service",
                            Identifiers = new[]
                            {
                                "appflow.amazonaws.com",
                            },
                        },
                    },
                    Actions = new[]
                    {
                        "s3:PutObject",
                        "s3:AbortMultipartUpload",
                        "s3:ListMultipartUploadParts",
                        "s3:ListBucketMultipartUploads",
                        "s3:GetBucketAcl",
                        "s3:PutObjectAcl",
                    },
                    Resources = new[]
                    {
                        "arn:aws:s3:::example-destination",
                        "arn:aws:s3:::example-destination/*",
                    },
                },
            },
        });
    
        var exampleDestinationBucketPolicy = new Aws.S3.BucketPolicy("example_destination", new()
        {
            Bucket = exampleDestinationBucketV2.Id,
            Policy = exampleDestination.Apply(getPolicyDocumentResult => getPolicyDocumentResult.Json),
        });
    
        var exampleFlow = new Aws.AppFlow.Flow("example", new()
        {
            Name = "example",
            SourceFlowConfig = new Aws.AppFlow.Inputs.FlowSourceFlowConfigArgs
            {
                ConnectorType = "S3",
                SourceConnectorProperties = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesArgs
                {
                    S3 = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesS3Args
                    {
                        BucketName = exampleSourceBucketPolicy.Bucket,
                        BucketPrefix = "example",
                    },
                },
            },
            DestinationFlowConfigs = new[]
            {
                new Aws.AppFlow.Inputs.FlowDestinationFlowConfigArgs
                {
                    ConnectorType = "S3",
                    DestinationConnectorProperties = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesArgs
                    {
                        S3 = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args
                        {
                            BucketName = exampleDestinationBucketPolicy.Bucket,
                            S3OutputFormatConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs
                            {
                                PrefixConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs
                                {
                                    PrefixType = "PATH",
                                },
                            },
                        },
                    },
                },
            },
            Tasks = new[]
            {
                new Aws.AppFlow.Inputs.FlowTaskArgs
                {
                    SourceFields = new[]
                    {
                        "exampleField",
                    },
                    DestinationField = "exampleField",
                    TaskType = "Map",
                    ConnectorOperators = new[]
                    {
                        new Aws.AppFlow.Inputs.FlowTaskConnectorOperatorArgs
                        {
                            S3 = "NO_OP",
                        },
                    },
                },
            },
            TriggerConfig = new Aws.AppFlow.Inputs.FlowTriggerConfigArgs
            {
                TriggerType = "OnDemand",
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.aws.s3.BucketV2;
    import com.pulumi.aws.s3.BucketV2Args;
    import com.pulumi.aws.iam.IamFunctions;
    import com.pulumi.aws.iam.inputs.GetPolicyDocumentArgs;
    import com.pulumi.aws.s3.BucketPolicy;
    import com.pulumi.aws.s3.BucketPolicyArgs;
    import com.pulumi.aws.s3.BucketObjectv2;
    import com.pulumi.aws.s3.BucketObjectv2Args;
    import com.pulumi.aws.appflow.Flow;
    import com.pulumi.aws.appflow.FlowArgs;
    import com.pulumi.aws.appflow.inputs.FlowSourceFlowConfigArgs;
    import com.pulumi.aws.appflow.inputs.FlowSourceFlowConfigSourceConnectorPropertiesArgs;
    import com.pulumi.aws.appflow.inputs.FlowSourceFlowConfigSourceConnectorPropertiesS3Args;
    import com.pulumi.aws.appflow.inputs.FlowDestinationFlowConfigArgs;
    import com.pulumi.aws.appflow.inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesArgs;
    import com.pulumi.aws.appflow.inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args;
    import com.pulumi.aws.appflow.inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs;
    import com.pulumi.aws.appflow.inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs;
    import com.pulumi.aws.appflow.inputs.FlowTaskArgs;
    import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs;
    import com.pulumi.asset.FileAsset;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var exampleSourceBucketV2 = new BucketV2("exampleSourceBucketV2", BucketV2Args.builder()
                .bucket("example-source")
                .build());
    
            final var exampleSource = IamFunctions.getPolicyDocument(GetPolicyDocumentArgs.builder()
                .statements(GetPolicyDocumentStatementArgs.builder()
                    .sid("AllowAppFlowSourceActions")
                    .effect("Allow")
                    .principals(GetPolicyDocumentStatementPrincipalArgs.builder()
                        .type("Service")
                        .identifiers("appflow.amazonaws.com")
                        .build())
                    .actions(                
                        "s3:ListBucket",
                        "s3:GetObject")
                    .resources(                
                        "arn:aws:s3:::example-source",
                        "arn:aws:s3:::example-source/*")
                    .build())
                .build());
    
            var exampleSourceBucketPolicy = new BucketPolicy("exampleSourceBucketPolicy", BucketPolicyArgs.builder()
                .bucket(exampleSourceBucketV2.id())
                .policy(exampleSource.applyValue(getPolicyDocumentResult -> getPolicyDocumentResult.json()))
                .build());
    
            var example = new BucketObjectv2("example", BucketObjectv2Args.builder()
                .bucket(exampleSourceBucketV2.id())
                .key("example_source.csv")
                .source(new FileAsset("example_source.csv"))
                .build());
    
            var exampleDestinationBucketV2 = new BucketV2("exampleDestinationBucketV2", BucketV2Args.builder()
                .bucket("example-destination")
                .build());
    
            final var exampleDestination = IamFunctions.getPolicyDocument(GetPolicyDocumentArgs.builder()
                .statements(GetPolicyDocumentStatementArgs.builder()
                    .sid("AllowAppFlowDestinationActions")
                    .effect("Allow")
                    .principals(GetPolicyDocumentStatementPrincipalArgs.builder()
                        .type("Service")
                        .identifiers("appflow.amazonaws.com")
                        .build())
                    .actions(                
                        "s3:PutObject",
                        "s3:AbortMultipartUpload",
                        "s3:ListMultipartUploadParts",
                        "s3:ListBucketMultipartUploads",
                        "s3:GetBucketAcl",
                        "s3:PutObjectAcl")
                    .resources(                
                        "arn:aws:s3:::example-destination",
                        "arn:aws:s3:::example-destination/*")
                    .build())
                .build());
    
            var exampleDestinationBucketPolicy = new BucketPolicy("exampleDestinationBucketPolicy", BucketPolicyArgs.builder()
                .bucket(exampleDestinationBucketV2.id())
                .policy(exampleDestination.applyValue(getPolicyDocumentResult -> getPolicyDocumentResult.json()))
                .build());
    
            var exampleFlow = new Flow("exampleFlow", FlowArgs.builder()
                .name("example")
                .sourceFlowConfig(FlowSourceFlowConfigArgs.builder()
                    .connectorType("S3")
                    .sourceConnectorProperties(FlowSourceFlowConfigSourceConnectorPropertiesArgs.builder()
                        .s3(FlowSourceFlowConfigSourceConnectorPropertiesS3Args.builder()
                            .bucketName(exampleSourceBucketPolicy.bucket())
                            .bucketPrefix("example")
                            .build())
                        .build())
                    .build())
                .destinationFlowConfigs(FlowDestinationFlowConfigArgs.builder()
                    .connectorType("S3")
                    .destinationConnectorProperties(FlowDestinationFlowConfigDestinationConnectorPropertiesArgs.builder()
                        .s3(FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args.builder()
                            .bucketName(exampleDestinationBucketPolicy.bucket())
                            .s3OutputFormatConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs.builder()
                                .prefixConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs.builder()
                                    .prefixType("PATH")
                                    .build())
                                .build())
                            .build())
                        .build())
                    .build())
                .tasks(FlowTaskArgs.builder()
                    .sourceFields("exampleField")
                    .destinationField("exampleField")
                    .taskType("Map")
                    .connectorOperators(FlowTaskConnectorOperatorArgs.builder()
                        .s3("NO_OP")
                        .build())
                    .build())
                .triggerConfig(FlowTriggerConfigArgs.builder()
                    .triggerType("OnDemand")
                    .build())
                .build());
    
        }
    }
    
    resources:
      exampleSourceBucketV2:
        type: aws:s3:BucketV2
        name: example_source
        properties:
          bucket: example-source
      exampleSourceBucketPolicy:
        type: aws:s3:BucketPolicy
        name: example_source
        properties:
          bucket: ${exampleSourceBucketV2.id}
          policy: ${exampleSource.json}
      example:
        type: aws:s3:BucketObjectv2
        properties:
          bucket: ${exampleSourceBucketV2.id}
          key: example_source.csv
          source:
            fn::FileAsset: example_source.csv
      exampleDestinationBucketV2:
        type: aws:s3:BucketV2
        name: example_destination
        properties:
          bucket: example-destination
      exampleDestinationBucketPolicy:
        type: aws:s3:BucketPolicy
        name: example_destination
        properties:
          bucket: ${exampleDestinationBucketV2.id}
          policy: ${exampleDestination.json}
      exampleFlow:
        type: aws:appflow:Flow
        name: example
        properties:
          name: example
          sourceFlowConfig:
            connectorType: S3
            sourceConnectorProperties:
              s3:
                bucketName: ${exampleSourceBucketPolicy.bucket}
                bucketPrefix: example
          destinationFlowConfigs:
            - connectorType: S3
              destinationConnectorProperties:
                s3:
                  bucketName: ${exampleDestinationBucketPolicy.bucket}
                  s3OutputFormatConfig:
                    prefixConfig:
                      prefixType: PATH
          tasks:
            - sourceFields:
                - exampleField
              destinationField: exampleField
              taskType: Map
              connectorOperators:
                - s3: NO_OP
          triggerConfig:
            triggerType: OnDemand
    variables:
      exampleSource:
        fn::invoke:
          Function: aws:iam:getPolicyDocument
          Arguments:
            statements:
              - sid: AllowAppFlowSourceActions
                effect: Allow
                principals:
                  - type: Service
                    identifiers:
                      - appflow.amazonaws.com
                actions:
                  - s3:ListBucket
                  - s3:GetObject
                resources:
                  - arn:aws:s3:::example-source
                  - arn:aws:s3:::example-source/*
      exampleDestination:
        fn::invoke:
          Function: aws:iam:getPolicyDocument
          Arguments:
            statements:
              - sid: AllowAppFlowDestinationActions
                effect: Allow
                principals:
                  - type: Service
                    identifiers:
                      - appflow.amazonaws.com
                actions:
                  - s3:PutObject
                  - s3:AbortMultipartUpload
                  - s3:ListMultipartUploadParts
                  - s3:ListBucketMultipartUploads
                  - s3:GetBucketAcl
                  - s3:PutObjectAcl
                resources:
                  - arn:aws:s3:::example-destination
                  - arn:aws:s3:::example-destination/*
    

    Create Flow Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new Flow(name: string, args: FlowArgs, opts?: CustomResourceOptions);
    @overload
    def Flow(resource_name: str,
             args: FlowArgs,
             opts: Optional[ResourceOptions] = None)
    
    @overload
    def Flow(resource_name: str,
             opts: Optional[ResourceOptions] = None,
             destination_flow_configs: Optional[Sequence[FlowDestinationFlowConfigArgs]] = None,
             source_flow_config: Optional[FlowSourceFlowConfigArgs] = None,
             tasks: Optional[Sequence[FlowTaskArgs]] = None,
             trigger_config: Optional[FlowTriggerConfigArgs] = None,
             description: Optional[str] = None,
             kms_arn: Optional[str] = None,
             metadata_catalog_config: Optional[FlowMetadataCatalogConfigArgs] = None,
             name: Optional[str] = None,
             tags: Optional[Mapping[str, str]] = None)
    func NewFlow(ctx *Context, name string, args FlowArgs, opts ...ResourceOption) (*Flow, error)
    public Flow(string name, FlowArgs args, CustomResourceOptions? opts = null)
    public Flow(String name, FlowArgs args)
    public Flow(String name, FlowArgs args, CustomResourceOptions options)
    
    type: aws:appflow:Flow
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args FlowArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args FlowArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args FlowArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args FlowArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args FlowArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var flowResource = new Aws.AppFlow.Flow("flowResource", new()
    {
        DestinationFlowConfigs = new[]
        {
            new Aws.AppFlow.Inputs.FlowDestinationFlowConfigArgs
            {
                ConnectorType = "string",
                DestinationConnectorProperties = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesArgs
                {
                    CustomConnector = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorArgs
                    {
                        EntityName = "string",
                        CustomProperties = 
                        {
                            { "string", "string" },
                        },
                        ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorErrorHandlingConfigArgs
                        {
                            BucketName = "string",
                            BucketPrefix = "string",
                            FailOnFirstDestinationError = false,
                        },
                        IdFieldNames = new[]
                        {
                            "string",
                        },
                        WriteOperationType = "string",
                    },
                    CustomerProfiles = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfilesArgs
                    {
                        DomainName = "string",
                        ObjectTypeName = "string",
                    },
                    EventBridge = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeArgs
                    {
                        Object = "string",
                        ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeErrorHandlingConfigArgs
                        {
                            BucketName = "string",
                            BucketPrefix = "string",
                            FailOnFirstDestinationError = false,
                        },
                    },
                    Honeycode = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeArgs
                    {
                        Object = "string",
                        ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeErrorHandlingConfigArgs
                        {
                            BucketName = "string",
                            BucketPrefix = "string",
                            FailOnFirstDestinationError = false,
                        },
                    },
                    LookoutMetrics = null,
                    Marketo = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoArgs
                    {
                        Object = "string",
                        ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoErrorHandlingConfigArgs
                        {
                            BucketName = "string",
                            BucketPrefix = "string",
                            FailOnFirstDestinationError = false,
                        },
                    },
                    Redshift = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftArgs
                    {
                        IntermediateBucketName = "string",
                        Object = "string",
                        BucketPrefix = "string",
                        ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftErrorHandlingConfigArgs
                        {
                            BucketName = "string",
                            BucketPrefix = "string",
                            FailOnFirstDestinationError = false,
                        },
                    },
                    S3 = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args
                    {
                        BucketName = "string",
                        BucketPrefix = "string",
                        S3OutputFormatConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs
                        {
                            AggregationConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfigArgs
                            {
                                AggregationType = "string",
                                TargetFileSize = 0,
                            },
                            FileType = "string",
                            PrefixConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs
                            {
                                PrefixFormat = "string",
                                PrefixHierarchies = new[]
                                {
                                    "string",
                                },
                                PrefixType = "string",
                            },
                            PreserveSourceDataTyping = false,
                        },
                    },
                    Salesforce = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceArgs
                    {
                        Object = "string",
                        ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceErrorHandlingConfigArgs
                        {
                            BucketName = "string",
                            BucketPrefix = "string",
                            FailOnFirstDestinationError = false,
                        },
                        IdFieldNames = new[]
                        {
                            "string",
                        },
                        WriteOperationType = "string",
                    },
                    SapoData = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataArgs
                    {
                        ObjectPath = "string",
                        ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfigArgs
                        {
                            BucketName = "string",
                            BucketPrefix = "string",
                            FailOnFirstDestinationError = false,
                        },
                        IdFieldNames = new[]
                        {
                            "string",
                        },
                        SuccessResponseHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfigArgs
                        {
                            BucketName = "string",
                            BucketPrefix = "string",
                        },
                        WriteOperationType = "string",
                    },
                    Snowflake = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeArgs
                    {
                        IntermediateBucketName = "string",
                        Object = "string",
                        BucketPrefix = "string",
                        ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeErrorHandlingConfigArgs
                        {
                            BucketName = "string",
                            BucketPrefix = "string",
                            FailOnFirstDestinationError = false,
                        },
                    },
                    Upsolver = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverArgs
                    {
                        BucketName = "string",
                        S3OutputFormatConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigArgs
                        {
                            PrefixConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfigArgs
                            {
                                PrefixType = "string",
                                PrefixFormat = "string",
                                PrefixHierarchies = new[]
                                {
                                    "string",
                                },
                            },
                            AggregationConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfigArgs
                            {
                                AggregationType = "string",
                            },
                            FileType = "string",
                        },
                        BucketPrefix = "string",
                    },
                    Zendesk = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskArgs
                    {
                        Object = "string",
                        ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskErrorHandlingConfigArgs
                        {
                            BucketName = "string",
                            BucketPrefix = "string",
                            FailOnFirstDestinationError = false,
                        },
                        IdFieldNames = new[]
                        {
                            "string",
                        },
                        WriteOperationType = "string",
                    },
                },
                ApiVersion = "string",
                ConnectorProfileName = "string",
            },
        },
        SourceFlowConfig = new Aws.AppFlow.Inputs.FlowSourceFlowConfigArgs
        {
            ConnectorType = "string",
            SourceConnectorProperties = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesArgs
            {
                Amplitude = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesAmplitudeArgs
                {
                    Object = "string",
                },
                CustomConnector = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesCustomConnectorArgs
                {
                    EntityName = "string",
                    CustomProperties = 
                    {
                        { "string", "string" },
                    },
                },
                Datadog = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesDatadogArgs
                {
                    Object = "string",
                },
                Dynatrace = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesDynatraceArgs
                {
                    Object = "string",
                },
                GoogleAnalytics = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalyticsArgs
                {
                    Object = "string",
                },
                InforNexus = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesInforNexusArgs
                {
                    Object = "string",
                },
                Marketo = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesMarketoArgs
                {
                    Object = "string",
                },
                S3 = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesS3Args
                {
                    BucketName = "string",
                    BucketPrefix = "string",
                    S3InputFormatConfig = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfigArgs
                    {
                        S3InputFileType = "string",
                    },
                },
                Salesforce = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesSalesforceArgs
                {
                    Object = "string",
                    EnableDynamicFieldUpdate = false,
                    IncludeDeletedRecords = false,
                },
                SapoData = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesSapoDataArgs
                {
                    ObjectPath = "string",
                },
                ServiceNow = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesServiceNowArgs
                {
                    Object = "string",
                },
                Singular = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesSingularArgs
                {
                    Object = "string",
                },
                Slack = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesSlackArgs
                {
                    Object = "string",
                },
                Trendmicro = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesTrendmicroArgs
                {
                    Object = "string",
                },
                Veeva = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesVeevaArgs
                {
                    Object = "string",
                    DocumentType = "string",
                    IncludeAllVersions = false,
                    IncludeRenditions = false,
                    IncludeSourceFiles = false,
                },
                Zendesk = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesZendeskArgs
                {
                    Object = "string",
                },
            },
            ApiVersion = "string",
            ConnectorProfileName = "string",
            IncrementalPullConfig = new Aws.AppFlow.Inputs.FlowSourceFlowConfigIncrementalPullConfigArgs
            {
                DatetimeTypeFieldName = "string",
            },
        },
        Tasks = new[]
        {
            new Aws.AppFlow.Inputs.FlowTaskArgs
            {
                TaskType = "string",
                ConnectorOperators = new[]
                {
                    new Aws.AppFlow.Inputs.FlowTaskConnectorOperatorArgs
                    {
                        Amplitude = "string",
                        CustomConnector = "string",
                        Datadog = "string",
                        Dynatrace = "string",
                        GoogleAnalytics = "string",
                        InforNexus = "string",
                        Marketo = "string",
                        S3 = "string",
                        Salesforce = "string",
                        SapoData = "string",
                        ServiceNow = "string",
                        Singular = "string",
                        Slack = "string",
                        Trendmicro = "string",
                        Veeva = "string",
                        Zendesk = "string",
                    },
                },
                DestinationField = "string",
                SourceFields = new[]
                {
                    "string",
                },
                TaskProperties = 
                {
                    { "string", "string" },
                },
            },
        },
        TriggerConfig = new Aws.AppFlow.Inputs.FlowTriggerConfigArgs
        {
            TriggerType = "string",
            TriggerProperties = new Aws.AppFlow.Inputs.FlowTriggerConfigTriggerPropertiesArgs
            {
                Scheduled = new Aws.AppFlow.Inputs.FlowTriggerConfigTriggerPropertiesScheduledArgs
                {
                    ScheduleExpression = "string",
                    DataPullMode = "string",
                    FirstExecutionFrom = "string",
                    ScheduleEndTime = "string",
                    ScheduleOffset = 0,
                    ScheduleStartTime = "string",
                    Timezone = "string",
                },
            },
        },
        Description = "string",
        KmsArn = "string",
        MetadataCatalogConfig = new Aws.AppFlow.Inputs.FlowMetadataCatalogConfigArgs
        {
            GlueDataCatalog = new Aws.AppFlow.Inputs.FlowMetadataCatalogConfigGlueDataCatalogArgs
            {
                DatabaseName = "string",
                RoleArn = "string",
                TablePrefix = "string",
            },
        },
        Name = "string",
        Tags = 
        {
            { "string", "string" },
        },
    });
    
    example, err := appflow.NewFlow(ctx, "flowResource", &appflow.FlowArgs{
    	DestinationFlowConfigs: appflow.FlowDestinationFlowConfigArray{
    		&appflow.FlowDestinationFlowConfigArgs{
    			ConnectorType: pulumi.String("string"),
    			DestinationConnectorProperties: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesArgs{
    				CustomConnector: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorArgs{
    					EntityName: pulumi.String("string"),
    					CustomProperties: pulumi.StringMap{
    						"string": pulumi.String("string"),
    					},
    					ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorErrorHandlingConfigArgs{
    						BucketName:                  pulumi.String("string"),
    						BucketPrefix:                pulumi.String("string"),
    						FailOnFirstDestinationError: pulumi.Bool(false),
    					},
    					IdFieldNames: pulumi.StringArray{
    						pulumi.String("string"),
    					},
    					WriteOperationType: pulumi.String("string"),
    				},
    				CustomerProfiles: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfilesArgs{
    					DomainName:     pulumi.String("string"),
    					ObjectTypeName: pulumi.String("string"),
    				},
    				EventBridge: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeArgs{
    					Object: pulumi.String("string"),
    					ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeErrorHandlingConfigArgs{
    						BucketName:                  pulumi.String("string"),
    						BucketPrefix:                pulumi.String("string"),
    						FailOnFirstDestinationError: pulumi.Bool(false),
    					},
    				},
    				Honeycode: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeArgs{
    					Object: pulumi.String("string"),
    					ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeErrorHandlingConfigArgs{
    						BucketName:                  pulumi.String("string"),
    						BucketPrefix:                pulumi.String("string"),
    						FailOnFirstDestinationError: pulumi.Bool(false),
    					},
    				},
    				LookoutMetrics: nil,
    				Marketo: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoArgs{
    					Object: pulumi.String("string"),
    					ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoErrorHandlingConfigArgs{
    						BucketName:                  pulumi.String("string"),
    						BucketPrefix:                pulumi.String("string"),
    						FailOnFirstDestinationError: pulumi.Bool(false),
    					},
    				},
    				Redshift: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftArgs{
    					IntermediateBucketName: pulumi.String("string"),
    					Object:                 pulumi.String("string"),
    					BucketPrefix:           pulumi.String("string"),
    					ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftErrorHandlingConfigArgs{
    						BucketName:                  pulumi.String("string"),
    						BucketPrefix:                pulumi.String("string"),
    						FailOnFirstDestinationError: pulumi.Bool(false),
    					},
    				},
    				S3: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args{
    					BucketName:   pulumi.String("string"),
    					BucketPrefix: pulumi.String("string"),
    					S3OutputFormatConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs{
    						AggregationConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfigArgs{
    							AggregationType: pulumi.String("string"),
    							TargetFileSize:  pulumi.Int(0),
    						},
    						FileType: pulumi.String("string"),
    						PrefixConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs{
    							PrefixFormat: pulumi.String("string"),
    							PrefixHierarchies: pulumi.StringArray{
    								pulumi.String("string"),
    							},
    							PrefixType: pulumi.String("string"),
    						},
    						PreserveSourceDataTyping: pulumi.Bool(false),
    					},
    				},
    				Salesforce: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceArgs{
    					Object: pulumi.String("string"),
    					ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceErrorHandlingConfigArgs{
    						BucketName:                  pulumi.String("string"),
    						BucketPrefix:                pulumi.String("string"),
    						FailOnFirstDestinationError: pulumi.Bool(false),
    					},
    					IdFieldNames: pulumi.StringArray{
    						pulumi.String("string"),
    					},
    					WriteOperationType: pulumi.String("string"),
    				},
    				SapoData: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataArgs{
    					ObjectPath: pulumi.String("string"),
    					ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfigArgs{
    						BucketName:                  pulumi.String("string"),
    						BucketPrefix:                pulumi.String("string"),
    						FailOnFirstDestinationError: pulumi.Bool(false),
    					},
    					IdFieldNames: pulumi.StringArray{
    						pulumi.String("string"),
    					},
    					SuccessResponseHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfigArgs{
    						BucketName:   pulumi.String("string"),
    						BucketPrefix: pulumi.String("string"),
    					},
    					WriteOperationType: pulumi.String("string"),
    				},
    				Snowflake: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeArgs{
    					IntermediateBucketName: pulumi.String("string"),
    					Object:                 pulumi.String("string"),
    					BucketPrefix:           pulumi.String("string"),
    					ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeErrorHandlingConfigArgs{
    						BucketName:                  pulumi.String("string"),
    						BucketPrefix:                pulumi.String("string"),
    						FailOnFirstDestinationError: pulumi.Bool(false),
    					},
    				},
    				Upsolver: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverArgs{
    					BucketName: pulumi.String("string"),
    					S3OutputFormatConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigArgs{
    						PrefixConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfigArgs{
    							PrefixType:   pulumi.String("string"),
    							PrefixFormat: pulumi.String("string"),
    							PrefixHierarchies: pulumi.StringArray{
    								pulumi.String("string"),
    							},
    						},
    						AggregationConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfigArgs{
    							AggregationType: pulumi.String("string"),
    						},
    						FileType: pulumi.String("string"),
    					},
    					BucketPrefix: pulumi.String("string"),
    				},
    				Zendesk: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskArgs{
    					Object: pulumi.String("string"),
    					ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskErrorHandlingConfigArgs{
    						BucketName:                  pulumi.String("string"),
    						BucketPrefix:                pulumi.String("string"),
    						FailOnFirstDestinationError: pulumi.Bool(false),
    					},
    					IdFieldNames: pulumi.StringArray{
    						pulumi.String("string"),
    					},
    					WriteOperationType: pulumi.String("string"),
    				},
    			},
    			ApiVersion:           pulumi.String("string"),
    			ConnectorProfileName: pulumi.String("string"),
    		},
    	},
    	SourceFlowConfig: &appflow.FlowSourceFlowConfigArgs{
    		ConnectorType: pulumi.String("string"),
    		SourceConnectorProperties: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesArgs{
    			Amplitude: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesAmplitudeArgs{
    				Object: pulumi.String("string"),
    			},
    			CustomConnector: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesCustomConnectorArgs{
    				EntityName: pulumi.String("string"),
    				CustomProperties: pulumi.StringMap{
    					"string": pulumi.String("string"),
    				},
    			},
    			Datadog: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesDatadogArgs{
    				Object: pulumi.String("string"),
    			},
    			Dynatrace: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesDynatraceArgs{
    				Object: pulumi.String("string"),
    			},
    			GoogleAnalytics: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalyticsArgs{
    				Object: pulumi.String("string"),
    			},
    			InforNexus: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesInforNexusArgs{
    				Object: pulumi.String("string"),
    			},
    			Marketo: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesMarketoArgs{
    				Object: pulumi.String("string"),
    			},
    			S3: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesS3Args{
    				BucketName:   pulumi.String("string"),
    				BucketPrefix: pulumi.String("string"),
    				S3InputFormatConfig: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfigArgs{
    					S3InputFileType: pulumi.String("string"),
    				},
    			},
    			Salesforce: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesSalesforceArgs{
    				Object:                   pulumi.String("string"),
    				EnableDynamicFieldUpdate: pulumi.Bool(false),
    				IncludeDeletedRecords:    pulumi.Bool(false),
    			},
    			SapoData: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesSapoDataArgs{
    				ObjectPath: pulumi.String("string"),
    			},
    			ServiceNow: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesServiceNowArgs{
    				Object: pulumi.String("string"),
    			},
    			Singular: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesSingularArgs{
    				Object: pulumi.String("string"),
    			},
    			Slack: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesSlackArgs{
    				Object: pulumi.String("string"),
    			},
    			Trendmicro: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesTrendmicroArgs{
    				Object: pulumi.String("string"),
    			},
    			Veeva: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesVeevaArgs{
    				Object:             pulumi.String("string"),
    				DocumentType:       pulumi.String("string"),
    				IncludeAllVersions: pulumi.Bool(false),
    				IncludeRenditions:  pulumi.Bool(false),
    				IncludeSourceFiles: pulumi.Bool(false),
    			},
    			Zendesk: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesZendeskArgs{
    				Object: pulumi.String("string"),
    			},
    		},
    		ApiVersion:           pulumi.String("string"),
    		ConnectorProfileName: pulumi.String("string"),
    		IncrementalPullConfig: &appflow.FlowSourceFlowConfigIncrementalPullConfigArgs{
    			DatetimeTypeFieldName: pulumi.String("string"),
    		},
    	},
    	Tasks: appflow.FlowTaskArray{
    		&appflow.FlowTaskArgs{
    			TaskType: pulumi.String("string"),
    			ConnectorOperators: appflow.FlowTaskConnectorOperatorArray{
    				&appflow.FlowTaskConnectorOperatorArgs{
    					Amplitude:       pulumi.String("string"),
    					CustomConnector: pulumi.String("string"),
    					Datadog:         pulumi.String("string"),
    					Dynatrace:       pulumi.String("string"),
    					GoogleAnalytics: pulumi.String("string"),
    					InforNexus:      pulumi.String("string"),
    					Marketo:         pulumi.String("string"),
    					S3:              pulumi.String("string"),
    					Salesforce:      pulumi.String("string"),
    					SapoData:        pulumi.String("string"),
    					ServiceNow:      pulumi.String("string"),
    					Singular:        pulumi.String("string"),
    					Slack:           pulumi.String("string"),
    					Trendmicro:      pulumi.String("string"),
    					Veeva:           pulumi.String("string"),
    					Zendesk:         pulumi.String("string"),
    				},
    			},
    			DestinationField: pulumi.String("string"),
    			SourceFields: pulumi.StringArray{
    				pulumi.String("string"),
    			},
    			TaskProperties: pulumi.StringMap{
    				"string": pulumi.String("string"),
    			},
    		},
    	},
    	TriggerConfig: &appflow.FlowTriggerConfigArgs{
    		TriggerType: pulumi.String("string"),
    		TriggerProperties: &appflow.FlowTriggerConfigTriggerPropertiesArgs{
    			Scheduled: &appflow.FlowTriggerConfigTriggerPropertiesScheduledArgs{
    				ScheduleExpression: pulumi.String("string"),
    				DataPullMode:       pulumi.String("string"),
    				FirstExecutionFrom: pulumi.String("string"),
    				ScheduleEndTime:    pulumi.String("string"),
    				ScheduleOffset:     pulumi.Int(0),
    				ScheduleStartTime:  pulumi.String("string"),
    				Timezone:           pulumi.String("string"),
    			},
    		},
    	},
    	Description: pulumi.String("string"),
    	KmsArn:      pulumi.String("string"),
    	MetadataCatalogConfig: &appflow.FlowMetadataCatalogConfigArgs{
    		GlueDataCatalog: &appflow.FlowMetadataCatalogConfigGlueDataCatalogArgs{
    			DatabaseName: pulumi.String("string"),
    			RoleArn:      pulumi.String("string"),
    			TablePrefix:  pulumi.String("string"),
    		},
    	},
    	Name: pulumi.String("string"),
    	Tags: pulumi.StringMap{
    		"string": pulumi.String("string"),
    	},
    })
    
    var flowResource = new Flow("flowResource", FlowArgs.builder()
        .destinationFlowConfigs(FlowDestinationFlowConfigArgs.builder()
            .connectorType("string")
            .destinationConnectorProperties(FlowDestinationFlowConfigDestinationConnectorPropertiesArgs.builder()
                .customConnector(FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorArgs.builder()
                    .entityName("string")
                    .customProperties(Map.of("string", "string"))
                    .errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorErrorHandlingConfigArgs.builder()
                        .bucketName("string")
                        .bucketPrefix("string")
                        .failOnFirstDestinationError(false)
                        .build())
                    .idFieldNames("string")
                    .writeOperationType("string")
                    .build())
                .customerProfiles(FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfilesArgs.builder()
                    .domainName("string")
                    .objectTypeName("string")
                    .build())
                .eventBridge(FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeArgs.builder()
                    .object("string")
                    .errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeErrorHandlingConfigArgs.builder()
                        .bucketName("string")
                        .bucketPrefix("string")
                        .failOnFirstDestinationError(false)
                        .build())
                    .build())
                .honeycode(FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeArgs.builder()
                    .object("string")
                    .errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeErrorHandlingConfigArgs.builder()
                        .bucketName("string")
                        .bucketPrefix("string")
                        .failOnFirstDestinationError(false)
                        .build())
                    .build())
                .lookoutMetrics()
                .marketo(FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoArgs.builder()
                    .object("string")
                    .errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoErrorHandlingConfigArgs.builder()
                        .bucketName("string")
                        .bucketPrefix("string")
                        .failOnFirstDestinationError(false)
                        .build())
                    .build())
                .redshift(FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftArgs.builder()
                    .intermediateBucketName("string")
                    .object("string")
                    .bucketPrefix("string")
                    .errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftErrorHandlingConfigArgs.builder()
                        .bucketName("string")
                        .bucketPrefix("string")
                        .failOnFirstDestinationError(false)
                        .build())
                    .build())
                .s3(FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args.builder()
                    .bucketName("string")
                    .bucketPrefix("string")
                    .s3OutputFormatConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs.builder()
                        .aggregationConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfigArgs.builder()
                            .aggregationType("string")
                            .targetFileSize(0)
                            .build())
                        .fileType("string")
                        .prefixConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs.builder()
                            .prefixFormat("string")
                            .prefixHierarchies("string")
                            .prefixType("string")
                            .build())
                        .preserveSourceDataTyping(false)
                        .build())
                    .build())
                .salesforce(FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceArgs.builder()
                    .object("string")
                    .errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceErrorHandlingConfigArgs.builder()
                        .bucketName("string")
                        .bucketPrefix("string")
                        .failOnFirstDestinationError(false)
                        .build())
                    .idFieldNames("string")
                    .writeOperationType("string")
                    .build())
                .sapoData(FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataArgs.builder()
                    .objectPath("string")
                    .errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfigArgs.builder()
                        .bucketName("string")
                        .bucketPrefix("string")
                        .failOnFirstDestinationError(false)
                        .build())
                    .idFieldNames("string")
                    .successResponseHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfigArgs.builder()
                        .bucketName("string")
                        .bucketPrefix("string")
                        .build())
                    .writeOperationType("string")
                    .build())
                .snowflake(FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeArgs.builder()
                    .intermediateBucketName("string")
                    .object("string")
                    .bucketPrefix("string")
                    .errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeErrorHandlingConfigArgs.builder()
                        .bucketName("string")
                        .bucketPrefix("string")
                        .failOnFirstDestinationError(false)
                        .build())
                    .build())
                .upsolver(FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverArgs.builder()
                    .bucketName("string")
                    .s3OutputFormatConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigArgs.builder()
                        .prefixConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfigArgs.builder()
                            .prefixType("string")
                            .prefixFormat("string")
                            .prefixHierarchies("string")
                            .build())
                        .aggregationConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfigArgs.builder()
                            .aggregationType("string")
                            .build())
                        .fileType("string")
                        .build())
                    .bucketPrefix("string")
                    .build())
                .zendesk(FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskArgs.builder()
                    .object("string")
                    .errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskErrorHandlingConfigArgs.builder()
                        .bucketName("string")
                        .bucketPrefix("string")
                        .failOnFirstDestinationError(false)
                        .build())
                    .idFieldNames("string")
                    .writeOperationType("string")
                    .build())
                .build())
            .apiVersion("string")
            .connectorProfileName("string")
            .build())
        .sourceFlowConfig(FlowSourceFlowConfigArgs.builder()
            .connectorType("string")
            .sourceConnectorProperties(FlowSourceFlowConfigSourceConnectorPropertiesArgs.builder()
                .amplitude(FlowSourceFlowConfigSourceConnectorPropertiesAmplitudeArgs.builder()
                    .object("string")
                    .build())
                .customConnector(FlowSourceFlowConfigSourceConnectorPropertiesCustomConnectorArgs.builder()
                    .entityName("string")
                    .customProperties(Map.of("string", "string"))
                    .build())
                .datadog(FlowSourceFlowConfigSourceConnectorPropertiesDatadogArgs.builder()
                    .object("string")
                    .build())
                .dynatrace(FlowSourceFlowConfigSourceConnectorPropertiesDynatraceArgs.builder()
                    .object("string")
                    .build())
                .googleAnalytics(FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalyticsArgs.builder()
                    .object("string")
                    .build())
                .inforNexus(FlowSourceFlowConfigSourceConnectorPropertiesInforNexusArgs.builder()
                    .object("string")
                    .build())
                .marketo(FlowSourceFlowConfigSourceConnectorPropertiesMarketoArgs.builder()
                    .object("string")
                    .build())
                .s3(FlowSourceFlowConfigSourceConnectorPropertiesS3Args.builder()
                    .bucketName("string")
                    .bucketPrefix("string")
                    .s3InputFormatConfig(FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfigArgs.builder()
                        .s3InputFileType("string")
                        .build())
                    .build())
                .salesforce(FlowSourceFlowConfigSourceConnectorPropertiesSalesforceArgs.builder()
                    .object("string")
                    .enableDynamicFieldUpdate(false)
                    .includeDeletedRecords(false)
                    .build())
                .sapoData(FlowSourceFlowConfigSourceConnectorPropertiesSapoDataArgs.builder()
                    .objectPath("string")
                    .build())
                .serviceNow(FlowSourceFlowConfigSourceConnectorPropertiesServiceNowArgs.builder()
                    .object("string")
                    .build())
                .singular(FlowSourceFlowConfigSourceConnectorPropertiesSingularArgs.builder()
                    .object("string")
                    .build())
                .slack(FlowSourceFlowConfigSourceConnectorPropertiesSlackArgs.builder()
                    .object("string")
                    .build())
                .trendmicro(FlowSourceFlowConfigSourceConnectorPropertiesTrendmicroArgs.builder()
                    .object("string")
                    .build())
                .veeva(FlowSourceFlowConfigSourceConnectorPropertiesVeevaArgs.builder()
                    .object("string")
                    .documentType("string")
                    .includeAllVersions(false)
                    .includeRenditions(false)
                    .includeSourceFiles(false)
                    .build())
                .zendesk(FlowSourceFlowConfigSourceConnectorPropertiesZendeskArgs.builder()
                    .object("string")
                    .build())
                .build())
            .apiVersion("string")
            .connectorProfileName("string")
            .incrementalPullConfig(FlowSourceFlowConfigIncrementalPullConfigArgs.builder()
                .datetimeTypeFieldName("string")
                .build())
            .build())
        .tasks(FlowTaskArgs.builder()
            .taskType("string")
            .connectorOperators(FlowTaskConnectorOperatorArgs.builder()
                .amplitude("string")
                .customConnector("string")
                .datadog("string")
                .dynatrace("string")
                .googleAnalytics("string")
                .inforNexus("string")
                .marketo("string")
                .s3("string")
                .salesforce("string")
                .sapoData("string")
                .serviceNow("string")
                .singular("string")
                .slack("string")
                .trendmicro("string")
                .veeva("string")
                .zendesk("string")
                .build())
            .destinationField("string")
            .sourceFields("string")
            .taskProperties(Map.of("string", "string"))
            .build())
        .triggerConfig(FlowTriggerConfigArgs.builder()
            .triggerType("string")
            .triggerProperties(FlowTriggerConfigTriggerPropertiesArgs.builder()
                .scheduled(FlowTriggerConfigTriggerPropertiesScheduledArgs.builder()
                    .scheduleExpression("string")
                    .dataPullMode("string")
                    .firstExecutionFrom("string")
                    .scheduleEndTime("string")
                    .scheduleOffset(0)
                    .scheduleStartTime("string")
                    .timezone("string")
                    .build())
                .build())
            .build())
        .description("string")
        .kmsArn("string")
        .metadataCatalogConfig(FlowMetadataCatalogConfigArgs.builder()
            .glueDataCatalog(FlowMetadataCatalogConfigGlueDataCatalogArgs.builder()
                .databaseName("string")
                .roleArn("string")
                .tablePrefix("string")
                .build())
            .build())
        .name("string")
        .tags(Map.of("string", "string"))
        .build());
    
    flow_resource = aws.appflow.Flow("flowResource",
        destination_flow_configs=[{
            "connectorType": "string",
            "destinationConnectorProperties": {
                "customConnector": {
                    "entityName": "string",
                    "customProperties": {
                        "string": "string",
                    },
                    "errorHandlingConfig": {
                        "bucketName": "string",
                        "bucketPrefix": "string",
                        "failOnFirstDestinationError": False,
                    },
                    "idFieldNames": ["string"],
                    "writeOperationType": "string",
                },
                "customerProfiles": {
                    "domainName": "string",
                    "objectTypeName": "string",
                },
                "eventBridge": {
                    "object": "string",
                    "errorHandlingConfig": {
                        "bucketName": "string",
                        "bucketPrefix": "string",
                        "failOnFirstDestinationError": False,
                    },
                },
                "honeycode": {
                    "object": "string",
                    "errorHandlingConfig": {
                        "bucketName": "string",
                        "bucketPrefix": "string",
                        "failOnFirstDestinationError": False,
                    },
                },
                "lookoutMetrics": {},
                "marketo": {
                    "object": "string",
                    "errorHandlingConfig": {
                        "bucketName": "string",
                        "bucketPrefix": "string",
                        "failOnFirstDestinationError": False,
                    },
                },
                "redshift": {
                    "intermediateBucketName": "string",
                    "object": "string",
                    "bucketPrefix": "string",
                    "errorHandlingConfig": {
                        "bucketName": "string",
                        "bucketPrefix": "string",
                        "failOnFirstDestinationError": False,
                    },
                },
                "s3": {
                    "bucketName": "string",
                    "bucketPrefix": "string",
                    "s3OutputFormatConfig": {
                        "aggregationConfig": {
                            "aggregationType": "string",
                            "targetFileSize": 0,
                        },
                        "fileType": "string",
                        "prefixConfig": {
                            "prefixFormat": "string",
                            "prefixHierarchies": ["string"],
                            "prefixType": "string",
                        },
                        "preserveSourceDataTyping": False,
                    },
                },
                "salesforce": {
                    "object": "string",
                    "errorHandlingConfig": {
                        "bucketName": "string",
                        "bucketPrefix": "string",
                        "failOnFirstDestinationError": False,
                    },
                    "idFieldNames": ["string"],
                    "writeOperationType": "string",
                },
                "sapoData": {
                    "objectPath": "string",
                    "errorHandlingConfig": {
                        "bucketName": "string",
                        "bucketPrefix": "string",
                        "failOnFirstDestinationError": False,
                    },
                    "idFieldNames": ["string"],
                    "successResponseHandlingConfig": {
                        "bucketName": "string",
                        "bucketPrefix": "string",
                    },
                    "writeOperationType": "string",
                },
                "snowflake": {
                    "intermediateBucketName": "string",
                    "object": "string",
                    "bucketPrefix": "string",
                    "errorHandlingConfig": {
                        "bucketName": "string",
                        "bucketPrefix": "string",
                        "failOnFirstDestinationError": False,
                    },
                },
                "upsolver": {
                    "bucketName": "string",
                    "s3OutputFormatConfig": {
                        "prefixConfig": {
                            "prefixType": "string",
                            "prefixFormat": "string",
                            "prefixHierarchies": ["string"],
                        },
                        "aggregationConfig": {
                            "aggregationType": "string",
                        },
                        "fileType": "string",
                    },
                    "bucketPrefix": "string",
                },
                "zendesk": {
                    "object": "string",
                    "errorHandlingConfig": {
                        "bucketName": "string",
                        "bucketPrefix": "string",
                        "failOnFirstDestinationError": False,
                    },
                    "idFieldNames": ["string"],
                    "writeOperationType": "string",
                },
            },
            "apiVersion": "string",
            "connectorProfileName": "string",
        }],
        source_flow_config={
            "connectorType": "string",
            "sourceConnectorProperties": {
                "amplitude": {
                    "object": "string",
                },
                "customConnector": {
                    "entityName": "string",
                    "customProperties": {
                        "string": "string",
                    },
                },
                "datadog": {
                    "object": "string",
                },
                "dynatrace": {
                    "object": "string",
                },
                "googleAnalytics": {
                    "object": "string",
                },
                "inforNexus": {
                    "object": "string",
                },
                "marketo": {
                    "object": "string",
                },
                "s3": {
                    "bucketName": "string",
                    "bucketPrefix": "string",
                    "s3InputFormatConfig": {
                        "s3InputFileType": "string",
                    },
                },
                "salesforce": {
                    "object": "string",
                    "enableDynamicFieldUpdate": False,
                    "includeDeletedRecords": False,
                },
                "sapoData": {
                    "objectPath": "string",
                },
                "serviceNow": {
                    "object": "string",
                },
                "singular": {
                    "object": "string",
                },
                "slack": {
                    "object": "string",
                },
                "trendmicro": {
                    "object": "string",
                },
                "veeva": {
                    "object": "string",
                    "documentType": "string",
                    "includeAllVersions": False,
                    "includeRenditions": False,
                    "includeSourceFiles": False,
                },
                "zendesk": {
                    "object": "string",
                },
            },
            "apiVersion": "string",
            "connectorProfileName": "string",
            "incrementalPullConfig": {
                "datetimeTypeFieldName": "string",
            },
        },
        tasks=[{
            "taskType": "string",
            "connectorOperators": [{
                "amplitude": "string",
                "customConnector": "string",
                "datadog": "string",
                "dynatrace": "string",
                "googleAnalytics": "string",
                "inforNexus": "string",
                "marketo": "string",
                "s3": "string",
                "salesforce": "string",
                "sapoData": "string",
                "serviceNow": "string",
                "singular": "string",
                "slack": "string",
                "trendmicro": "string",
                "veeva": "string",
                "zendesk": "string",
            }],
            "destinationField": "string",
            "sourceFields": ["string"],
            "taskProperties": {
                "string": "string",
            },
        }],
        trigger_config={
            "triggerType": "string",
            "triggerProperties": {
                "scheduled": {
                    "scheduleExpression": "string",
                    "dataPullMode": "string",
                    "firstExecutionFrom": "string",
                    "scheduleEndTime": "string",
                    "scheduleOffset": 0,
                    "scheduleStartTime": "string",
                    "timezone": "string",
                },
            },
        },
        description="string",
        kms_arn="string",
        metadata_catalog_config={
            "glueDataCatalog": {
                "databaseName": "string",
                "roleArn": "string",
                "tablePrefix": "string",
            },
        },
        name="string",
        tags={
            "string": "string",
        })
    
    const flowResource = new aws.appflow.Flow("flowResource", {
        destinationFlowConfigs: [{
            connectorType: "string",
            destinationConnectorProperties: {
                customConnector: {
                    entityName: "string",
                    customProperties: {
                        string: "string",
                    },
                    errorHandlingConfig: {
                        bucketName: "string",
                        bucketPrefix: "string",
                        failOnFirstDestinationError: false,
                    },
                    idFieldNames: ["string"],
                    writeOperationType: "string",
                },
                customerProfiles: {
                    domainName: "string",
                    objectTypeName: "string",
                },
                eventBridge: {
                    object: "string",
                    errorHandlingConfig: {
                        bucketName: "string",
                        bucketPrefix: "string",
                        failOnFirstDestinationError: false,
                    },
                },
                honeycode: {
                    object: "string",
                    errorHandlingConfig: {
                        bucketName: "string",
                        bucketPrefix: "string",
                        failOnFirstDestinationError: false,
                    },
                },
                lookoutMetrics: {},
                marketo: {
                    object: "string",
                    errorHandlingConfig: {
                        bucketName: "string",
                        bucketPrefix: "string",
                        failOnFirstDestinationError: false,
                    },
                },
                redshift: {
                    intermediateBucketName: "string",
                    object: "string",
                    bucketPrefix: "string",
                    errorHandlingConfig: {
                        bucketName: "string",
                        bucketPrefix: "string",
                        failOnFirstDestinationError: false,
                    },
                },
                s3: {
                    bucketName: "string",
                    bucketPrefix: "string",
                    s3OutputFormatConfig: {
                        aggregationConfig: {
                            aggregationType: "string",
                            targetFileSize: 0,
                        },
                        fileType: "string",
                        prefixConfig: {
                            prefixFormat: "string",
                            prefixHierarchies: ["string"],
                            prefixType: "string",
                        },
                        preserveSourceDataTyping: false,
                    },
                },
                salesforce: {
                    object: "string",
                    errorHandlingConfig: {
                        bucketName: "string",
                        bucketPrefix: "string",
                        failOnFirstDestinationError: false,
                    },
                    idFieldNames: ["string"],
                    writeOperationType: "string",
                },
                sapoData: {
                    objectPath: "string",
                    errorHandlingConfig: {
                        bucketName: "string",
                        bucketPrefix: "string",
                        failOnFirstDestinationError: false,
                    },
                    idFieldNames: ["string"],
                    successResponseHandlingConfig: {
                        bucketName: "string",
                        bucketPrefix: "string",
                    },
                    writeOperationType: "string",
                },
                snowflake: {
                    intermediateBucketName: "string",
                    object: "string",
                    bucketPrefix: "string",
                    errorHandlingConfig: {
                        bucketName: "string",
                        bucketPrefix: "string",
                        failOnFirstDestinationError: false,
                    },
                },
                upsolver: {
                    bucketName: "string",
                    s3OutputFormatConfig: {
                        prefixConfig: {
                            prefixType: "string",
                            prefixFormat: "string",
                            prefixHierarchies: ["string"],
                        },
                        aggregationConfig: {
                            aggregationType: "string",
                        },
                        fileType: "string",
                    },
                    bucketPrefix: "string",
                },
                zendesk: {
                    object: "string",
                    errorHandlingConfig: {
                        bucketName: "string",
                        bucketPrefix: "string",
                        failOnFirstDestinationError: false,
                    },
                    idFieldNames: ["string"],
                    writeOperationType: "string",
                },
            },
            apiVersion: "string",
            connectorProfileName: "string",
        }],
        sourceFlowConfig: {
            connectorType: "string",
            sourceConnectorProperties: {
                amplitude: {
                    object: "string",
                },
                customConnector: {
                    entityName: "string",
                    customProperties: {
                        string: "string",
                    },
                },
                datadog: {
                    object: "string",
                },
                dynatrace: {
                    object: "string",
                },
                googleAnalytics: {
                    object: "string",
                },
                inforNexus: {
                    object: "string",
                },
                marketo: {
                    object: "string",
                },
                s3: {
                    bucketName: "string",
                    bucketPrefix: "string",
                    s3InputFormatConfig: {
                        s3InputFileType: "string",
                    },
                },
                salesforce: {
                    object: "string",
                    enableDynamicFieldUpdate: false,
                    includeDeletedRecords: false,
                },
                sapoData: {
                    objectPath: "string",
                },
                serviceNow: {
                    object: "string",
                },
                singular: {
                    object: "string",
                },
                slack: {
                    object: "string",
                },
                trendmicro: {
                    object: "string",
                },
                veeva: {
                    object: "string",
                    documentType: "string",
                    includeAllVersions: false,
                    includeRenditions: false,
                    includeSourceFiles: false,
                },
                zendesk: {
                    object: "string",
                },
            },
            apiVersion: "string",
            connectorProfileName: "string",
            incrementalPullConfig: {
                datetimeTypeFieldName: "string",
            },
        },
        tasks: [{
            taskType: "string",
            connectorOperators: [{
                amplitude: "string",
                customConnector: "string",
                datadog: "string",
                dynatrace: "string",
                googleAnalytics: "string",
                inforNexus: "string",
                marketo: "string",
                s3: "string",
                salesforce: "string",
                sapoData: "string",
                serviceNow: "string",
                singular: "string",
                slack: "string",
                trendmicro: "string",
                veeva: "string",
                zendesk: "string",
            }],
            destinationField: "string",
            sourceFields: ["string"],
            taskProperties: {
                string: "string",
            },
        }],
        triggerConfig: {
            triggerType: "string",
            triggerProperties: {
                scheduled: {
                    scheduleExpression: "string",
                    dataPullMode: "string",
                    firstExecutionFrom: "string",
                    scheduleEndTime: "string",
                    scheduleOffset: 0,
                    scheduleStartTime: "string",
                    timezone: "string",
                },
            },
        },
        description: "string",
        kmsArn: "string",
        metadataCatalogConfig: {
            glueDataCatalog: {
                databaseName: "string",
                roleArn: "string",
                tablePrefix: "string",
            },
        },
        name: "string",
        tags: {
            string: "string",
        },
    });
    
    type: aws:appflow:Flow
    properties:
        description: string
        destinationFlowConfigs:
            - apiVersion: string
              connectorProfileName: string
              connectorType: string
              destinationConnectorProperties:
                customConnector:
                    customProperties:
                        string: string
                    entityName: string
                    errorHandlingConfig:
                        bucketName: string
                        bucketPrefix: string
                        failOnFirstDestinationError: false
                    idFieldNames:
                        - string
                    writeOperationType: string
                customerProfiles:
                    domainName: string
                    objectTypeName: string
                eventBridge:
                    errorHandlingConfig:
                        bucketName: string
                        bucketPrefix: string
                        failOnFirstDestinationError: false
                    object: string
                honeycode:
                    errorHandlingConfig:
                        bucketName: string
                        bucketPrefix: string
                        failOnFirstDestinationError: false
                    object: string
                lookoutMetrics: {}
                marketo:
                    errorHandlingConfig:
                        bucketName: string
                        bucketPrefix: string
                        failOnFirstDestinationError: false
                    object: string
                redshift:
                    bucketPrefix: string
                    errorHandlingConfig:
                        bucketName: string
                        bucketPrefix: string
                        failOnFirstDestinationError: false
                    intermediateBucketName: string
                    object: string
                s3:
                    bucketName: string
                    bucketPrefix: string
                    s3OutputFormatConfig:
                        aggregationConfig:
                            aggregationType: string
                            targetFileSize: 0
                        fileType: string
                        prefixConfig:
                            prefixFormat: string
                            prefixHierarchies:
                                - string
                            prefixType: string
                        preserveSourceDataTyping: false
                salesforce:
                    errorHandlingConfig:
                        bucketName: string
                        bucketPrefix: string
                        failOnFirstDestinationError: false
                    idFieldNames:
                        - string
                    object: string
                    writeOperationType: string
                sapoData:
                    errorHandlingConfig:
                        bucketName: string
                        bucketPrefix: string
                        failOnFirstDestinationError: false
                    idFieldNames:
                        - string
                    objectPath: string
                    successResponseHandlingConfig:
                        bucketName: string
                        bucketPrefix: string
                    writeOperationType: string
                snowflake:
                    bucketPrefix: string
                    errorHandlingConfig:
                        bucketName: string
                        bucketPrefix: string
                        failOnFirstDestinationError: false
                    intermediateBucketName: string
                    object: string
                upsolver:
                    bucketName: string
                    bucketPrefix: string
                    s3OutputFormatConfig:
                        aggregationConfig:
                            aggregationType: string
                        fileType: string
                        prefixConfig:
                            prefixFormat: string
                            prefixHierarchies:
                                - string
                            prefixType: string
                zendesk:
                    errorHandlingConfig:
                        bucketName: string
                        bucketPrefix: string
                        failOnFirstDestinationError: false
                    idFieldNames:
                        - string
                    object: string
                    writeOperationType: string
        kmsArn: string
        metadataCatalogConfig:
            glueDataCatalog:
                databaseName: string
                roleArn: string
                tablePrefix: string
        name: string
        sourceFlowConfig:
            apiVersion: string
            connectorProfileName: string
            connectorType: string
            incrementalPullConfig:
                datetimeTypeFieldName: string
            sourceConnectorProperties:
                amplitude:
                    object: string
                customConnector:
                    customProperties:
                        string: string
                    entityName: string
                datadog:
                    object: string
                dynatrace:
                    object: string
                googleAnalytics:
                    object: string
                inforNexus:
                    object: string
                marketo:
                    object: string
                s3:
                    bucketName: string
                    bucketPrefix: string
                    s3InputFormatConfig:
                        s3InputFileType: string
                salesforce:
                    enableDynamicFieldUpdate: false
                    includeDeletedRecords: false
                    object: string
                sapoData:
                    objectPath: string
                serviceNow:
                    object: string
                singular:
                    object: string
                slack:
                    object: string
                trendmicro:
                    object: string
                veeva:
                    documentType: string
                    includeAllVersions: false
                    includeRenditions: false
                    includeSourceFiles: false
                    object: string
                zendesk:
                    object: string
        tags:
            string: string
        tasks:
            - connectorOperators:
                - amplitude: string
                  customConnector: string
                  datadog: string
                  dynatrace: string
                  googleAnalytics: string
                  inforNexus: string
                  marketo: string
                  s3: string
                  salesforce: string
                  sapoData: string
                  serviceNow: string
                  singular: string
                  slack: string
                  trendmicro: string
                  veeva: string
                  zendesk: string
              destinationField: string
              sourceFields:
                - string
              taskProperties:
                string: string
              taskType: string
        triggerConfig:
            triggerProperties:
                scheduled:
                    dataPullMode: string
                    firstExecutionFrom: string
                    scheduleEndTime: string
                    scheduleExpression: string
                    scheduleOffset: 0
                    scheduleStartTime: string
                    timezone: string
            triggerType: string
    

    Flow Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    The Flow resource accepts the following input properties:

    DestinationFlowConfigs List<FlowDestinationFlowConfig>
    A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
    SourceFlowConfig FlowSourceFlowConfig
    The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
    Tasks List<FlowTask>
    A Task that Amazon AppFlow performs while transferring the data in the flow run.
    TriggerConfig FlowTriggerConfig
    A Trigger that determine how and when the flow runs.
    Description string
    Description of the flow you want to create.
    KmsArn string
    ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
    MetadataCatalogConfig FlowMetadataCatalogConfig
    A Catalog that determines the configuration that Amazon AppFlow uses when it catalogs the data that’s transferred by the associated flow. When Amazon AppFlow catalogs the data from a flow, it stores metadata in a data catalog.
    Name string
    Name of the flow.
    Tags Dictionary<string, string>
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    DestinationFlowConfigs []FlowDestinationFlowConfigArgs
    A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
    SourceFlowConfig FlowSourceFlowConfigArgs
    The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
    Tasks []FlowTaskArgs
    A Task that Amazon AppFlow performs while transferring the data in the flow run.
    TriggerConfig FlowTriggerConfigArgs
    A Trigger that determine how and when the flow runs.
    Description string
    Description of the flow you want to create.
    KmsArn string
    ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
    MetadataCatalogConfig FlowMetadataCatalogConfigArgs
    A Catalog that determines the configuration that Amazon AppFlow uses when it catalogs the data that’s transferred by the associated flow. When Amazon AppFlow catalogs the data from a flow, it stores metadata in a data catalog.
    Name string
    Name of the flow.
    Tags map[string]string
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    destinationFlowConfigs List<FlowDestinationFlowConfig>
    A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
    sourceFlowConfig FlowSourceFlowConfig
    The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
    tasks List<FlowTask>
    A Task that Amazon AppFlow performs while transferring the data in the flow run.
    triggerConfig FlowTriggerConfig
    A Trigger that determine how and when the flow runs.
    description String
    Description of the flow you want to create.
    kmsArn String
    ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
    metadataCatalogConfig FlowMetadataCatalogConfig
    A Catalog that determines the configuration that Amazon AppFlow uses when it catalogs the data that’s transferred by the associated flow. When Amazon AppFlow catalogs the data from a flow, it stores metadata in a data catalog.
    name String
    Name of the flow.
    tags Map<String,String>
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    destinationFlowConfigs FlowDestinationFlowConfig[]
    A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
    sourceFlowConfig FlowSourceFlowConfig
    The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
    tasks FlowTask[]
    A Task that Amazon AppFlow performs while transferring the data in the flow run.
    triggerConfig FlowTriggerConfig
    A Trigger that determine how and when the flow runs.
    description string
    Description of the flow you want to create.
    kmsArn string
    ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
    metadataCatalogConfig FlowMetadataCatalogConfig
    A Catalog that determines the configuration that Amazon AppFlow uses when it catalogs the data that’s transferred by the associated flow. When Amazon AppFlow catalogs the data from a flow, it stores metadata in a data catalog.
    name string
    Name of the flow.
    tags {[key: string]: string}
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    destination_flow_configs Sequence[FlowDestinationFlowConfigArgs]
    A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
    source_flow_config FlowSourceFlowConfigArgs
    The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
    tasks Sequence[FlowTaskArgs]
    A Task that Amazon AppFlow performs while transferring the data in the flow run.
    trigger_config FlowTriggerConfigArgs
    A Trigger that determine how and when the flow runs.
    description str
    Description of the flow you want to create.
    kms_arn str
    ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
    metadata_catalog_config FlowMetadataCatalogConfigArgs
    A Catalog that determines the configuration that Amazon AppFlow uses when it catalogs the data that’s transferred by the associated flow. When Amazon AppFlow catalogs the data from a flow, it stores metadata in a data catalog.
    name str
    Name of the flow.
    tags Mapping[str, str]
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    destinationFlowConfigs List<Property Map>
    A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
    sourceFlowConfig Property Map
    The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
    tasks List<Property Map>
    A Task that Amazon AppFlow performs while transferring the data in the flow run.
    triggerConfig Property Map
    A Trigger that determine how and when the flow runs.
    description String
    Description of the flow you want to create.
    kmsArn String
    ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
    metadataCatalogConfig Property Map
    A Catalog that determines the configuration that Amazon AppFlow uses when it catalogs the data that’s transferred by the associated flow. When Amazon AppFlow catalogs the data from a flow, it stores metadata in a data catalog.
    name String
    Name of the flow.
    tags Map<String>
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the Flow resource produces the following output properties:

    Arn string
    Flow's ARN.
    FlowStatus string
    The current status of the flow.
    Id string
    The provider-assigned unique ID for this managed resource.
    TagsAll Dictionary<string, string>
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    Arn string
    Flow's ARN.
    FlowStatus string
    The current status of the flow.
    Id string
    The provider-assigned unique ID for this managed resource.
    TagsAll map[string]string
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    arn String
    Flow's ARN.
    flowStatus String
    The current status of the flow.
    id String
    The provider-assigned unique ID for this managed resource.
    tagsAll Map<String,String>
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    arn string
    Flow's ARN.
    flowStatus string
    The current status of the flow.
    id string
    The provider-assigned unique ID for this managed resource.
    tagsAll {[key: string]: string}
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    arn str
    Flow's ARN.
    flow_status str
    The current status of the flow.
    id str
    The provider-assigned unique ID for this managed resource.
    tags_all Mapping[str, str]
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    arn String
    Flow's ARN.
    flowStatus String
    The current status of the flow.
    id String
    The provider-assigned unique ID for this managed resource.
    tagsAll Map<String>
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    Look up Existing Flow Resource

    Get an existing Flow resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: FlowState, opts?: CustomResourceOptions): Flow
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            arn: Optional[str] = None,
            description: Optional[str] = None,
            destination_flow_configs: Optional[Sequence[FlowDestinationFlowConfigArgs]] = None,
            flow_status: Optional[str] = None,
            kms_arn: Optional[str] = None,
            metadata_catalog_config: Optional[FlowMetadataCatalogConfigArgs] = None,
            name: Optional[str] = None,
            source_flow_config: Optional[FlowSourceFlowConfigArgs] = None,
            tags: Optional[Mapping[str, str]] = None,
            tags_all: Optional[Mapping[str, str]] = None,
            tasks: Optional[Sequence[FlowTaskArgs]] = None,
            trigger_config: Optional[FlowTriggerConfigArgs] = None) -> Flow
    func GetFlow(ctx *Context, name string, id IDInput, state *FlowState, opts ...ResourceOption) (*Flow, error)
    public static Flow Get(string name, Input<string> id, FlowState? state, CustomResourceOptions? opts = null)
    public static Flow get(String name, Output<String> id, FlowState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    Arn string
    Flow's ARN.
    Description string
    Description of the flow you want to create.
    DestinationFlowConfigs List<FlowDestinationFlowConfig>
    A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
    FlowStatus string
    The current status of the flow.
    KmsArn string
    ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
    MetadataCatalogConfig FlowMetadataCatalogConfig
    A Catalog that determines the configuration that Amazon AppFlow uses when it catalogs the data that’s transferred by the associated flow. When Amazon AppFlow catalogs the data from a flow, it stores metadata in a data catalog.
    Name string
    Name of the flow.
    SourceFlowConfig FlowSourceFlowConfig
    The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
    Tags Dictionary<string, string>
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    TagsAll Dictionary<string, string>
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    Tasks List<FlowTask>
    A Task that Amazon AppFlow performs while transferring the data in the flow run.
    TriggerConfig FlowTriggerConfig
    A Trigger that determine how and when the flow runs.
    Arn string
    Flow's ARN.
    Description string
    Description of the flow you want to create.
    DestinationFlowConfigs []FlowDestinationFlowConfigArgs
    A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
    FlowStatus string
    The current status of the flow.
    KmsArn string
    ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
    MetadataCatalogConfig FlowMetadataCatalogConfigArgs
    A Catalog that determines the configuration that Amazon AppFlow uses when it catalogs the data that’s transferred by the associated flow. When Amazon AppFlow catalogs the data from a flow, it stores metadata in a data catalog.
    Name string
    Name of the flow.
    SourceFlowConfig FlowSourceFlowConfigArgs
    The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
    Tags map[string]string
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    TagsAll map[string]string
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    Tasks []FlowTaskArgs
    A Task that Amazon AppFlow performs while transferring the data in the flow run.
    TriggerConfig FlowTriggerConfigArgs
    A Trigger that determine how and when the flow runs.
    arn String
    Flow's ARN.
    description String
    Description of the flow you want to create.
    destinationFlowConfigs List<FlowDestinationFlowConfig>
    A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
    flowStatus String
    The current status of the flow.
    kmsArn String
    ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
    metadataCatalogConfig FlowMetadataCatalogConfig
    A Catalog that determines the configuration that Amazon AppFlow uses when it catalogs the data that’s transferred by the associated flow. When Amazon AppFlow catalogs the data from a flow, it stores metadata in a data catalog.
    name String
    Name of the flow.
    sourceFlowConfig FlowSourceFlowConfig
    The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
    tags Map<String,String>
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    tagsAll Map<String,String>
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    tasks List<FlowTask>
    A Task that Amazon AppFlow performs while transferring the data in the flow run.
    triggerConfig FlowTriggerConfig
    A Trigger that determine how and when the flow runs.
    arn string
    Flow's ARN.
    description string
    Description of the flow you want to create.
    destinationFlowConfigs FlowDestinationFlowConfig[]
    A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
    flowStatus string
    The current status of the flow.
    kmsArn string
    ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
    metadataCatalogConfig FlowMetadataCatalogConfig
    A Catalog that determines the configuration that Amazon AppFlow uses when it catalogs the data that’s transferred by the associated flow. When Amazon AppFlow catalogs the data from a flow, it stores metadata in a data catalog.
    name string
    Name of the flow.
    sourceFlowConfig FlowSourceFlowConfig
    The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
    tags {[key: string]: string}
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    tagsAll {[key: string]: string}
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    tasks FlowTask[]
    A Task that Amazon AppFlow performs while transferring the data in the flow run.
    triggerConfig FlowTriggerConfig
    A Trigger that determine how and when the flow runs.
    arn str
    Flow's ARN.
    description str
    Description of the flow you want to create.
    destination_flow_configs Sequence[FlowDestinationFlowConfigArgs]
    A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
    flow_status str
    The current status of the flow.
    kms_arn str
    ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
    metadata_catalog_config FlowMetadataCatalogConfigArgs
    A Catalog that determines the configuration that Amazon AppFlow uses when it catalogs the data that’s transferred by the associated flow. When Amazon AppFlow catalogs the data from a flow, it stores metadata in a data catalog.
    name str
    Name of the flow.
    source_flow_config FlowSourceFlowConfigArgs
    The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
    tags Mapping[str, str]
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    tags_all Mapping[str, str]
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    tasks Sequence[FlowTaskArgs]
    A Task that Amazon AppFlow performs while transferring the data in the flow run.
    trigger_config FlowTriggerConfigArgs
    A Trigger that determine how and when the flow runs.
    arn String
    Flow's ARN.
    description String
    Description of the flow you want to create.
    destinationFlowConfigs List<Property Map>
    A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
    flowStatus String
    The current status of the flow.
    kmsArn String
    ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
    metadataCatalogConfig Property Map
    A Catalog that determines the configuration that Amazon AppFlow uses when it catalogs the data that’s transferred by the associated flow. When Amazon AppFlow catalogs the data from a flow, it stores metadata in a data catalog.
    name String
    Name of the flow.
    sourceFlowConfig Property Map
    The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
    tags Map<String>
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    tagsAll Map<String>
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    tasks List<Property Map>
    A Task that Amazon AppFlow performs while transferring the data in the flow run.
    triggerConfig Property Map
    A Trigger that determine how and when the flow runs.

    Supporting Types

    FlowDestinationFlowConfig, FlowDestinationFlowConfigArgs

    ConnectorType string
    Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce, Singular, Slack, Redshift, S3, Marketo, Googleanalytics, Zendesk, Servicenow, Datadog, Trendmicro, Snowflake, Dynatrace, Infornexus, Amplitude, Veeva, EventBridge, LookoutMetrics, Upsolver, Honeycode, CustomerProfiles, SAPOData, and CustomConnector.
    DestinationConnectorProperties FlowDestinationFlowConfigDestinationConnectorProperties
    This stores the information that is required to query a particular connector. See Destination Connector Properties for more information.
    ApiVersion string
    API version that the destination connector uses.
    ConnectorProfileName string
    Name of the connector profile. This name must be unique for each connector profile in the AWS account.
    ConnectorType string
    Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce, Singular, Slack, Redshift, S3, Marketo, Googleanalytics, Zendesk, Servicenow, Datadog, Trendmicro, Snowflake, Dynatrace, Infornexus, Amplitude, Veeva, EventBridge, LookoutMetrics, Upsolver, Honeycode, CustomerProfiles, SAPOData, and CustomConnector.
    DestinationConnectorProperties FlowDestinationFlowConfigDestinationConnectorProperties
    This stores the information that is required to query a particular connector. See Destination Connector Properties for more information.
    ApiVersion string
    API version that the destination connector uses.
    ConnectorProfileName string
    Name of the connector profile. This name must be unique for each connector profile in the AWS account.
    connectorType String
    Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce, Singular, Slack, Redshift, S3, Marketo, Googleanalytics, Zendesk, Servicenow, Datadog, Trendmicro, Snowflake, Dynatrace, Infornexus, Amplitude, Veeva, EventBridge, LookoutMetrics, Upsolver, Honeycode, CustomerProfiles, SAPOData, and CustomConnector.
    destinationConnectorProperties FlowDestinationFlowConfigDestinationConnectorProperties
    This stores the information that is required to query a particular connector. See Destination Connector Properties for more information.
    apiVersion String
    API version that the destination connector uses.
    connectorProfileName String
    Name of the connector profile. This name must be unique for each connector profile in the AWS account.
    connectorType string
    Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce, Singular, Slack, Redshift, S3, Marketo, Googleanalytics, Zendesk, Servicenow, Datadog, Trendmicro, Snowflake, Dynatrace, Infornexus, Amplitude, Veeva, EventBridge, LookoutMetrics, Upsolver, Honeycode, CustomerProfiles, SAPOData, and CustomConnector.
    destinationConnectorProperties FlowDestinationFlowConfigDestinationConnectorProperties
    This stores the information that is required to query a particular connector. See Destination Connector Properties for more information.
    apiVersion string
    API version that the destination connector uses.
    connectorProfileName string
    Name of the connector profile. This name must be unique for each connector profile in the AWS account.
    connector_type str
    Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce, Singular, Slack, Redshift, S3, Marketo, Googleanalytics, Zendesk, Servicenow, Datadog, Trendmicro, Snowflake, Dynatrace, Infornexus, Amplitude, Veeva, EventBridge, LookoutMetrics, Upsolver, Honeycode, CustomerProfiles, SAPOData, and CustomConnector.
    destination_connector_properties FlowDestinationFlowConfigDestinationConnectorProperties
    This stores the information that is required to query a particular connector. See Destination Connector Properties for more information.
    api_version str
    API version that the destination connector uses.
    connector_profile_name str
    Name of the connector profile. This name must be unique for each connector profile in the AWS account.
    connectorType String
    Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce, Singular, Slack, Redshift, S3, Marketo, Googleanalytics, Zendesk, Servicenow, Datadog, Trendmicro, Snowflake, Dynatrace, Infornexus, Amplitude, Veeva, EventBridge, LookoutMetrics, Upsolver, Honeycode, CustomerProfiles, SAPOData, and CustomConnector.
    destinationConnectorProperties Property Map
    This stores the information that is required to query a particular connector. See Destination Connector Properties for more information.
    apiVersion String
    API version that the destination connector uses.
    connectorProfileName String
    Name of the connector profile. This name must be unique for each connector profile in the AWS account.

    FlowDestinationFlowConfigDestinationConnectorProperties, FlowDestinationFlowConfigDestinationConnectorPropertiesArgs

    CustomConnector FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnector
    Properties that are required to query the custom Connector. See Custom Connector Destination Properties for more details.
    CustomerProfiles FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfiles
    Properties that are required to query Amazon Connect Customer Profiles. See Customer Profiles Destination Properties for more details.
    EventBridge FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridge
    Properties that are required to query Amazon EventBridge. See Generic Destination Properties for more details.
    Honeycode FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycode
    Properties that are required to query Amazon Honeycode. See Generic Destination Properties for more details.
    LookoutMetrics FlowDestinationFlowConfigDestinationConnectorPropertiesLookoutMetrics
    Marketo FlowDestinationFlowConfigDestinationConnectorPropertiesMarketo
    Properties that are required to query Marketo. See Generic Destination Properties for more details.
    Redshift FlowDestinationFlowConfigDestinationConnectorPropertiesRedshift
    Properties that are required to query Amazon Redshift. See Redshift Destination Properties for more details.
    S3 FlowDestinationFlowConfigDestinationConnectorPropertiesS3
    Properties that are required to query Amazon S3. See S3 Destination Properties for more details.
    Salesforce FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforce
    Properties that are required to query Salesforce. See Salesforce Destination Properties for more details.
    SapoData FlowDestinationFlowConfigDestinationConnectorPropertiesSapoData
    Properties that are required to query SAPOData. See SAPOData Destination Properties for more details.
    Snowflake FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflake
    Properties that are required to query Snowflake. See Snowflake Destination Properties for more details.
    Upsolver FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolver
    Properties that are required to query Upsolver. See Upsolver Destination Properties for more details.
    Zendesk FlowDestinationFlowConfigDestinationConnectorPropertiesZendesk
    Properties that are required to query Zendesk. See Zendesk Destination Properties for more details.
    CustomConnector FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnector
    Properties that are required to query the custom Connector. See Custom Connector Destination Properties for more details.
    CustomerProfiles FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfiles
    Properties that are required to query Amazon Connect Customer Profiles. See Customer Profiles Destination Properties for more details.
    EventBridge FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridge
    Properties that are required to query Amazon EventBridge. See Generic Destination Properties for more details.
    Honeycode FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycode
    Properties that are required to query Amazon Honeycode. See Generic Destination Properties for more details.
    LookoutMetrics FlowDestinationFlowConfigDestinationConnectorPropertiesLookoutMetrics
    Marketo FlowDestinationFlowConfigDestinationConnectorPropertiesMarketo
    Properties that are required to query Marketo. See Generic Destination Properties for more details.
    Redshift FlowDestinationFlowConfigDestinationConnectorPropertiesRedshift
    Properties that are required to query Amazon Redshift. See Redshift Destination Properties for more details.
    S3 FlowDestinationFlowConfigDestinationConnectorPropertiesS3
    Properties that are required to query Amazon S3. See S3 Destination Properties for more details.
    Salesforce FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforce
    Properties that are required to query Salesforce. See Salesforce Destination Properties for more details.
    SapoData FlowDestinationFlowConfigDestinationConnectorPropertiesSapoData
    Properties that are required to query SAPOData. See SAPOData Destination Properties for more details.
    Snowflake FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflake
    Properties that are required to query Snowflake. See Snowflake Destination Properties for more details.
    Upsolver FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolver
    Properties that are required to query Upsolver. See Upsolver Destination Properties for more details.
    Zendesk FlowDestinationFlowConfigDestinationConnectorPropertiesZendesk
    Properties that are required to query Zendesk. See Zendesk Destination Properties for more details.
    customConnector FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnector
    Properties that are required to query the custom Connector. See Custom Connector Destination Properties for more details.
    customerProfiles FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfiles
    Properties that are required to query Amazon Connect Customer Profiles. See Customer Profiles Destination Properties for more details.
    eventBridge FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridge
    Properties that are required to query Amazon EventBridge. See Generic Destination Properties for more details.
    honeycode FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycode
    Properties that are required to query Amazon Honeycode. See Generic Destination Properties for more details.
    lookoutMetrics FlowDestinationFlowConfigDestinationConnectorPropertiesLookoutMetrics
    marketo FlowDestinationFlowConfigDestinationConnectorPropertiesMarketo
    Properties that are required to query Marketo. See Generic Destination Properties for more details.
    redshift FlowDestinationFlowConfigDestinationConnectorPropertiesRedshift
    Properties that are required to query Amazon Redshift. See Redshift Destination Properties for more details.
    s3 FlowDestinationFlowConfigDestinationConnectorPropertiesS3
    Properties that are required to query Amazon S3. See S3 Destination Properties for more details.
    salesforce FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforce
    Properties that are required to query Salesforce. See Salesforce Destination Properties for more details.
    sapoData FlowDestinationFlowConfigDestinationConnectorPropertiesSapoData
    Properties that are required to query SAPOData. See SAPOData Destination Properties for more details.
    snowflake FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflake
    Properties that are required to query Snowflake. See Snowflake Destination Properties for more details.
    upsolver FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolver
    Properties that are required to query Upsolver. See Upsolver Destination Properties for more details.
    zendesk FlowDestinationFlowConfigDestinationConnectorPropertiesZendesk
    Properties that are required to query Zendesk. See Zendesk Destination Properties for more details.
    customConnector FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnector
    Properties that are required to query the custom Connector. See Custom Connector Destination Properties for more details.
    customerProfiles FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfiles
    Properties that are required to query Amazon Connect Customer Profiles. See Customer Profiles Destination Properties for more details.
    eventBridge FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridge
    Properties that are required to query Amazon EventBridge. See Generic Destination Properties for more details.
    honeycode FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycode
    Properties that are required to query Amazon Honeycode. See Generic Destination Properties for more details.
    lookoutMetrics FlowDestinationFlowConfigDestinationConnectorPropertiesLookoutMetrics
    marketo FlowDestinationFlowConfigDestinationConnectorPropertiesMarketo
    Properties that are required to query Marketo. See Generic Destination Properties for more details.
    redshift FlowDestinationFlowConfigDestinationConnectorPropertiesRedshift
    Properties that are required to query Amazon Redshift. See Redshift Destination Properties for more details.
    s3 FlowDestinationFlowConfigDestinationConnectorPropertiesS3
    Properties that are required to query Amazon S3. See S3 Destination Properties for more details.
    salesforce FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforce
    Properties that are required to query Salesforce. See Salesforce Destination Properties for more details.
    sapoData FlowDestinationFlowConfigDestinationConnectorPropertiesSapoData
    Properties that are required to query SAPOData. See SAPOData Destination Properties for more details.
    snowflake FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflake
    Properties that are required to query Snowflake. See Snowflake Destination Properties for more details.
    upsolver FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolver
    Properties that are required to query Upsolver. See Upsolver Destination Properties for more details.
    zendesk FlowDestinationFlowConfigDestinationConnectorPropertiesZendesk
    Properties that are required to query Zendesk. See Zendesk Destination Properties for more details.
    custom_connector FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnector
    Properties that are required to query the custom Connector. See Custom Connector Destination Properties for more details.
    customer_profiles FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfiles
    Properties that are required to query Amazon Connect Customer Profiles. See Customer Profiles Destination Properties for more details.
    event_bridge FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridge
    Properties that are required to query Amazon EventBridge. See Generic Destination Properties for more details.
    honeycode FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycode
    Properties that are required to query Amazon Honeycode. See Generic Destination Properties for more details.
    lookout_metrics FlowDestinationFlowConfigDestinationConnectorPropertiesLookoutMetrics
    marketo FlowDestinationFlowConfigDestinationConnectorPropertiesMarketo
    Properties that are required to query Marketo. See Generic Destination Properties for more details.
    redshift FlowDestinationFlowConfigDestinationConnectorPropertiesRedshift
    Properties that are required to query Amazon Redshift. See Redshift Destination Properties for more details.
    s3 FlowDestinationFlowConfigDestinationConnectorPropertiesS3
    Properties that are required to query Amazon S3. See S3 Destination Properties for more details.
    salesforce FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforce
    Properties that are required to query Salesforce. See Salesforce Destination Properties for more details.
    sapo_data FlowDestinationFlowConfigDestinationConnectorPropertiesSapoData
    Properties that are required to query SAPOData. See SAPOData Destination Properties for more details.
    snowflake FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflake
    Properties that are required to query Snowflake. See Snowflake Destination Properties for more details.
    upsolver FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolver
    Properties that are required to query Upsolver. See Upsolver Destination Properties for more details.
    zendesk FlowDestinationFlowConfigDestinationConnectorPropertiesZendesk
    Properties that are required to query Zendesk. See Zendesk Destination Properties for more details.
    customConnector Property Map
    Properties that are required to query the custom Connector. See Custom Connector Destination Properties for more details.
    customerProfiles Property Map
    Properties that are required to query Amazon Connect Customer Profiles. See Customer Profiles Destination Properties for more details.
    eventBridge Property Map
    Properties that are required to query Amazon EventBridge. See Generic Destination Properties for more details.
    honeycode Property Map
    Properties that are required to query Amazon Honeycode. See Generic Destination Properties for more details.
    lookoutMetrics Property Map
    marketo Property Map
    Properties that are required to query Marketo. See Generic Destination Properties for more details.
    redshift Property Map
    Properties that are required to query Amazon Redshift. See Redshift Destination Properties for more details.
    s3 Property Map
    Properties that are required to query Amazon S3. See S3 Destination Properties for more details.
    salesforce Property Map
    Properties that are required to query Salesforce. See Salesforce Destination Properties for more details.
    sapoData Property Map
    Properties that are required to query SAPOData. See SAPOData Destination Properties for more details.
    snowflake Property Map
    Properties that are required to query Snowflake. See Snowflake Destination Properties for more details.
    upsolver Property Map
    Properties that are required to query Upsolver. See Upsolver Destination Properties for more details.
    zendesk Property Map
    Properties that are required to query Zendesk. See Zendesk Destination Properties for more details.

    FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnector, FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorArgs

    FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorErrorHandlingConfigArgs

    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName string
    Name of the Amazon S3 bucket.
    bucketPrefix string
    Amazon S3 bucket prefix.
    failOnFirstDestinationError boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucket_name str
    Name of the Amazon S3 bucket.
    bucket_prefix str
    Amazon S3 bucket prefix.
    fail_on_first_destination_error bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.

    FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfiles, FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfilesArgs

    DomainName string
    Unique name of the Amazon Connect Customer Profiles domain.
    ObjectTypeName string
    Object specified in the Amazon Connect Customer Profiles flow destination.
    DomainName string
    Unique name of the Amazon Connect Customer Profiles domain.
    ObjectTypeName string
    Object specified in the Amazon Connect Customer Profiles flow destination.
    domainName String
    Unique name of the Amazon Connect Customer Profiles domain.
    objectTypeName String
    Object specified in the Amazon Connect Customer Profiles flow destination.
    domainName string
    Unique name of the Amazon Connect Customer Profiles domain.
    objectTypeName string
    Object specified in the Amazon Connect Customer Profiles flow destination.
    domain_name str
    Unique name of the Amazon Connect Customer Profiles domain.
    object_type_name str
    Object specified in the Amazon Connect Customer Profiles flow destination.
    domainName String
    Unique name of the Amazon Connect Customer Profiles domain.
    objectTypeName String
    Object specified in the Amazon Connect Customer Profiles flow destination.

    FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridge, FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeArgs

    FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeErrorHandlingConfigArgs

    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName string
    Name of the Amazon S3 bucket.
    bucketPrefix string
    Amazon S3 bucket prefix.
    failOnFirstDestinationError boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucket_name str
    Name of the Amazon S3 bucket.
    bucket_prefix str
    Amazon S3 bucket prefix.
    fail_on_first_destination_error bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.

    FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycode, FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeArgs

    FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeErrorHandlingConfigArgs

    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName string
    Name of the Amazon S3 bucket.
    bucketPrefix string
    Amazon S3 bucket prefix.
    failOnFirstDestinationError boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucket_name str
    Name of the Amazon S3 bucket.
    bucket_prefix str
    Amazon S3 bucket prefix.
    fail_on_first_destination_error bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.

    FlowDestinationFlowConfigDestinationConnectorPropertiesMarketo, FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoArgs

    FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoErrorHandlingConfigArgs

    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName string
    Name of the Amazon S3 bucket.
    bucketPrefix string
    Amazon S3 bucket prefix.
    failOnFirstDestinationError boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucket_name str
    Name of the Amazon S3 bucket.
    bucket_prefix str
    Amazon S3 bucket prefix.
    fail_on_first_destination_error bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.

    FlowDestinationFlowConfigDestinationConnectorPropertiesRedshift, FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftArgs

    FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftErrorHandlingConfigArgs

    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName string
    Name of the Amazon S3 bucket.
    bucketPrefix string
    Amazon S3 bucket prefix.
    failOnFirstDestinationError boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucket_name str
    Name of the Amazon S3 bucket.
    bucket_prefix str
    Amazon S3 bucket prefix.
    fail_on_first_destination_error bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.

    FlowDestinationFlowConfigDestinationConnectorPropertiesS3, FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args

    FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs

    AggregationConfig FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfig
    Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
    FileType string
    File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV, JSON, and PARQUET.
    PrefixConfig FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfig
    Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
    PreserveSourceDataTyping bool
    Whether the data types from the source system need to be preserved (Only valid for Parquet file type)
    AggregationConfig FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfig
    Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
    FileType string
    File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV, JSON, and PARQUET.
    PrefixConfig FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfig
    Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
    PreserveSourceDataTyping bool
    Whether the data types from the source system need to be preserved (Only valid for Parquet file type)
    aggregationConfig FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfig
    Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
    fileType String
    File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV, JSON, and PARQUET.
    prefixConfig FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfig
    Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
    preserveSourceDataTyping Boolean
    Whether the data types from the source system need to be preserved (Only valid for Parquet file type)
    aggregationConfig FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfig
    Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
    fileType string
    File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV, JSON, and PARQUET.
    prefixConfig FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfig
    Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
    preserveSourceDataTyping boolean
    Whether the data types from the source system need to be preserved (Only valid for Parquet file type)
    aggregation_config FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfig
    Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
    file_type str
    File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV, JSON, and PARQUET.
    prefix_config FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfig
    Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
    preserve_source_data_typing bool
    Whether the data types from the source system need to be preserved (Only valid for Parquet file type)
    aggregationConfig Property Map
    Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
    fileType String
    File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV, JSON, and PARQUET.
    prefixConfig Property Map
    Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
    preserveSourceDataTyping Boolean
    Whether the data types from the source system need to be preserved (Only valid for Parquet file type)

    FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfigArgs

    AggregationType string
    Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are None and SingleFile.
    TargetFileSize int
    The desired file size, in MB, for each output file that Amazon AppFlow writes to the flow destination. Integer value.
    AggregationType string
    Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are None and SingleFile.
    TargetFileSize int
    The desired file size, in MB, for each output file that Amazon AppFlow writes to the flow destination. Integer value.
    aggregationType String
    Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are None and SingleFile.
    targetFileSize Integer
    The desired file size, in MB, for each output file that Amazon AppFlow writes to the flow destination. Integer value.
    aggregationType string
    Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are None and SingleFile.
    targetFileSize number
    The desired file size, in MB, for each output file that Amazon AppFlow writes to the flow destination. Integer value.
    aggregation_type str
    Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are None and SingleFile.
    target_file_size int
    The desired file size, in MB, for each output file that Amazon AppFlow writes to the flow destination. Integer value.
    aggregationType String
    Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are None and SingleFile.
    targetFileSize Number
    The desired file size, in MB, for each output file that Amazon AppFlow writes to the flow destination. Integer value.

    FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs

    PrefixFormat string
    Determines the level of granularity that's included in the prefix. Valid values are YEAR, MONTH, DAY, HOUR, and MINUTE.
    PrefixHierarchies List<string>
    Determines whether the destination file path includes either or both of the selected elements. Valid values are EXECUTION_ID and SCHEMA_VERSION
    PrefixType string
    Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME, PATH, and PATH_AND_FILENAME.
    PrefixFormat string
    Determines the level of granularity that's included in the prefix. Valid values are YEAR, MONTH, DAY, HOUR, and MINUTE.
    PrefixHierarchies []string
    Determines whether the destination file path includes either or both of the selected elements. Valid values are EXECUTION_ID and SCHEMA_VERSION
    PrefixType string
    Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME, PATH, and PATH_AND_FILENAME.
    prefixFormat String
    Determines the level of granularity that's included in the prefix. Valid values are YEAR, MONTH, DAY, HOUR, and MINUTE.
    prefixHierarchies List<String>
    Determines whether the destination file path includes either or both of the selected elements. Valid values are EXECUTION_ID and SCHEMA_VERSION
    prefixType String
    Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME, PATH, and PATH_AND_FILENAME.
    prefixFormat string
    Determines the level of granularity that's included in the prefix. Valid values are YEAR, MONTH, DAY, HOUR, and MINUTE.
    prefixHierarchies string[]
    Determines whether the destination file path includes either or both of the selected elements. Valid values are EXECUTION_ID and SCHEMA_VERSION
    prefixType string
    Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME, PATH, and PATH_AND_FILENAME.
    prefix_format str
    Determines the level of granularity that's included in the prefix. Valid values are YEAR, MONTH, DAY, HOUR, and MINUTE.
    prefix_hierarchies Sequence[str]
    Determines whether the destination file path includes either or both of the selected elements. Valid values are EXECUTION_ID and SCHEMA_VERSION
    prefix_type str
    Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME, PATH, and PATH_AND_FILENAME.
    prefixFormat String
    Determines the level of granularity that's included in the prefix. Valid values are YEAR, MONTH, DAY, HOUR, and MINUTE.
    prefixHierarchies List<String>
    Determines whether the destination file path includes either or both of the selected elements. Valid values are EXECUTION_ID and SCHEMA_VERSION
    prefixType String
    Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME, PATH, and PATH_AND_FILENAME.

    FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforce, FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceArgs

    FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceErrorHandlingConfigArgs

    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName string
    Name of the Amazon S3 bucket.
    bucketPrefix string
    Amazon S3 bucket prefix.
    failOnFirstDestinationError boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucket_name str
    Name of the Amazon S3 bucket.
    bucket_prefix str
    Amazon S3 bucket prefix.
    fail_on_first_destination_error bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.

    FlowDestinationFlowConfigDestinationConnectorPropertiesSapoData, FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataArgs

    ObjectPath string
    ErrorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfig
    IdFieldNames List<string>
    SuccessResponseHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfig
    Determines how Amazon AppFlow handles the success response that it gets from the connector after placing data. See Success Response Handling Config for more details.
    WriteOperationType string
    ObjectPath string
    ErrorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfig
    IdFieldNames []string
    SuccessResponseHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfig
    Determines how Amazon AppFlow handles the success response that it gets from the connector after placing data. See Success Response Handling Config for more details.
    WriteOperationType string
    objectPath String
    errorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfig
    idFieldNames List<String>
    successResponseHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfig
    Determines how Amazon AppFlow handles the success response that it gets from the connector after placing data. See Success Response Handling Config for more details.
    writeOperationType String
    objectPath string
    errorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfig
    idFieldNames string[]
    successResponseHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfig
    Determines how Amazon AppFlow handles the success response that it gets from the connector after placing data. See Success Response Handling Config for more details.
    writeOperationType string
    object_path str
    error_handling_config FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfig
    id_field_names Sequence[str]
    success_response_handling_config FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfig
    Determines how Amazon AppFlow handles the success response that it gets from the connector after placing data. See Success Response Handling Config for more details.
    write_operation_type str
    objectPath String
    errorHandlingConfig Property Map
    idFieldNames List<String>
    successResponseHandlingConfig Property Map
    Determines how Amazon AppFlow handles the success response that it gets from the connector after placing data. See Success Response Handling Config for more details.
    writeOperationType String

    FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfigArgs

    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName string
    Name of the Amazon S3 bucket.
    bucketPrefix string
    Amazon S3 bucket prefix.
    failOnFirstDestinationError boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucket_name str
    Name of the Amazon S3 bucket.
    bucket_prefix str
    Amazon S3 bucket prefix.
    fail_on_first_destination_error bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.

    FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfigArgs

    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    bucketName string
    Name of the Amazon S3 bucket.
    bucketPrefix string
    Amazon S3 bucket prefix.
    bucket_name str
    Name of the Amazon S3 bucket.
    bucket_prefix str
    Amazon S3 bucket prefix.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.

    FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflake, FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeArgs

    FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeErrorHandlingConfigArgs

    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName string
    Name of the Amazon S3 bucket.
    bucketPrefix string
    Amazon S3 bucket prefix.
    failOnFirstDestinationError boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucket_name str
    Name of the Amazon S3 bucket.
    bucket_prefix str
    Amazon S3 bucket prefix.
    fail_on_first_destination_error bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.

    FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolver, FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverArgs

    FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigArgs

    PrefixConfig FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfig
    Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
    AggregationConfig FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfig
    Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
    FileType string
    File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV, JSON, and PARQUET.
    PrefixConfig FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfig
    Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
    AggregationConfig FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfig
    Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
    FileType string
    File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV, JSON, and PARQUET.
    prefixConfig FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfig
    Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
    aggregationConfig FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfig
    Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
    fileType String
    File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV, JSON, and PARQUET.
    prefixConfig FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfig
    Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
    aggregationConfig FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfig
    Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
    fileType string
    File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV, JSON, and PARQUET.
    prefix_config FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfig
    Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
    aggregation_config FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfig
    Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
    file_type str
    File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV, JSON, and PARQUET.
    prefixConfig Property Map
    Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
    aggregationConfig Property Map
    Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
    fileType String
    File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV, JSON, and PARQUET.

    FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfigArgs

    AggregationType string
    Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are None and SingleFile.
    AggregationType string
    Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are None and SingleFile.
    aggregationType String
    Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are None and SingleFile.
    aggregationType string
    Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are None and SingleFile.
    aggregation_type str
    Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are None and SingleFile.
    aggregationType String
    Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are None and SingleFile.

    FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfigArgs

    PrefixType string
    Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME, PATH, and PATH_AND_FILENAME.
    PrefixFormat string
    Determines the level of granularity that's included in the prefix. Valid values are YEAR, MONTH, DAY, HOUR, and MINUTE.
    PrefixHierarchies List<string>
    Determines whether the destination file path includes either or both of the selected elements. Valid values are EXECUTION_ID and SCHEMA_VERSION
    PrefixType string
    Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME, PATH, and PATH_AND_FILENAME.
    PrefixFormat string
    Determines the level of granularity that's included in the prefix. Valid values are YEAR, MONTH, DAY, HOUR, and MINUTE.
    PrefixHierarchies []string
    Determines whether the destination file path includes either or both of the selected elements. Valid values are EXECUTION_ID and SCHEMA_VERSION
    prefixType String
    Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME, PATH, and PATH_AND_FILENAME.
    prefixFormat String
    Determines the level of granularity that's included in the prefix. Valid values are YEAR, MONTH, DAY, HOUR, and MINUTE.
    prefixHierarchies List<String>
    Determines whether the destination file path includes either or both of the selected elements. Valid values are EXECUTION_ID and SCHEMA_VERSION
    prefixType string
    Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME, PATH, and PATH_AND_FILENAME.
    prefixFormat string
    Determines the level of granularity that's included in the prefix. Valid values are YEAR, MONTH, DAY, HOUR, and MINUTE.
    prefixHierarchies string[]
    Determines whether the destination file path includes either or both of the selected elements. Valid values are EXECUTION_ID and SCHEMA_VERSION
    prefix_type str
    Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME, PATH, and PATH_AND_FILENAME.
    prefix_format str
    Determines the level of granularity that's included in the prefix. Valid values are YEAR, MONTH, DAY, HOUR, and MINUTE.
    prefix_hierarchies Sequence[str]
    Determines whether the destination file path includes either or both of the selected elements. Valid values are EXECUTION_ID and SCHEMA_VERSION
    prefixType String
    Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME, PATH, and PATH_AND_FILENAME.
    prefixFormat String
    Determines the level of granularity that's included in the prefix. Valid values are YEAR, MONTH, DAY, HOUR, and MINUTE.
    prefixHierarchies List<String>
    Determines whether the destination file path includes either or both of the selected elements. Valid values are EXECUTION_ID and SCHEMA_VERSION

    FlowDestinationFlowConfigDestinationConnectorPropertiesZendesk, FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskArgs

    FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskErrorHandlingConfigArgs

    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName string
    Name of the Amazon S3 bucket.
    bucketPrefix string
    Amazon S3 bucket prefix.
    failOnFirstDestinationError boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucket_name str
    Name of the Amazon S3 bucket.
    bucket_prefix str
    Amazon S3 bucket prefix.
    fail_on_first_destination_error bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.

    FlowMetadataCatalogConfig, FlowMetadataCatalogConfigArgs

    FlowMetadataCatalogConfigGlueDataCatalog, FlowMetadataCatalogConfigGlueDataCatalogArgs

    DatabaseName string
    The name of an existing Glue database to store the metadata tables that Amazon AppFlow creates.
    RoleArn string
    The ARN of an IAM role that grants AppFlow the permissions it needs to create Data Catalog tables, databases, and partitions.
    TablePrefix string
    A naming prefix for each Data Catalog table that Amazon AppFlow creates
    DatabaseName string
    The name of an existing Glue database to store the metadata tables that Amazon AppFlow creates.
    RoleArn string
    The ARN of an IAM role that grants AppFlow the permissions it needs to create Data Catalog tables, databases, and partitions.
    TablePrefix string
    A naming prefix for each Data Catalog table that Amazon AppFlow creates
    databaseName String
    The name of an existing Glue database to store the metadata tables that Amazon AppFlow creates.
    roleArn String
    The ARN of an IAM role that grants AppFlow the permissions it needs to create Data Catalog tables, databases, and partitions.
    tablePrefix String
    A naming prefix for each Data Catalog table that Amazon AppFlow creates
    databaseName string
    The name of an existing Glue database to store the metadata tables that Amazon AppFlow creates.
    roleArn string
    The ARN of an IAM role that grants AppFlow the permissions it needs to create Data Catalog tables, databases, and partitions.
    tablePrefix string
    A naming prefix for each Data Catalog table that Amazon AppFlow creates
    database_name str
    The name of an existing Glue database to store the metadata tables that Amazon AppFlow creates.
    role_arn str
    The ARN of an IAM role that grants AppFlow the permissions it needs to create Data Catalog tables, databases, and partitions.
    table_prefix str
    A naming prefix for each Data Catalog table that Amazon AppFlow creates
    databaseName String
    The name of an existing Glue database to store the metadata tables that Amazon AppFlow creates.
    roleArn String
    The ARN of an IAM role that grants AppFlow the permissions it needs to create Data Catalog tables, databases, and partitions.
    tablePrefix String
    A naming prefix for each Data Catalog table that Amazon AppFlow creates

    FlowSourceFlowConfig, FlowSourceFlowConfigArgs

    ConnectorType string
    Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce, Singular, Slack, Redshift, S3, Marketo, Googleanalytics, Zendesk, Servicenow, Datadog, Trendmicro, Snowflake, Dynatrace, Infornexus, Amplitude, Veeva, EventBridge, LookoutMetrics, Upsolver, Honeycode, CustomerProfiles, SAPOData, and CustomConnector.
    SourceConnectorProperties FlowSourceFlowConfigSourceConnectorProperties
    Information that is required to query a particular source connector. See Source Connector Properties for details.
    ApiVersion string
    API version that the destination connector uses.
    ConnectorProfileName string
    Name of the connector profile. This name must be unique for each connector profile in the AWS account.
    IncrementalPullConfig FlowSourceFlowConfigIncrementalPullConfig
    Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See Incremental Pull Config for more details.
    ConnectorType string
    Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce, Singular, Slack, Redshift, S3, Marketo, Googleanalytics, Zendesk, Servicenow, Datadog, Trendmicro, Snowflake, Dynatrace, Infornexus, Amplitude, Veeva, EventBridge, LookoutMetrics, Upsolver, Honeycode, CustomerProfiles, SAPOData, and CustomConnector.
    SourceConnectorProperties FlowSourceFlowConfigSourceConnectorProperties
    Information that is required to query a particular source connector. See Source Connector Properties for details.
    ApiVersion string
    API version that the destination connector uses.
    ConnectorProfileName string
    Name of the connector profile. This name must be unique for each connector profile in the AWS account.
    IncrementalPullConfig FlowSourceFlowConfigIncrementalPullConfig
    Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See Incremental Pull Config for more details.
    connectorType String
    Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce, Singular, Slack, Redshift, S3, Marketo, Googleanalytics, Zendesk, Servicenow, Datadog, Trendmicro, Snowflake, Dynatrace, Infornexus, Amplitude, Veeva, EventBridge, LookoutMetrics, Upsolver, Honeycode, CustomerProfiles, SAPOData, and CustomConnector.
    sourceConnectorProperties FlowSourceFlowConfigSourceConnectorProperties
    Information that is required to query a particular source connector. See Source Connector Properties for details.
    apiVersion String
    API version that the destination connector uses.
    connectorProfileName String
    Name of the connector profile. This name must be unique for each connector profile in the AWS account.
    incrementalPullConfig FlowSourceFlowConfigIncrementalPullConfig
    Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See Incremental Pull Config for more details.
    connectorType string
    Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce, Singular, Slack, Redshift, S3, Marketo, Googleanalytics, Zendesk, Servicenow, Datadog, Trendmicro, Snowflake, Dynatrace, Infornexus, Amplitude, Veeva, EventBridge, LookoutMetrics, Upsolver, Honeycode, CustomerProfiles, SAPOData, and CustomConnector.
    sourceConnectorProperties FlowSourceFlowConfigSourceConnectorProperties
    Information that is required to query a particular source connector. See Source Connector Properties for details.
    apiVersion string
    API version that the destination connector uses.
    connectorProfileName string
    Name of the connector profile. This name must be unique for each connector profile in the AWS account.
    incrementalPullConfig FlowSourceFlowConfigIncrementalPullConfig
    Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See Incremental Pull Config for more details.
    connector_type str
    Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce, Singular, Slack, Redshift, S3, Marketo, Googleanalytics, Zendesk, Servicenow, Datadog, Trendmicro, Snowflake, Dynatrace, Infornexus, Amplitude, Veeva, EventBridge, LookoutMetrics, Upsolver, Honeycode, CustomerProfiles, SAPOData, and CustomConnector.
    source_connector_properties FlowSourceFlowConfigSourceConnectorProperties
    Information that is required to query a particular source connector. See Source Connector Properties for details.
    api_version str
    API version that the destination connector uses.
    connector_profile_name str
    Name of the connector profile. This name must be unique for each connector profile in the AWS account.
    incremental_pull_config FlowSourceFlowConfigIncrementalPullConfig
    Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See Incremental Pull Config for more details.
    connectorType String
    Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce, Singular, Slack, Redshift, S3, Marketo, Googleanalytics, Zendesk, Servicenow, Datadog, Trendmicro, Snowflake, Dynatrace, Infornexus, Amplitude, Veeva, EventBridge, LookoutMetrics, Upsolver, Honeycode, CustomerProfiles, SAPOData, and CustomConnector.
    sourceConnectorProperties Property Map
    Information that is required to query a particular source connector. See Source Connector Properties for details.
    apiVersion String
    API version that the destination connector uses.
    connectorProfileName String
    Name of the connector profile. This name must be unique for each connector profile in the AWS account.
    incrementalPullConfig Property Map
    Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See Incremental Pull Config for more details.

    FlowSourceFlowConfigIncrementalPullConfig, FlowSourceFlowConfigIncrementalPullConfigArgs

    DatetimeTypeFieldName string
    Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source.
    DatetimeTypeFieldName string
    Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source.
    datetimeTypeFieldName String
    Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source.
    datetimeTypeFieldName string
    Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source.
    datetime_type_field_name str
    Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source.
    datetimeTypeFieldName String
    Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source.

    FlowSourceFlowConfigSourceConnectorProperties, FlowSourceFlowConfigSourceConnectorPropertiesArgs

    Amplitude FlowSourceFlowConfigSourceConnectorPropertiesAmplitude
    Information that is required for querying Amplitude. See Generic Source Properties for more details.
    CustomConnector FlowSourceFlowConfigSourceConnectorPropertiesCustomConnector
    Properties that are applied when the custom connector is being used as a source. See Custom Connector Source Properties.
    Datadog FlowSourceFlowConfigSourceConnectorPropertiesDatadog
    Information that is required for querying Datadog. See Generic Source Properties for more details.
    Dynatrace FlowSourceFlowConfigSourceConnectorPropertiesDynatrace
    Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    GoogleAnalytics FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalytics
    Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTION and BETWEEN.
    InforNexus FlowSourceFlowConfigSourceConnectorPropertiesInforNexus
    Information that is required for querying Infor Nexus. See Generic Source Properties for more details.
    Marketo FlowSourceFlowConfigSourceConnectorPropertiesMarketo
    Information that is required for querying Marketo. See Generic Source Properties for more details.
    S3 FlowSourceFlowConfigSourceConnectorPropertiesS3
    Information that is required for querying Amazon S3. See S3 Source Properties for more details.
    Salesforce FlowSourceFlowConfigSourceConnectorPropertiesSalesforce
    Information that is required for querying Salesforce. See Salesforce Source Properties for more details.
    SapoData FlowSourceFlowConfigSourceConnectorPropertiesSapoData
    Information that is required for querying SAPOData as a flow source. See SAPO Source Properties for more details.
    ServiceNow FlowSourceFlowConfigSourceConnectorPropertiesServiceNow
    Information that is required for querying ServiceNow. See Generic Source Properties for more details.
    Singular FlowSourceFlowConfigSourceConnectorPropertiesSingular
    Information that is required for querying Singular. See Generic Source Properties for more details.
    Slack FlowSourceFlowConfigSourceConnectorPropertiesSlack
    Information that is required for querying Slack. See Generic Source Properties for more details.
    Trendmicro FlowSourceFlowConfigSourceConnectorPropertiesTrendmicro
    Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Veeva FlowSourceFlowConfigSourceConnectorPropertiesVeeva
    Information that is required for querying Veeva. See Veeva Source Properties for more details.
    Zendesk FlowSourceFlowConfigSourceConnectorPropertiesZendesk
    Information that is required for querying Zendesk. See Generic Source Properties for more details.
    Amplitude FlowSourceFlowConfigSourceConnectorPropertiesAmplitude
    Information that is required for querying Amplitude. See Generic Source Properties for more details.
    CustomConnector FlowSourceFlowConfigSourceConnectorPropertiesCustomConnector
    Properties that are applied when the custom connector is being used as a source. See Custom Connector Source Properties.
    Datadog FlowSourceFlowConfigSourceConnectorPropertiesDatadog
    Information that is required for querying Datadog. See Generic Source Properties for more details.
    Dynatrace FlowSourceFlowConfigSourceConnectorPropertiesDynatrace
    Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    GoogleAnalytics FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalytics
    Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTION and BETWEEN.
    InforNexus FlowSourceFlowConfigSourceConnectorPropertiesInforNexus
    Information that is required for querying Infor Nexus. See Generic Source Properties for more details.
    Marketo FlowSourceFlowConfigSourceConnectorPropertiesMarketo
    Information that is required for querying Marketo. See Generic Source Properties for more details.
    S3 FlowSourceFlowConfigSourceConnectorPropertiesS3
    Information that is required for querying Amazon S3. See S3 Source Properties for more details.
    Salesforce FlowSourceFlowConfigSourceConnectorPropertiesSalesforce
    Information that is required for querying Salesforce. See Salesforce Source Properties for more details.
    SapoData FlowSourceFlowConfigSourceConnectorPropertiesSapoData
    Information that is required for querying SAPOData as a flow source. See SAPO Source Properties for more details.
    ServiceNow FlowSourceFlowConfigSourceConnectorPropertiesServiceNow
    Information that is required for querying ServiceNow. See Generic Source Properties for more details.
    Singular FlowSourceFlowConfigSourceConnectorPropertiesSingular
    Information that is required for querying Singular. See Generic Source Properties for more details.
    Slack FlowSourceFlowConfigSourceConnectorPropertiesSlack
    Information that is required for querying Slack. See Generic Source Properties for more details.
    Trendmicro FlowSourceFlowConfigSourceConnectorPropertiesTrendmicro
    Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Veeva FlowSourceFlowConfigSourceConnectorPropertiesVeeva
    Information that is required for querying Veeva. See Veeva Source Properties for more details.
    Zendesk FlowSourceFlowConfigSourceConnectorPropertiesZendesk
    Information that is required for querying Zendesk. See Generic Source Properties for more details.
    amplitude FlowSourceFlowConfigSourceConnectorPropertiesAmplitude
    Information that is required for querying Amplitude. See Generic Source Properties for more details.
    customConnector FlowSourceFlowConfigSourceConnectorPropertiesCustomConnector
    Properties that are applied when the custom connector is being used as a source. See Custom Connector Source Properties.
    datadog FlowSourceFlowConfigSourceConnectorPropertiesDatadog
    Information that is required for querying Datadog. See Generic Source Properties for more details.
    dynatrace FlowSourceFlowConfigSourceConnectorPropertiesDynatrace
    Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    googleAnalytics FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalytics
    Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTION and BETWEEN.
    inforNexus FlowSourceFlowConfigSourceConnectorPropertiesInforNexus
    Information that is required for querying Infor Nexus. See Generic Source Properties for more details.
    marketo FlowSourceFlowConfigSourceConnectorPropertiesMarketo
    Information that is required for querying Marketo. See Generic Source Properties for more details.
    s3 FlowSourceFlowConfigSourceConnectorPropertiesS3
    Information that is required for querying Amazon S3. See S3 Source Properties for more details.
    salesforce FlowSourceFlowConfigSourceConnectorPropertiesSalesforce
    Information that is required for querying Salesforce. See Salesforce Source Properties for more details.
    sapoData FlowSourceFlowConfigSourceConnectorPropertiesSapoData
    Information that is required for querying SAPOData as a flow source. See SAPO Source Properties for more details.
    serviceNow FlowSourceFlowConfigSourceConnectorPropertiesServiceNow
    Information that is required for querying ServiceNow. See Generic Source Properties for more details.
    singular FlowSourceFlowConfigSourceConnectorPropertiesSingular
    Information that is required for querying Singular. See Generic Source Properties for more details.
    slack FlowSourceFlowConfigSourceConnectorPropertiesSlack
    Information that is required for querying Slack. See Generic Source Properties for more details.
    trendmicro FlowSourceFlowConfigSourceConnectorPropertiesTrendmicro
    Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    veeva FlowSourceFlowConfigSourceConnectorPropertiesVeeva
    Information that is required for querying Veeva. See Veeva Source Properties for more details.
    zendesk FlowSourceFlowConfigSourceConnectorPropertiesZendesk
    Information that is required for querying Zendesk. See Generic Source Properties for more details.
    amplitude FlowSourceFlowConfigSourceConnectorPropertiesAmplitude
    Information that is required for querying Amplitude. See Generic Source Properties for more details.
    customConnector FlowSourceFlowConfigSourceConnectorPropertiesCustomConnector
    Properties that are applied when the custom connector is being used as a source. See Custom Connector Source Properties.
    datadog FlowSourceFlowConfigSourceConnectorPropertiesDatadog
    Information that is required for querying Datadog. See Generic Source Properties for more details.
    dynatrace FlowSourceFlowConfigSourceConnectorPropertiesDynatrace
    Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    googleAnalytics FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalytics
    Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTION and BETWEEN.
    inforNexus FlowSourceFlowConfigSourceConnectorPropertiesInforNexus
    Information that is required for querying Infor Nexus. See Generic Source Properties for more details.
    marketo FlowSourceFlowConfigSourceConnectorPropertiesMarketo
    Information that is required for querying Marketo. See Generic Source Properties for more details.
    s3 FlowSourceFlowConfigSourceConnectorPropertiesS3
    Information that is required for querying Amazon S3. See S3 Source Properties for more details.
    salesforce FlowSourceFlowConfigSourceConnectorPropertiesSalesforce
    Information that is required for querying Salesforce. See Salesforce Source Properties for more details.
    sapoData FlowSourceFlowConfigSourceConnectorPropertiesSapoData
    Information that is required for querying SAPOData as a flow source. See SAPO Source Properties for more details.
    serviceNow FlowSourceFlowConfigSourceConnectorPropertiesServiceNow
    Information that is required for querying ServiceNow. See Generic Source Properties for more details.
    singular FlowSourceFlowConfigSourceConnectorPropertiesSingular
    Information that is required for querying Singular. See Generic Source Properties for more details.
    slack FlowSourceFlowConfigSourceConnectorPropertiesSlack
    Information that is required for querying Slack. See Generic Source Properties for more details.
    trendmicro FlowSourceFlowConfigSourceConnectorPropertiesTrendmicro
    Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    veeva FlowSourceFlowConfigSourceConnectorPropertiesVeeva
    Information that is required for querying Veeva. See Veeva Source Properties for more details.
    zendesk FlowSourceFlowConfigSourceConnectorPropertiesZendesk
    Information that is required for querying Zendesk. See Generic Source Properties for more details.
    amplitude FlowSourceFlowConfigSourceConnectorPropertiesAmplitude
    Information that is required for querying Amplitude. See Generic Source Properties for more details.
    custom_connector FlowSourceFlowConfigSourceConnectorPropertiesCustomConnector
    Properties that are applied when the custom connector is being used as a source. See Custom Connector Source Properties.
    datadog FlowSourceFlowConfigSourceConnectorPropertiesDatadog
    Information that is required for querying Datadog. See Generic Source Properties for more details.
    dynatrace FlowSourceFlowConfigSourceConnectorPropertiesDynatrace
    Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    google_analytics FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalytics
    Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTION and BETWEEN.
    infor_nexus FlowSourceFlowConfigSourceConnectorPropertiesInforNexus
    Information that is required for querying Infor Nexus. See Generic Source Properties for more details.
    marketo FlowSourceFlowConfigSourceConnectorPropertiesMarketo
    Information that is required for querying Marketo. See Generic Source Properties for more details.
    s3 FlowSourceFlowConfigSourceConnectorPropertiesS3
    Information that is required for querying Amazon S3. See S3 Source Properties for more details.
    salesforce FlowSourceFlowConfigSourceConnectorPropertiesSalesforce
    Information that is required for querying Salesforce. See Salesforce Source Properties for more details.
    sapo_data FlowSourceFlowConfigSourceConnectorPropertiesSapoData
    Information that is required for querying SAPOData as a flow source. See SAPO Source Properties for more details.
    service_now FlowSourceFlowConfigSourceConnectorPropertiesServiceNow
    Information that is required for querying ServiceNow. See Generic Source Properties for more details.
    singular FlowSourceFlowConfigSourceConnectorPropertiesSingular
    Information that is required for querying Singular. See Generic Source Properties for more details.
    slack FlowSourceFlowConfigSourceConnectorPropertiesSlack
    Information that is required for querying Slack. See Generic Source Properties for more details.
    trendmicro FlowSourceFlowConfigSourceConnectorPropertiesTrendmicro
    Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    veeva FlowSourceFlowConfigSourceConnectorPropertiesVeeva
    Information that is required for querying Veeva. See Veeva Source Properties for more details.
    zendesk FlowSourceFlowConfigSourceConnectorPropertiesZendesk
    Information that is required for querying Zendesk. See Generic Source Properties for more details.
    amplitude Property Map
    Information that is required for querying Amplitude. See Generic Source Properties for more details.
    customConnector Property Map
    Properties that are applied when the custom connector is being used as a source. See Custom Connector Source Properties.
    datadog Property Map
    Information that is required for querying Datadog. See Generic Source Properties for more details.
    dynatrace Property Map
    Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    googleAnalytics Property Map
    Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTION and BETWEEN.
    inforNexus Property Map
    Information that is required for querying Infor Nexus. See Generic Source Properties for more details.
    marketo Property Map
    Information that is required for querying Marketo. See Generic Source Properties for more details.
    s3 Property Map
    Information that is required for querying Amazon S3. See S3 Source Properties for more details.
    salesforce Property Map
    Information that is required for querying Salesforce. See Salesforce Source Properties for more details.
    sapoData Property Map
    Information that is required for querying SAPOData as a flow source. See SAPO Source Properties for more details.
    serviceNow Property Map
    Information that is required for querying ServiceNow. See Generic Source Properties for more details.
    singular Property Map
    Information that is required for querying Singular. See Generic Source Properties for more details.
    slack Property Map
    Information that is required for querying Slack. See Generic Source Properties for more details.
    trendmicro Property Map
    Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    veeva Property Map
    Information that is required for querying Veeva. See Veeva Source Properties for more details.
    zendesk Property Map
    Information that is required for querying Zendesk. See Generic Source Properties for more details.

    FlowSourceFlowConfigSourceConnectorPropertiesAmplitude, FlowSourceFlowConfigSourceConnectorPropertiesAmplitudeArgs

    Object string
    Object string
    object String
    object string
    object str
    object String

    FlowSourceFlowConfigSourceConnectorPropertiesCustomConnector, FlowSourceFlowConfigSourceConnectorPropertiesCustomConnectorArgs

    EntityName string
    CustomProperties Dictionary<string, string>
    EntityName string
    CustomProperties map[string]string
    entityName String
    customProperties Map<String,String>
    entityName string
    customProperties {[key: string]: string}
    entity_name str
    custom_properties Mapping[str, str]
    entityName String
    customProperties Map<String>

    FlowSourceFlowConfigSourceConnectorPropertiesDatadog, FlowSourceFlowConfigSourceConnectorPropertiesDatadogArgs

    Object string
    Object string
    object String
    object string
    object str
    object String

    FlowSourceFlowConfigSourceConnectorPropertiesDynatrace, FlowSourceFlowConfigSourceConnectorPropertiesDynatraceArgs

    Object string
    Object string
    object String
    object string
    object str
    object String

    FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalytics, FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalyticsArgs

    Object string
    Object string
    object String
    object string
    object str
    object String

    FlowSourceFlowConfigSourceConnectorPropertiesInforNexus, FlowSourceFlowConfigSourceConnectorPropertiesInforNexusArgs

    Object string
    Object string
    object String
    object string
    object str
    object String

    FlowSourceFlowConfigSourceConnectorPropertiesMarketo, FlowSourceFlowConfigSourceConnectorPropertiesMarketoArgs

    Object string
    Object string
    object String
    object string
    object str
    object String

    FlowSourceFlowConfigSourceConnectorPropertiesS3, FlowSourceFlowConfigSourceConnectorPropertiesS3Args

    BucketName string
    BucketPrefix string
    S3InputFormatConfig FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfig
    When you use Amazon S3 as the source, the configuration format that you provide the flow input data. See S3 Input Format Config for details.
    BucketName string
    BucketPrefix string
    S3InputFormatConfig FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfig
    When you use Amazon S3 as the source, the configuration format that you provide the flow input data. See S3 Input Format Config for details.
    bucketName String
    bucketPrefix String
    s3InputFormatConfig FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfig
    When you use Amazon S3 as the source, the configuration format that you provide the flow input data. See S3 Input Format Config for details.
    bucketName string
    bucketPrefix string
    s3InputFormatConfig FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfig
    When you use Amazon S3 as the source, the configuration format that you provide the flow input data. See S3 Input Format Config for details.
    bucket_name str
    bucket_prefix str
    s3_input_format_config FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfig
    When you use Amazon S3 as the source, the configuration format that you provide the flow input data. See S3 Input Format Config for details.
    bucketName String
    bucketPrefix String
    s3InputFormatConfig Property Map
    When you use Amazon S3 as the source, the configuration format that you provide the flow input data. See S3 Input Format Config for details.

    FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfig, FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfigArgs

    S3InputFileType string
    File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are CSV and JSON.
    S3InputFileType string
    File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are CSV and JSON.
    s3InputFileType String
    File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are CSV and JSON.
    s3InputFileType string
    File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are CSV and JSON.
    s3_input_file_type str
    File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are CSV and JSON.
    s3InputFileType String
    File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are CSV and JSON.

    FlowSourceFlowConfigSourceConnectorPropertiesSalesforce, FlowSourceFlowConfigSourceConnectorPropertiesSalesforceArgs

    Object string
    EnableDynamicFieldUpdate bool
    Flag that enables dynamic fetching of new (recently added) fields in the Salesforce objects while running a flow.
    IncludeDeletedRecords bool
    Whether Amazon AppFlow includes deleted files in the flow run.
    Object string
    EnableDynamicFieldUpdate bool
    Flag that enables dynamic fetching of new (recently added) fields in the Salesforce objects while running a flow.
    IncludeDeletedRecords bool
    Whether Amazon AppFlow includes deleted files in the flow run.
    object String
    enableDynamicFieldUpdate Boolean
    Flag that enables dynamic fetching of new (recently added) fields in the Salesforce objects while running a flow.
    includeDeletedRecords Boolean
    Whether Amazon AppFlow includes deleted files in the flow run.
    object string
    enableDynamicFieldUpdate boolean
    Flag that enables dynamic fetching of new (recently added) fields in the Salesforce objects while running a flow.
    includeDeletedRecords boolean
    Whether Amazon AppFlow includes deleted files in the flow run.
    object str
    enable_dynamic_field_update bool
    Flag that enables dynamic fetching of new (recently added) fields in the Salesforce objects while running a flow.
    include_deleted_records bool
    Whether Amazon AppFlow includes deleted files in the flow run.
    object String
    enableDynamicFieldUpdate Boolean
    Flag that enables dynamic fetching of new (recently added) fields in the Salesforce objects while running a flow.
    includeDeletedRecords Boolean
    Whether Amazon AppFlow includes deleted files in the flow run.

    FlowSourceFlowConfigSourceConnectorPropertiesSapoData, FlowSourceFlowConfigSourceConnectorPropertiesSapoDataArgs

    ObjectPath string
    ObjectPath string
    objectPath String
    objectPath string
    objectPath String

    FlowSourceFlowConfigSourceConnectorPropertiesServiceNow, FlowSourceFlowConfigSourceConnectorPropertiesServiceNowArgs

    Object string
    Object string
    object String
    object string
    object str
    object String

    FlowSourceFlowConfigSourceConnectorPropertiesSingular, FlowSourceFlowConfigSourceConnectorPropertiesSingularArgs

    Object string
    Object string
    object String
    object string
    object str
    object String

    FlowSourceFlowConfigSourceConnectorPropertiesSlack, FlowSourceFlowConfigSourceConnectorPropertiesSlackArgs

    Object string
    Object string
    object String
    object string
    object str
    object String

    FlowSourceFlowConfigSourceConnectorPropertiesTrendmicro, FlowSourceFlowConfigSourceConnectorPropertiesTrendmicroArgs

    Object string
    Object string
    object String
    object string
    object str
    object String

    FlowSourceFlowConfigSourceConnectorPropertiesVeeva, FlowSourceFlowConfigSourceConnectorPropertiesVeevaArgs

    Object string
    DocumentType string
    Document type specified in the Veeva document extract flow.
    IncludeAllVersions bool
    Boolean value to include All Versions of files in Veeva document extract flow.
    IncludeRenditions bool
    Boolean value to include file renditions in Veeva document extract flow.
    IncludeSourceFiles bool
    Boolean value to include source files in Veeva document extract flow.
    Object string
    DocumentType string
    Document type specified in the Veeva document extract flow.
    IncludeAllVersions bool
    Boolean value to include All Versions of files in Veeva document extract flow.
    IncludeRenditions bool
    Boolean value to include file renditions in Veeva document extract flow.
    IncludeSourceFiles bool
    Boolean value to include source files in Veeva document extract flow.
    object String
    documentType String
    Document type specified in the Veeva document extract flow.
    includeAllVersions Boolean
    Boolean value to include All Versions of files in Veeva document extract flow.
    includeRenditions Boolean
    Boolean value to include file renditions in Veeva document extract flow.
    includeSourceFiles Boolean
    Boolean value to include source files in Veeva document extract flow.
    object string
    documentType string
    Document type specified in the Veeva document extract flow.
    includeAllVersions boolean
    Boolean value to include All Versions of files in Veeva document extract flow.
    includeRenditions boolean
    Boolean value to include file renditions in Veeva document extract flow.
    includeSourceFiles boolean
    Boolean value to include source files in Veeva document extract flow.
    object str
    document_type str
    Document type specified in the Veeva document extract flow.
    include_all_versions bool
    Boolean value to include All Versions of files in Veeva document extract flow.
    include_renditions bool
    Boolean value to include file renditions in Veeva document extract flow.
    include_source_files bool
    Boolean value to include source files in Veeva document extract flow.
    object String
    documentType String
    Document type specified in the Veeva document extract flow.
    includeAllVersions Boolean
    Boolean value to include All Versions of files in Veeva document extract flow.
    includeRenditions Boolean
    Boolean value to include file renditions in Veeva document extract flow.
    includeSourceFiles Boolean
    Boolean value to include source files in Veeva document extract flow.

    FlowSourceFlowConfigSourceConnectorPropertiesZendesk, FlowSourceFlowConfigSourceConnectorPropertiesZendeskArgs

    Object string
    Object string
    object String
    object string
    object str
    object String

    FlowTask, FlowTaskArgs

    TaskType string
    Particular task implementation that Amazon AppFlow performs. Valid values are Arithmetic, Filter, Map, Map_all, Mask, Merge, Passthrough, Truncate, and Validate.
    ConnectorOperators List<FlowTaskConnectorOperator>
    Operation to be performed on the provided source fields. See Connector Operator for details.
    DestinationField string
    Field in a destination connector, or a field value against which Amazon AppFlow validates a source field.
    SourceFields List<string>
    Source fields to which a particular task is applied.
    TaskProperties Dictionary<string, string>
    Map used to store task-related information. The execution service looks for particular information based on the TaskType. Valid keys are VALUE, VALUES, DATA_TYPE, UPPER_BOUND, LOWER_BOUND, SOURCE_DATA_TYPE, DESTINATION_DATA_TYPE, VALIDATION_ACTION, MASK_VALUE, MASK_LENGTH, TRUNCATE_LENGTH, MATH_OPERATION_FIELDS_ORDER, CONCAT_FORMAT, SUBFIELD_CATEGORY_MAP, and EXCLUDE_SOURCE_FIELDS_LIST.
    TaskType string
    Particular task implementation that Amazon AppFlow performs. Valid values are Arithmetic, Filter, Map, Map_all, Mask, Merge, Passthrough, Truncate, and Validate.
    ConnectorOperators []FlowTaskConnectorOperator
    Operation to be performed on the provided source fields. See Connector Operator for details.
    DestinationField string
    Field in a destination connector, or a field value against which Amazon AppFlow validates a source field.
    SourceFields []string
    Source fields to which a particular task is applied.
    TaskProperties map[string]string
    Map used to store task-related information. The execution service looks for particular information based on the TaskType. Valid keys are VALUE, VALUES, DATA_TYPE, UPPER_BOUND, LOWER_BOUND, SOURCE_DATA_TYPE, DESTINATION_DATA_TYPE, VALIDATION_ACTION, MASK_VALUE, MASK_LENGTH, TRUNCATE_LENGTH, MATH_OPERATION_FIELDS_ORDER, CONCAT_FORMAT, SUBFIELD_CATEGORY_MAP, and EXCLUDE_SOURCE_FIELDS_LIST.
    taskType String
    Particular task implementation that Amazon AppFlow performs. Valid values are Arithmetic, Filter, Map, Map_all, Mask, Merge, Passthrough, Truncate, and Validate.
    connectorOperators List<FlowTaskConnectorOperator>
    Operation to be performed on the provided source fields. See Connector Operator for details.
    destinationField String
    Field in a destination connector, or a field value against which Amazon AppFlow validates a source field.
    sourceFields List<String>
    Source fields to which a particular task is applied.
    taskProperties Map<String,String>
    Map used to store task-related information. The execution service looks for particular information based on the TaskType. Valid keys are VALUE, VALUES, DATA_TYPE, UPPER_BOUND, LOWER_BOUND, SOURCE_DATA_TYPE, DESTINATION_DATA_TYPE, VALIDATION_ACTION, MASK_VALUE, MASK_LENGTH, TRUNCATE_LENGTH, MATH_OPERATION_FIELDS_ORDER, CONCAT_FORMAT, SUBFIELD_CATEGORY_MAP, and EXCLUDE_SOURCE_FIELDS_LIST.
    taskType string
    Particular task implementation that Amazon AppFlow performs. Valid values are Arithmetic, Filter, Map, Map_all, Mask, Merge, Passthrough, Truncate, and Validate.
    connectorOperators FlowTaskConnectorOperator[]
    Operation to be performed on the provided source fields. See Connector Operator for details.
    destinationField string
    Field in a destination connector, or a field value against which Amazon AppFlow validates a source field.
    sourceFields string[]
    Source fields to which a particular task is applied.
    taskProperties {[key: string]: string}
    Map used to store task-related information. The execution service looks for particular information based on the TaskType. Valid keys are VALUE, VALUES, DATA_TYPE, UPPER_BOUND, LOWER_BOUND, SOURCE_DATA_TYPE, DESTINATION_DATA_TYPE, VALIDATION_ACTION, MASK_VALUE, MASK_LENGTH, TRUNCATE_LENGTH, MATH_OPERATION_FIELDS_ORDER, CONCAT_FORMAT, SUBFIELD_CATEGORY_MAP, and EXCLUDE_SOURCE_FIELDS_LIST.
    task_type str
    Particular task implementation that Amazon AppFlow performs. Valid values are Arithmetic, Filter, Map, Map_all, Mask, Merge, Passthrough, Truncate, and Validate.
    connector_operators Sequence[FlowTaskConnectorOperator]
    Operation to be performed on the provided source fields. See Connector Operator for details.
    destination_field str
    Field in a destination connector, or a field value against which Amazon AppFlow validates a source field.
    source_fields Sequence[str]
    Source fields to which a particular task is applied.
    task_properties Mapping[str, str]
    Map used to store task-related information. The execution service looks for particular information based on the TaskType. Valid keys are VALUE, VALUES, DATA_TYPE, UPPER_BOUND, LOWER_BOUND, SOURCE_DATA_TYPE, DESTINATION_DATA_TYPE, VALIDATION_ACTION, MASK_VALUE, MASK_LENGTH, TRUNCATE_LENGTH, MATH_OPERATION_FIELDS_ORDER, CONCAT_FORMAT, SUBFIELD_CATEGORY_MAP, and EXCLUDE_SOURCE_FIELDS_LIST.
    taskType String
    Particular task implementation that Amazon AppFlow performs. Valid values are Arithmetic, Filter, Map, Map_all, Mask, Merge, Passthrough, Truncate, and Validate.
    connectorOperators List<Property Map>
    Operation to be performed on the provided source fields. See Connector Operator for details.
    destinationField String
    Field in a destination connector, or a field value against which Amazon AppFlow validates a source field.
    sourceFields List<String>
    Source fields to which a particular task is applied.
    taskProperties Map<String>
    Map used to store task-related information. The execution service looks for particular information based on the TaskType. Valid keys are VALUE, VALUES, DATA_TYPE, UPPER_BOUND, LOWER_BOUND, SOURCE_DATA_TYPE, DESTINATION_DATA_TYPE, VALIDATION_ACTION, MASK_VALUE, MASK_LENGTH, TRUNCATE_LENGTH, MATH_OPERATION_FIELDS_ORDER, CONCAT_FORMAT, SUBFIELD_CATEGORY_MAP, and EXCLUDE_SOURCE_FIELDS_LIST.

    FlowTaskConnectorOperator, FlowTaskConnectorOperatorArgs

    Amplitude string
    Operation to be performed on the provided Amplitude source fields. The only valid value is BETWEEN.
    CustomConnector string
    Operators supported by the custom connector. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Datadog string
    Operation to be performed on the provided Datadog source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Dynatrace string
    Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    GoogleAnalytics string
    Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTION and BETWEEN.
    InforNexus string
    Operation to be performed on the provided Infor Nexus source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Marketo string
    Operation to be performed on the provided Marketo source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    S3 string
    Operation to be performed on the provided Amazon S3 source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Salesforce string
    Operation to be performed on the provided Salesforce source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    SapoData string
    Operation to be performed on the provided SAPOData source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    ServiceNow string
    Operation to be performed on the provided ServiceNow source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Singular string
    Operation to be performed on the provided Singular source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Slack string
    Operation to be performed on the provided Slack source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Trendmicro string
    Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Veeva string
    Operation to be performed on the provided Veeva source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Zendesk string
    Operation to be performed on the provided Zendesk source fields. Valid values are PROJECTION, GREATER_THAN, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Amplitude string
    Operation to be performed on the provided Amplitude source fields. The only valid value is BETWEEN.
    CustomConnector string
    Operators supported by the custom connector. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Datadog string
    Operation to be performed on the provided Datadog source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Dynatrace string
    Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    GoogleAnalytics string
    Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTION and BETWEEN.
    InforNexus string
    Operation to be performed on the provided Infor Nexus source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Marketo string
    Operation to be performed on the provided Marketo source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    S3 string
    Operation to be performed on the provided Amazon S3 source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Salesforce string
    Operation to be performed on the provided Salesforce source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    SapoData string
    Operation to be performed on the provided SAPOData source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    ServiceNow string
    Operation to be performed on the provided ServiceNow source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Singular string
    Operation to be performed on the provided Singular source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Slack string
    Operation to be performed on the provided Slack source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Trendmicro string
    Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Veeva string
    Operation to be performed on the provided Veeva source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Zendesk string
    Operation to be performed on the provided Zendesk source fields. Valid values are PROJECTION, GREATER_THAN, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    amplitude String
    Operation to be performed on the provided Amplitude source fields. The only valid value is BETWEEN.
    customConnector String
    Operators supported by the custom connector. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    datadog String
    Operation to be performed on the provided Datadog source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    dynatrace String
    Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    googleAnalytics String
    Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTION and BETWEEN.
    inforNexus String
    Operation to be performed on the provided Infor Nexus source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    marketo String
    Operation to be performed on the provided Marketo source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    s3 String
    Operation to be performed on the provided Amazon S3 source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    salesforce String
    Operation to be performed on the provided Salesforce source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    sapoData String
    Operation to be performed on the provided SAPOData source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    serviceNow String
    Operation to be performed on the provided ServiceNow source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    singular String
    Operation to be performed on the provided Singular source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    slack String
    Operation to be performed on the provided Slack source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    trendmicro String
    Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    veeva String
    Operation to be performed on the provided Veeva source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    zendesk String
    Operation to be performed on the provided Zendesk source fields. Valid values are PROJECTION, GREATER_THAN, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    amplitude string
    Operation to be performed on the provided Amplitude source fields. The only valid value is BETWEEN.
    customConnector string
    Operators supported by the custom connector. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    datadog string
    Operation to be performed on the provided Datadog source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    dynatrace string
    Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    googleAnalytics string
    Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTION and BETWEEN.
    inforNexus string
    Operation to be performed on the provided Infor Nexus source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    marketo string
    Operation to be performed on the provided Marketo source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    s3 string
    Operation to be performed on the provided Amazon S3 source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    salesforce string
    Operation to be performed on the provided Salesforce source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    sapoData string
    Operation to be performed on the provided SAPOData source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    serviceNow string
    Operation to be performed on the provided ServiceNow source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    singular string
    Operation to be performed on the provided Singular source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    slack string
    Operation to be performed on the provided Slack source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    trendmicro string
    Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    veeva string
    Operation to be performed on the provided Veeva source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    zendesk string
    Operation to be performed on the provided Zendesk source fields. Valid values are PROJECTION, GREATER_THAN, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    amplitude str
    Operation to be performed on the provided Amplitude source fields. The only valid value is BETWEEN.
    custom_connector str
    Operators supported by the custom connector. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    datadog str
    Operation to be performed on the provided Datadog source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    dynatrace str
    Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    google_analytics str
    Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTION and BETWEEN.
    infor_nexus str
    Operation to be performed on the provided Infor Nexus source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    marketo str
    Operation to be performed on the provided Marketo source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    s3 str
    Operation to be performed on the provided Amazon S3 source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    salesforce str
    Operation to be performed on the provided Salesforce source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    sapo_data str
    Operation to be performed on the provided SAPOData source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    service_now str
    Operation to be performed on the provided ServiceNow source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    singular str
    Operation to be performed on the provided Singular source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    slack str
    Operation to be performed on the provided Slack source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    trendmicro str
    Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    veeva str
    Operation to be performed on the provided Veeva source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    zendesk str
    Operation to be performed on the provided Zendesk source fields. Valid values are PROJECTION, GREATER_THAN, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    amplitude String
    Operation to be performed on the provided Amplitude source fields. The only valid value is BETWEEN.
    customConnector String
    Operators supported by the custom connector. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    datadog String
    Operation to be performed on the provided Datadog source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    dynatrace String
    Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    googleAnalytics String
    Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTION and BETWEEN.
    inforNexus String
    Operation to be performed on the provided Infor Nexus source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    marketo String
    Operation to be performed on the provided Marketo source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    s3 String
    Operation to be performed on the provided Amazon S3 source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    salesforce String
    Operation to be performed on the provided Salesforce source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    sapoData String
    Operation to be performed on the provided SAPOData source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    serviceNow String
    Operation to be performed on the provided ServiceNow source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    singular String
    Operation to be performed on the provided Singular source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    slack String
    Operation to be performed on the provided Slack source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    trendmicro String
    Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    veeva String
    Operation to be performed on the provided Veeva source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    zendesk String
    Operation to be performed on the provided Zendesk source fields. Valid values are PROJECTION, GREATER_THAN, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.

    FlowTriggerConfig, FlowTriggerConfigArgs

    TriggerType string
    Type of flow trigger. Valid values are Scheduled, Event, and OnDemand.
    TriggerProperties FlowTriggerConfigTriggerProperties
    Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the Scheduled trigger type. See Scheduled Trigger Properties for details.
    TriggerType string
    Type of flow trigger. Valid values are Scheduled, Event, and OnDemand.
    TriggerProperties FlowTriggerConfigTriggerProperties
    Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the Scheduled trigger type. See Scheduled Trigger Properties for details.
    triggerType String
    Type of flow trigger. Valid values are Scheduled, Event, and OnDemand.
    triggerProperties FlowTriggerConfigTriggerProperties
    Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the Scheduled trigger type. See Scheduled Trigger Properties for details.
    triggerType string
    Type of flow trigger. Valid values are Scheduled, Event, and OnDemand.
    triggerProperties FlowTriggerConfigTriggerProperties
    Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the Scheduled trigger type. See Scheduled Trigger Properties for details.
    trigger_type str
    Type of flow trigger. Valid values are Scheduled, Event, and OnDemand.
    trigger_properties FlowTriggerConfigTriggerProperties
    Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the Scheduled trigger type. See Scheduled Trigger Properties for details.
    triggerType String
    Type of flow trigger. Valid values are Scheduled, Event, and OnDemand.
    triggerProperties Property Map
    Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the Scheduled trigger type. See Scheduled Trigger Properties for details.

    FlowTriggerConfigTriggerProperties, FlowTriggerConfigTriggerPropertiesArgs

    FlowTriggerConfigTriggerPropertiesScheduled, FlowTriggerConfigTriggerPropertiesScheduledArgs

    ScheduleExpression string
    Scheduling expression that determines the rate at which the schedule will run, for example rate(5minutes).
    DataPullMode string
    Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are Incremental and Complete.
    FirstExecutionFrom string
    Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp.
    ScheduleEndTime string
    Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
    ScheduleOffset int
    Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000.
    ScheduleStartTime string
    Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
    Timezone string
    Time zone used when referring to the date and time of a scheduled-triggered flow, such as America/New_York.

    package generated_program;
    

    import com.pulumi.Context; import com.pulumi.Pulumi; import com.pulumi.core.Output; import com.pulumi.aws.appflow.Flow; import com.pulumi.aws.appflow.FlowArgs; import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths;

    public class App { public static void main(String[] args) { Pulumi.run(App::stack); }

    public static void stack(Context ctx) {
        var example = new Flow(&quot;example&quot;, FlowArgs.builder()
            .triggerConfig(FlowTriggerConfigArgs.builder()
                .scheduled(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
                .build())
            .build());
    
    }
    

    }

    resources:
      example:
        type: aws:appflow:Flow
        properties:
          triggerConfig:
            scheduled:
              - scheduleExpression: rate(1minutes)
    
    ScheduleExpression string
    Scheduling expression that determines the rate at which the schedule will run, for example rate(5minutes).
    DataPullMode string
    Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are Incremental and Complete.
    FirstExecutionFrom string
    Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp.
    ScheduleEndTime string
    Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
    ScheduleOffset int
    Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000.
    ScheduleStartTime string
    Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
    Timezone string
    Time zone used when referring to the date and time of a scheduled-triggered flow, such as America/New_York.

    package generated_program;
    

    import com.pulumi.Context; import com.pulumi.Pulumi; import com.pulumi.core.Output; import com.pulumi.aws.appflow.Flow; import com.pulumi.aws.appflow.FlowArgs; import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths;

    public class App { public static void main(String[] args) { Pulumi.run(App::stack); }

    public static void stack(Context ctx) {
        var example = new Flow(&quot;example&quot;, FlowArgs.builder()
            .triggerConfig(FlowTriggerConfigArgs.builder()
                .scheduled(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
                .build())
            .build());
    
    }
    

    }

    resources:
      example:
        type: aws:appflow:Flow
        properties:
          triggerConfig:
            scheduled:
              - scheduleExpression: rate(1minutes)
    
    scheduleExpression String
    Scheduling expression that determines the rate at which the schedule will run, for example rate(5minutes).
    dataPullMode String
    Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are Incremental and Complete.
    firstExecutionFrom String
    Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp.
    scheduleEndTime String
    Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
    scheduleOffset Integer
    Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000.
    scheduleStartTime String
    Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
    timezone String
    Time zone used when referring to the date and time of a scheduled-triggered flow, such as America/New_York.

    package generated_program;
    

    import com.pulumi.Context; import com.pulumi.Pulumi; import com.pulumi.core.Output; import com.pulumi.aws.appflow.Flow; import com.pulumi.aws.appflow.FlowArgs; import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths;

    public class App { public static void main(String[] args) { Pulumi.run(App::stack); }

    public static void stack(Context ctx) {
        var example = new Flow(&quot;example&quot;, FlowArgs.builder()
            .triggerConfig(FlowTriggerConfigArgs.builder()
                .scheduled(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
                .build())
            .build());
    
    }
    

    }

    resources:
      example:
        type: aws:appflow:Flow
        properties:
          triggerConfig:
            scheduled:
              - scheduleExpression: rate(1minutes)
    
    scheduleExpression string
    Scheduling expression that determines the rate at which the schedule will run, for example rate(5minutes).
    dataPullMode string
    Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are Incremental and Complete.
    firstExecutionFrom string
    Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp.
    scheduleEndTime string
    Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
    scheduleOffset number
    Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000.
    scheduleStartTime string
    Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
    timezone string
    Time zone used when referring to the date and time of a scheduled-triggered flow, such as America/New_York.

    package generated_program;
    

    import com.pulumi.Context; import com.pulumi.Pulumi; import com.pulumi.core.Output; import com.pulumi.aws.appflow.Flow; import com.pulumi.aws.appflow.FlowArgs; import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths;

    public class App { public static void main(String[] args) { Pulumi.run(App::stack); }

    public static void stack(Context ctx) {
        var example = new Flow(&quot;example&quot;, FlowArgs.builder()
            .triggerConfig(FlowTriggerConfigArgs.builder()
                .scheduled(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
                .build())
            .build());
    
    }
    

    }

    resources:
      example:
        type: aws:appflow:Flow
        properties:
          triggerConfig:
            scheduled:
              - scheduleExpression: rate(1minutes)
    
    schedule_expression str
    Scheduling expression that determines the rate at which the schedule will run, for example rate(5minutes).
    data_pull_mode str
    Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are Incremental and Complete.
    first_execution_from str
    Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp.
    schedule_end_time str
    Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
    schedule_offset int
    Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000.
    schedule_start_time str
    Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
    timezone str
    Time zone used when referring to the date and time of a scheduled-triggered flow, such as America/New_York.

    package generated_program;
    

    import com.pulumi.Context; import com.pulumi.Pulumi; import com.pulumi.core.Output; import com.pulumi.aws.appflow.Flow; import com.pulumi.aws.appflow.FlowArgs; import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths;

    public class App { public static void main(String[] args) { Pulumi.run(App::stack); }

    public static void stack(Context ctx) {
        var example = new Flow(&quot;example&quot;, FlowArgs.builder()
            .triggerConfig(FlowTriggerConfigArgs.builder()
                .scheduled(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
                .build())
            .build());
    
    }
    

    }

    resources:
      example:
        type: aws:appflow:Flow
        properties:
          triggerConfig:
            scheduled:
              - scheduleExpression: rate(1minutes)
    
    scheduleExpression String
    Scheduling expression that determines the rate at which the schedule will run, for example rate(5minutes).
    dataPullMode String
    Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are Incremental and Complete.
    firstExecutionFrom String
    Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp.
    scheduleEndTime String
    Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
    scheduleOffset Number
    Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000.
    scheduleStartTime String
    Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
    timezone String
    Time zone used when referring to the date and time of a scheduled-triggered flow, such as America/New_York.

    package generated_program;
    

    import com.pulumi.Context; import com.pulumi.Pulumi; import com.pulumi.core.Output; import com.pulumi.aws.appflow.Flow; import com.pulumi.aws.appflow.FlowArgs; import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths;

    public class App { public static void main(String[] args) { Pulumi.run(App::stack); }

    public static void stack(Context ctx) {
        var example = new Flow(&quot;example&quot;, FlowArgs.builder()
            .triggerConfig(FlowTriggerConfigArgs.builder()
                .scheduled(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
                .build())
            .build());
    
    }
    

    }

    resources:
      example:
        type: aws:appflow:Flow
        properties:
          triggerConfig:
            scheduled:
              - scheduleExpression: rate(1minutes)
    

    Import

    Using pulumi import, import AppFlow flows using the arn. For example:

    $ pulumi import aws:appflow/flow:Flow example arn:aws:appflow:us-west-2:123456789012:flow/example-flow
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    AWS Classic pulumi/pulumi-aws
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the aws Terraform Provider.
    aws logo
    AWS v6.54.0 published on Friday, Sep 27, 2024 by Pulumi