aws.pipes.Pipe
Explore with Pulumi AI
Resource for managing an AWS EventBridge Pipes Pipe.
You can find out more about EventBridge Pipes in the User Guide.
EventBridge Pipes are very configurable, and may require IAM permissions to work correctly. More information on the configuration options and IAM permissions can be found in the User Guide.
Note: EventBridge was formerly known as CloudWatch Events. The functionality is identical.
Example Usage
Basic Usage
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const main = aws.getCallerIdentity({});
const example = new aws.iam.Role("example", {assumeRolePolicy: JSON.stringify({
Version: "2012-10-17",
Statement: {
Effect: "Allow",
Action: "sts:AssumeRole",
Principal: {
Service: "pipes.amazonaws.com",
},
Condition: {
StringEquals: {
"aws:SourceAccount": main.then(main => main.accountId),
},
},
},
})});
const sourceQueue = new aws.sqs.Queue("source", {});
const source = new aws.iam.RolePolicy("source", {
role: example.id,
policy: pulumi.jsonStringify({
Version: "2012-10-17",
Statement: [{
Effect: "Allow",
Action: [
"sqs:DeleteMessage",
"sqs:GetQueueAttributes",
"sqs:ReceiveMessage",
],
Resource: [sourceQueue.arn],
}],
}),
});
const targetQueue = new aws.sqs.Queue("target", {});
const target = new aws.iam.RolePolicy("target", {
role: example.id,
policy: pulumi.jsonStringify({
Version: "2012-10-17",
Statement: [{
Effect: "Allow",
Action: ["sqs:SendMessage"],
Resource: [targetQueue.arn],
}],
}),
});
const examplePipe = new aws.pipes.Pipe("example", {
name: "example-pipe",
roleArn: example.arn,
source: sourceQueue.arn,
target: targetQueue.arn,
}, {
dependsOn: [
source,
target,
],
});
import pulumi
import json
import pulumi_aws as aws
main = aws.get_caller_identity()
example = aws.iam.Role("example", assume_role_policy=json.dumps({
"Version": "2012-10-17",
"Statement": {
"Effect": "Allow",
"Action": "sts:AssumeRole",
"Principal": {
"Service": "pipes.amazonaws.com",
},
"Condition": {
"StringEquals": {
"aws:SourceAccount": main.account_id,
},
},
},
}))
source_queue = aws.sqs.Queue("source")
source = aws.iam.RolePolicy("source",
role=example.id,
policy=pulumi.Output.json_dumps({
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Action": [
"sqs:DeleteMessage",
"sqs:GetQueueAttributes",
"sqs:ReceiveMessage",
],
"Resource": [source_queue.arn],
}],
}))
target_queue = aws.sqs.Queue("target")
target = aws.iam.RolePolicy("target",
role=example.id,
policy=pulumi.Output.json_dumps({
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Action": ["sqs:SendMessage"],
"Resource": [target_queue.arn],
}],
}))
example_pipe = aws.pipes.Pipe("example",
name="example-pipe",
role_arn=example.arn,
source=source_queue.arn,
target=target_queue.arn,
opts = pulumi.ResourceOptions(depends_on=[
source,
target,
]))
package main
import (
"encoding/json"
"github.com/pulumi/pulumi-aws/sdk/v6/go/aws"
"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/iam"
"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/pipes"
"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/sqs"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
main, err := aws.GetCallerIdentity(ctx, nil, nil)
if err != nil {
return err
}
tmpJSON0, err := json.Marshal(map[string]interface{}{
"Version": "2012-10-17",
"Statement": map[string]interface{}{
"Effect": "Allow",
"Action": "sts:AssumeRole",
"Principal": map[string]interface{}{
"Service": "pipes.amazonaws.com",
},
"Condition": map[string]interface{}{
"StringEquals": map[string]interface{}{
"aws:SourceAccount": main.AccountId,
},
},
},
})
if err != nil {
return err
}
json0 := string(tmpJSON0)
example, err := iam.NewRole(ctx, "example", &iam.RoleArgs{
AssumeRolePolicy: pulumi.String(json0),
})
if err != nil {
return err
}
sourceQueue, err := sqs.NewQueue(ctx, "source", nil)
if err != nil {
return err
}
source, err := iam.NewRolePolicy(ctx, "source", &iam.RolePolicyArgs{
Role: example.ID(),
Policy: sourceQueue.Arn.ApplyT(func(arn string) (pulumi.String, error) {
var _zero pulumi.String
tmpJSON1, err := json.Marshal(map[string]interface{}{
"Version": "2012-10-17",
"Statement": []map[string]interface{}{
map[string]interface{}{
"Effect": "Allow",
"Action": []string{
"sqs:DeleteMessage",
"sqs:GetQueueAttributes",
"sqs:ReceiveMessage",
},
"Resource": []string{
arn,
},
},
},
})
if err != nil {
return _zero, err
}
json1 := string(tmpJSON1)
return pulumi.String(json1), nil
}).(pulumi.StringOutput),
})
if err != nil {
return err
}
targetQueue, err := sqs.NewQueue(ctx, "target", nil)
if err != nil {
return err
}
target, err := iam.NewRolePolicy(ctx, "target", &iam.RolePolicyArgs{
Role: example.ID(),
Policy: targetQueue.Arn.ApplyT(func(arn string) (pulumi.String, error) {
var _zero pulumi.String
tmpJSON2, err := json.Marshal(map[string]interface{}{
"Version": "2012-10-17",
"Statement": []map[string]interface{}{
map[string]interface{}{
"Effect": "Allow",
"Action": []string{
"sqs:SendMessage",
},
"Resource": []string{
arn,
},
},
},
})
if err != nil {
return _zero, err
}
json2 := string(tmpJSON2)
return pulumi.String(json2), nil
}).(pulumi.StringOutput),
})
if err != nil {
return err
}
_, err = pipes.NewPipe(ctx, "example", &pipes.PipeArgs{
Name: pulumi.String("example-pipe"),
RoleArn: example.Arn,
Source: sourceQueue.Arn,
Target: targetQueue.Arn,
}, pulumi.DependsOn([]pulumi.Resource{
source,
target,
}))
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() =>
{
var main = Aws.GetCallerIdentity.Invoke();
var example = new Aws.Iam.Role("example", new()
{
AssumeRolePolicy = JsonSerializer.Serialize(new Dictionary<string, object?>
{
["Version"] = "2012-10-17",
["Statement"] = new Dictionary<string, object?>
{
["Effect"] = "Allow",
["Action"] = "sts:AssumeRole",
["Principal"] = new Dictionary<string, object?>
{
["Service"] = "pipes.amazonaws.com",
},
["Condition"] = new Dictionary<string, object?>
{
["StringEquals"] = new Dictionary<string, object?>
{
["aws:SourceAccount"] = main.Apply(getCallerIdentityResult => getCallerIdentityResult.AccountId),
},
},
},
}),
});
var sourceQueue = new Aws.Sqs.Queue("source");
var source = new Aws.Iam.RolePolicy("source", new()
{
Role = example.Id,
Policy = Output.JsonSerialize(Output.Create(new Dictionary<string, object?>
{
["Version"] = "2012-10-17",
["Statement"] = new[]
{
new Dictionary<string, object?>
{
["Effect"] = "Allow",
["Action"] = new[]
{
"sqs:DeleteMessage",
"sqs:GetQueueAttributes",
"sqs:ReceiveMessage",
},
["Resource"] = new[]
{
sourceQueue.Arn,
},
},
},
})),
});
var targetQueue = new Aws.Sqs.Queue("target");
var target = new Aws.Iam.RolePolicy("target", new()
{
Role = example.Id,
Policy = Output.JsonSerialize(Output.Create(new Dictionary<string, object?>
{
["Version"] = "2012-10-17",
["Statement"] = new[]
{
new Dictionary<string, object?>
{
["Effect"] = "Allow",
["Action"] = new[]
{
"sqs:SendMessage",
},
["Resource"] = new[]
{
targetQueue.Arn,
},
},
},
})),
});
var examplePipe = new Aws.Pipes.Pipe("example", new()
{
Name = "example-pipe",
RoleArn = example.Arn,
Source = sourceQueue.Arn,
Target = targetQueue.Arn,
}, new CustomResourceOptions
{
DependsOn =
{
source,
target,
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.AwsFunctions;
import com.pulumi.aws.inputs.GetCallerIdentityArgs;
import com.pulumi.aws.iam.Role;
import com.pulumi.aws.iam.RoleArgs;
import com.pulumi.aws.sqs.Queue;
import com.pulumi.aws.iam.RolePolicy;
import com.pulumi.aws.iam.RolePolicyArgs;
import com.pulumi.aws.pipes.Pipe;
import com.pulumi.aws.pipes.PipeArgs;
import static com.pulumi.codegen.internal.Serialization.*;
import com.pulumi.resources.CustomResourceOptions;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var main = AwsFunctions.getCallerIdentity();
var example = new Role("example", RoleArgs.builder()
.assumeRolePolicy(serializeJson(
jsonObject(
jsonProperty("Version", "2012-10-17"),
jsonProperty("Statement", jsonObject(
jsonProperty("Effect", "Allow"),
jsonProperty("Action", "sts:AssumeRole"),
jsonProperty("Principal", jsonObject(
jsonProperty("Service", "pipes.amazonaws.com")
)),
jsonProperty("Condition", jsonObject(
jsonProperty("StringEquals", jsonObject(
jsonProperty("aws:SourceAccount", main.applyValue(getCallerIdentityResult -> getCallerIdentityResult.accountId()))
))
))
))
)))
.build());
var sourceQueue = new Queue("sourceQueue");
var source = new RolePolicy("source", RolePolicyArgs.builder()
.role(example.id())
.policy(sourceQueue.arn().applyValue(arn -> serializeJson(
jsonObject(
jsonProperty("Version", "2012-10-17"),
jsonProperty("Statement", jsonArray(jsonObject(
jsonProperty("Effect", "Allow"),
jsonProperty("Action", jsonArray(
"sqs:DeleteMessage",
"sqs:GetQueueAttributes",
"sqs:ReceiveMessage"
)),
jsonProperty("Resource", jsonArray(arn))
)))
))))
.build());
var targetQueue = new Queue("targetQueue");
var target = new RolePolicy("target", RolePolicyArgs.builder()
.role(example.id())
.policy(targetQueue.arn().applyValue(arn -> serializeJson(
jsonObject(
jsonProperty("Version", "2012-10-17"),
jsonProperty("Statement", jsonArray(jsonObject(
jsonProperty("Effect", "Allow"),
jsonProperty("Action", jsonArray("sqs:SendMessage")),
jsonProperty("Resource", jsonArray(arn))
)))
))))
.build());
var examplePipe = new Pipe("examplePipe", PipeArgs.builder()
.name("example-pipe")
.roleArn(example.arn())
.source(sourceQueue.arn())
.target(targetQueue.arn())
.build(), CustomResourceOptions.builder()
.dependsOn(
source,
target)
.build());
}
}
resources:
example:
type: aws:iam:Role
properties:
assumeRolePolicy:
fn::toJSON:
Version: 2012-10-17
Statement:
Effect: Allow
Action: sts:AssumeRole
Principal:
Service: pipes.amazonaws.com
Condition:
StringEquals:
aws:SourceAccount: ${main.accountId}
source:
type: aws:iam:RolePolicy
properties:
role: ${example.id}
policy:
fn::toJSON:
Version: 2012-10-17
Statement:
- Effect: Allow
Action:
- sqs:DeleteMessage
- sqs:GetQueueAttributes
- sqs:ReceiveMessage
Resource:
- ${sourceQueue.arn}
sourceQueue:
type: aws:sqs:Queue
name: source
target:
type: aws:iam:RolePolicy
properties:
role: ${example.id}
policy:
fn::toJSON:
Version: 2012-10-17
Statement:
- Effect: Allow
Action:
- sqs:SendMessage
Resource:
- ${targetQueue.arn}
targetQueue:
type: aws:sqs:Queue
name: target
examplePipe:
type: aws:pipes:Pipe
name: example
properties:
name: example-pipe
roleArn: ${example.arn}
source: ${sourceQueue.arn}
target: ${targetQueue.arn}
options:
dependson:
- ${source}
- ${target}
variables:
main:
fn::invoke:
Function: aws:getCallerIdentity
Arguments: {}
Enrichment Usage
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const example = new aws.pipes.Pipe("example", {
name: "example-pipe",
roleArn: exampleAwsIamRole.arn,
source: source.arn,
target: target.arn,
enrichment: exampleAwsCloudwatchEventApiDestination.arn,
enrichmentParameters: {
httpParameters: {
pathParameterValues: "example-path-param",
headerParameters: {
"example-header": "example-value",
"second-example-header": "second-example-value",
},
queryStringParameters: {
"example-query-string": "example-value",
"second-example-query-string": "second-example-value",
},
},
},
});
import pulumi
import pulumi_aws as aws
example = aws.pipes.Pipe("example",
name="example-pipe",
role_arn=example_aws_iam_role["arn"],
source=source["arn"],
target=target["arn"],
enrichment=example_aws_cloudwatch_event_api_destination["arn"],
enrichment_parameters={
"http_parameters": {
"path_parameter_values": "example-path-param",
"header_parameters": {
"example_header": "example-value",
"second_example_header": "second-example-value",
},
"query_string_parameters": {
"example_query_string": "example-value",
"second_example_query_string": "second-example-value",
},
},
})
package main
import (
"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/pipes"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := pipes.NewPipe(ctx, "example", &pipes.PipeArgs{
Name: pulumi.String("example-pipe"),
RoleArn: pulumi.Any(exampleAwsIamRole.Arn),
Source: pulumi.Any(source.Arn),
Target: pulumi.Any(target.Arn),
Enrichment: pulumi.Any(exampleAwsCloudwatchEventApiDestination.Arn),
EnrichmentParameters: &pipes.PipeEnrichmentParametersArgs{
HttpParameters: &pipes.PipeEnrichmentParametersHttpParametersArgs{
PathParameterValues: pulumi.String("example-path-param"),
HeaderParameters: pulumi.StringMap{
"example-header": pulumi.String("example-value"),
"second-example-header": pulumi.String("second-example-value"),
},
QueryStringParameters: pulumi.StringMap{
"example-query-string": pulumi.String("example-value"),
"second-example-query-string": pulumi.String("second-example-value"),
},
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() =>
{
var example = new Aws.Pipes.Pipe("example", new()
{
Name = "example-pipe",
RoleArn = exampleAwsIamRole.Arn,
Source = source.Arn,
Target = target.Arn,
Enrichment = exampleAwsCloudwatchEventApiDestination.Arn,
EnrichmentParameters = new Aws.Pipes.Inputs.PipeEnrichmentParametersArgs
{
HttpParameters = new Aws.Pipes.Inputs.PipeEnrichmentParametersHttpParametersArgs
{
PathParameterValues = "example-path-param",
HeaderParameters =
{
{ "example-header", "example-value" },
{ "second-example-header", "second-example-value" },
},
QueryStringParameters =
{
{ "example-query-string", "example-value" },
{ "second-example-query-string", "second-example-value" },
},
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.pipes.Pipe;
import com.pulumi.aws.pipes.PipeArgs;
import com.pulumi.aws.pipes.inputs.PipeEnrichmentParametersArgs;
import com.pulumi.aws.pipes.inputs.PipeEnrichmentParametersHttpParametersArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var example = new Pipe("example", PipeArgs.builder()
.name("example-pipe")
.roleArn(exampleAwsIamRole.arn())
.source(source.arn())
.target(target.arn())
.enrichment(exampleAwsCloudwatchEventApiDestination.arn())
.enrichmentParameters(PipeEnrichmentParametersArgs.builder()
.httpParameters(PipeEnrichmentParametersHttpParametersArgs.builder()
.pathParameterValues("example-path-param")
.headerParameters(Map.ofEntries(
Map.entry("example-header", "example-value"),
Map.entry("second-example-header", "second-example-value")
))
.queryStringParameters(Map.ofEntries(
Map.entry("example-query-string", "example-value"),
Map.entry("second-example-query-string", "second-example-value")
))
.build())
.build())
.build());
}
}
resources:
example:
type: aws:pipes:Pipe
properties:
name: example-pipe
roleArn: ${exampleAwsIamRole.arn}
source: ${source.arn}
target: ${target.arn}
enrichment: ${exampleAwsCloudwatchEventApiDestination.arn}
enrichmentParameters:
httpParameters:
pathParameterValues: example-path-param
headerParameters:
example-header: example-value
second-example-header: second-example-value
queryStringParameters:
example-query-string: example-value
second-example-query-string: second-example-value
Filter Usage
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const example = new aws.pipes.Pipe("example", {
name: "example-pipe",
roleArn: exampleAwsIamRole.arn,
source: source.arn,
target: target.arn,
sourceParameters: {
filterCriteria: {
filters: [{
pattern: JSON.stringify({
source: ["event-source"],
}),
}],
},
},
});
import pulumi
import json
import pulumi_aws as aws
example = aws.pipes.Pipe("example",
name="example-pipe",
role_arn=example_aws_iam_role["arn"],
source=source["arn"],
target=target["arn"],
source_parameters={
"filter_criteria": {
"filters": [{
"pattern": json.dumps({
"source": ["event-source"],
}),
}],
},
})
package main
import (
"encoding/json"
"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/pipes"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
tmpJSON0, err := json.Marshal(map[string]interface{}{
"source": []string{
"event-source",
},
})
if err != nil {
return err
}
json0 := string(tmpJSON0)
_, err = pipes.NewPipe(ctx, "example", &pipes.PipeArgs{
Name: pulumi.String("example-pipe"),
RoleArn: pulumi.Any(exampleAwsIamRole.Arn),
Source: pulumi.Any(source.Arn),
Target: pulumi.Any(target.Arn),
SourceParameters: &pipes.PipeSourceParametersArgs{
FilterCriteria: &pipes.PipeSourceParametersFilterCriteriaArgs{
Filters: pipes.PipeSourceParametersFilterCriteriaFilterArray{
&pipes.PipeSourceParametersFilterCriteriaFilterArgs{
Pattern: pulumi.String(json0),
},
},
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() =>
{
var example = new Aws.Pipes.Pipe("example", new()
{
Name = "example-pipe",
RoleArn = exampleAwsIamRole.Arn,
Source = source.Arn,
Target = target.Arn,
SourceParameters = new Aws.Pipes.Inputs.PipeSourceParametersArgs
{
FilterCriteria = new Aws.Pipes.Inputs.PipeSourceParametersFilterCriteriaArgs
{
Filters = new[]
{
new Aws.Pipes.Inputs.PipeSourceParametersFilterCriteriaFilterArgs
{
Pattern = JsonSerializer.Serialize(new Dictionary<string, object?>
{
["source"] = new[]
{
"event-source",
},
}),
},
},
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.pipes.Pipe;
import com.pulumi.aws.pipes.PipeArgs;
import com.pulumi.aws.pipes.inputs.PipeSourceParametersArgs;
import com.pulumi.aws.pipes.inputs.PipeSourceParametersFilterCriteriaArgs;
import static com.pulumi.codegen.internal.Serialization.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var example = new Pipe("example", PipeArgs.builder()
.name("example-pipe")
.roleArn(exampleAwsIamRole.arn())
.source(source.arn())
.target(target.arn())
.sourceParameters(PipeSourceParametersArgs.builder()
.filterCriteria(PipeSourceParametersFilterCriteriaArgs.builder()
.filters(PipeSourceParametersFilterCriteriaFilterArgs.builder()
.pattern(serializeJson(
jsonObject(
jsonProperty("source", jsonArray("event-source"))
)))
.build())
.build())
.build())
.build());
}
}
resources:
example:
type: aws:pipes:Pipe
properties:
name: example-pipe
roleArn: ${exampleAwsIamRole.arn}
source: ${source.arn}
target: ${target.arn}
sourceParameters:
filterCriteria:
filters:
- pattern:
fn::toJSON:
source:
- event-source
CloudWatch Logs Logging Configuration Usage
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const example = new aws.cloudwatch.LogGroup("example", {name: "example-pipe-target"});
const examplePipe = new aws.pipes.Pipe("example", {
name: "example-pipe",
roleArn: exampleAwsIamRole.arn,
source: sourceAwsSqsQueue.arn,
target: targetAwsSqsQueue.arn,
logConfiguration: {
includeExecutionDatas: ["ALL"],
level: "INFO",
cloudwatchLogsLogDestination: {
logGroupArn: targetAwsCloudwatchLogGroup.arn,
},
},
}, {
dependsOn: [
source,
target,
],
});
import pulumi
import pulumi_aws as aws
example = aws.cloudwatch.LogGroup("example", name="example-pipe-target")
example_pipe = aws.pipes.Pipe("example",
name="example-pipe",
role_arn=example_aws_iam_role["arn"],
source=source_aws_sqs_queue["arn"],
target=target_aws_sqs_queue["arn"],
log_configuration={
"include_execution_datas": ["ALL"],
"level": "INFO",
"cloudwatch_logs_log_destination": {
"log_group_arn": target_aws_cloudwatch_log_group["arn"],
},
},
opts = pulumi.ResourceOptions(depends_on=[
source,
target,
]))
package main
import (
"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/cloudwatch"
"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/pipes"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := cloudwatch.NewLogGroup(ctx, "example", &cloudwatch.LogGroupArgs{
Name: pulumi.String("example-pipe-target"),
})
if err != nil {
return err
}
_, err = pipes.NewPipe(ctx, "example", &pipes.PipeArgs{
Name: pulumi.String("example-pipe"),
RoleArn: pulumi.Any(exampleAwsIamRole.Arn),
Source: pulumi.Any(sourceAwsSqsQueue.Arn),
Target: pulumi.Any(targetAwsSqsQueue.Arn),
LogConfiguration: &pipes.PipeLogConfigurationArgs{
IncludeExecutionDatas: pulumi.StringArray{
pulumi.String("ALL"),
},
Level: pulumi.String("INFO"),
CloudwatchLogsLogDestination: &pipes.PipeLogConfigurationCloudwatchLogsLogDestinationArgs{
LogGroupArn: pulumi.Any(targetAwsCloudwatchLogGroup.Arn),
},
},
}, pulumi.DependsOn([]pulumi.Resource{
source,
target,
}))
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() =>
{
var example = new Aws.CloudWatch.LogGroup("example", new()
{
Name = "example-pipe-target",
});
var examplePipe = new Aws.Pipes.Pipe("example", new()
{
Name = "example-pipe",
RoleArn = exampleAwsIamRole.Arn,
Source = sourceAwsSqsQueue.Arn,
Target = targetAwsSqsQueue.Arn,
LogConfiguration = new Aws.Pipes.Inputs.PipeLogConfigurationArgs
{
IncludeExecutionDatas = new[]
{
"ALL",
},
Level = "INFO",
CloudwatchLogsLogDestination = new Aws.Pipes.Inputs.PipeLogConfigurationCloudwatchLogsLogDestinationArgs
{
LogGroupArn = targetAwsCloudwatchLogGroup.Arn,
},
},
}, new CustomResourceOptions
{
DependsOn =
{
source,
target,
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.cloudwatch.LogGroup;
import com.pulumi.aws.cloudwatch.LogGroupArgs;
import com.pulumi.aws.pipes.Pipe;
import com.pulumi.aws.pipes.PipeArgs;
import com.pulumi.aws.pipes.inputs.PipeLogConfigurationArgs;
import com.pulumi.aws.pipes.inputs.PipeLogConfigurationCloudwatchLogsLogDestinationArgs;
import com.pulumi.resources.CustomResourceOptions;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var example = new LogGroup("example", LogGroupArgs.builder()
.name("example-pipe-target")
.build());
var examplePipe = new Pipe("examplePipe", PipeArgs.builder()
.name("example-pipe")
.roleArn(exampleAwsIamRole.arn())
.source(sourceAwsSqsQueue.arn())
.target(targetAwsSqsQueue.arn())
.logConfiguration(PipeLogConfigurationArgs.builder()
.includeExecutionDatas("ALL")
.level("INFO")
.cloudwatchLogsLogDestination(PipeLogConfigurationCloudwatchLogsLogDestinationArgs.builder()
.logGroupArn(targetAwsCloudwatchLogGroup.arn())
.build())
.build())
.build(), CustomResourceOptions.builder()
.dependsOn(
source,
target)
.build());
}
}
resources:
example:
type: aws:cloudwatch:LogGroup
properties:
name: example-pipe-target
examplePipe:
type: aws:pipes:Pipe
name: example
properties:
name: example-pipe
roleArn: ${exampleAwsIamRole.arn}
source: ${sourceAwsSqsQueue.arn}
target: ${targetAwsSqsQueue.arn}
logConfiguration:
includeExecutionDatas:
- ALL
level: INFO
cloudwatchLogsLogDestination:
logGroupArn: ${targetAwsCloudwatchLogGroup.arn}
options:
dependson:
- ${source}
- ${target}
SQS Source and Target Configuration Usage
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const example = new aws.pipes.Pipe("example", {
name: "example-pipe",
roleArn: exampleAwsIamRole.arn,
source: source.arn,
target: target.arn,
sourceParameters: {
sqsQueueParameters: {
batchSize: 1,
maximumBatchingWindowInSeconds: 2,
},
},
targetParameters: {
sqsQueueParameters: {
messageDeduplicationId: "example-dedupe",
messageGroupId: "example-group",
},
},
});
import pulumi
import pulumi_aws as aws
example = aws.pipes.Pipe("example",
name="example-pipe",
role_arn=example_aws_iam_role["arn"],
source=source["arn"],
target=target["arn"],
source_parameters={
"sqs_queue_parameters": {
"batch_size": 1,
"maximum_batching_window_in_seconds": 2,
},
},
target_parameters={
"sqs_queue_parameters": {
"message_deduplication_id": "example-dedupe",
"message_group_id": "example-group",
},
})
package main
import (
"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/pipes"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := pipes.NewPipe(ctx, "example", &pipes.PipeArgs{
Name: pulumi.String("example-pipe"),
RoleArn: pulumi.Any(exampleAwsIamRole.Arn),
Source: pulumi.Any(source.Arn),
Target: pulumi.Any(target.Arn),
SourceParameters: &pipes.PipeSourceParametersArgs{
SqsQueueParameters: &pipes.PipeSourceParametersSqsQueueParametersArgs{
BatchSize: pulumi.Int(1),
MaximumBatchingWindowInSeconds: pulumi.Int(2),
},
},
TargetParameters: &pipes.PipeTargetParametersArgs{
SqsQueueParameters: &pipes.PipeTargetParametersSqsQueueParametersArgs{
MessageDeduplicationId: pulumi.String("example-dedupe"),
MessageGroupId: pulumi.String("example-group"),
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() =>
{
var example = new Aws.Pipes.Pipe("example", new()
{
Name = "example-pipe",
RoleArn = exampleAwsIamRole.Arn,
Source = source.Arn,
Target = target.Arn,
SourceParameters = new Aws.Pipes.Inputs.PipeSourceParametersArgs
{
SqsQueueParameters = new Aws.Pipes.Inputs.PipeSourceParametersSqsQueueParametersArgs
{
BatchSize = 1,
MaximumBatchingWindowInSeconds = 2,
},
},
TargetParameters = new Aws.Pipes.Inputs.PipeTargetParametersArgs
{
SqsQueueParameters = new Aws.Pipes.Inputs.PipeTargetParametersSqsQueueParametersArgs
{
MessageDeduplicationId = "example-dedupe",
MessageGroupId = "example-group",
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.pipes.Pipe;
import com.pulumi.aws.pipes.PipeArgs;
import com.pulumi.aws.pipes.inputs.PipeSourceParametersArgs;
import com.pulumi.aws.pipes.inputs.PipeSourceParametersSqsQueueParametersArgs;
import com.pulumi.aws.pipes.inputs.PipeTargetParametersArgs;
import com.pulumi.aws.pipes.inputs.PipeTargetParametersSqsQueueParametersArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var example = new Pipe("example", PipeArgs.builder()
.name("example-pipe")
.roleArn(exampleAwsIamRole.arn())
.source(source.arn())
.target(target.arn())
.sourceParameters(PipeSourceParametersArgs.builder()
.sqsQueueParameters(PipeSourceParametersSqsQueueParametersArgs.builder()
.batchSize(1)
.maximumBatchingWindowInSeconds(2)
.build())
.build())
.targetParameters(PipeTargetParametersArgs.builder()
.sqsQueueParameters(PipeTargetParametersSqsQueueParametersArgs.builder()
.messageDeduplicationId("example-dedupe")
.messageGroupId("example-group")
.build())
.build())
.build());
}
}
resources:
example:
type: aws:pipes:Pipe
properties:
name: example-pipe
roleArn: ${exampleAwsIamRole.arn}
source: ${source.arn}
target: ${target.arn}
sourceParameters:
sqsQueueParameters:
batchSize: 1
maximumBatchingWindowInSeconds: 2
targetParameters:
sqsQueueParameters:
messageDeduplicationId: example-dedupe
messageGroupId: example-group
Create Pipe Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new Pipe(name: string, args: PipeArgs, opts?: CustomResourceOptions);
@overload
def Pipe(resource_name: str,
args: PipeArgs,
opts: Optional[ResourceOptions] = None)
@overload
def Pipe(resource_name: str,
opts: Optional[ResourceOptions] = None,
role_arn: Optional[str] = None,
target: Optional[str] = None,
source: Optional[str] = None,
name_prefix: Optional[str] = None,
log_configuration: Optional[PipeLogConfigurationArgs] = None,
name: Optional[str] = None,
description: Optional[str] = None,
enrichment_parameters: Optional[PipeEnrichmentParametersArgs] = None,
enrichment: Optional[str] = None,
source_parameters: Optional[PipeSourceParametersArgs] = None,
tags: Optional[Mapping[str, str]] = None,
desired_state: Optional[str] = None,
target_parameters: Optional[PipeTargetParametersArgs] = None)
func NewPipe(ctx *Context, name string, args PipeArgs, opts ...ResourceOption) (*Pipe, error)
public Pipe(string name, PipeArgs args, CustomResourceOptions? opts = null)
type: aws:pipes:Pipe
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args PipeArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args PipeArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args PipeArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args PipeArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args PipeArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var pipeResource = new Aws.Pipes.Pipe("pipeResource", new()
{
RoleArn = "string",
Target = "string",
Source = "string",
NamePrefix = "string",
LogConfiguration = new Aws.Pipes.Inputs.PipeLogConfigurationArgs
{
Level = "string",
CloudwatchLogsLogDestination = new Aws.Pipes.Inputs.PipeLogConfigurationCloudwatchLogsLogDestinationArgs
{
LogGroupArn = "string",
},
FirehoseLogDestination = new Aws.Pipes.Inputs.PipeLogConfigurationFirehoseLogDestinationArgs
{
DeliveryStreamArn = "string",
},
IncludeExecutionDatas = new[]
{
"string",
},
S3LogDestination = new Aws.Pipes.Inputs.PipeLogConfigurationS3LogDestinationArgs
{
BucketName = "string",
BucketOwner = "string",
OutputFormat = "string",
Prefix = "string",
},
},
Name = "string",
Description = "string",
EnrichmentParameters = new Aws.Pipes.Inputs.PipeEnrichmentParametersArgs
{
HttpParameters = new Aws.Pipes.Inputs.PipeEnrichmentParametersHttpParametersArgs
{
HeaderParameters =
{
{ "string", "string" },
},
PathParameterValues = "string",
QueryStringParameters =
{
{ "string", "string" },
},
},
InputTemplate = "string",
},
Enrichment = "string",
SourceParameters = new Aws.Pipes.Inputs.PipeSourceParametersArgs
{
ActivemqBrokerParameters = new Aws.Pipes.Inputs.PipeSourceParametersActivemqBrokerParametersArgs
{
Credentials = new Aws.Pipes.Inputs.PipeSourceParametersActivemqBrokerParametersCredentialsArgs
{
BasicAuth = "string",
},
QueueName = "string",
BatchSize = 0,
MaximumBatchingWindowInSeconds = 0,
},
DynamodbStreamParameters = new Aws.Pipes.Inputs.PipeSourceParametersDynamodbStreamParametersArgs
{
StartingPosition = "string",
BatchSize = 0,
DeadLetterConfig = new Aws.Pipes.Inputs.PipeSourceParametersDynamodbStreamParametersDeadLetterConfigArgs
{
Arn = "string",
},
MaximumBatchingWindowInSeconds = 0,
MaximumRecordAgeInSeconds = 0,
MaximumRetryAttempts = 0,
OnPartialBatchItemFailure = "string",
ParallelizationFactor = 0,
},
FilterCriteria = new Aws.Pipes.Inputs.PipeSourceParametersFilterCriteriaArgs
{
Filters = new[]
{
new Aws.Pipes.Inputs.PipeSourceParametersFilterCriteriaFilterArgs
{
Pattern = "string",
},
},
},
KinesisStreamParameters = new Aws.Pipes.Inputs.PipeSourceParametersKinesisStreamParametersArgs
{
StartingPosition = "string",
BatchSize = 0,
DeadLetterConfig = new Aws.Pipes.Inputs.PipeSourceParametersKinesisStreamParametersDeadLetterConfigArgs
{
Arn = "string",
},
MaximumBatchingWindowInSeconds = 0,
MaximumRecordAgeInSeconds = 0,
MaximumRetryAttempts = 0,
OnPartialBatchItemFailure = "string",
ParallelizationFactor = 0,
StartingPositionTimestamp = "string",
},
ManagedStreamingKafkaParameters = new Aws.Pipes.Inputs.PipeSourceParametersManagedStreamingKafkaParametersArgs
{
TopicName = "string",
BatchSize = 0,
ConsumerGroupId = "string",
Credentials = new Aws.Pipes.Inputs.PipeSourceParametersManagedStreamingKafkaParametersCredentialsArgs
{
ClientCertificateTlsAuth = "string",
SaslScram512Auth = "string",
},
MaximumBatchingWindowInSeconds = 0,
StartingPosition = "string",
},
RabbitmqBrokerParameters = new Aws.Pipes.Inputs.PipeSourceParametersRabbitmqBrokerParametersArgs
{
Credentials = new Aws.Pipes.Inputs.PipeSourceParametersRabbitmqBrokerParametersCredentialsArgs
{
BasicAuth = "string",
},
QueueName = "string",
BatchSize = 0,
MaximumBatchingWindowInSeconds = 0,
VirtualHost = "string",
},
SelfManagedKafkaParameters = new Aws.Pipes.Inputs.PipeSourceParametersSelfManagedKafkaParametersArgs
{
TopicName = "string",
AdditionalBootstrapServers = new[]
{
"string",
},
BatchSize = 0,
ConsumerGroupId = "string",
Credentials = new Aws.Pipes.Inputs.PipeSourceParametersSelfManagedKafkaParametersCredentialsArgs
{
BasicAuth = "string",
ClientCertificateTlsAuth = "string",
SaslScram256Auth = "string",
SaslScram512Auth = "string",
},
MaximumBatchingWindowInSeconds = 0,
ServerRootCaCertificate = "string",
StartingPosition = "string",
Vpc = new Aws.Pipes.Inputs.PipeSourceParametersSelfManagedKafkaParametersVpcArgs
{
SecurityGroups = new[]
{
"string",
},
Subnets = new[]
{
"string",
},
},
},
SqsQueueParameters = new Aws.Pipes.Inputs.PipeSourceParametersSqsQueueParametersArgs
{
BatchSize = 0,
MaximumBatchingWindowInSeconds = 0,
},
},
Tags =
{
{ "string", "string" },
},
DesiredState = "string",
TargetParameters = new Aws.Pipes.Inputs.PipeTargetParametersArgs
{
BatchJobParameters = new Aws.Pipes.Inputs.PipeTargetParametersBatchJobParametersArgs
{
JobDefinition = "string",
JobName = "string",
ArrayProperties = new Aws.Pipes.Inputs.PipeTargetParametersBatchJobParametersArrayPropertiesArgs
{
Size = 0,
},
ContainerOverrides = new Aws.Pipes.Inputs.PipeTargetParametersBatchJobParametersContainerOverridesArgs
{
Commands = new[]
{
"string",
},
Environments = new[]
{
new Aws.Pipes.Inputs.PipeTargetParametersBatchJobParametersContainerOverridesEnvironmentArgs
{
Name = "string",
Value = "string",
},
},
InstanceType = "string",
ResourceRequirements = new[]
{
new Aws.Pipes.Inputs.PipeTargetParametersBatchJobParametersContainerOverridesResourceRequirementArgs
{
Type = "string",
Value = "string",
},
},
},
DependsOns = new[]
{
new Aws.Pipes.Inputs.PipeTargetParametersBatchJobParametersDependsOnArgs
{
JobId = "string",
Type = "string",
},
},
Parameters =
{
{ "string", "string" },
},
RetryStrategy = new Aws.Pipes.Inputs.PipeTargetParametersBatchJobParametersRetryStrategyArgs
{
Attempts = 0,
},
},
CloudwatchLogsParameters = new Aws.Pipes.Inputs.PipeTargetParametersCloudwatchLogsParametersArgs
{
LogStreamName = "string",
Timestamp = "string",
},
EcsTaskParameters = new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersArgs
{
TaskDefinitionArn = "string",
Overrides = new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersOverridesArgs
{
ContainerOverrides = new[]
{
new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideArgs
{
Commands = new[]
{
"string",
},
Cpu = 0,
EnvironmentFiles = new[]
{
new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentFileArgs
{
Type = "string",
Value = "string",
},
},
Environments = new[]
{
new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentArgs
{
Name = "string",
Value = "string",
},
},
Memory = 0,
MemoryReservation = 0,
Name = "string",
ResourceRequirements = new[]
{
new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideResourceRequirementArgs
{
Type = "string",
Value = "string",
},
},
},
},
Cpu = "string",
EphemeralStorage = new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersOverridesEphemeralStorageArgs
{
SizeInGib = 0,
},
ExecutionRoleArn = "string",
InferenceAcceleratorOverrides = new[]
{
new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersOverridesInferenceAcceleratorOverrideArgs
{
DeviceName = "string",
DeviceType = "string",
},
},
Memory = "string",
TaskRoleArn = "string",
},
PlacementStrategies = new[]
{
new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersPlacementStrategyArgs
{
Field = "string",
Type = "string",
},
},
Group = "string",
LaunchType = "string",
NetworkConfiguration = new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersNetworkConfigurationArgs
{
AwsVpcConfiguration = new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersNetworkConfigurationAwsVpcConfigurationArgs
{
AssignPublicIp = "string",
SecurityGroups = new[]
{
"string",
},
Subnets = new[]
{
"string",
},
},
},
CapacityProviderStrategies = new[]
{
new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersCapacityProviderStrategyArgs
{
CapacityProvider = "string",
Base = 0,
Weight = 0,
},
},
PlacementConstraints = new[]
{
new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersPlacementConstraintArgs
{
Expression = "string",
Type = "string",
},
},
EnableExecuteCommand = false,
PlatformVersion = "string",
PropagateTags = "string",
ReferenceId = "string",
Tags =
{
{ "string", "string" },
},
TaskCount = 0,
EnableEcsManagedTags = false,
},
EventbridgeEventBusParameters = new Aws.Pipes.Inputs.PipeTargetParametersEventbridgeEventBusParametersArgs
{
DetailType = "string",
EndpointId = "string",
Resources = new[]
{
"string",
},
Source = "string",
Time = "string",
},
HttpParameters = new Aws.Pipes.Inputs.PipeTargetParametersHttpParametersArgs
{
HeaderParameters =
{
{ "string", "string" },
},
PathParameterValues = "string",
QueryStringParameters =
{
{ "string", "string" },
},
},
InputTemplate = "string",
KinesisStreamParameters = new Aws.Pipes.Inputs.PipeTargetParametersKinesisStreamParametersArgs
{
PartitionKey = "string",
},
LambdaFunctionParameters = new Aws.Pipes.Inputs.PipeTargetParametersLambdaFunctionParametersArgs
{
InvocationType = "string",
},
RedshiftDataParameters = new Aws.Pipes.Inputs.PipeTargetParametersRedshiftDataParametersArgs
{
Database = "string",
Sqls = new[]
{
"string",
},
DbUser = "string",
SecretManagerArn = "string",
StatementName = "string",
WithEvent = false,
},
SagemakerPipelineParameters = new Aws.Pipes.Inputs.PipeTargetParametersSagemakerPipelineParametersArgs
{
PipelineParameters = new[]
{
new Aws.Pipes.Inputs.PipeTargetParametersSagemakerPipelineParametersPipelineParameterArgs
{
Name = "string",
Value = "string",
},
},
},
SqsQueueParameters = new Aws.Pipes.Inputs.PipeTargetParametersSqsQueueParametersArgs
{
MessageDeduplicationId = "string",
MessageGroupId = "string",
},
StepFunctionStateMachineParameters = new Aws.Pipes.Inputs.PipeTargetParametersStepFunctionStateMachineParametersArgs
{
InvocationType = "string",
},
},
});
example, err := pipes.NewPipe(ctx, "pipeResource", &pipes.PipeArgs{
RoleArn: pulumi.String("string"),
Target: pulumi.String("string"),
Source: pulumi.String("string"),
NamePrefix: pulumi.String("string"),
LogConfiguration: &pipes.PipeLogConfigurationArgs{
Level: pulumi.String("string"),
CloudwatchLogsLogDestination: &pipes.PipeLogConfigurationCloudwatchLogsLogDestinationArgs{
LogGroupArn: pulumi.String("string"),
},
FirehoseLogDestination: &pipes.PipeLogConfigurationFirehoseLogDestinationArgs{
DeliveryStreamArn: pulumi.String("string"),
},
IncludeExecutionDatas: pulumi.StringArray{
pulumi.String("string"),
},
S3LogDestination: &pipes.PipeLogConfigurationS3LogDestinationArgs{
BucketName: pulumi.String("string"),
BucketOwner: pulumi.String("string"),
OutputFormat: pulumi.String("string"),
Prefix: pulumi.String("string"),
},
},
Name: pulumi.String("string"),
Description: pulumi.String("string"),
EnrichmentParameters: &pipes.PipeEnrichmentParametersArgs{
HttpParameters: &pipes.PipeEnrichmentParametersHttpParametersArgs{
HeaderParameters: pulumi.StringMap{
"string": pulumi.String("string"),
},
PathParameterValues: pulumi.String("string"),
QueryStringParameters: pulumi.StringMap{
"string": pulumi.String("string"),
},
},
InputTemplate: pulumi.String("string"),
},
Enrichment: pulumi.String("string"),
SourceParameters: &pipes.PipeSourceParametersArgs{
ActivemqBrokerParameters: &pipes.PipeSourceParametersActivemqBrokerParametersArgs{
Credentials: &pipes.PipeSourceParametersActivemqBrokerParametersCredentialsArgs{
BasicAuth: pulumi.String("string"),
},
QueueName: pulumi.String("string"),
BatchSize: pulumi.Int(0),
MaximumBatchingWindowInSeconds: pulumi.Int(0),
},
DynamodbStreamParameters: &pipes.PipeSourceParametersDynamodbStreamParametersArgs{
StartingPosition: pulumi.String("string"),
BatchSize: pulumi.Int(0),
DeadLetterConfig: &pipes.PipeSourceParametersDynamodbStreamParametersDeadLetterConfigArgs{
Arn: pulumi.String("string"),
},
MaximumBatchingWindowInSeconds: pulumi.Int(0),
MaximumRecordAgeInSeconds: pulumi.Int(0),
MaximumRetryAttempts: pulumi.Int(0),
OnPartialBatchItemFailure: pulumi.String("string"),
ParallelizationFactor: pulumi.Int(0),
},
FilterCriteria: &pipes.PipeSourceParametersFilterCriteriaArgs{
Filters: pipes.PipeSourceParametersFilterCriteriaFilterArray{
&pipes.PipeSourceParametersFilterCriteriaFilterArgs{
Pattern: pulumi.String("string"),
},
},
},
KinesisStreamParameters: &pipes.PipeSourceParametersKinesisStreamParametersArgs{
StartingPosition: pulumi.String("string"),
BatchSize: pulumi.Int(0),
DeadLetterConfig: &pipes.PipeSourceParametersKinesisStreamParametersDeadLetterConfigArgs{
Arn: pulumi.String("string"),
},
MaximumBatchingWindowInSeconds: pulumi.Int(0),
MaximumRecordAgeInSeconds: pulumi.Int(0),
MaximumRetryAttempts: pulumi.Int(0),
OnPartialBatchItemFailure: pulumi.String("string"),
ParallelizationFactor: pulumi.Int(0),
StartingPositionTimestamp: pulumi.String("string"),
},
ManagedStreamingKafkaParameters: &pipes.PipeSourceParametersManagedStreamingKafkaParametersArgs{
TopicName: pulumi.String("string"),
BatchSize: pulumi.Int(0),
ConsumerGroupId: pulumi.String("string"),
Credentials: &pipes.PipeSourceParametersManagedStreamingKafkaParametersCredentialsArgs{
ClientCertificateTlsAuth: pulumi.String("string"),
SaslScram512Auth: pulumi.String("string"),
},
MaximumBatchingWindowInSeconds: pulumi.Int(0),
StartingPosition: pulumi.String("string"),
},
RabbitmqBrokerParameters: &pipes.PipeSourceParametersRabbitmqBrokerParametersArgs{
Credentials: &pipes.PipeSourceParametersRabbitmqBrokerParametersCredentialsArgs{
BasicAuth: pulumi.String("string"),
},
QueueName: pulumi.String("string"),
BatchSize: pulumi.Int(0),
MaximumBatchingWindowInSeconds: pulumi.Int(0),
VirtualHost: pulumi.String("string"),
},
SelfManagedKafkaParameters: &pipes.PipeSourceParametersSelfManagedKafkaParametersArgs{
TopicName: pulumi.String("string"),
AdditionalBootstrapServers: pulumi.StringArray{
pulumi.String("string"),
},
BatchSize: pulumi.Int(0),
ConsumerGroupId: pulumi.String("string"),
Credentials: &pipes.PipeSourceParametersSelfManagedKafkaParametersCredentialsArgs{
BasicAuth: pulumi.String("string"),
ClientCertificateTlsAuth: pulumi.String("string"),
SaslScram256Auth: pulumi.String("string"),
SaslScram512Auth: pulumi.String("string"),
},
MaximumBatchingWindowInSeconds: pulumi.Int(0),
ServerRootCaCertificate: pulumi.String("string"),
StartingPosition: pulumi.String("string"),
Vpc: &pipes.PipeSourceParametersSelfManagedKafkaParametersVpcArgs{
SecurityGroups: pulumi.StringArray{
pulumi.String("string"),
},
Subnets: pulumi.StringArray{
pulumi.String("string"),
},
},
},
SqsQueueParameters: &pipes.PipeSourceParametersSqsQueueParametersArgs{
BatchSize: pulumi.Int(0),
MaximumBatchingWindowInSeconds: pulumi.Int(0),
},
},
Tags: pulumi.StringMap{
"string": pulumi.String("string"),
},
DesiredState: pulumi.String("string"),
TargetParameters: &pipes.PipeTargetParametersArgs{
BatchJobParameters: &pipes.PipeTargetParametersBatchJobParametersArgs{
JobDefinition: pulumi.String("string"),
JobName: pulumi.String("string"),
ArrayProperties: &pipes.PipeTargetParametersBatchJobParametersArrayPropertiesArgs{
Size: pulumi.Int(0),
},
ContainerOverrides: &pipes.PipeTargetParametersBatchJobParametersContainerOverridesArgs{
Commands: pulumi.StringArray{
pulumi.String("string"),
},
Environments: pipes.PipeTargetParametersBatchJobParametersContainerOverridesEnvironmentArray{
&pipes.PipeTargetParametersBatchJobParametersContainerOverridesEnvironmentArgs{
Name: pulumi.String("string"),
Value: pulumi.String("string"),
},
},
InstanceType: pulumi.String("string"),
ResourceRequirements: pipes.PipeTargetParametersBatchJobParametersContainerOverridesResourceRequirementArray{
&pipes.PipeTargetParametersBatchJobParametersContainerOverridesResourceRequirementArgs{
Type: pulumi.String("string"),
Value: pulumi.String("string"),
},
},
},
DependsOns: pipes.PipeTargetParametersBatchJobParametersDependsOnArray{
&pipes.PipeTargetParametersBatchJobParametersDependsOnArgs{
JobId: pulumi.String("string"),
Type: pulumi.String("string"),
},
},
Parameters: pulumi.StringMap{
"string": pulumi.String("string"),
},
RetryStrategy: &pipes.PipeTargetParametersBatchJobParametersRetryStrategyArgs{
Attempts: pulumi.Int(0),
},
},
CloudwatchLogsParameters: &pipes.PipeTargetParametersCloudwatchLogsParametersArgs{
LogStreamName: pulumi.String("string"),
Timestamp: pulumi.String("string"),
},
EcsTaskParameters: &pipes.PipeTargetParametersEcsTaskParametersArgs{
TaskDefinitionArn: pulumi.String("string"),
Overrides: &pipes.PipeTargetParametersEcsTaskParametersOverridesArgs{
ContainerOverrides: pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideArray{
&pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideArgs{
Commands: pulumi.StringArray{
pulumi.String("string"),
},
Cpu: pulumi.Int(0),
EnvironmentFiles: pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentFileArray{
&pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentFileArgs{
Type: pulumi.String("string"),
Value: pulumi.String("string"),
},
},
Environments: pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentArray{
&pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentArgs{
Name: pulumi.String("string"),
Value: pulumi.String("string"),
},
},
Memory: pulumi.Int(0),
MemoryReservation: pulumi.Int(0),
Name: pulumi.String("string"),
ResourceRequirements: pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideResourceRequirementArray{
&pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideResourceRequirementArgs{
Type: pulumi.String("string"),
Value: pulumi.String("string"),
},
},
},
},
Cpu: pulumi.String("string"),
EphemeralStorage: &pipes.PipeTargetParametersEcsTaskParametersOverridesEphemeralStorageArgs{
SizeInGib: pulumi.Int(0),
},
ExecutionRoleArn: pulumi.String("string"),
InferenceAcceleratorOverrides: pipes.PipeTargetParametersEcsTaskParametersOverridesInferenceAcceleratorOverrideArray{
&pipes.PipeTargetParametersEcsTaskParametersOverridesInferenceAcceleratorOverrideArgs{
DeviceName: pulumi.String("string"),
DeviceType: pulumi.String("string"),
},
},
Memory: pulumi.String("string"),
TaskRoleArn: pulumi.String("string"),
},
PlacementStrategies: pipes.PipeTargetParametersEcsTaskParametersPlacementStrategyArray{
&pipes.PipeTargetParametersEcsTaskParametersPlacementStrategyArgs{
Field: pulumi.String("string"),
Type: pulumi.String("string"),
},
},
Group: pulumi.String("string"),
LaunchType: pulumi.String("string"),
NetworkConfiguration: &pipes.PipeTargetParametersEcsTaskParametersNetworkConfigurationArgs{
AwsVpcConfiguration: &pipes.PipeTargetParametersEcsTaskParametersNetworkConfigurationAwsVpcConfigurationArgs{
AssignPublicIp: pulumi.String("string"),
SecurityGroups: pulumi.StringArray{
pulumi.String("string"),
},
Subnets: pulumi.StringArray{
pulumi.String("string"),
},
},
},
CapacityProviderStrategies: pipes.PipeTargetParametersEcsTaskParametersCapacityProviderStrategyArray{
&pipes.PipeTargetParametersEcsTaskParametersCapacityProviderStrategyArgs{
CapacityProvider: pulumi.String("string"),
Base: pulumi.Int(0),
Weight: pulumi.Int(0),
},
},
PlacementConstraints: pipes.PipeTargetParametersEcsTaskParametersPlacementConstraintArray{
&pipes.PipeTargetParametersEcsTaskParametersPlacementConstraintArgs{
Expression: pulumi.String("string"),
Type: pulumi.String("string"),
},
},
EnableExecuteCommand: pulumi.Bool(false),
PlatformVersion: pulumi.String("string"),
PropagateTags: pulumi.String("string"),
ReferenceId: pulumi.String("string"),
Tags: pulumi.StringMap{
"string": pulumi.String("string"),
},
TaskCount: pulumi.Int(0),
EnableEcsManagedTags: pulumi.Bool(false),
},
EventbridgeEventBusParameters: &pipes.PipeTargetParametersEventbridgeEventBusParametersArgs{
DetailType: pulumi.String("string"),
EndpointId: pulumi.String("string"),
Resources: pulumi.StringArray{
pulumi.String("string"),
},
Source: pulumi.String("string"),
Time: pulumi.String("string"),
},
HttpParameters: &pipes.PipeTargetParametersHttpParametersArgs{
HeaderParameters: pulumi.StringMap{
"string": pulumi.String("string"),
},
PathParameterValues: pulumi.String("string"),
QueryStringParameters: pulumi.StringMap{
"string": pulumi.String("string"),
},
},
InputTemplate: pulumi.String("string"),
KinesisStreamParameters: &pipes.PipeTargetParametersKinesisStreamParametersArgs{
PartitionKey: pulumi.String("string"),
},
LambdaFunctionParameters: &pipes.PipeTargetParametersLambdaFunctionParametersArgs{
InvocationType: pulumi.String("string"),
},
RedshiftDataParameters: &pipes.PipeTargetParametersRedshiftDataParametersArgs{
Database: pulumi.String("string"),
Sqls: pulumi.StringArray{
pulumi.String("string"),
},
DbUser: pulumi.String("string"),
SecretManagerArn: pulumi.String("string"),
StatementName: pulumi.String("string"),
WithEvent: pulumi.Bool(false),
},
SagemakerPipelineParameters: &pipes.PipeTargetParametersSagemakerPipelineParametersArgs{
PipelineParameters: pipes.PipeTargetParametersSagemakerPipelineParametersPipelineParameterArray{
&pipes.PipeTargetParametersSagemakerPipelineParametersPipelineParameterArgs{
Name: pulumi.String("string"),
Value: pulumi.String("string"),
},
},
},
SqsQueueParameters: &pipes.PipeTargetParametersSqsQueueParametersArgs{
MessageDeduplicationId: pulumi.String("string"),
MessageGroupId: pulumi.String("string"),
},
StepFunctionStateMachineParameters: &pipes.PipeTargetParametersStepFunctionStateMachineParametersArgs{
InvocationType: pulumi.String("string"),
},
},
})
var pipeResource = new Pipe("pipeResource", PipeArgs.builder()
.roleArn("string")
.target("string")
.source("string")
.namePrefix("string")
.logConfiguration(PipeLogConfigurationArgs.builder()
.level("string")
.cloudwatchLogsLogDestination(PipeLogConfigurationCloudwatchLogsLogDestinationArgs.builder()
.logGroupArn("string")
.build())
.firehoseLogDestination(PipeLogConfigurationFirehoseLogDestinationArgs.builder()
.deliveryStreamArn("string")
.build())
.includeExecutionDatas("string")
.s3LogDestination(PipeLogConfigurationS3LogDestinationArgs.builder()
.bucketName("string")
.bucketOwner("string")
.outputFormat("string")
.prefix("string")
.build())
.build())
.name("string")
.description("string")
.enrichmentParameters(PipeEnrichmentParametersArgs.builder()
.httpParameters(PipeEnrichmentParametersHttpParametersArgs.builder()
.headerParameters(Map.of("string", "string"))
.pathParameterValues("string")
.queryStringParameters(Map.of("string", "string"))
.build())
.inputTemplate("string")
.build())
.enrichment("string")
.sourceParameters(PipeSourceParametersArgs.builder()
.activemqBrokerParameters(PipeSourceParametersActivemqBrokerParametersArgs.builder()
.credentials(PipeSourceParametersActivemqBrokerParametersCredentialsArgs.builder()
.basicAuth("string")
.build())
.queueName("string")
.batchSize(0)
.maximumBatchingWindowInSeconds(0)
.build())
.dynamodbStreamParameters(PipeSourceParametersDynamodbStreamParametersArgs.builder()
.startingPosition("string")
.batchSize(0)
.deadLetterConfig(PipeSourceParametersDynamodbStreamParametersDeadLetterConfigArgs.builder()
.arn("string")
.build())
.maximumBatchingWindowInSeconds(0)
.maximumRecordAgeInSeconds(0)
.maximumRetryAttempts(0)
.onPartialBatchItemFailure("string")
.parallelizationFactor(0)
.build())
.filterCriteria(PipeSourceParametersFilterCriteriaArgs.builder()
.filters(PipeSourceParametersFilterCriteriaFilterArgs.builder()
.pattern("string")
.build())
.build())
.kinesisStreamParameters(PipeSourceParametersKinesisStreamParametersArgs.builder()
.startingPosition("string")
.batchSize(0)
.deadLetterConfig(PipeSourceParametersKinesisStreamParametersDeadLetterConfigArgs.builder()
.arn("string")
.build())
.maximumBatchingWindowInSeconds(0)
.maximumRecordAgeInSeconds(0)
.maximumRetryAttempts(0)
.onPartialBatchItemFailure("string")
.parallelizationFactor(0)
.startingPositionTimestamp("string")
.build())
.managedStreamingKafkaParameters(PipeSourceParametersManagedStreamingKafkaParametersArgs.builder()
.topicName("string")
.batchSize(0)
.consumerGroupId("string")
.credentials(PipeSourceParametersManagedStreamingKafkaParametersCredentialsArgs.builder()
.clientCertificateTlsAuth("string")
.saslScram512Auth("string")
.build())
.maximumBatchingWindowInSeconds(0)
.startingPosition("string")
.build())
.rabbitmqBrokerParameters(PipeSourceParametersRabbitmqBrokerParametersArgs.builder()
.credentials(PipeSourceParametersRabbitmqBrokerParametersCredentialsArgs.builder()
.basicAuth("string")
.build())
.queueName("string")
.batchSize(0)
.maximumBatchingWindowInSeconds(0)
.virtualHost("string")
.build())
.selfManagedKafkaParameters(PipeSourceParametersSelfManagedKafkaParametersArgs.builder()
.topicName("string")
.additionalBootstrapServers("string")
.batchSize(0)
.consumerGroupId("string")
.credentials(PipeSourceParametersSelfManagedKafkaParametersCredentialsArgs.builder()
.basicAuth("string")
.clientCertificateTlsAuth("string")
.saslScram256Auth("string")
.saslScram512Auth("string")
.build())
.maximumBatchingWindowInSeconds(0)
.serverRootCaCertificate("string")
.startingPosition("string")
.vpc(PipeSourceParametersSelfManagedKafkaParametersVpcArgs.builder()
.securityGroups("string")
.subnets("string")
.build())
.build())
.sqsQueueParameters(PipeSourceParametersSqsQueueParametersArgs.builder()
.batchSize(0)
.maximumBatchingWindowInSeconds(0)
.build())
.build())
.tags(Map.of("string", "string"))
.desiredState("string")
.targetParameters(PipeTargetParametersArgs.builder()
.batchJobParameters(PipeTargetParametersBatchJobParametersArgs.builder()
.jobDefinition("string")
.jobName("string")
.arrayProperties(PipeTargetParametersBatchJobParametersArrayPropertiesArgs.builder()
.size(0)
.build())
.containerOverrides(PipeTargetParametersBatchJobParametersContainerOverridesArgs.builder()
.commands("string")
.environments(PipeTargetParametersBatchJobParametersContainerOverridesEnvironmentArgs.builder()
.name("string")
.value("string")
.build())
.instanceType("string")
.resourceRequirements(PipeTargetParametersBatchJobParametersContainerOverridesResourceRequirementArgs.builder()
.type("string")
.value("string")
.build())
.build())
.dependsOns(PipeTargetParametersBatchJobParametersDependsOnArgs.builder()
.jobId("string")
.type("string")
.build())
.parameters(Map.of("string", "string"))
.retryStrategy(PipeTargetParametersBatchJobParametersRetryStrategyArgs.builder()
.attempts(0)
.build())
.build())
.cloudwatchLogsParameters(PipeTargetParametersCloudwatchLogsParametersArgs.builder()
.logStreamName("string")
.timestamp("string")
.build())
.ecsTaskParameters(PipeTargetParametersEcsTaskParametersArgs.builder()
.taskDefinitionArn("string")
.overrides(PipeTargetParametersEcsTaskParametersOverridesArgs.builder()
.containerOverrides(PipeTargetParametersEcsTaskParametersOverridesContainerOverrideArgs.builder()
.commands("string")
.cpu(0)
.environmentFiles(PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentFileArgs.builder()
.type("string")
.value("string")
.build())
.environments(PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentArgs.builder()
.name("string")
.value("string")
.build())
.memory(0)
.memoryReservation(0)
.name("string")
.resourceRequirements(PipeTargetParametersEcsTaskParametersOverridesContainerOverrideResourceRequirementArgs.builder()
.type("string")
.value("string")
.build())
.build())
.cpu("string")
.ephemeralStorage(PipeTargetParametersEcsTaskParametersOverridesEphemeralStorageArgs.builder()
.sizeInGib(0)
.build())
.executionRoleArn("string")
.inferenceAcceleratorOverrides(PipeTargetParametersEcsTaskParametersOverridesInferenceAcceleratorOverrideArgs.builder()
.deviceName("string")
.deviceType("string")
.build())
.memory("string")
.taskRoleArn("string")
.build())
.placementStrategies(PipeTargetParametersEcsTaskParametersPlacementStrategyArgs.builder()
.field("string")
.type("string")
.build())
.group("string")
.launchType("string")
.networkConfiguration(PipeTargetParametersEcsTaskParametersNetworkConfigurationArgs.builder()
.awsVpcConfiguration(PipeTargetParametersEcsTaskParametersNetworkConfigurationAwsVpcConfigurationArgs.builder()
.assignPublicIp("string")
.securityGroups("string")
.subnets("string")
.build())
.build())
.capacityProviderStrategies(PipeTargetParametersEcsTaskParametersCapacityProviderStrategyArgs.builder()
.capacityProvider("string")
.base(0)
.weight(0)
.build())
.placementConstraints(PipeTargetParametersEcsTaskParametersPlacementConstraintArgs.builder()
.expression("string")
.type("string")
.build())
.enableExecuteCommand(false)
.platformVersion("string")
.propagateTags("string")
.referenceId("string")
.tags(Map.of("string", "string"))
.taskCount(0)
.enableEcsManagedTags(false)
.build())
.eventbridgeEventBusParameters(PipeTargetParametersEventbridgeEventBusParametersArgs.builder()
.detailType("string")
.endpointId("string")
.resources("string")
.source("string")
.time("string")
.build())
.httpParameters(PipeTargetParametersHttpParametersArgs.builder()
.headerParameters(Map.of("string", "string"))
.pathParameterValues("string")
.queryStringParameters(Map.of("string", "string"))
.build())
.inputTemplate("string")
.kinesisStreamParameters(PipeTargetParametersKinesisStreamParametersArgs.builder()
.partitionKey("string")
.build())
.lambdaFunctionParameters(PipeTargetParametersLambdaFunctionParametersArgs.builder()
.invocationType("string")
.build())
.redshiftDataParameters(PipeTargetParametersRedshiftDataParametersArgs.builder()
.database("string")
.sqls("string")
.dbUser("string")
.secretManagerArn("string")
.statementName("string")
.withEvent(false)
.build())
.sagemakerPipelineParameters(PipeTargetParametersSagemakerPipelineParametersArgs.builder()
.pipelineParameters(PipeTargetParametersSagemakerPipelineParametersPipelineParameterArgs.builder()
.name("string")
.value("string")
.build())
.build())
.sqsQueueParameters(PipeTargetParametersSqsQueueParametersArgs.builder()
.messageDeduplicationId("string")
.messageGroupId("string")
.build())
.stepFunctionStateMachineParameters(PipeTargetParametersStepFunctionStateMachineParametersArgs.builder()
.invocationType("string")
.build())
.build())
.build());
pipe_resource = aws.pipes.Pipe("pipeResource",
role_arn="string",
target="string",
source="string",
name_prefix="string",
log_configuration={
"level": "string",
"cloudwatchLogsLogDestination": {
"logGroupArn": "string",
},
"firehoseLogDestination": {
"deliveryStreamArn": "string",
},
"includeExecutionDatas": ["string"],
"s3LogDestination": {
"bucketName": "string",
"bucketOwner": "string",
"outputFormat": "string",
"prefix": "string",
},
},
name="string",
description="string",
enrichment_parameters={
"httpParameters": {
"headerParameters": {
"string": "string",
},
"pathParameterValues": "string",
"queryStringParameters": {
"string": "string",
},
},
"inputTemplate": "string",
},
enrichment="string",
source_parameters={
"activemqBrokerParameters": {
"credentials": {
"basicAuth": "string",
},
"queueName": "string",
"batchSize": 0,
"maximumBatchingWindowInSeconds": 0,
},
"dynamodbStreamParameters": {
"startingPosition": "string",
"batchSize": 0,
"deadLetterConfig": {
"arn": "string",
},
"maximumBatchingWindowInSeconds": 0,
"maximumRecordAgeInSeconds": 0,
"maximumRetryAttempts": 0,
"onPartialBatchItemFailure": "string",
"parallelizationFactor": 0,
},
"filterCriteria": {
"filters": [{
"pattern": "string",
}],
},
"kinesisStreamParameters": {
"startingPosition": "string",
"batchSize": 0,
"deadLetterConfig": {
"arn": "string",
},
"maximumBatchingWindowInSeconds": 0,
"maximumRecordAgeInSeconds": 0,
"maximumRetryAttempts": 0,
"onPartialBatchItemFailure": "string",
"parallelizationFactor": 0,
"startingPositionTimestamp": "string",
},
"managedStreamingKafkaParameters": {
"topicName": "string",
"batchSize": 0,
"consumerGroupId": "string",
"credentials": {
"clientCertificateTlsAuth": "string",
"saslScram512Auth": "string",
},
"maximumBatchingWindowInSeconds": 0,
"startingPosition": "string",
},
"rabbitmqBrokerParameters": {
"credentials": {
"basicAuth": "string",
},
"queueName": "string",
"batchSize": 0,
"maximumBatchingWindowInSeconds": 0,
"virtualHost": "string",
},
"selfManagedKafkaParameters": {
"topicName": "string",
"additionalBootstrapServers": ["string"],
"batchSize": 0,
"consumerGroupId": "string",
"credentials": {
"basicAuth": "string",
"clientCertificateTlsAuth": "string",
"saslScram256Auth": "string",
"saslScram512Auth": "string",
},
"maximumBatchingWindowInSeconds": 0,
"serverRootCaCertificate": "string",
"startingPosition": "string",
"vpc": {
"securityGroups": ["string"],
"subnets": ["string"],
},
},
"sqsQueueParameters": {
"batchSize": 0,
"maximumBatchingWindowInSeconds": 0,
},
},
tags={
"string": "string",
},
desired_state="string",
target_parameters={
"batchJobParameters": {
"jobDefinition": "string",
"jobName": "string",
"arrayProperties": {
"size": 0,
},
"containerOverrides": {
"commands": ["string"],
"environments": [{
"name": "string",
"value": "string",
}],
"instanceType": "string",
"resourceRequirements": [{
"type": "string",
"value": "string",
}],
},
"dependsOns": [{
"jobId": "string",
"type": "string",
}],
"parameters": {
"string": "string",
},
"retryStrategy": {
"attempts": 0,
},
},
"cloudwatchLogsParameters": {
"logStreamName": "string",
"timestamp": "string",
},
"ecsTaskParameters": {
"taskDefinitionArn": "string",
"overrides": {
"containerOverrides": [{
"commands": ["string"],
"cpu": 0,
"environmentFiles": [{
"type": "string",
"value": "string",
}],
"environments": [{
"name": "string",
"value": "string",
}],
"memory": 0,
"memoryReservation": 0,
"name": "string",
"resourceRequirements": [{
"type": "string",
"value": "string",
}],
}],
"cpu": "string",
"ephemeralStorage": {
"sizeInGib": 0,
},
"executionRoleArn": "string",
"inferenceAcceleratorOverrides": [{
"deviceName": "string",
"deviceType": "string",
}],
"memory": "string",
"taskRoleArn": "string",
},
"placementStrategies": [{
"field": "string",
"type": "string",
}],
"group": "string",
"launchType": "string",
"networkConfiguration": {
"awsVpcConfiguration": {
"assignPublicIp": "string",
"securityGroups": ["string"],
"subnets": ["string"],
},
},
"capacityProviderStrategies": [{
"capacityProvider": "string",
"base": 0,
"weight": 0,
}],
"placementConstraints": [{
"expression": "string",
"type": "string",
}],
"enableExecuteCommand": False,
"platformVersion": "string",
"propagateTags": "string",
"referenceId": "string",
"tags": {
"string": "string",
},
"taskCount": 0,
"enableEcsManagedTags": False,
},
"eventbridgeEventBusParameters": {
"detailType": "string",
"endpointId": "string",
"resources": ["string"],
"source": "string",
"time": "string",
},
"httpParameters": {
"headerParameters": {
"string": "string",
},
"pathParameterValues": "string",
"queryStringParameters": {
"string": "string",
},
},
"inputTemplate": "string",
"kinesisStreamParameters": {
"partitionKey": "string",
},
"lambdaFunctionParameters": {
"invocationType": "string",
},
"redshiftDataParameters": {
"database": "string",
"sqls": ["string"],
"dbUser": "string",
"secretManagerArn": "string",
"statementName": "string",
"withEvent": False,
},
"sagemakerPipelineParameters": {
"pipelineParameters": [{
"name": "string",
"value": "string",
}],
},
"sqsQueueParameters": {
"messageDeduplicationId": "string",
"messageGroupId": "string",
},
"stepFunctionStateMachineParameters": {
"invocationType": "string",
},
})
const pipeResource = new aws.pipes.Pipe("pipeResource", {
roleArn: "string",
target: "string",
source: "string",
namePrefix: "string",
logConfiguration: {
level: "string",
cloudwatchLogsLogDestination: {
logGroupArn: "string",
},
firehoseLogDestination: {
deliveryStreamArn: "string",
},
includeExecutionDatas: ["string"],
s3LogDestination: {
bucketName: "string",
bucketOwner: "string",
outputFormat: "string",
prefix: "string",
},
},
name: "string",
description: "string",
enrichmentParameters: {
httpParameters: {
headerParameters: {
string: "string",
},
pathParameterValues: "string",
queryStringParameters: {
string: "string",
},
},
inputTemplate: "string",
},
enrichment: "string",
sourceParameters: {
activemqBrokerParameters: {
credentials: {
basicAuth: "string",
},
queueName: "string",
batchSize: 0,
maximumBatchingWindowInSeconds: 0,
},
dynamodbStreamParameters: {
startingPosition: "string",
batchSize: 0,
deadLetterConfig: {
arn: "string",
},
maximumBatchingWindowInSeconds: 0,
maximumRecordAgeInSeconds: 0,
maximumRetryAttempts: 0,
onPartialBatchItemFailure: "string",
parallelizationFactor: 0,
},
filterCriteria: {
filters: [{
pattern: "string",
}],
},
kinesisStreamParameters: {
startingPosition: "string",
batchSize: 0,
deadLetterConfig: {
arn: "string",
},
maximumBatchingWindowInSeconds: 0,
maximumRecordAgeInSeconds: 0,
maximumRetryAttempts: 0,
onPartialBatchItemFailure: "string",
parallelizationFactor: 0,
startingPositionTimestamp: "string",
},
managedStreamingKafkaParameters: {
topicName: "string",
batchSize: 0,
consumerGroupId: "string",
credentials: {
clientCertificateTlsAuth: "string",
saslScram512Auth: "string",
},
maximumBatchingWindowInSeconds: 0,
startingPosition: "string",
},
rabbitmqBrokerParameters: {
credentials: {
basicAuth: "string",
},
queueName: "string",
batchSize: 0,
maximumBatchingWindowInSeconds: 0,
virtualHost: "string",
},
selfManagedKafkaParameters: {
topicName: "string",
additionalBootstrapServers: ["string"],
batchSize: 0,
consumerGroupId: "string",
credentials: {
basicAuth: "string",
clientCertificateTlsAuth: "string",
saslScram256Auth: "string",
saslScram512Auth: "string",
},
maximumBatchingWindowInSeconds: 0,
serverRootCaCertificate: "string",
startingPosition: "string",
vpc: {
securityGroups: ["string"],
subnets: ["string"],
},
},
sqsQueueParameters: {
batchSize: 0,
maximumBatchingWindowInSeconds: 0,
},
},
tags: {
string: "string",
},
desiredState: "string",
targetParameters: {
batchJobParameters: {
jobDefinition: "string",
jobName: "string",
arrayProperties: {
size: 0,
},
containerOverrides: {
commands: ["string"],
environments: [{
name: "string",
value: "string",
}],
instanceType: "string",
resourceRequirements: [{
type: "string",
value: "string",
}],
},
dependsOns: [{
jobId: "string",
type: "string",
}],
parameters: {
string: "string",
},
retryStrategy: {
attempts: 0,
},
},
cloudwatchLogsParameters: {
logStreamName: "string",
timestamp: "string",
},
ecsTaskParameters: {
taskDefinitionArn: "string",
overrides: {
containerOverrides: [{
commands: ["string"],
cpu: 0,
environmentFiles: [{
type: "string",
value: "string",
}],
environments: [{
name: "string",
value: "string",
}],
memory: 0,
memoryReservation: 0,
name: "string",
resourceRequirements: [{
type: "string",
value: "string",
}],
}],
cpu: "string",
ephemeralStorage: {
sizeInGib: 0,
},
executionRoleArn: "string",
inferenceAcceleratorOverrides: [{
deviceName: "string",
deviceType: "string",
}],
memory: "string",
taskRoleArn: "string",
},
placementStrategies: [{
field: "string",
type: "string",
}],
group: "string",
launchType: "string",
networkConfiguration: {
awsVpcConfiguration: {
assignPublicIp: "string",
securityGroups: ["string"],
subnets: ["string"],
},
},
capacityProviderStrategies: [{
capacityProvider: "string",
base: 0,
weight: 0,
}],
placementConstraints: [{
expression: "string",
type: "string",
}],
enableExecuteCommand: false,
platformVersion: "string",
propagateTags: "string",
referenceId: "string",
tags: {
string: "string",
},
taskCount: 0,
enableEcsManagedTags: false,
},
eventbridgeEventBusParameters: {
detailType: "string",
endpointId: "string",
resources: ["string"],
source: "string",
time: "string",
},
httpParameters: {
headerParameters: {
string: "string",
},
pathParameterValues: "string",
queryStringParameters: {
string: "string",
},
},
inputTemplate: "string",
kinesisStreamParameters: {
partitionKey: "string",
},
lambdaFunctionParameters: {
invocationType: "string",
},
redshiftDataParameters: {
database: "string",
sqls: ["string"],
dbUser: "string",
secretManagerArn: "string",
statementName: "string",
withEvent: false,
},
sagemakerPipelineParameters: {
pipelineParameters: [{
name: "string",
value: "string",
}],
},
sqsQueueParameters: {
messageDeduplicationId: "string",
messageGroupId: "string",
},
stepFunctionStateMachineParameters: {
invocationType: "string",
},
},
});
type: aws:pipes:Pipe
properties:
description: string
desiredState: string
enrichment: string
enrichmentParameters:
httpParameters:
headerParameters:
string: string
pathParameterValues: string
queryStringParameters:
string: string
inputTemplate: string
logConfiguration:
cloudwatchLogsLogDestination:
logGroupArn: string
firehoseLogDestination:
deliveryStreamArn: string
includeExecutionDatas:
- string
level: string
s3LogDestination:
bucketName: string
bucketOwner: string
outputFormat: string
prefix: string
name: string
namePrefix: string
roleArn: string
source: string
sourceParameters:
activemqBrokerParameters:
batchSize: 0
credentials:
basicAuth: string
maximumBatchingWindowInSeconds: 0
queueName: string
dynamodbStreamParameters:
batchSize: 0
deadLetterConfig:
arn: string
maximumBatchingWindowInSeconds: 0
maximumRecordAgeInSeconds: 0
maximumRetryAttempts: 0
onPartialBatchItemFailure: string
parallelizationFactor: 0
startingPosition: string
filterCriteria:
filters:
- pattern: string
kinesisStreamParameters:
batchSize: 0
deadLetterConfig:
arn: string
maximumBatchingWindowInSeconds: 0
maximumRecordAgeInSeconds: 0
maximumRetryAttempts: 0
onPartialBatchItemFailure: string
parallelizationFactor: 0
startingPosition: string
startingPositionTimestamp: string
managedStreamingKafkaParameters:
batchSize: 0
consumerGroupId: string
credentials:
clientCertificateTlsAuth: string
saslScram512Auth: string
maximumBatchingWindowInSeconds: 0
startingPosition: string
topicName: string
rabbitmqBrokerParameters:
batchSize: 0
credentials:
basicAuth: string
maximumBatchingWindowInSeconds: 0
queueName: string
virtualHost: string
selfManagedKafkaParameters:
additionalBootstrapServers:
- string
batchSize: 0
consumerGroupId: string
credentials:
basicAuth: string
clientCertificateTlsAuth: string
saslScram256Auth: string
saslScram512Auth: string
maximumBatchingWindowInSeconds: 0
serverRootCaCertificate: string
startingPosition: string
topicName: string
vpc:
securityGroups:
- string
subnets:
- string
sqsQueueParameters:
batchSize: 0
maximumBatchingWindowInSeconds: 0
tags:
string: string
target: string
targetParameters:
batchJobParameters:
arrayProperties:
size: 0
containerOverrides:
commands:
- string
environments:
- name: string
value: string
instanceType: string
resourceRequirements:
- type: string
value: string
dependsOns:
- jobId: string
type: string
jobDefinition: string
jobName: string
parameters:
string: string
retryStrategy:
attempts: 0
cloudwatchLogsParameters:
logStreamName: string
timestamp: string
ecsTaskParameters:
capacityProviderStrategies:
- base: 0
capacityProvider: string
weight: 0
enableEcsManagedTags: false
enableExecuteCommand: false
group: string
launchType: string
networkConfiguration:
awsVpcConfiguration:
assignPublicIp: string
securityGroups:
- string
subnets:
- string
overrides:
containerOverrides:
- commands:
- string
cpu: 0
environmentFiles:
- type: string
value: string
environments:
- name: string
value: string
memory: 0
memoryReservation: 0
name: string
resourceRequirements:
- type: string
value: string
cpu: string
ephemeralStorage:
sizeInGib: 0
executionRoleArn: string
inferenceAcceleratorOverrides:
- deviceName: string
deviceType: string
memory: string
taskRoleArn: string
placementConstraints:
- expression: string
type: string
placementStrategies:
- field: string
type: string
platformVersion: string
propagateTags: string
referenceId: string
tags:
string: string
taskCount: 0
taskDefinitionArn: string
eventbridgeEventBusParameters:
detailType: string
endpointId: string
resources:
- string
source: string
time: string
httpParameters:
headerParameters:
string: string
pathParameterValues: string
queryStringParameters:
string: string
inputTemplate: string
kinesisStreamParameters:
partitionKey: string
lambdaFunctionParameters:
invocationType: string
redshiftDataParameters:
database: string
dbUser: string
secretManagerArn: string
sqls:
- string
statementName: string
withEvent: false
sagemakerPipelineParameters:
pipelineParameters:
- name: string
value: string
sqsQueueParameters:
messageDeduplicationId: string
messageGroupId: string
stepFunctionStateMachineParameters:
invocationType: string
Pipe Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
The Pipe resource accepts the following input properties:
- Role
Arn string - ARN of the role that allows the pipe to send data to the target.
- Source string
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- Target string
Target resource of the pipe (typically an ARN).
The following arguments are optional:
- Description string
- A description of the pipe. At most 512 characters.
- Desired
State string - The state the pipe should be in. One of:
RUNNING
,STOPPED
. - Enrichment string
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- Enrichment
Parameters PipeEnrichment Parameters - Parameters to configure enrichment for your pipe. Detailed below.
- Log
Configuration PipeLog Configuration - Logging configuration settings for the pipe. Detailed below.
- Name string
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - Name
Prefix string - Creates a unique name beginning with the specified prefix. Conflicts with
name
. - Source
Parameters PipeSource Parameters - Parameters to configure a source for the pipe. Detailed below.
- Dictionary<string, string>
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - Target
Parameters PipeTarget Parameters - Parameters to configure a target for your pipe. Detailed below.
- Role
Arn string - ARN of the role that allows the pipe to send data to the target.
- Source string
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- Target string
Target resource of the pipe (typically an ARN).
The following arguments are optional:
- Description string
- A description of the pipe. At most 512 characters.
- Desired
State string - The state the pipe should be in. One of:
RUNNING
,STOPPED
. - Enrichment string
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- Enrichment
Parameters PipeEnrichment Parameters Args - Parameters to configure enrichment for your pipe. Detailed below.
- Log
Configuration PipeLog Configuration Args - Logging configuration settings for the pipe. Detailed below.
- Name string
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - Name
Prefix string - Creates a unique name beginning with the specified prefix. Conflicts with
name
. - Source
Parameters PipeSource Parameters Args - Parameters to configure a source for the pipe. Detailed below.
- map[string]string
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - Target
Parameters PipeTarget Parameters Args - Parameters to configure a target for your pipe. Detailed below.
- role
Arn String - ARN of the role that allows the pipe to send data to the target.
- source String
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- target String
Target resource of the pipe (typically an ARN).
The following arguments are optional:
- description String
- A description of the pipe. At most 512 characters.
- desired
State String - The state the pipe should be in. One of:
RUNNING
,STOPPED
. - enrichment String
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- enrichment
Parameters PipeEnrichment Parameters - Parameters to configure enrichment for your pipe. Detailed below.
- log
Configuration PipeLog Configuration - Logging configuration settings for the pipe. Detailed below.
- name String
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - name
Prefix String - Creates a unique name beginning with the specified prefix. Conflicts with
name
. - source
Parameters PipeSource Parameters - Parameters to configure a source for the pipe. Detailed below.
- Map<String,String>
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - target
Parameters PipeTarget Parameters - Parameters to configure a target for your pipe. Detailed below.
- role
Arn string - ARN of the role that allows the pipe to send data to the target.
- source string
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- target string
Target resource of the pipe (typically an ARN).
The following arguments are optional:
- description string
- A description of the pipe. At most 512 characters.
- desired
State string - The state the pipe should be in. One of:
RUNNING
,STOPPED
. - enrichment string
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- enrichment
Parameters PipeEnrichment Parameters - Parameters to configure enrichment for your pipe. Detailed below.
- log
Configuration PipeLog Configuration - Logging configuration settings for the pipe. Detailed below.
- name string
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - name
Prefix string - Creates a unique name beginning with the specified prefix. Conflicts with
name
. - source
Parameters PipeSource Parameters - Parameters to configure a source for the pipe. Detailed below.
- {[key: string]: string}
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - target
Parameters PipeTarget Parameters - Parameters to configure a target for your pipe. Detailed below.
- role_
arn str - ARN of the role that allows the pipe to send data to the target.
- source str
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- target str
Target resource of the pipe (typically an ARN).
The following arguments are optional:
- description str
- A description of the pipe. At most 512 characters.
- desired_
state str - The state the pipe should be in. One of:
RUNNING
,STOPPED
. - enrichment str
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- enrichment_
parameters PipeEnrichment Parameters Args - Parameters to configure enrichment for your pipe. Detailed below.
- log_
configuration PipeLog Configuration Args - Logging configuration settings for the pipe. Detailed below.
- name str
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - name_
prefix str - Creates a unique name beginning with the specified prefix. Conflicts with
name
. - source_
parameters PipeSource Parameters Args - Parameters to configure a source for the pipe. Detailed below.
- Mapping[str, str]
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - target_
parameters PipeTarget Parameters Args - Parameters to configure a target for your pipe. Detailed below.
- role
Arn String - ARN of the role that allows the pipe to send data to the target.
- source String
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- target String
Target resource of the pipe (typically an ARN).
The following arguments are optional:
- description String
- A description of the pipe. At most 512 characters.
- desired
State String - The state the pipe should be in. One of:
RUNNING
,STOPPED
. - enrichment String
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- enrichment
Parameters Property Map - Parameters to configure enrichment for your pipe. Detailed below.
- log
Configuration Property Map - Logging configuration settings for the pipe. Detailed below.
- name String
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - name
Prefix String - Creates a unique name beginning with the specified prefix. Conflicts with
name
. - source
Parameters Property Map - Parameters to configure a source for the pipe. Detailed below.
- Map<String>
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - target
Parameters Property Map - Parameters to configure a target for your pipe. Detailed below.
Outputs
All input properties are implicitly available as output properties. Additionally, the Pipe resource produces the following output properties:
Look up Existing Pipe Resource
Get an existing Pipe resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: PipeState, opts?: CustomResourceOptions): Pipe
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
arn: Optional[str] = None,
description: Optional[str] = None,
desired_state: Optional[str] = None,
enrichment: Optional[str] = None,
enrichment_parameters: Optional[PipeEnrichmentParametersArgs] = None,
log_configuration: Optional[PipeLogConfigurationArgs] = None,
name: Optional[str] = None,
name_prefix: Optional[str] = None,
role_arn: Optional[str] = None,
source: Optional[str] = None,
source_parameters: Optional[PipeSourceParametersArgs] = None,
tags: Optional[Mapping[str, str]] = None,
tags_all: Optional[Mapping[str, str]] = None,
target: Optional[str] = None,
target_parameters: Optional[PipeTargetParametersArgs] = None) -> Pipe
func GetPipe(ctx *Context, name string, id IDInput, state *PipeState, opts ...ResourceOption) (*Pipe, error)
public static Pipe Get(string name, Input<string> id, PipeState? state, CustomResourceOptions? opts = null)
public static Pipe get(String name, Output<String> id, PipeState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Arn string
- ARN of this pipe.
- Description string
- A description of the pipe. At most 512 characters.
- Desired
State string - The state the pipe should be in. One of:
RUNNING
,STOPPED
. - Enrichment string
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- Enrichment
Parameters PipeEnrichment Parameters - Parameters to configure enrichment for your pipe. Detailed below.
- Log
Configuration PipeLog Configuration - Logging configuration settings for the pipe. Detailed below.
- Name string
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - Name
Prefix string - Creates a unique name beginning with the specified prefix. Conflicts with
name
. - Role
Arn string - ARN of the role that allows the pipe to send data to the target.
- Source string
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- Source
Parameters PipeSource Parameters - Parameters to configure a source for the pipe. Detailed below.
- Dictionary<string, string>
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - Dictionary<string, string>
- Map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block. - Target string
Target resource of the pipe (typically an ARN).
The following arguments are optional:
- Target
Parameters PipeTarget Parameters - Parameters to configure a target for your pipe. Detailed below.
- Arn string
- ARN of this pipe.
- Description string
- A description of the pipe. At most 512 characters.
- Desired
State string - The state the pipe should be in. One of:
RUNNING
,STOPPED
. - Enrichment string
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- Enrichment
Parameters PipeEnrichment Parameters Args - Parameters to configure enrichment for your pipe. Detailed below.
- Log
Configuration PipeLog Configuration Args - Logging configuration settings for the pipe. Detailed below.
- Name string
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - Name
Prefix string - Creates a unique name beginning with the specified prefix. Conflicts with
name
. - Role
Arn string - ARN of the role that allows the pipe to send data to the target.
- Source string
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- Source
Parameters PipeSource Parameters Args - Parameters to configure a source for the pipe. Detailed below.
- map[string]string
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - map[string]string
- Map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block. - Target string
Target resource of the pipe (typically an ARN).
The following arguments are optional:
- Target
Parameters PipeTarget Parameters Args - Parameters to configure a target for your pipe. Detailed below.
- arn String
- ARN of this pipe.
- description String
- A description of the pipe. At most 512 characters.
- desired
State String - The state the pipe should be in. One of:
RUNNING
,STOPPED
. - enrichment String
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- enrichment
Parameters PipeEnrichment Parameters - Parameters to configure enrichment for your pipe. Detailed below.
- log
Configuration PipeLog Configuration - Logging configuration settings for the pipe. Detailed below.
- name String
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - name
Prefix String - Creates a unique name beginning with the specified prefix. Conflicts with
name
. - role
Arn String - ARN of the role that allows the pipe to send data to the target.
- source String
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- source
Parameters PipeSource Parameters - Parameters to configure a source for the pipe. Detailed below.
- Map<String,String>
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - Map<String,String>
- Map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block. - target String
Target resource of the pipe (typically an ARN).
The following arguments are optional:
- target
Parameters PipeTarget Parameters - Parameters to configure a target for your pipe. Detailed below.
- arn string
- ARN of this pipe.
- description string
- A description of the pipe. At most 512 characters.
- desired
State string - The state the pipe should be in. One of:
RUNNING
,STOPPED
. - enrichment string
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- enrichment
Parameters PipeEnrichment Parameters - Parameters to configure enrichment for your pipe. Detailed below.
- log
Configuration PipeLog Configuration - Logging configuration settings for the pipe. Detailed below.
- name string
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - name
Prefix string - Creates a unique name beginning with the specified prefix. Conflicts with
name
. - role
Arn string - ARN of the role that allows the pipe to send data to the target.
- source string
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- source
Parameters PipeSource Parameters - Parameters to configure a source for the pipe. Detailed below.
- {[key: string]: string}
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - {[key: string]: string}
- Map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block. - target string
Target resource of the pipe (typically an ARN).
The following arguments are optional:
- target
Parameters PipeTarget Parameters - Parameters to configure a target for your pipe. Detailed below.
- arn str
- ARN of this pipe.
- description str
- A description of the pipe. At most 512 characters.
- desired_
state str - The state the pipe should be in. One of:
RUNNING
,STOPPED
. - enrichment str
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- enrichment_
parameters PipeEnrichment Parameters Args - Parameters to configure enrichment for your pipe. Detailed below.
- log_
configuration PipeLog Configuration Args - Logging configuration settings for the pipe. Detailed below.
- name str
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - name_
prefix str - Creates a unique name beginning with the specified prefix. Conflicts with
name
. - role_
arn str - ARN of the role that allows the pipe to send data to the target.
- source str
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- source_
parameters PipeSource Parameters Args - Parameters to configure a source for the pipe. Detailed below.
- Mapping[str, str]
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - Mapping[str, str]
- Map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block. - target str
Target resource of the pipe (typically an ARN).
The following arguments are optional:
- target_
parameters PipeTarget Parameters Args - Parameters to configure a target for your pipe. Detailed below.
- arn String
- ARN of this pipe.
- description String
- A description of the pipe. At most 512 characters.
- desired
State String - The state the pipe should be in. One of:
RUNNING
,STOPPED
. - enrichment String
- Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
- enrichment
Parameters Property Map - Parameters to configure enrichment for your pipe. Detailed below.
- log
Configuration Property Map - Logging configuration settings for the pipe. Detailed below.
- name String
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - name
Prefix String - Creates a unique name beginning with the specified prefix. Conflicts with
name
. - role
Arn String - ARN of the role that allows the pipe to send data to the target.
- source String
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- source
Parameters Property Map - Parameters to configure a source for the pipe. Detailed below.
- Map<String>
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - Map<String>
- Map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block. - target String
Target resource of the pipe (typically an ARN).
The following arguments are optional:
- target
Parameters Property Map - Parameters to configure a target for your pipe. Detailed below.
Supporting Types
PipeEnrichmentParameters, PipeEnrichmentParametersArgs
- Http
Parameters PipeEnrichment Parameters Http Parameters - Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. Detailed below.
- Input
Template string - Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- Http
Parameters PipeEnrichment Parameters Http Parameters - Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. Detailed below.
- Input
Template string - Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- http
Parameters PipeEnrichment Parameters Http Parameters - Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. Detailed below.
- input
Template String - Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- http
Parameters PipeEnrichment Parameters Http Parameters - Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. Detailed below.
- input
Template string - Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- http_
parameters PipeEnrichment Parameters Http Parameters - Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. Detailed below.
- input_
template str - Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- http
Parameters Property Map - Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. Detailed below.
- input
Template String - Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
PipeEnrichmentParametersHttpParameters, PipeEnrichmentParametersHttpParametersArgs
- Header
Parameters Dictionary<string, string> - Path
Parameter stringValues - Query
String Dictionary<string, string>Parameters
- Header
Parameters map[string]string - Path
Parameter stringValues - Query
String map[string]stringParameters
- header
Parameters Map<String,String> - path
Parameter StringValues - query
String Map<String,String>Parameters
- header
Parameters {[key: string]: string} - path
Parameter stringValues - query
String {[key: string]: string}Parameters
- header_
parameters Mapping[str, str] - path_
parameter_ strvalues - query_
string_ Mapping[str, str]parameters
- header
Parameters Map<String> - path
Parameter StringValues - query
String Map<String>Parameters
PipeLogConfiguration, PipeLogConfigurationArgs
- Level string
- The level of logging detail to include. Valid values
OFF
,ERROR
,INFO
andTRACE
. - Cloudwatch
Logs PipeLog Destination Log Configuration Cloudwatch Logs Log Destination - Amazon CloudWatch Logs logging configuration settings for the pipe. Detailed below.
- Firehose
Log PipeDestination Log Configuration Firehose Log Destination - Amazon Kinesis Data Firehose logging configuration settings for the pipe. Detailed below.
- Include
Execution List<string>Datas - String list that specifies whether the execution data (specifically, the
payload
,awsRequest
, andawsResponse
fields) is included in the log messages for this pipe. This applies to all log destinations for the pipe. Valid valuesALL
. - S3Log
Destination PipeLog Configuration S3Log Destination - Amazon S3 logging configuration settings for the pipe. Detailed below.
- Level string
- The level of logging detail to include. Valid values
OFF
,ERROR
,INFO
andTRACE
. - Cloudwatch
Logs PipeLog Destination Log Configuration Cloudwatch Logs Log Destination - Amazon CloudWatch Logs logging configuration settings for the pipe. Detailed below.
- Firehose
Log PipeDestination Log Configuration Firehose Log Destination - Amazon Kinesis Data Firehose logging configuration settings for the pipe. Detailed below.
- Include
Execution []stringDatas - String list that specifies whether the execution data (specifically, the
payload
,awsRequest
, andawsResponse
fields) is included in the log messages for this pipe. This applies to all log destinations for the pipe. Valid valuesALL
. - S3Log
Destination PipeLog Configuration S3Log Destination - Amazon S3 logging configuration settings for the pipe. Detailed below.
- level String
- The level of logging detail to include. Valid values
OFF
,ERROR
,INFO
andTRACE
. - cloudwatch
Logs PipeLog Destination Log Configuration Cloudwatch Logs Log Destination - Amazon CloudWatch Logs logging configuration settings for the pipe. Detailed below.
- firehose
Log PipeDestination Log Configuration Firehose Log Destination - Amazon Kinesis Data Firehose logging configuration settings for the pipe. Detailed below.
- include
Execution List<String>Datas - String list that specifies whether the execution data (specifically, the
payload
,awsRequest
, andawsResponse
fields) is included in the log messages for this pipe. This applies to all log destinations for the pipe. Valid valuesALL
. - s3Log
Destination PipeLog Configuration S3Log Destination - Amazon S3 logging configuration settings for the pipe. Detailed below.
- level string
- The level of logging detail to include. Valid values
OFF
,ERROR
,INFO
andTRACE
. - cloudwatch
Logs PipeLog Destination Log Configuration Cloudwatch Logs Log Destination - Amazon CloudWatch Logs logging configuration settings for the pipe. Detailed below.
- firehose
Log PipeDestination Log Configuration Firehose Log Destination - Amazon Kinesis Data Firehose logging configuration settings for the pipe. Detailed below.
- include
Execution string[]Datas - String list that specifies whether the execution data (specifically, the
payload
,awsRequest
, andawsResponse
fields) is included in the log messages for this pipe. This applies to all log destinations for the pipe. Valid valuesALL
. - s3Log
Destination PipeLog Configuration S3Log Destination - Amazon S3 logging configuration settings for the pipe. Detailed below.
- level str
- The level of logging detail to include. Valid values
OFF
,ERROR
,INFO
andTRACE
. - cloudwatch_
logs_ Pipelog_ destination Log Configuration Cloudwatch Logs Log Destination - Amazon CloudWatch Logs logging configuration settings for the pipe. Detailed below.
- firehose_
log_ Pipedestination Log Configuration Firehose Log Destination - Amazon Kinesis Data Firehose logging configuration settings for the pipe. Detailed below.
- include_
execution_ Sequence[str]datas - String list that specifies whether the execution data (specifically, the
payload
,awsRequest
, andawsResponse
fields) is included in the log messages for this pipe. This applies to all log destinations for the pipe. Valid valuesALL
. - s3_
log_ Pipedestination Log Configuration S3Log Destination - Amazon S3 logging configuration settings for the pipe. Detailed below.
- level String
- The level of logging detail to include. Valid values
OFF
,ERROR
,INFO
andTRACE
. - cloudwatch
Logs Property MapLog Destination - Amazon CloudWatch Logs logging configuration settings for the pipe. Detailed below.
- firehose
Log Property MapDestination - Amazon Kinesis Data Firehose logging configuration settings for the pipe. Detailed below.
- include
Execution List<String>Datas - String list that specifies whether the execution data (specifically, the
payload
,awsRequest
, andawsResponse
fields) is included in the log messages for this pipe. This applies to all log destinations for the pipe. Valid valuesALL
. - s3Log
Destination Property Map - Amazon S3 logging configuration settings for the pipe. Detailed below.
PipeLogConfigurationCloudwatchLogsLogDestination, PipeLogConfigurationCloudwatchLogsLogDestinationArgs
- Log
Group stringArn - Amazon Web Services Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
- Log
Group stringArn - Amazon Web Services Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
- log
Group StringArn - Amazon Web Services Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
- log
Group stringArn - Amazon Web Services Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
- log_
group_ strarn - Amazon Web Services Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
- log
Group StringArn - Amazon Web Services Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
PipeLogConfigurationFirehoseLogDestination, PipeLogConfigurationFirehoseLogDestinationArgs
- Delivery
Stream stringArn - Amazon Resource Name (ARN) of the Kinesis Data Firehose delivery stream to which EventBridge delivers the pipe log records.
- Delivery
Stream stringArn - Amazon Resource Name (ARN) of the Kinesis Data Firehose delivery stream to which EventBridge delivers the pipe log records.
- delivery
Stream StringArn - Amazon Resource Name (ARN) of the Kinesis Data Firehose delivery stream to which EventBridge delivers the pipe log records.
- delivery
Stream stringArn - Amazon Resource Name (ARN) of the Kinesis Data Firehose delivery stream to which EventBridge delivers the pipe log records.
- delivery_
stream_ strarn - Amazon Resource Name (ARN) of the Kinesis Data Firehose delivery stream to which EventBridge delivers the pipe log records.
- delivery
Stream StringArn - Amazon Resource Name (ARN) of the Kinesis Data Firehose delivery stream to which EventBridge delivers the pipe log records.
PipeLogConfigurationS3LogDestination, PipeLogConfigurationS3LogDestinationArgs
- Bucket
Name string - Name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- Bucket
Owner string - Amazon Web Services account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- Output
Format string - EventBridge format for the log records. Valid values
json
,plain
andw3c
. - Prefix string
- Prefix text with which to begin Amazon S3 log object names.
- Bucket
Name string - Name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- Bucket
Owner string - Amazon Web Services account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- Output
Format string - EventBridge format for the log records. Valid values
json
,plain
andw3c
. - Prefix string
- Prefix text with which to begin Amazon S3 log object names.
- bucket
Name String - Name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- bucket
Owner String - Amazon Web Services account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- output
Format String - EventBridge format for the log records. Valid values
json
,plain
andw3c
. - prefix String
- Prefix text with which to begin Amazon S3 log object names.
- bucket
Name string - Name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- bucket
Owner string - Amazon Web Services account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- output
Format string - EventBridge format for the log records. Valid values
json
,plain
andw3c
. - prefix string
- Prefix text with which to begin Amazon S3 log object names.
- bucket_
name str - Name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- bucket_
owner str - Amazon Web Services account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- output_
format str - EventBridge format for the log records. Valid values
json
,plain
andw3c
. - prefix str
- Prefix text with which to begin Amazon S3 log object names.
- bucket
Name String - Name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- bucket
Owner String - Amazon Web Services account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- output
Format String - EventBridge format for the log records. Valid values
json
,plain
andw3c
. - prefix String
- Prefix text with which to begin Amazon S3 log object names.
PipeSourceParameters, PipeSourceParametersArgs
- Activemq
Broker PipeParameters Source Parameters Activemq Broker Parameters - The parameters for using an Active MQ broker as a source. Detailed below.
- Dynamodb
Stream PipeParameters Source Parameters Dynamodb Stream Parameters - The parameters for using a DynamoDB stream as a source. Detailed below.
- Filter
Criteria PipeSource Parameters Filter Criteria - The collection of event patterns used to filter events. Detailed below.
- Kinesis
Stream PipeParameters Source Parameters Kinesis Stream Parameters - The parameters for using a Kinesis stream as a source. Detailed below.
- Managed
Streaming PipeKafka Parameters Source Parameters Managed Streaming Kafka Parameters - The parameters for using an MSK stream as a source. Detailed below.
- Rabbitmq
Broker PipeParameters Source Parameters Rabbitmq Broker Parameters - The parameters for using a Rabbit MQ broker as a source. Detailed below.
- Self
Managed PipeKafka Parameters Source Parameters Self Managed Kafka Parameters - The parameters for using a self-managed Apache Kafka stream as a source. Detailed below.
- Sqs
Queue PipeParameters Source Parameters Sqs Queue Parameters - The parameters for using a Amazon SQS stream as a source. Detailed below.
- Activemq
Broker PipeParameters Source Parameters Activemq Broker Parameters - The parameters for using an Active MQ broker as a source. Detailed below.
- Dynamodb
Stream PipeParameters Source Parameters Dynamodb Stream Parameters - The parameters for using a DynamoDB stream as a source. Detailed below.
- Filter
Criteria PipeSource Parameters Filter Criteria - The collection of event patterns used to filter events. Detailed below.
- Kinesis
Stream PipeParameters Source Parameters Kinesis Stream Parameters - The parameters for using a Kinesis stream as a source. Detailed below.
- Managed
Streaming PipeKafka Parameters Source Parameters Managed Streaming Kafka Parameters - The parameters for using an MSK stream as a source. Detailed below.
- Rabbitmq
Broker PipeParameters Source Parameters Rabbitmq Broker Parameters - The parameters for using a Rabbit MQ broker as a source. Detailed below.
- Self
Managed PipeKafka Parameters Source Parameters Self Managed Kafka Parameters - The parameters for using a self-managed Apache Kafka stream as a source. Detailed below.
- Sqs
Queue PipeParameters Source Parameters Sqs Queue Parameters - The parameters for using a Amazon SQS stream as a source. Detailed below.
- activemq
Broker PipeParameters Source Parameters Activemq Broker Parameters - The parameters for using an Active MQ broker as a source. Detailed below.
- dynamodb
Stream PipeParameters Source Parameters Dynamodb Stream Parameters - The parameters for using a DynamoDB stream as a source. Detailed below.
- filter
Criteria PipeSource Parameters Filter Criteria - The collection of event patterns used to filter events. Detailed below.
- kinesis
Stream PipeParameters Source Parameters Kinesis Stream Parameters - The parameters for using a Kinesis stream as a source. Detailed below.
- managed
Streaming PipeKafka Parameters Source Parameters Managed Streaming Kafka Parameters - The parameters for using an MSK stream as a source. Detailed below.
- rabbitmq
Broker PipeParameters Source Parameters Rabbitmq Broker Parameters - The parameters for using a Rabbit MQ broker as a source. Detailed below.
- self
Managed PipeKafka Parameters Source Parameters Self Managed Kafka Parameters - The parameters for using a self-managed Apache Kafka stream as a source. Detailed below.
- sqs
Queue PipeParameters Source Parameters Sqs Queue Parameters - The parameters for using a Amazon SQS stream as a source. Detailed below.
- activemq
Broker PipeParameters Source Parameters Activemq Broker Parameters - The parameters for using an Active MQ broker as a source. Detailed below.
- dynamodb
Stream PipeParameters Source Parameters Dynamodb Stream Parameters - The parameters for using a DynamoDB stream as a source. Detailed below.
- filter
Criteria PipeSource Parameters Filter Criteria - The collection of event patterns used to filter events. Detailed below.
- kinesis
Stream PipeParameters Source Parameters Kinesis Stream Parameters - The parameters for using a Kinesis stream as a source. Detailed below.
- managed
Streaming PipeKafka Parameters Source Parameters Managed Streaming Kafka Parameters - The parameters for using an MSK stream as a source. Detailed below.
- rabbitmq
Broker PipeParameters Source Parameters Rabbitmq Broker Parameters - The parameters for using a Rabbit MQ broker as a source. Detailed below.
- self
Managed PipeKafka Parameters Source Parameters Self Managed Kafka Parameters - The parameters for using a self-managed Apache Kafka stream as a source. Detailed below.
- sqs
Queue PipeParameters Source Parameters Sqs Queue Parameters - The parameters for using a Amazon SQS stream as a source. Detailed below.
- activemq_
broker_ Pipeparameters Source Parameters Activemq Broker Parameters - The parameters for using an Active MQ broker as a source. Detailed below.
- dynamodb_
stream_ Pipeparameters Source Parameters Dynamodb Stream Parameters - The parameters for using a DynamoDB stream as a source. Detailed below.
- filter_
criteria PipeSource Parameters Filter Criteria - The collection of event patterns used to filter events. Detailed below.
- kinesis_
stream_ Pipeparameters Source Parameters Kinesis Stream Parameters - The parameters for using a Kinesis stream as a source. Detailed below.
- managed_
streaming_ Pipekafka_ parameters Source Parameters Managed Streaming Kafka Parameters - The parameters for using an MSK stream as a source. Detailed below.
- rabbitmq_
broker_ Pipeparameters Source Parameters Rabbitmq Broker Parameters - The parameters for using a Rabbit MQ broker as a source. Detailed below.
- self_
managed_ Pipekafka_ parameters Source Parameters Self Managed Kafka Parameters - The parameters for using a self-managed Apache Kafka stream as a source. Detailed below.
- sqs_
queue_ Pipeparameters Source Parameters Sqs Queue Parameters - The parameters for using a Amazon SQS stream as a source. Detailed below.
- activemq
Broker Property MapParameters - The parameters for using an Active MQ broker as a source. Detailed below.
- dynamodb
Stream Property MapParameters - The parameters for using a DynamoDB stream as a source. Detailed below.
- filter
Criteria Property Map - The collection of event patterns used to filter events. Detailed below.
- kinesis
Stream Property MapParameters - The parameters for using a Kinesis stream as a source. Detailed below.
- managed
Streaming Property MapKafka Parameters - The parameters for using an MSK stream as a source. Detailed below.
- rabbitmq
Broker Property MapParameters - The parameters for using a Rabbit MQ broker as a source. Detailed below.
- self
Managed Property MapKafka Parameters - The parameters for using a self-managed Apache Kafka stream as a source. Detailed below.
- sqs
Queue Property MapParameters - The parameters for using a Amazon SQS stream as a source. Detailed below.
PipeSourceParametersActivemqBrokerParameters, PipeSourceParametersActivemqBrokerParametersArgs
- Credentials
Pipe
Source Parameters Activemq Broker Parameters Credentials - The credentials needed to access the resource. Detailed below.
- Queue
Name string - The name of the destination queue to consume. Maximum length of 1000.
- Batch
Size int - The maximum number of records to include in each batch. Maximum value of 10000.
- Maximum
Batching intWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- Credentials
Pipe
Source Parameters Activemq Broker Parameters Credentials - The credentials needed to access the resource. Detailed below.
- Queue
Name string - The name of the destination queue to consume. Maximum length of 1000.
- Batch
Size int - The maximum number of records to include in each batch. Maximum value of 10000.
- Maximum
Batching intWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- credentials
Pipe
Source Parameters Activemq Broker Parameters Credentials - The credentials needed to access the resource. Detailed below.
- queue
Name String - The name of the destination queue to consume. Maximum length of 1000.
- batch
Size Integer - The maximum number of records to include in each batch. Maximum value of 10000.
- maximum
Batching IntegerWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- credentials
Pipe
Source Parameters Activemq Broker Parameters Credentials - The credentials needed to access the resource. Detailed below.
- queue
Name string - The name of the destination queue to consume. Maximum length of 1000.
- batch
Size number - The maximum number of records to include in each batch. Maximum value of 10000.
- maximum
Batching numberWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- credentials
Pipe
Source Parameters Activemq Broker Parameters Credentials - The credentials needed to access the resource. Detailed below.
- queue_
name str - The name of the destination queue to consume. Maximum length of 1000.
- batch_
size int - The maximum number of records to include in each batch. Maximum value of 10000.
- maximum_
batching_ intwindow_ in_ seconds - The maximum length of a time to wait for events. Maximum value of 300.
- credentials Property Map
- The credentials needed to access the resource. Detailed below.
- queue
Name String - The name of the destination queue to consume. Maximum length of 1000.
- batch
Size Number - The maximum number of records to include in each batch. Maximum value of 10000.
- maximum
Batching NumberWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
PipeSourceParametersActivemqBrokerParametersCredentials, PipeSourceParametersActivemqBrokerParametersCredentialsArgs
- Basic
Auth string - The ARN of the Secrets Manager secret containing the credentials.
- Basic
Auth string - The ARN of the Secrets Manager secret containing the credentials.
- basic
Auth String - The ARN of the Secrets Manager secret containing the credentials.
- basic
Auth string - The ARN of the Secrets Manager secret containing the credentials.
- basic_
auth str - The ARN of the Secrets Manager secret containing the credentials.
- basic
Auth String - The ARN of the Secrets Manager secret containing the credentials.
PipeSourceParametersDynamodbStreamParameters, PipeSourceParametersDynamodbStreamParametersArgs
- Starting
Position string - The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- Batch
Size int - The maximum number of records to include in each batch. Maximum value of 10000.
- Dead
Letter PipeConfig Source Parameters Dynamodb Stream Parameters Dead Letter Config - Define the target queue to send dead-letter queue events to. Detailed below.
- Maximum
Batching intWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- Maximum
Record intAge In Seconds - Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
- Maximum
Retry intAttempts - Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
- On
Partial stringBatch Item Failure - Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
- Parallelization
Factor int - The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
- Starting
Position string - The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- Batch
Size int - The maximum number of records to include in each batch. Maximum value of 10000.
- Dead
Letter PipeConfig Source Parameters Dynamodb Stream Parameters Dead Letter Config - Define the target queue to send dead-letter queue events to. Detailed below.
- Maximum
Batching intWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- Maximum
Record intAge In Seconds - Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
- Maximum
Retry intAttempts - Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
- On
Partial stringBatch Item Failure - Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
- Parallelization
Factor int - The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
- starting
Position String - The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- batch
Size Integer - The maximum number of records to include in each batch. Maximum value of 10000.
- dead
Letter PipeConfig Source Parameters Dynamodb Stream Parameters Dead Letter Config - Define the target queue to send dead-letter queue events to. Detailed below.
- maximum
Batching IntegerWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- maximum
Record IntegerAge In Seconds - Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
- maximum
Retry IntegerAttempts - Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
- on
Partial StringBatch Item Failure - Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
- parallelization
Factor Integer - The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
- starting
Position string - The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- batch
Size number - The maximum number of records to include in each batch. Maximum value of 10000.
- dead
Letter PipeConfig Source Parameters Dynamodb Stream Parameters Dead Letter Config - Define the target queue to send dead-letter queue events to. Detailed below.
- maximum
Batching numberWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- maximum
Record numberAge In Seconds - Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
- maximum
Retry numberAttempts - Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
- on
Partial stringBatch Item Failure - Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
- parallelization
Factor number - The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
- starting_
position str - The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- batch_
size int - The maximum number of records to include in each batch. Maximum value of 10000.
- dead_
letter_ Pipeconfig Source Parameters Dynamodb Stream Parameters Dead Letter Config - Define the target queue to send dead-letter queue events to. Detailed below.
- maximum_
batching_ intwindow_ in_ seconds - The maximum length of a time to wait for events. Maximum value of 300.
- maximum_
record_ intage_ in_ seconds - Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
- maximum_
retry_ intattempts - Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
- on_
partial_ strbatch_ item_ failure - Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
- parallelization_
factor int - The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
- starting
Position String - The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- batch
Size Number - The maximum number of records to include in each batch. Maximum value of 10000.
- dead
Letter Property MapConfig - Define the target queue to send dead-letter queue events to. Detailed below.
- maximum
Batching NumberWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- maximum
Record NumberAge In Seconds - Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
- maximum
Retry NumberAttempts - Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
- on
Partial StringBatch Item Failure - Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
- parallelization
Factor Number - The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
PipeSourceParametersDynamodbStreamParametersDeadLetterConfig, PipeSourceParametersDynamodbStreamParametersDeadLetterConfigArgs
- Arn string
- ARN of this pipe.
- Arn string
- ARN of this pipe.
- arn String
- ARN of this pipe.
- arn string
- ARN of this pipe.
- arn str
- ARN of this pipe.
- arn String
- ARN of this pipe.
PipeSourceParametersFilterCriteria, PipeSourceParametersFilterCriteriaArgs
- Filters
List<Pipe
Source Parameters Filter Criteria Filter> - An array of up to 5 event patterns. Detailed below.
- Filters
[]Pipe
Source Parameters Filter Criteria Filter - An array of up to 5 event patterns. Detailed below.
- filters
List<Pipe
Source Parameters Filter Criteria Filter> - An array of up to 5 event patterns. Detailed below.
- filters
Pipe
Source Parameters Filter Criteria Filter[] - An array of up to 5 event patterns. Detailed below.
- filters
Sequence[Pipe
Source Parameters Filter Criteria Filter] - An array of up to 5 event patterns. Detailed below.
- filters List<Property Map>
- An array of up to 5 event patterns. Detailed below.
PipeSourceParametersFilterCriteriaFilter, PipeSourceParametersFilterCriteriaFilterArgs
- Pattern string
- The event pattern. At most 4096 characters.
- Pattern string
- The event pattern. At most 4096 characters.
- pattern String
- The event pattern. At most 4096 characters.
- pattern string
- The event pattern. At most 4096 characters.
- pattern str
- The event pattern. At most 4096 characters.
- pattern String
- The event pattern. At most 4096 characters.
PipeSourceParametersKinesisStreamParameters, PipeSourceParametersKinesisStreamParametersArgs
- Starting
Position string - The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- Batch
Size int - The maximum number of records to include in each batch. Maximum value of 10000.
- Dead
Letter PipeConfig Source Parameters Kinesis Stream Parameters Dead Letter Config - Define the target queue to send dead-letter queue events to. Detailed below.
- Maximum
Batching intWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- Maximum
Record intAge In Seconds - Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
- Maximum
Retry intAttempts - Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
- On
Partial stringBatch Item Failure - Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
- Parallelization
Factor int - The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
- Starting
Position stringTimestamp - With StartingPosition set to AT_TIMESTAMP, the time from which to start reading, in Unix time seconds.
- Starting
Position string - The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- Batch
Size int - The maximum number of records to include in each batch. Maximum value of 10000.
- Dead
Letter PipeConfig Source Parameters Kinesis Stream Parameters Dead Letter Config - Define the target queue to send dead-letter queue events to. Detailed below.
- Maximum
Batching intWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- Maximum
Record intAge In Seconds - Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
- Maximum
Retry intAttempts - Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
- On
Partial stringBatch Item Failure - Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
- Parallelization
Factor int - The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
- Starting
Position stringTimestamp - With StartingPosition set to AT_TIMESTAMP, the time from which to start reading, in Unix time seconds.
- starting
Position String - The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- batch
Size Integer - The maximum number of records to include in each batch. Maximum value of 10000.
- dead
Letter PipeConfig Source Parameters Kinesis Stream Parameters Dead Letter Config - Define the target queue to send dead-letter queue events to. Detailed below.
- maximum
Batching IntegerWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- maximum
Record IntegerAge In Seconds - Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
- maximum
Retry IntegerAttempts - Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
- on
Partial StringBatch Item Failure - Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
- parallelization
Factor Integer - The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
- starting
Position StringTimestamp - With StartingPosition set to AT_TIMESTAMP, the time from which to start reading, in Unix time seconds.
- starting
Position string - The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- batch
Size number - The maximum number of records to include in each batch. Maximum value of 10000.
- dead
Letter PipeConfig Source Parameters Kinesis Stream Parameters Dead Letter Config - Define the target queue to send dead-letter queue events to. Detailed below.
- maximum
Batching numberWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- maximum
Record numberAge In Seconds - Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
- maximum
Retry numberAttempts - Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
- on
Partial stringBatch Item Failure - Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
- parallelization
Factor number - The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
- starting
Position stringTimestamp - With StartingPosition set to AT_TIMESTAMP, the time from which to start reading, in Unix time seconds.
- starting_
position str - The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- batch_
size int - The maximum number of records to include in each batch. Maximum value of 10000.
- dead_
letter_ Pipeconfig Source Parameters Kinesis Stream Parameters Dead Letter Config - Define the target queue to send dead-letter queue events to. Detailed below.
- maximum_
batching_ intwindow_ in_ seconds - The maximum length of a time to wait for events. Maximum value of 300.
- maximum_
record_ intage_ in_ seconds - Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
- maximum_
retry_ intattempts - Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
- on_
partial_ strbatch_ item_ failure - Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
- parallelization_
factor int - The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
- starting_
position_ strtimestamp - With StartingPosition set to AT_TIMESTAMP, the time from which to start reading, in Unix time seconds.
- starting
Position String - The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- batch
Size Number - The maximum number of records to include in each batch. Maximum value of 10000.
- dead
Letter Property MapConfig - Define the target queue to send dead-letter queue events to. Detailed below.
- maximum
Batching NumberWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- maximum
Record NumberAge In Seconds - Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
- maximum
Retry NumberAttempts - Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
- on
Partial StringBatch Item Failure - Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
- parallelization
Factor Number - The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
- starting
Position StringTimestamp - With StartingPosition set to AT_TIMESTAMP, the time from which to start reading, in Unix time seconds.
PipeSourceParametersKinesisStreamParametersDeadLetterConfig, PipeSourceParametersKinesisStreamParametersDeadLetterConfigArgs
- Arn string
- ARN of this pipe.
- Arn string
- ARN of this pipe.
- arn String
- ARN of this pipe.
- arn string
- ARN of this pipe.
- arn str
- ARN of this pipe.
- arn String
- ARN of this pipe.
PipeSourceParametersManagedStreamingKafkaParameters, PipeSourceParametersManagedStreamingKafkaParametersArgs
- Topic
Name string - The name of the topic that the pipe will read from. Maximum length of 249.
- Batch
Size int - The maximum number of records to include in each batch. Maximum value of 10000.
- Consumer
Group stringId - The name of the destination queue to consume. Maximum value of 200.
- Credentials
Pipe
Source Parameters Managed Streaming Kafka Parameters Credentials - The credentials needed to access the resource. Detailed below.
- Maximum
Batching intWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- Starting
Position string - The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- Topic
Name string - The name of the topic that the pipe will read from. Maximum length of 249.
- Batch
Size int - The maximum number of records to include in each batch. Maximum value of 10000.
- Consumer
Group stringId - The name of the destination queue to consume. Maximum value of 200.
- Credentials
Pipe
Source Parameters Managed Streaming Kafka Parameters Credentials - The credentials needed to access the resource. Detailed below.
- Maximum
Batching intWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- Starting
Position string - The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- topic
Name String - The name of the topic that the pipe will read from. Maximum length of 249.
- batch
Size Integer - The maximum number of records to include in each batch. Maximum value of 10000.
- consumer
Group StringId - The name of the destination queue to consume. Maximum value of 200.
- credentials
Pipe
Source Parameters Managed Streaming Kafka Parameters Credentials - The credentials needed to access the resource. Detailed below.
- maximum
Batching IntegerWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- starting
Position String - The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- topic
Name string - The name of the topic that the pipe will read from. Maximum length of 249.
- batch
Size number - The maximum number of records to include in each batch. Maximum value of 10000.
- consumer
Group stringId - The name of the destination queue to consume. Maximum value of 200.
- credentials
Pipe
Source Parameters Managed Streaming Kafka Parameters Credentials - The credentials needed to access the resource. Detailed below.
- maximum
Batching numberWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- starting
Position string - The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- topic_
name str - The name of the topic that the pipe will read from. Maximum length of 249.
- batch_
size int - The maximum number of records to include in each batch. Maximum value of 10000.
- consumer_
group_ strid - The name of the destination queue to consume. Maximum value of 200.
- credentials
Pipe
Source Parameters Managed Streaming Kafka Parameters Credentials - The credentials needed to access the resource. Detailed below.
- maximum_
batching_ intwindow_ in_ seconds - The maximum length of a time to wait for events. Maximum value of 300.
- starting_
position str - The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- topic
Name String - The name of the topic that the pipe will read from. Maximum length of 249.
- batch
Size Number - The maximum number of records to include in each batch. Maximum value of 10000.
- consumer
Group StringId - The name of the destination queue to consume. Maximum value of 200.
- credentials Property Map
- The credentials needed to access the resource. Detailed below.
- maximum
Batching NumberWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- starting
Position String - The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
PipeSourceParametersManagedStreamingKafkaParametersCredentials, PipeSourceParametersManagedStreamingKafkaParametersCredentialsArgs
- Client
Certificate stringTls Auth - The ARN of the Secrets Manager secret containing the credentials.
- Sasl
Scram512Auth string - The ARN of the Secrets Manager secret containing the credentials.
- Client
Certificate stringTls Auth - The ARN of the Secrets Manager secret containing the credentials.
- Sasl
Scram512Auth string - The ARN of the Secrets Manager secret containing the credentials.
- client
Certificate StringTls Auth - The ARN of the Secrets Manager secret containing the credentials.
- sasl
Scram512Auth String - The ARN of the Secrets Manager secret containing the credentials.
- client
Certificate stringTls Auth - The ARN of the Secrets Manager secret containing the credentials.
- sasl
Scram512Auth string - The ARN of the Secrets Manager secret containing the credentials.
- client_
certificate_ strtls_ auth - The ARN of the Secrets Manager secret containing the credentials.
- sasl_
scram512_ strauth - The ARN of the Secrets Manager secret containing the credentials.
- client
Certificate StringTls Auth - The ARN of the Secrets Manager secret containing the credentials.
- sasl
Scram512Auth String - The ARN of the Secrets Manager secret containing the credentials.
PipeSourceParametersRabbitmqBrokerParameters, PipeSourceParametersRabbitmqBrokerParametersArgs
- Credentials
Pipe
Source Parameters Rabbitmq Broker Parameters Credentials - The credentials needed to access the resource. Detailed below.
- Queue
Name string - The name of the destination queue to consume. Maximum length of 1000.
- Batch
Size int - The maximum number of records to include in each batch. Maximum value of 10000.
- Maximum
Batching intWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- Virtual
Host string - The name of the virtual host associated with the source broker. Maximum length of 200.
- Credentials
Pipe
Source Parameters Rabbitmq Broker Parameters Credentials - The credentials needed to access the resource. Detailed below.
- Queue
Name string - The name of the destination queue to consume. Maximum length of 1000.
- Batch
Size int - The maximum number of records to include in each batch. Maximum value of 10000.
- Maximum
Batching intWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- Virtual
Host string - The name of the virtual host associated with the source broker. Maximum length of 200.
- credentials
Pipe
Source Parameters Rabbitmq Broker Parameters Credentials - The credentials needed to access the resource. Detailed below.
- queue
Name String - The name of the destination queue to consume. Maximum length of 1000.
- batch
Size Integer - The maximum number of records to include in each batch. Maximum value of 10000.
- maximum
Batching IntegerWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- virtual
Host String - The name of the virtual host associated with the source broker. Maximum length of 200.
- credentials
Pipe
Source Parameters Rabbitmq Broker Parameters Credentials - The credentials needed to access the resource. Detailed below.
- queue
Name string - The name of the destination queue to consume. Maximum length of 1000.
- batch
Size number - The maximum number of records to include in each batch. Maximum value of 10000.
- maximum
Batching numberWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- virtual
Host string - The name of the virtual host associated with the source broker. Maximum length of 200.
- credentials
Pipe
Source Parameters Rabbitmq Broker Parameters Credentials - The credentials needed to access the resource. Detailed below.
- queue_
name str - The name of the destination queue to consume. Maximum length of 1000.
- batch_
size int - The maximum number of records to include in each batch. Maximum value of 10000.
- maximum_
batching_ intwindow_ in_ seconds - The maximum length of a time to wait for events. Maximum value of 300.
- virtual_
host str - The name of the virtual host associated with the source broker. Maximum length of 200.
- credentials Property Map
- The credentials needed to access the resource. Detailed below.
- queue
Name String - The name of the destination queue to consume. Maximum length of 1000.
- batch
Size Number - The maximum number of records to include in each batch. Maximum value of 10000.
- maximum
Batching NumberWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- virtual
Host String - The name of the virtual host associated with the source broker. Maximum length of 200.
PipeSourceParametersRabbitmqBrokerParametersCredentials, PipeSourceParametersRabbitmqBrokerParametersCredentialsArgs
- Basic
Auth string - The ARN of the Secrets Manager secret containing the credentials.
- Basic
Auth string - The ARN of the Secrets Manager secret containing the credentials.
- basic
Auth String - The ARN of the Secrets Manager secret containing the credentials.
- basic
Auth string - The ARN of the Secrets Manager secret containing the credentials.
- basic_
auth str - The ARN of the Secrets Manager secret containing the credentials.
- basic
Auth String - The ARN of the Secrets Manager secret containing the credentials.
PipeSourceParametersSelfManagedKafkaParameters, PipeSourceParametersSelfManagedKafkaParametersArgs
- Topic
Name string - The name of the topic that the pipe will read from. Maximum length of 249.
- Additional
Bootstrap List<string>Servers - An array of server URLs. Maximum number of 2 items, each of maximum length 300.
- Batch
Size int - The maximum number of records to include in each batch. Maximum value of 10000.
- Consumer
Group stringId - The name of the destination queue to consume. Maximum value of 200.
- Credentials
Pipe
Source Parameters Self Managed Kafka Parameters Credentials - The credentials needed to access the resource. Detailed below.
- Maximum
Batching intWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- Server
Root stringCa Certificate - The ARN of the Secrets Manager secret used for certification.
- Starting
Position string - The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- Vpc
Pipe
Source Parameters Self Managed Kafka Parameters Vpc - This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used. Detailed below.
- Topic
Name string - The name of the topic that the pipe will read from. Maximum length of 249.
- Additional
Bootstrap []stringServers - An array of server URLs. Maximum number of 2 items, each of maximum length 300.
- Batch
Size int - The maximum number of records to include in each batch. Maximum value of 10000.
- Consumer
Group stringId - The name of the destination queue to consume. Maximum value of 200.
- Credentials
Pipe
Source Parameters Self Managed Kafka Parameters Credentials - The credentials needed to access the resource. Detailed below.
- Maximum
Batching intWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- Server
Root stringCa Certificate - The ARN of the Secrets Manager secret used for certification.
- Starting
Position string - The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- Vpc
Pipe
Source Parameters Self Managed Kafka Parameters Vpc - This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used. Detailed below.
- topic
Name String - The name of the topic that the pipe will read from. Maximum length of 249.
- additional
Bootstrap List<String>Servers - An array of server URLs. Maximum number of 2 items, each of maximum length 300.
- batch
Size Integer - The maximum number of records to include in each batch. Maximum value of 10000.
- consumer
Group StringId - The name of the destination queue to consume. Maximum value of 200.
- credentials
Pipe
Source Parameters Self Managed Kafka Parameters Credentials - The credentials needed to access the resource. Detailed below.
- maximum
Batching IntegerWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- server
Root StringCa Certificate - The ARN of the Secrets Manager secret used for certification.
- starting
Position String - The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- vpc
Pipe
Source Parameters Self Managed Kafka Parameters Vpc - This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used. Detailed below.
- topic
Name string - The name of the topic that the pipe will read from. Maximum length of 249.
- additional
Bootstrap string[]Servers - An array of server URLs. Maximum number of 2 items, each of maximum length 300.
- batch
Size number - The maximum number of records to include in each batch. Maximum value of 10000.
- consumer
Group stringId - The name of the destination queue to consume. Maximum value of 200.
- credentials
Pipe
Source Parameters Self Managed Kafka Parameters Credentials - The credentials needed to access the resource. Detailed below.
- maximum
Batching numberWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- server
Root stringCa Certificate - The ARN of the Secrets Manager secret used for certification.
- starting
Position string - The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- vpc
Pipe
Source Parameters Self Managed Kafka Parameters Vpc - This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used. Detailed below.
- topic_
name str - The name of the topic that the pipe will read from. Maximum length of 249.
- additional_
bootstrap_ Sequence[str]servers - An array of server URLs. Maximum number of 2 items, each of maximum length 300.
- batch_
size int - The maximum number of records to include in each batch. Maximum value of 10000.
- consumer_
group_ strid - The name of the destination queue to consume. Maximum value of 200.
- credentials
Pipe
Source Parameters Self Managed Kafka Parameters Credentials - The credentials needed to access the resource. Detailed below.
- maximum_
batching_ intwindow_ in_ seconds - The maximum length of a time to wait for events. Maximum value of 300.
- server_
root_ strca_ certificate - The ARN of the Secrets Manager secret used for certification.
- starting_
position str - The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- vpc
Pipe
Source Parameters Self Managed Kafka Parameters Vpc - This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used. Detailed below.
- topic
Name String - The name of the topic that the pipe will read from. Maximum length of 249.
- additional
Bootstrap List<String>Servers - An array of server URLs. Maximum number of 2 items, each of maximum length 300.
- batch
Size Number - The maximum number of records to include in each batch. Maximum value of 10000.
- consumer
Group StringId - The name of the destination queue to consume. Maximum value of 200.
- credentials Property Map
- The credentials needed to access the resource. Detailed below.
- maximum
Batching NumberWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- server
Root StringCa Certificate - The ARN of the Secrets Manager secret used for certification.
- starting
Position String - The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
- vpc Property Map
- This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used. Detailed below.
PipeSourceParametersSelfManagedKafkaParametersCredentials, PipeSourceParametersSelfManagedKafkaParametersCredentialsArgs
- Basic
Auth string - The ARN of the Secrets Manager secret containing the credentials.
- Client
Certificate stringTls Auth - The ARN of the Secrets Manager secret containing the credentials.
- Sasl
Scram256Auth string - The ARN of the Secrets Manager secret containing the credentials.
- Sasl
Scram512Auth string - The ARN of the Secrets Manager secret containing the credentials.
- Basic
Auth string - The ARN of the Secrets Manager secret containing the credentials.
- Client
Certificate stringTls Auth - The ARN of the Secrets Manager secret containing the credentials.
- Sasl
Scram256Auth string - The ARN of the Secrets Manager secret containing the credentials.
- Sasl
Scram512Auth string - The ARN of the Secrets Manager secret containing the credentials.
- basic
Auth String - The ARN of the Secrets Manager secret containing the credentials.
- client
Certificate StringTls Auth - The ARN of the Secrets Manager secret containing the credentials.
- sasl
Scram256Auth String - The ARN of the Secrets Manager secret containing the credentials.
- sasl
Scram512Auth String - The ARN of the Secrets Manager secret containing the credentials.
- basic
Auth string - The ARN of the Secrets Manager secret containing the credentials.
- client
Certificate stringTls Auth - The ARN of the Secrets Manager secret containing the credentials.
- sasl
Scram256Auth string - The ARN of the Secrets Manager secret containing the credentials.
- sasl
Scram512Auth string - The ARN of the Secrets Manager secret containing the credentials.
- basic_
auth str - The ARN of the Secrets Manager secret containing the credentials.
- client_
certificate_ strtls_ auth - The ARN of the Secrets Manager secret containing the credentials.
- sasl_
scram256_ strauth - The ARN of the Secrets Manager secret containing the credentials.
- sasl_
scram512_ strauth - The ARN of the Secrets Manager secret containing the credentials.
- basic
Auth String - The ARN of the Secrets Manager secret containing the credentials.
- client
Certificate StringTls Auth - The ARN of the Secrets Manager secret containing the credentials.
- sasl
Scram256Auth String - The ARN of the Secrets Manager secret containing the credentials.
- sasl
Scram512Auth String - The ARN of the Secrets Manager secret containing the credentials.
PipeSourceParametersSelfManagedKafkaParametersVpc, PipeSourceParametersSelfManagedKafkaParametersVpcArgs
- Security
Groups List<string> - Subnets List<string>
- Security
Groups []string - Subnets []string
- security
Groups List<String> - subnets List<String>
- security
Groups string[] - subnets string[]
- security_
groups Sequence[str] - subnets Sequence[str]
- security
Groups List<String> - subnets List<String>
PipeSourceParametersSqsQueueParameters, PipeSourceParametersSqsQueueParametersArgs
- Batch
Size int - The maximum number of records to include in each batch. Maximum value of 10000.
- Maximum
Batching intWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- Batch
Size int - The maximum number of records to include in each batch. Maximum value of 10000.
- Maximum
Batching intWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- batch
Size Integer - The maximum number of records to include in each batch. Maximum value of 10000.
- maximum
Batching IntegerWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- batch
Size number - The maximum number of records to include in each batch. Maximum value of 10000.
- maximum
Batching numberWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
- batch_
size int - The maximum number of records to include in each batch. Maximum value of 10000.
- maximum_
batching_ intwindow_ in_ seconds - The maximum length of a time to wait for events. Maximum value of 300.
- batch
Size Number - The maximum number of records to include in each batch. Maximum value of 10000.
- maximum
Batching NumberWindow In Seconds - The maximum length of a time to wait for events. Maximum value of 300.
PipeTargetParameters, PipeTargetParametersArgs
- Batch
Job PipeParameters Target Parameters Batch Job Parameters - The parameters for using an AWS Batch job as a target. Detailed below.
- Cloudwatch
Logs PipeParameters Target Parameters Cloudwatch Logs Parameters - The parameters for using an CloudWatch Logs log stream as a target. Detailed below.
- Ecs
Task PipeParameters Target Parameters Ecs Task Parameters - The parameters for using an Amazon ECS task as a target. Detailed below.
- Eventbridge
Event PipeBus Parameters Target Parameters Eventbridge Event Bus Parameters - The parameters for using an EventBridge event bus as a target. Detailed below.
- Http
Parameters PipeTarget Parameters Http Parameters - These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations. Detailed below.
- Input
Template string - Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- Kinesis
Stream PipeParameters Target Parameters Kinesis Stream Parameters - The parameters for using a Kinesis stream as a source. Detailed below.
- Lambda
Function PipeParameters Target Parameters Lambda Function Parameters - The parameters for using a Lambda function as a target. Detailed below.
- Redshift
Data PipeParameters Target Parameters Redshift Data Parameters - These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement. Detailed below.
- Sagemaker
Pipeline PipeParameters Target Parameters Sagemaker Pipeline Parameters - The parameters for using a SageMaker pipeline as a target. Detailed below.
- Sqs
Queue PipeParameters Target Parameters Sqs Queue Parameters - The parameters for using a Amazon SQS stream as a target. Detailed below.
- Step
Function PipeState Machine Parameters Target Parameters Step Function State Machine Parameters - The parameters for using a Step Functions state machine as a target. Detailed below.
- Batch
Job PipeParameters Target Parameters Batch Job Parameters - The parameters for using an AWS Batch job as a target. Detailed below.
- Cloudwatch
Logs PipeParameters Target Parameters Cloudwatch Logs Parameters - The parameters for using an CloudWatch Logs log stream as a target. Detailed below.
- Ecs
Task PipeParameters Target Parameters Ecs Task Parameters - The parameters for using an Amazon ECS task as a target. Detailed below.
- Eventbridge
Event PipeBus Parameters Target Parameters Eventbridge Event Bus Parameters - The parameters for using an EventBridge event bus as a target. Detailed below.
- Http
Parameters PipeTarget Parameters Http Parameters - These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations. Detailed below.
- Input
Template string - Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- Kinesis
Stream PipeParameters Target Parameters Kinesis Stream Parameters - The parameters for using a Kinesis stream as a source. Detailed below.
- Lambda
Function PipeParameters Target Parameters Lambda Function Parameters - The parameters for using a Lambda function as a target. Detailed below.
- Redshift
Data PipeParameters Target Parameters Redshift Data Parameters - These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement. Detailed below.
- Sagemaker
Pipeline PipeParameters Target Parameters Sagemaker Pipeline Parameters - The parameters for using a SageMaker pipeline as a target. Detailed below.
- Sqs
Queue PipeParameters Target Parameters Sqs Queue Parameters - The parameters for using a Amazon SQS stream as a target. Detailed below.
- Step
Function PipeState Machine Parameters Target Parameters Step Function State Machine Parameters - The parameters for using a Step Functions state machine as a target. Detailed below.
- batch
Job PipeParameters Target Parameters Batch Job Parameters - The parameters for using an AWS Batch job as a target. Detailed below.
- cloudwatch
Logs PipeParameters Target Parameters Cloudwatch Logs Parameters - The parameters for using an CloudWatch Logs log stream as a target. Detailed below.
- ecs
Task PipeParameters Target Parameters Ecs Task Parameters - The parameters for using an Amazon ECS task as a target. Detailed below.
- eventbridge
Event PipeBus Parameters Target Parameters Eventbridge Event Bus Parameters - The parameters for using an EventBridge event bus as a target. Detailed below.
- http
Parameters PipeTarget Parameters Http Parameters - These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations. Detailed below.
- input
Template String - Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- kinesis
Stream PipeParameters Target Parameters Kinesis Stream Parameters - The parameters for using a Kinesis stream as a source. Detailed below.
- lambda
Function PipeParameters Target Parameters Lambda Function Parameters - The parameters for using a Lambda function as a target. Detailed below.
- redshift
Data PipeParameters Target Parameters Redshift Data Parameters - These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement. Detailed below.
- sagemaker
Pipeline PipeParameters Target Parameters Sagemaker Pipeline Parameters - The parameters for using a SageMaker pipeline as a target. Detailed below.
- sqs
Queue PipeParameters Target Parameters Sqs Queue Parameters - The parameters for using a Amazon SQS stream as a target. Detailed below.
- step
Function PipeState Machine Parameters Target Parameters Step Function State Machine Parameters - The parameters for using a Step Functions state machine as a target. Detailed below.
- batch
Job PipeParameters Target Parameters Batch Job Parameters - The parameters for using an AWS Batch job as a target. Detailed below.
- cloudwatch
Logs PipeParameters Target Parameters Cloudwatch Logs Parameters - The parameters for using an CloudWatch Logs log stream as a target. Detailed below.
- ecs
Task PipeParameters Target Parameters Ecs Task Parameters - The parameters for using an Amazon ECS task as a target. Detailed below.
- eventbridge
Event PipeBus Parameters Target Parameters Eventbridge Event Bus Parameters - The parameters for using an EventBridge event bus as a target. Detailed below.
- http
Parameters PipeTarget Parameters Http Parameters - These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations. Detailed below.
- input
Template string - Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- kinesis
Stream PipeParameters Target Parameters Kinesis Stream Parameters - The parameters for using a Kinesis stream as a source. Detailed below.
- lambda
Function PipeParameters Target Parameters Lambda Function Parameters - The parameters for using a Lambda function as a target. Detailed below.
- redshift
Data PipeParameters Target Parameters Redshift Data Parameters - These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement. Detailed below.
- sagemaker
Pipeline PipeParameters Target Parameters Sagemaker Pipeline Parameters - The parameters for using a SageMaker pipeline as a target. Detailed below.
- sqs
Queue PipeParameters Target Parameters Sqs Queue Parameters - The parameters for using a Amazon SQS stream as a target. Detailed below.
- step
Function PipeState Machine Parameters Target Parameters Step Function State Machine Parameters - The parameters for using a Step Functions state machine as a target. Detailed below.
- batch_
job_ Pipeparameters Target Parameters Batch Job Parameters - The parameters for using an AWS Batch job as a target. Detailed below.
- cloudwatch_
logs_ Pipeparameters Target Parameters Cloudwatch Logs Parameters - The parameters for using an CloudWatch Logs log stream as a target. Detailed below.
- ecs_
task_ Pipeparameters Target Parameters Ecs Task Parameters - The parameters for using an Amazon ECS task as a target. Detailed below.
- eventbridge_
event_ Pipebus_ parameters Target Parameters Eventbridge Event Bus Parameters - The parameters for using an EventBridge event bus as a target. Detailed below.
- http_
parameters PipeTarget Parameters Http Parameters - These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations. Detailed below.
- input_
template str - Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- kinesis_
stream_ Pipeparameters Target Parameters Kinesis Stream Parameters - The parameters for using a Kinesis stream as a source. Detailed below.
- lambda_
function_ Pipeparameters Target Parameters Lambda Function Parameters - The parameters for using a Lambda function as a target. Detailed below.
- redshift_
data_ Pipeparameters Target Parameters Redshift Data Parameters - These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement. Detailed below.
- sagemaker_
pipeline_ Pipeparameters Target Parameters Sagemaker Pipeline Parameters - The parameters for using a SageMaker pipeline as a target. Detailed below.
- sqs_
queue_ Pipeparameters Target Parameters Sqs Queue Parameters - The parameters for using a Amazon SQS stream as a target. Detailed below.
- step_
function_ Pipestate_ machine_ parameters Target Parameters Step Function State Machine Parameters - The parameters for using a Step Functions state machine as a target. Detailed below.
- batch
Job Property MapParameters - The parameters for using an AWS Batch job as a target. Detailed below.
- cloudwatch
Logs Property MapParameters - The parameters for using an CloudWatch Logs log stream as a target. Detailed below.
- ecs
Task Property MapParameters - The parameters for using an Amazon ECS task as a target. Detailed below.
- eventbridge
Event Property MapBus Parameters - The parameters for using an EventBridge event bus as a target. Detailed below.
- http
Parameters Property Map - These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations. Detailed below.
- input
Template String - Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
- kinesis
Stream Property MapParameters - The parameters for using a Kinesis stream as a source. Detailed below.
- lambda
Function Property MapParameters - The parameters for using a Lambda function as a target. Detailed below.
- redshift
Data Property MapParameters - These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement. Detailed below.
- sagemaker
Pipeline Property MapParameters - The parameters for using a SageMaker pipeline as a target. Detailed below.
- sqs
Queue Property MapParameters - The parameters for using a Amazon SQS stream as a target. Detailed below.
- step
Function Property MapState Machine Parameters - The parameters for using a Step Functions state machine as a target. Detailed below.
PipeTargetParametersBatchJobParameters, PipeTargetParametersBatchJobParametersArgs
- Job
Definition string - The job definition used by this job. This value can be one of name, name:revision, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used.
- Job
Name string - The name of the job. It can be up to 128 letters long.
- Array
Properties PipeTarget Parameters Batch Job Parameters Array Properties - The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job. Detailed below.
- Container
Overrides PipeTarget Parameters Batch Job Parameters Container Overrides - The overrides that are sent to a container. Detailed below.
- Depends
Ons List<PipeTarget Parameters Batch Job Parameters Depends On> - A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIAL type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify an N_TO_N type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin. Detailed below.
- Parameters Dictionary<string, string>
- Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition. Detailed below.
- Retry
Strategy PipeTarget Parameters Batch Job Parameters Retry Strategy - The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition. Detailed below.
- Job
Definition string - The job definition used by this job. This value can be one of name, name:revision, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used.
- Job
Name string - The name of the job. It can be up to 128 letters long.
- Array
Properties PipeTarget Parameters Batch Job Parameters Array Properties - The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job. Detailed below.
- Container
Overrides PipeTarget Parameters Batch Job Parameters Container Overrides - The overrides that are sent to a container. Detailed below.
- Depends
Ons []PipeTarget Parameters Batch Job Parameters Depends On - A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIAL type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify an N_TO_N type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin. Detailed below.
- Parameters map[string]string
- Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition. Detailed below.
- Retry
Strategy PipeTarget Parameters Batch Job Parameters Retry Strategy - The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition. Detailed below.
- job
Definition String - The job definition used by this job. This value can be one of name, name:revision, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used.
- job
Name String - The name of the job. It can be up to 128 letters long.
- array
Properties PipeTarget Parameters Batch Job Parameters Array Properties - The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job. Detailed below.
- container
Overrides PipeTarget Parameters Batch Job Parameters Container Overrides - The overrides that are sent to a container. Detailed below.
- depends
Ons List<PipeTarget Parameters Batch Job Parameters Depends On> - A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIAL type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify an N_TO_N type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin. Detailed below.
- parameters Map<String,String>
- Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition. Detailed below.
- retry
Strategy PipeTarget Parameters Batch Job Parameters Retry Strategy - The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition. Detailed below.
- job
Definition string - The job definition used by this job. This value can be one of name, name:revision, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used.
- job
Name string - The name of the job. It can be up to 128 letters long.
- array
Properties PipeTarget Parameters Batch Job Parameters Array Properties - The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job. Detailed below.
- container
Overrides PipeTarget Parameters Batch Job Parameters Container Overrides - The overrides that are sent to a container. Detailed below.
- depends
Ons PipeTarget Parameters Batch Job Parameters Depends On[] - A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIAL type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify an N_TO_N type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin. Detailed below.
- parameters {[key: string]: string}
- Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition. Detailed below.
- retry
Strategy PipeTarget Parameters Batch Job Parameters Retry Strategy - The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition. Detailed below.
- job_
definition str - The job definition used by this job. This value can be one of name, name:revision, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used.
- job_
name str - The name of the job. It can be up to 128 letters long.
- array_
properties PipeTarget Parameters Batch Job Parameters Array Properties - The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job. Detailed below.
- container_
overrides PipeTarget Parameters Batch Job Parameters Container Overrides - The overrides that are sent to a container. Detailed below.
- depends_
ons Sequence[PipeTarget Parameters Batch Job Parameters Depends On] - A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIAL type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify an N_TO_N type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin. Detailed below.
- parameters Mapping[str, str]
- Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition. Detailed below.
- retry_
strategy PipeTarget Parameters Batch Job Parameters Retry Strategy - The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition. Detailed below.
- job
Definition String - The job definition used by this job. This value can be one of name, name:revision, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used.
- job
Name String - The name of the job. It can be up to 128 letters long.
- array
Properties Property Map - The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job. Detailed below.
- container
Overrides Property Map - The overrides that are sent to a container. Detailed below.
- depends
Ons List<Property Map> - A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIAL type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify an N_TO_N type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin. Detailed below.
- parameters Map<String>
- Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition. Detailed below.
- retry
Strategy Property Map - The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition. Detailed below.
PipeTargetParametersBatchJobParametersArrayProperties, PipeTargetParametersBatchJobParametersArrayPropertiesArgs
- Size int
- The size of the array, if this is an array batch job. Minimum value of 2. Maximum value of 10,000.
- Size int
- The size of the array, if this is an array batch job. Minimum value of 2. Maximum value of 10,000.
- size Integer
- The size of the array, if this is an array batch job. Minimum value of 2. Maximum value of 10,000.
- size number
- The size of the array, if this is an array batch job. Minimum value of 2. Maximum value of 10,000.
- size int
- The size of the array, if this is an array batch job. Minimum value of 2. Maximum value of 10,000.
- size Number
- The size of the array, if this is an array batch job. Minimum value of 2. Maximum value of 10,000.
PipeTargetParametersBatchJobParametersContainerOverrides, PipeTargetParametersBatchJobParametersContainerOverridesArgs
- Commands List<string>
- List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- Environments
List<Pipe
Target Parameters Batch Job Parameters Container Overrides Environment> - The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
- Instance
Type string - The instance type to use for a multi-node parallel job. This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided.
- Resource
Requirements List<PipeTarget Parameters Batch Job Parameters Container Overrides Resource Requirement> - The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
- Commands []string
- List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- Environments
[]Pipe
Target Parameters Batch Job Parameters Container Overrides Environment - The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
- Instance
Type string - The instance type to use for a multi-node parallel job. This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided.
- Resource
Requirements []PipeTarget Parameters Batch Job Parameters Container Overrides Resource Requirement - The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
- commands List<String>
- List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- environments
List<Pipe
Target Parameters Batch Job Parameters Container Overrides Environment> - The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
- instance
Type String - The instance type to use for a multi-node parallel job. This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided.
- resource
Requirements List<PipeTarget Parameters Batch Job Parameters Container Overrides Resource Requirement> - The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
- commands string[]
- List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- environments
Pipe
Target Parameters Batch Job Parameters Container Overrides Environment[] - The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
- instance
Type string - The instance type to use for a multi-node parallel job. This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided.
- resource
Requirements PipeTarget Parameters Batch Job Parameters Container Overrides Resource Requirement[] - The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
- commands Sequence[str]
- List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- environments
Sequence[Pipe
Target Parameters Batch Job Parameters Container Overrides Environment] - The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
- instance_
type str - The instance type to use for a multi-node parallel job. This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided.
- resource_
requirements Sequence[PipeTarget Parameters Batch Job Parameters Container Overrides Resource Requirement] - The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
- commands List<String>
- List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- environments List<Property Map>
- The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
- instance
Type String - The instance type to use for a multi-node parallel job. This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided.
- resource
Requirements List<Property Map> - The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
PipeTargetParametersBatchJobParametersContainerOverridesEnvironment, PipeTargetParametersBatchJobParametersContainerOverridesEnvironmentArgs
PipeTargetParametersBatchJobParametersContainerOverridesResourceRequirement, PipeTargetParametersBatchJobParametersContainerOverridesResourceRequirementArgs
- Type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- Value string
- Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
- Type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- Value string
- Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
- type String
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- value String
- Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
- type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- value string
- Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
- type str
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- value str
- Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
- type String
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- value String
- Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
PipeTargetParametersBatchJobParametersDependsOn, PipeTargetParametersBatchJobParametersDependsOnArgs
- Job
Id string - The job ID of the AWS Batch job that's associated with this dependency.
- Type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- Job
Id string - The job ID of the AWS Batch job that's associated with this dependency.
- Type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- job
Id String - The job ID of the AWS Batch job that's associated with this dependency.
- type String
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- job
Id string - The job ID of the AWS Batch job that's associated with this dependency.
- type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- job_
id str - The job ID of the AWS Batch job that's associated with this dependency.
- type str
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- job
Id String - The job ID of the AWS Batch job that's associated with this dependency.
- type String
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
PipeTargetParametersBatchJobParametersRetryStrategy, PipeTargetParametersBatchJobParametersRetryStrategyArgs
- Attempts int
- The number of times to move a job to the RUNNABLE status. If the value of attempts is greater than one, the job is retried on failure the same number of attempts as the value. Maximum value of 10.
- Attempts int
- The number of times to move a job to the RUNNABLE status. If the value of attempts is greater than one, the job is retried on failure the same number of attempts as the value. Maximum value of 10.
- attempts Integer
- The number of times to move a job to the RUNNABLE status. If the value of attempts is greater than one, the job is retried on failure the same number of attempts as the value. Maximum value of 10.
- attempts number
- The number of times to move a job to the RUNNABLE status. If the value of attempts is greater than one, the job is retried on failure the same number of attempts as the value. Maximum value of 10.
- attempts int
- The number of times to move a job to the RUNNABLE status. If the value of attempts is greater than one, the job is retried on failure the same number of attempts as the value. Maximum value of 10.
- attempts Number
- The number of times to move a job to the RUNNABLE status. If the value of attempts is greater than one, the job is retried on failure the same number of attempts as the value. Maximum value of 10.
PipeTargetParametersCloudwatchLogsParameters, PipeTargetParametersCloudwatchLogsParametersArgs
- Log
Stream stringName - The name of the log stream.
- Timestamp string
- The time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. This is the JSON path to the field in the event e.g. $.detail.timestamp
- Log
Stream stringName - The name of the log stream.
- Timestamp string
- The time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. This is the JSON path to the field in the event e.g. $.detail.timestamp
- log
Stream StringName - The name of the log stream.
- timestamp String
- The time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. This is the JSON path to the field in the event e.g. $.detail.timestamp
- log
Stream stringName - The name of the log stream.
- timestamp string
- The time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. This is the JSON path to the field in the event e.g. $.detail.timestamp
- log_
stream_ strname - The name of the log stream.
- timestamp str
- The time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. This is the JSON path to the field in the event e.g. $.detail.timestamp
- log
Stream StringName - The name of the log stream.
- timestamp String
- The time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. This is the JSON path to the field in the event e.g. $.detail.timestamp
PipeTargetParametersEcsTaskParameters, PipeTargetParametersEcsTaskParametersArgs
- Task
Definition stringArn - The ARN of the task definition to use if the event target is an Amazon ECS task.
- Capacity
Provider List<PipeStrategies Target Parameters Ecs Task Parameters Capacity Provider Strategy> - List of capacity provider strategies to use for the task. If a capacityProviderStrategy is specified, the launchType parameter must be omitted. If no capacityProviderStrategy or launchType is specified, the defaultCapacityProviderStrategy for the cluster is used. Detailed below.
- bool
- Specifies whether to enable Amazon ECS managed tags for the task. Valid values: true, false.
- Enable
Execute boolCommand - Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task. Valid values: true, false.
- Group string
- Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
- Launch
Type string - Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATE value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. Valid Values: EC2, FARGATE, EXTERNAL
- Network
Configuration PipeTarget Parameters Ecs Task Parameters Network Configuration - Use this structure if the Amazon ECS task uses the awsvpc network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if LaunchType is FARGATE because the awsvpc mode is required for Fargate tasks. If you specify NetworkConfiguration when the target ECS task does not use the awsvpc network mode, the task fails. Detailed below.
- Overrides
Pipe
Target Parameters Ecs Task Parameters Overrides - The overrides that are associated with a task. Detailed below.
- Placement
Constraints List<PipeTarget Parameters Ecs Task Parameters Placement Constraint> - An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime). Detailed below.
- Placement
Strategies List<PipeTarget Parameters Ecs Task Parameters Placement Strategy> - The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task. Detailed below.
- Platform
Version string - Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as 1.1.0. This structure is used only if LaunchType is FARGATE.
- string
- Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResource API action. Valid Values: TASK_DEFINITION
- Reference
Id string - The reference ID to use for the task. Maximum length of 1,024.
- Dictionary<string, string>
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - Task
Count int - The number of tasks to create based on TaskDefinition. The default is 1.
- Task
Definition stringArn - The ARN of the task definition to use if the event target is an Amazon ECS task.
- Capacity
Provider []PipeStrategies Target Parameters Ecs Task Parameters Capacity Provider Strategy - List of capacity provider strategies to use for the task. If a capacityProviderStrategy is specified, the launchType parameter must be omitted. If no capacityProviderStrategy or launchType is specified, the defaultCapacityProviderStrategy for the cluster is used. Detailed below.
- bool
- Specifies whether to enable Amazon ECS managed tags for the task. Valid values: true, false.
- Enable
Execute boolCommand - Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task. Valid values: true, false.
- Group string
- Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
- Launch
Type string - Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATE value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. Valid Values: EC2, FARGATE, EXTERNAL
- Network
Configuration PipeTarget Parameters Ecs Task Parameters Network Configuration - Use this structure if the Amazon ECS task uses the awsvpc network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if LaunchType is FARGATE because the awsvpc mode is required for Fargate tasks. If you specify NetworkConfiguration when the target ECS task does not use the awsvpc network mode, the task fails. Detailed below.
- Overrides
Pipe
Target Parameters Ecs Task Parameters Overrides - The overrides that are associated with a task. Detailed below.
- Placement
Constraints []PipeTarget Parameters Ecs Task Parameters Placement Constraint - An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime). Detailed below.
- Placement
Strategies []PipeTarget Parameters Ecs Task Parameters Placement Strategy - The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task. Detailed below.
- Platform
Version string - Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as 1.1.0. This structure is used only if LaunchType is FARGATE.
- string
- Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResource API action. Valid Values: TASK_DEFINITION
- Reference
Id string - The reference ID to use for the task. Maximum length of 1,024.
- map[string]string
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - Task
Count int - The number of tasks to create based on TaskDefinition. The default is 1.
- task
Definition StringArn - The ARN of the task definition to use if the event target is an Amazon ECS task.
- capacity
Provider List<PipeStrategies Target Parameters Ecs Task Parameters Capacity Provider Strategy> - List of capacity provider strategies to use for the task. If a capacityProviderStrategy is specified, the launchType parameter must be omitted. If no capacityProviderStrategy or launchType is specified, the defaultCapacityProviderStrategy for the cluster is used. Detailed below.
- Boolean
- Specifies whether to enable Amazon ECS managed tags for the task. Valid values: true, false.
- enable
Execute BooleanCommand - Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task. Valid values: true, false.
- group String
- Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
- launch
Type String - Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATE value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. Valid Values: EC2, FARGATE, EXTERNAL
- network
Configuration PipeTarget Parameters Ecs Task Parameters Network Configuration - Use this structure if the Amazon ECS task uses the awsvpc network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if LaunchType is FARGATE because the awsvpc mode is required for Fargate tasks. If you specify NetworkConfiguration when the target ECS task does not use the awsvpc network mode, the task fails. Detailed below.
- overrides
Pipe
Target Parameters Ecs Task Parameters Overrides - The overrides that are associated with a task. Detailed below.
- placement
Constraints List<PipeTarget Parameters Ecs Task Parameters Placement Constraint> - An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime). Detailed below.
- placement
Strategies List<PipeTarget Parameters Ecs Task Parameters Placement Strategy> - The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task. Detailed below.
- platform
Version String - Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as 1.1.0. This structure is used only if LaunchType is FARGATE.
- String
- Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResource API action. Valid Values: TASK_DEFINITION
- reference
Id String - The reference ID to use for the task. Maximum length of 1,024.
- Map<String,String>
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - task
Count Integer - The number of tasks to create based on TaskDefinition. The default is 1.
- task
Definition stringArn - The ARN of the task definition to use if the event target is an Amazon ECS task.
- capacity
Provider PipeStrategies Target Parameters Ecs Task Parameters Capacity Provider Strategy[] - List of capacity provider strategies to use for the task. If a capacityProviderStrategy is specified, the launchType parameter must be omitted. If no capacityProviderStrategy or launchType is specified, the defaultCapacityProviderStrategy for the cluster is used. Detailed below.
- boolean
- Specifies whether to enable Amazon ECS managed tags for the task. Valid values: true, false.
- enable
Execute booleanCommand - Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task. Valid values: true, false.
- group string
- Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
- launch
Type string - Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATE value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. Valid Values: EC2, FARGATE, EXTERNAL
- network
Configuration PipeTarget Parameters Ecs Task Parameters Network Configuration - Use this structure if the Amazon ECS task uses the awsvpc network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if LaunchType is FARGATE because the awsvpc mode is required for Fargate tasks. If you specify NetworkConfiguration when the target ECS task does not use the awsvpc network mode, the task fails. Detailed below.
- overrides
Pipe
Target Parameters Ecs Task Parameters Overrides - The overrides that are associated with a task. Detailed below.
- placement
Constraints PipeTarget Parameters Ecs Task Parameters Placement Constraint[] - An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime). Detailed below.
- placement
Strategies PipeTarget Parameters Ecs Task Parameters Placement Strategy[] - The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task. Detailed below.
- platform
Version string - Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as 1.1.0. This structure is used only if LaunchType is FARGATE.
- string
- Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResource API action. Valid Values: TASK_DEFINITION
- reference
Id string - The reference ID to use for the task. Maximum length of 1,024.
- {[key: string]: string}
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - task
Count number - The number of tasks to create based on TaskDefinition. The default is 1.
- task_
definition_ strarn - The ARN of the task definition to use if the event target is an Amazon ECS task.
- capacity_
provider_ Sequence[Pipestrategies Target Parameters Ecs Task Parameters Capacity Provider Strategy] - List of capacity provider strategies to use for the task. If a capacityProviderStrategy is specified, the launchType parameter must be omitted. If no capacityProviderStrategy or launchType is specified, the defaultCapacityProviderStrategy for the cluster is used. Detailed below.
- bool
- Specifies whether to enable Amazon ECS managed tags for the task. Valid values: true, false.
- enable_
execute_ boolcommand - Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task. Valid values: true, false.
- group str
- Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
- launch_
type str - Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATE value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. Valid Values: EC2, FARGATE, EXTERNAL
- network_
configuration PipeTarget Parameters Ecs Task Parameters Network Configuration - Use this structure if the Amazon ECS task uses the awsvpc network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if LaunchType is FARGATE because the awsvpc mode is required for Fargate tasks. If you specify NetworkConfiguration when the target ECS task does not use the awsvpc network mode, the task fails. Detailed below.
- overrides
Pipe
Target Parameters Ecs Task Parameters Overrides - The overrides that are associated with a task. Detailed below.
- placement_
constraints Sequence[PipeTarget Parameters Ecs Task Parameters Placement Constraint] - An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime). Detailed below.
- placement_
strategies Sequence[PipeTarget Parameters Ecs Task Parameters Placement Strategy] - The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task. Detailed below.
- platform_
version str - Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as 1.1.0. This structure is used only if LaunchType is FARGATE.
- str
- Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResource API action. Valid Values: TASK_DEFINITION
- reference_
id str - The reference ID to use for the task. Maximum length of 1,024.
- Mapping[str, str]
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - task_
count int - The number of tasks to create based on TaskDefinition. The default is 1.
- task
Definition StringArn - The ARN of the task definition to use if the event target is an Amazon ECS task.
- capacity
Provider List<Property Map>Strategies - List of capacity provider strategies to use for the task. If a capacityProviderStrategy is specified, the launchType parameter must be omitted. If no capacityProviderStrategy or launchType is specified, the defaultCapacityProviderStrategy for the cluster is used. Detailed below.
- Boolean
- Specifies whether to enable Amazon ECS managed tags for the task. Valid values: true, false.
- enable
Execute BooleanCommand - Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task. Valid values: true, false.
- group String
- Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
- launch
Type String - Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATE value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. Valid Values: EC2, FARGATE, EXTERNAL
- network
Configuration Property Map - Use this structure if the Amazon ECS task uses the awsvpc network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if LaunchType is FARGATE because the awsvpc mode is required for Fargate tasks. If you specify NetworkConfiguration when the target ECS task does not use the awsvpc network mode, the task fails. Detailed below.
- overrides Property Map
- The overrides that are associated with a task. Detailed below.
- placement
Constraints List<Property Map> - An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime). Detailed below.
- placement
Strategies List<Property Map> - The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task. Detailed below.
- platform
Version String - Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as 1.1.0. This structure is used only if LaunchType is FARGATE.
- String
- Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResource API action. Valid Values: TASK_DEFINITION
- reference
Id String - The reference ID to use for the task. Maximum length of 1,024.
- Map<String>
- Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level. - task
Count Number - The number of tasks to create based on TaskDefinition. The default is 1.
PipeTargetParametersEcsTaskParametersCapacityProviderStrategy, PipeTargetParametersEcsTaskParametersCapacityProviderStrategyArgs
- Capacity
Provider string - The short name of the capacity provider. Maximum value of 255.
- Base int
- The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used. Maximum value of 100,000.
- Weight int
- The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied. Maximum value of 1,000.
- Capacity
Provider string - The short name of the capacity provider. Maximum value of 255.
- Base int
- The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used. Maximum value of 100,000.
- Weight int
- The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied. Maximum value of 1,000.
- capacity
Provider String - The short name of the capacity provider. Maximum value of 255.
- base Integer
- The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used. Maximum value of 100,000.
- weight Integer
- The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied. Maximum value of 1,000.
- capacity
Provider string - The short name of the capacity provider. Maximum value of 255.
- base number
- The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used. Maximum value of 100,000.
- weight number
- The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied. Maximum value of 1,000.
- capacity_
provider str - The short name of the capacity provider. Maximum value of 255.
- base int
- The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used. Maximum value of 100,000.
- weight int
- The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied. Maximum value of 1,000.
- capacity
Provider String - The short name of the capacity provider. Maximum value of 255.
- base Number
- The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used. Maximum value of 100,000.
- weight Number
- The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied. Maximum value of 1,000.
PipeTargetParametersEcsTaskParametersNetworkConfiguration, PipeTargetParametersEcsTaskParametersNetworkConfigurationArgs
- Aws
Vpc PipeConfiguration Target Parameters Ecs Task Parameters Network Configuration Aws Vpc Configuration - Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpc network mode. Detailed below.
- Aws
Vpc PipeConfiguration Target Parameters Ecs Task Parameters Network Configuration Aws Vpc Configuration - Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpc network mode. Detailed below.
- aws
Vpc PipeConfiguration Target Parameters Ecs Task Parameters Network Configuration Aws Vpc Configuration - Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpc network mode. Detailed below.
- aws
Vpc PipeConfiguration Target Parameters Ecs Task Parameters Network Configuration Aws Vpc Configuration - Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpc network mode. Detailed below.
- aws_
vpc_ Pipeconfiguration Target Parameters Ecs Task Parameters Network Configuration Aws Vpc Configuration - Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpc network mode. Detailed below.
- aws
Vpc Property MapConfiguration - Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpc network mode. Detailed below.
PipeTargetParametersEcsTaskParametersNetworkConfigurationAwsVpcConfiguration, PipeTargetParametersEcsTaskParametersNetworkConfigurationAwsVpcConfigurationArgs
- Assign
Public stringIp - Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLED only when LaunchType in EcsParameters is set to FARGATE. Valid Values: ENABLED, DISABLED.
- Security
Groups List<string> - Subnets List<string>
- Assign
Public stringIp - Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLED only when LaunchType in EcsParameters is set to FARGATE. Valid Values: ENABLED, DISABLED.
- Security
Groups []string - Subnets []string
- assign
Public StringIp - Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLED only when LaunchType in EcsParameters is set to FARGATE. Valid Values: ENABLED, DISABLED.
- security
Groups List<String> - subnets List<String>
- assign
Public stringIp - Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLED only when LaunchType in EcsParameters is set to FARGATE. Valid Values: ENABLED, DISABLED.
- security
Groups string[] - subnets string[]
- assign_
public_ strip - Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLED only when LaunchType in EcsParameters is set to FARGATE. Valid Values: ENABLED, DISABLED.
- security_
groups Sequence[str] - subnets Sequence[str]
- assign
Public StringIp - Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLED only when LaunchType in EcsParameters is set to FARGATE. Valid Values: ENABLED, DISABLED.
- security
Groups List<String> - subnets List<String>
PipeTargetParametersEcsTaskParametersOverrides, PipeTargetParametersEcsTaskParametersOverridesArgs
- Container
Overrides List<PipeTarget Parameters Ecs Task Parameters Overrides Container Override> - One or more container overrides that are sent to a task. Detailed below.
- Cpu string
- The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
- Ephemeral
Storage PipeTarget Parameters Ecs Task Parameters Overrides Ephemeral Storage - The ephemeral storage setting override for the task. Detailed below.
- Execution
Role stringArn - The Amazon Resource Name (ARN) of the task execution IAM role override for the task.
- Inference
Accelerator List<PipeOverrides Target Parameters Ecs Task Parameters Overrides Inference Accelerator Override> - List of Elastic Inference accelerator overrides for the task. Detailed below.
- Memory string
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- Task
Role stringArn - The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role.
- Container
Overrides []PipeTarget Parameters Ecs Task Parameters Overrides Container Override - One or more container overrides that are sent to a task. Detailed below.
- Cpu string
- The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
- Ephemeral
Storage PipeTarget Parameters Ecs Task Parameters Overrides Ephemeral Storage - The ephemeral storage setting override for the task. Detailed below.
- Execution
Role stringArn - The Amazon Resource Name (ARN) of the task execution IAM role override for the task.
- Inference
Accelerator []PipeOverrides Target Parameters Ecs Task Parameters Overrides Inference Accelerator Override - List of Elastic Inference accelerator overrides for the task. Detailed below.
- Memory string
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- Task
Role stringArn - The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role.
- container
Overrides List<PipeTarget Parameters Ecs Task Parameters Overrides Container Override> - One or more container overrides that are sent to a task. Detailed below.
- cpu String
- The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
- ephemeral
Storage PipeTarget Parameters Ecs Task Parameters Overrides Ephemeral Storage - The ephemeral storage setting override for the task. Detailed below.
- execution
Role StringArn - The Amazon Resource Name (ARN) of the task execution IAM role override for the task.
- inference
Accelerator List<PipeOverrides Target Parameters Ecs Task Parameters Overrides Inference Accelerator Override> - List of Elastic Inference accelerator overrides for the task. Detailed below.
- memory String
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- task
Role StringArn - The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role.
- container
Overrides PipeTarget Parameters Ecs Task Parameters Overrides Container Override[] - One or more container overrides that are sent to a task. Detailed below.
- cpu string
- The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
- ephemeral
Storage PipeTarget Parameters Ecs Task Parameters Overrides Ephemeral Storage - The ephemeral storage setting override for the task. Detailed below.
- execution
Role stringArn - The Amazon Resource Name (ARN) of the task execution IAM role override for the task.
- inference
Accelerator PipeOverrides Target Parameters Ecs Task Parameters Overrides Inference Accelerator Override[] - List of Elastic Inference accelerator overrides for the task. Detailed below.
- memory string
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- task
Role stringArn - The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role.
- container_
overrides Sequence[PipeTarget Parameters Ecs Task Parameters Overrides Container Override] - One or more container overrides that are sent to a task. Detailed below.
- cpu str
- The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
- ephemeral_
storage PipeTarget Parameters Ecs Task Parameters Overrides Ephemeral Storage - The ephemeral storage setting override for the task. Detailed below.
- execution_
role_ strarn - The Amazon Resource Name (ARN) of the task execution IAM role override for the task.
- inference_
accelerator_ Sequence[Pipeoverrides Target Parameters Ecs Task Parameters Overrides Inference Accelerator Override] - List of Elastic Inference accelerator overrides for the task. Detailed below.
- memory str
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- task_
role_ strarn - The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role.
- container
Overrides List<Property Map> - One or more container overrides that are sent to a task. Detailed below.
- cpu String
- The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
- ephemeral
Storage Property Map - The ephemeral storage setting override for the task. Detailed below.
- execution
Role StringArn - The Amazon Resource Name (ARN) of the task execution IAM role override for the task.
- inference
Accelerator List<Property Map>Overrides - List of Elastic Inference accelerator overrides for the task. Detailed below.
- memory String
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- task
Role StringArn - The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role.
PipeTargetParametersEcsTaskParametersOverridesContainerOverride, PipeTargetParametersEcsTaskParametersOverridesContainerOverrideArgs
- Commands List<string>
- List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- Cpu int
- The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
- Environment
Files List<PipeTarget Parameters Ecs Task Parameters Overrides Container Override Environment File> - A list of files containing the environment variables to pass to a container, instead of the value from the container definition. Detailed below.
- Environments
List<Pipe
Target Parameters Ecs Task Parameters Overrides Container Override Environment> - The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
- Memory int
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- Memory
Reservation int - The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
- Name string
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - Resource
Requirements List<PipeTarget Parameters Ecs Task Parameters Overrides Container Override Resource Requirement> - The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
- Commands []string
- List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- Cpu int
- The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
- Environment
Files []PipeTarget Parameters Ecs Task Parameters Overrides Container Override Environment File - A list of files containing the environment variables to pass to a container, instead of the value from the container definition. Detailed below.
- Environments
[]Pipe
Target Parameters Ecs Task Parameters Overrides Container Override Environment - The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
- Memory int
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- Memory
Reservation int - The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
- Name string
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - Resource
Requirements []PipeTarget Parameters Ecs Task Parameters Overrides Container Override Resource Requirement - The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
- commands List<String>
- List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- cpu Integer
- The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
- environment
Files List<PipeTarget Parameters Ecs Task Parameters Overrides Container Override Environment File> - A list of files containing the environment variables to pass to a container, instead of the value from the container definition. Detailed below.
- environments
List<Pipe
Target Parameters Ecs Task Parameters Overrides Container Override Environment> - The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
- memory Integer
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- memory
Reservation Integer - The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
- name String
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - resource
Requirements List<PipeTarget Parameters Ecs Task Parameters Overrides Container Override Resource Requirement> - The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
- commands string[]
- List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- cpu number
- The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
- environment
Files PipeTarget Parameters Ecs Task Parameters Overrides Container Override Environment File[] - A list of files containing the environment variables to pass to a container, instead of the value from the container definition. Detailed below.
- environments
Pipe
Target Parameters Ecs Task Parameters Overrides Container Override Environment[] - The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
- memory number
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- memory
Reservation number - The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
- name string
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - resource
Requirements PipeTarget Parameters Ecs Task Parameters Overrides Container Override Resource Requirement[] - The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
- commands Sequence[str]
- List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- cpu int
- The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
- environment_
files Sequence[PipeTarget Parameters Ecs Task Parameters Overrides Container Override Environment File] - A list of files containing the environment variables to pass to a container, instead of the value from the container definition. Detailed below.
- environments
Sequence[Pipe
Target Parameters Ecs Task Parameters Overrides Container Override Environment] - The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
- memory int
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- memory_
reservation int - The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
- name str
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - resource_
requirements Sequence[PipeTarget Parameters Ecs Task Parameters Overrides Container Override Resource Requirement] - The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
- commands List<String>
- List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- cpu Number
- The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
- environment
Files List<Property Map> - A list of files containing the environment variables to pass to a container, instead of the value from the container definition. Detailed below.
- environments List<Property Map>
- The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
- memory Number
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- memory
Reservation Number - The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
- name String
- Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with
name_prefix
. - resource
Requirements List<Property Map> - The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironment, PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentArgs
PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentFile, PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentFileArgs
- Type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- Value string
- Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
- Type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- Value string
- Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
- type String
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- value String
- Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
- type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- value string
- Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
- type str
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- value str
- Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
- type String
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- value String
- Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
PipeTargetParametersEcsTaskParametersOverridesContainerOverrideResourceRequirement, PipeTargetParametersEcsTaskParametersOverridesContainerOverrideResourceRequirementArgs
- Type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- Value string
- Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
- Type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- Value string
- Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
- type String
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- value String
- Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
- type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- value string
- Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
- type str
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- value str
- Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
- type String
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- value String
- Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
PipeTargetParametersEcsTaskParametersOverridesEphemeralStorage, PipeTargetParametersEcsTaskParametersOverridesEphemeralStorageArgs
- Size
In intGib - The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21 GiB and the maximum supported value is 200 GiB.
- Size
In intGib - The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21 GiB and the maximum supported value is 200 GiB.
- size
In IntegerGib - The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21 GiB and the maximum supported value is 200 GiB.
- size
In numberGib - The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21 GiB and the maximum supported value is 200 GiB.
- size_
in_ intgib - The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21 GiB and the maximum supported value is 200 GiB.
- size
In NumberGib - The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21 GiB and the maximum supported value is 200 GiB.
PipeTargetParametersEcsTaskParametersOverridesInferenceAcceleratorOverride, PipeTargetParametersEcsTaskParametersOverridesInferenceAcceleratorOverrideArgs
- Device
Name string - The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceName specified in the task definition.
- Device
Type string - The Elastic Inference accelerator type to use.
- Device
Name string - The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceName specified in the task definition.
- Device
Type string - The Elastic Inference accelerator type to use.
- device
Name String - The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceName specified in the task definition.
- device
Type String - The Elastic Inference accelerator type to use.
- device
Name string - The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceName specified in the task definition.
- device
Type string - The Elastic Inference accelerator type to use.
- device_
name str - The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceName specified in the task definition.
- device_
type str - The Elastic Inference accelerator type to use.
- device
Name String - The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceName specified in the task definition.
- device
Type String - The Elastic Inference accelerator type to use.
PipeTargetParametersEcsTaskParametersPlacementConstraint, PipeTargetParametersEcsTaskParametersPlacementConstraintArgs
- Expression string
- A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance. Maximum length of 2,000.
- Type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- Expression string
- A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance. Maximum length of 2,000.
- Type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- expression String
- A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance. Maximum length of 2,000.
- type String
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- expression string
- A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance. Maximum length of 2,000.
- type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- expression str
- A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance. Maximum length of 2,000.
- type str
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- expression String
- A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance. Maximum length of 2,000.
- type String
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
PipeTargetParametersEcsTaskParametersPlacementStrategy, PipeTargetParametersEcsTaskParametersPlacementStrategyArgs
- Field string
- The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used. Maximum length of 255.
- Type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- Field string
- The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used. Maximum length of 255.
- Type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- field String
- The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used. Maximum length of 255.
- type String
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- field string
- The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used. Maximum length of 255.
- type string
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- field str
- The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used. Maximum length of 255.
- type str
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
- field String
- The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used. Maximum length of 255.
- type String
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
PipeTargetParametersEventbridgeEventBusParameters, PipeTargetParametersEventbridgeEventBusParametersArgs
- Detail
Type string - A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
- Endpoint
Id string - The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo.
- Resources List<string>
- List of AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
- Source string
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- Time string
- The time stamp of the event, per RFC3339. If no time stamp is provided, the time stamp of the PutEvents call is used. This is the JSON path to the field in the event e.g. $.detail.timestamp
- Detail
Type string - A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
- Endpoint
Id string - The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo.
- Resources []string
- List of AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
- Source string
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- Time string
- The time stamp of the event, per RFC3339. If no time stamp is provided, the time stamp of the PutEvents call is used. This is the JSON path to the field in the event e.g. $.detail.timestamp
- detail
Type String - A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
- endpoint
Id String - The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo.
- resources List<String>
- List of AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
- source String
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- time String
- The time stamp of the event, per RFC3339. If no time stamp is provided, the time stamp of the PutEvents call is used. This is the JSON path to the field in the event e.g. $.detail.timestamp
- detail
Type string - A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
- endpoint
Id string - The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo.
- resources string[]
- List of AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
- source string
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- time string
- The time stamp of the event, per RFC3339. If no time stamp is provided, the time stamp of the PutEvents call is used. This is the JSON path to the field in the event e.g. $.detail.timestamp
- detail_
type str - A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
- endpoint_
id str - The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo.
- resources Sequence[str]
- List of AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
- source str
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- time str
- The time stamp of the event, per RFC3339. If no time stamp is provided, the time stamp of the PutEvents call is used. This is the JSON path to the field in the event e.g. $.detail.timestamp
- detail
Type String - A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
- endpoint
Id String - The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo.
- resources List<String>
- List of AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
- source String
- Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
- time String
- The time stamp of the event, per RFC3339. If no time stamp is provided, the time stamp of the PutEvents call is used. This is the JSON path to the field in the event e.g. $.detail.timestamp
PipeTargetParametersHttpParameters, PipeTargetParametersHttpParametersArgs
- Header
Parameters Dictionary<string, string> - Path
Parameter stringValues - Query
String Dictionary<string, string>Parameters
- Header
Parameters map[string]string - Path
Parameter stringValues - Query
String map[string]stringParameters
- header
Parameters Map<String,String> - path
Parameter StringValues - query
String Map<String,String>Parameters
- header
Parameters {[key: string]: string} - path
Parameter stringValues - query
String {[key: string]: string}Parameters
- header_
parameters Mapping[str, str] - path_
parameter_ strvalues - query_
string_ Mapping[str, str]parameters
- header
Parameters Map<String> - path
Parameter StringValues - query
String Map<String>Parameters
PipeTargetParametersKinesisStreamParameters, PipeTargetParametersKinesisStreamParametersArgs
- Partition
Key string - Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
- Partition
Key string - Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
- partition
Key String - Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
- partition
Key string - Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
- partition_
key str - Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
- partition
Key String - Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
PipeTargetParametersLambdaFunctionParameters, PipeTargetParametersLambdaFunctionParametersArgs
- Invocation
Type string - Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
- Invocation
Type string - Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
- invocation
Type String - Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
- invocation
Type string - Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
- invocation_
type str - Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
- invocation
Type String - Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
PipeTargetParametersRedshiftDataParameters, PipeTargetParametersRedshiftDataParametersArgs
- Database string
- The name of the database. Required when authenticating using temporary credentials.
- Sqls List<string>
- List of SQL statements text to run, each of maximum length of 100,000.
- Db
User string - The database user name. Required when authenticating using temporary credentials.
- Secret
Manager stringArn - The name or ARN of the secret that enables access to the database. Required when authenticating using Secrets Manager.
- Statement
Name string - The name of the SQL statement. You can name the SQL statement when you create it to identify the query.
- With
Event bool - Indicates whether to send an event back to EventBridge after the SQL statement runs.
- Database string
- The name of the database. Required when authenticating using temporary credentials.
- Sqls []string
- List of SQL statements text to run, each of maximum length of 100,000.
- Db
User string - The database user name. Required when authenticating using temporary credentials.
- Secret
Manager stringArn - The name or ARN of the secret that enables access to the database. Required when authenticating using Secrets Manager.
- Statement
Name string - The name of the SQL statement. You can name the SQL statement when you create it to identify the query.
- With
Event bool - Indicates whether to send an event back to EventBridge after the SQL statement runs.
- database String
- The name of the database. Required when authenticating using temporary credentials.
- sqls List<String>
- List of SQL statements text to run, each of maximum length of 100,000.
- db
User String - The database user name. Required when authenticating using temporary credentials.
- secret
Manager StringArn - The name or ARN of the secret that enables access to the database. Required when authenticating using Secrets Manager.
- statement
Name String - The name of the SQL statement. You can name the SQL statement when you create it to identify the query.
- with
Event Boolean - Indicates whether to send an event back to EventBridge after the SQL statement runs.
- database string
- The name of the database. Required when authenticating using temporary credentials.
- sqls string[]
- List of SQL statements text to run, each of maximum length of 100,000.
- db
User string - The database user name. Required when authenticating using temporary credentials.
- secret
Manager stringArn - The name or ARN of the secret that enables access to the database. Required when authenticating using Secrets Manager.
- statement
Name string - The name of the SQL statement. You can name the SQL statement when you create it to identify the query.
- with
Event boolean - Indicates whether to send an event back to EventBridge after the SQL statement runs.
- database str
- The name of the database. Required when authenticating using temporary credentials.
- sqls Sequence[str]
- List of SQL statements text to run, each of maximum length of 100,000.
- db_
user str - The database user name. Required when authenticating using temporary credentials.
- secret_
manager_ strarn - The name or ARN of the secret that enables access to the database. Required when authenticating using Secrets Manager.
- statement_
name str - The name of the SQL statement. You can name the SQL statement when you create it to identify the query.
- with_
event bool - Indicates whether to send an event back to EventBridge after the SQL statement runs.
- database String
- The name of the database. Required when authenticating using temporary credentials.
- sqls List<String>
- List of SQL statements text to run, each of maximum length of 100,000.
- db
User String - The database user name. Required when authenticating using temporary credentials.
- secret
Manager StringArn - The name or ARN of the secret that enables access to the database. Required when authenticating using Secrets Manager.
- statement
Name String - The name of the SQL statement. You can name the SQL statement when you create it to identify the query.
- with
Event Boolean - Indicates whether to send an event back to EventBridge after the SQL statement runs.
PipeTargetParametersSagemakerPipelineParameters, PipeTargetParametersSagemakerPipelineParametersArgs
- Pipeline
Parameters List<PipeTarget Parameters Sagemaker Pipeline Parameters Pipeline Parameter> - List of Parameter names and values for SageMaker Model Building Pipeline execution. Detailed below.
- Pipeline
Parameters []PipeTarget Parameters Sagemaker Pipeline Parameters Pipeline Parameter - List of Parameter names and values for SageMaker Model Building Pipeline execution. Detailed below.
- pipeline
Parameters List<PipeTarget Parameters Sagemaker Pipeline Parameters Pipeline Parameter> - List of Parameter names and values for SageMaker Model Building Pipeline execution. Detailed below.
- pipeline
Parameters PipeTarget Parameters Sagemaker Pipeline Parameters Pipeline Parameter[] - List of Parameter names and values for SageMaker Model Building Pipeline execution. Detailed below.
- pipeline_
parameters Sequence[PipeTarget Parameters Sagemaker Pipeline Parameters Pipeline Parameter] - List of Parameter names and values for SageMaker Model Building Pipeline execution. Detailed below.
- pipeline
Parameters List<Property Map> - List of Parameter names and values for SageMaker Model Building Pipeline execution. Detailed below.
PipeTargetParametersSagemakerPipelineParametersPipelineParameter, PipeTargetParametersSagemakerPipelineParametersPipelineParameterArgs
PipeTargetParametersSqsQueueParameters, PipeTargetParametersSqsQueueParametersArgs
- Message
Deduplication stringId - This parameter applies only to FIFO (first-in-first-out) queues. The token used for deduplication of sent messages.
- Message
Group stringId - The FIFO message group ID to use as the target.
- Message
Deduplication stringId - This parameter applies only to FIFO (first-in-first-out) queues. The token used for deduplication of sent messages.
- Message
Group stringId - The FIFO message group ID to use as the target.
- message
Deduplication StringId - This parameter applies only to FIFO (first-in-first-out) queues. The token used for deduplication of sent messages.
- message
Group StringId - The FIFO message group ID to use as the target.
- message
Deduplication stringId - This parameter applies only to FIFO (first-in-first-out) queues. The token used for deduplication of sent messages.
- message
Group stringId - The FIFO message group ID to use as the target.
- message_
deduplication_ strid - This parameter applies only to FIFO (first-in-first-out) queues. The token used for deduplication of sent messages.
- message_
group_ strid - The FIFO message group ID to use as the target.
- message
Deduplication StringId - This parameter applies only to FIFO (first-in-first-out) queues. The token used for deduplication of sent messages.
- message
Group StringId - The FIFO message group ID to use as the target.
PipeTargetParametersStepFunctionStateMachineParameters, PipeTargetParametersStepFunctionStateMachineParametersArgs
- Invocation
Type string - Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
- Invocation
Type string - Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
- invocation
Type String - Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
- invocation
Type string - Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
- invocation_
type str - Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
- invocation
Type String - Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
Import
Using pulumi import
, import pipes using the name
. For example:
$ pulumi import aws:pipes/pipe:Pipe example my-pipe
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- AWS Classic pulumi/pulumi-aws
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
aws
Terraform Provider.