1. Packages
  2. AWS
  3. API Docs
  4. pipes
  5. Pipe
AWS v6.60.0 published on Tuesday, Nov 19, 2024 by Pulumi

aws.pipes.Pipe

Explore with Pulumi AI

aws logo
AWS v6.60.0 published on Tuesday, Nov 19, 2024 by Pulumi

    Resource for managing an AWS EventBridge Pipes Pipe.

    You can find out more about EventBridge Pipes in the User Guide.

    EventBridge Pipes are very configurable, and may require IAM permissions to work correctly. More information on the configuration options and IAM permissions can be found in the User Guide.

    Note: EventBridge was formerly known as CloudWatch Events. The functionality is identical.

    Example Usage

    Basic Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as aws from "@pulumi/aws";
    
    const main = aws.getCallerIdentity({});
    const example = new aws.iam.Role("example", {assumeRolePolicy: JSON.stringify({
        Version: "2012-10-17",
        Statement: {
            Effect: "Allow",
            Action: "sts:AssumeRole",
            Principal: {
                Service: "pipes.amazonaws.com",
            },
            Condition: {
                StringEquals: {
                    "aws:SourceAccount": main.then(main => main.accountId),
                },
            },
        },
    })});
    const sourceQueue = new aws.sqs.Queue("source", {});
    const source = new aws.iam.RolePolicy("source", {
        role: example.id,
        policy: pulumi.jsonStringify({
            Version: "2012-10-17",
            Statement: [{
                Effect: "Allow",
                Action: [
                    "sqs:DeleteMessage",
                    "sqs:GetQueueAttributes",
                    "sqs:ReceiveMessage",
                ],
                Resource: [sourceQueue.arn],
            }],
        }),
    });
    const targetQueue = new aws.sqs.Queue("target", {});
    const target = new aws.iam.RolePolicy("target", {
        role: example.id,
        policy: pulumi.jsonStringify({
            Version: "2012-10-17",
            Statement: [{
                Effect: "Allow",
                Action: ["sqs:SendMessage"],
                Resource: [targetQueue.arn],
            }],
        }),
    });
    const examplePipe = new aws.pipes.Pipe("example", {
        name: "example-pipe",
        roleArn: example.arn,
        source: sourceQueue.arn,
        target: targetQueue.arn,
    }, {
        dependsOn: [
            source,
            target,
        ],
    });
    
    import pulumi
    import json
    import pulumi_aws as aws
    
    main = aws.get_caller_identity()
    example = aws.iam.Role("example", assume_role_policy=json.dumps({
        "Version": "2012-10-17",
        "Statement": {
            "Effect": "Allow",
            "Action": "sts:AssumeRole",
            "Principal": {
                "Service": "pipes.amazonaws.com",
            },
            "Condition": {
                "StringEquals": {
                    "aws:SourceAccount": main.account_id,
                },
            },
        },
    }))
    source_queue = aws.sqs.Queue("source")
    source = aws.iam.RolePolicy("source",
        role=example.id,
        policy=pulumi.Output.json_dumps({
            "Version": "2012-10-17",
            "Statement": [{
                "Effect": "Allow",
                "Action": [
                    "sqs:DeleteMessage",
                    "sqs:GetQueueAttributes",
                    "sqs:ReceiveMessage",
                ],
                "Resource": [source_queue.arn],
            }],
        }))
    target_queue = aws.sqs.Queue("target")
    target = aws.iam.RolePolicy("target",
        role=example.id,
        policy=pulumi.Output.json_dumps({
            "Version": "2012-10-17",
            "Statement": [{
                "Effect": "Allow",
                "Action": ["sqs:SendMessage"],
                "Resource": [target_queue.arn],
            }],
        }))
    example_pipe = aws.pipes.Pipe("example",
        name="example-pipe",
        role_arn=example.arn,
        source=source_queue.arn,
        target=target_queue.arn,
        opts = pulumi.ResourceOptions(depends_on=[
                source,
                target,
            ]))
    
    package main
    
    import (
    	"encoding/json"
    
    	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws"
    	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/iam"
    	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/pipes"
    	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/sqs"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		main, err := aws.GetCallerIdentity(ctx, &aws.GetCallerIdentityArgs{}, nil)
    		if err != nil {
    			return err
    		}
    		tmpJSON0, err := json.Marshal(map[string]interface{}{
    			"Version": "2012-10-17",
    			"Statement": map[string]interface{}{
    				"Effect": "Allow",
    				"Action": "sts:AssumeRole",
    				"Principal": map[string]interface{}{
    					"Service": "pipes.amazonaws.com",
    				},
    				"Condition": map[string]interface{}{
    					"StringEquals": map[string]interface{}{
    						"aws:SourceAccount": main.AccountId,
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		json0 := string(tmpJSON0)
    		example, err := iam.NewRole(ctx, "example", &iam.RoleArgs{
    			AssumeRolePolicy: pulumi.String(json0),
    		})
    		if err != nil {
    			return err
    		}
    		sourceQueue, err := sqs.NewQueue(ctx, "source", nil)
    		if err != nil {
    			return err
    		}
    		source, err := iam.NewRolePolicy(ctx, "source", &iam.RolePolicyArgs{
    			Role: example.ID(),
    			Policy: sourceQueue.Arn.ApplyT(func(arn string) (pulumi.String, error) {
    				var _zero pulumi.String
    				tmpJSON1, err := json.Marshal(map[string]interface{}{
    					"Version": "2012-10-17",
    					"Statement": []map[string]interface{}{
    						map[string]interface{}{
    							"Effect": "Allow",
    							"Action": []string{
    								"sqs:DeleteMessage",
    								"sqs:GetQueueAttributes",
    								"sqs:ReceiveMessage",
    							},
    							"Resource": []string{
    								arn,
    							},
    						},
    					},
    				})
    				if err != nil {
    					return _zero, err
    				}
    				json1 := string(tmpJSON1)
    				return pulumi.String(json1), nil
    			}).(pulumi.StringOutput),
    		})
    		if err != nil {
    			return err
    		}
    		targetQueue, err := sqs.NewQueue(ctx, "target", nil)
    		if err != nil {
    			return err
    		}
    		target, err := iam.NewRolePolicy(ctx, "target", &iam.RolePolicyArgs{
    			Role: example.ID(),
    			Policy: targetQueue.Arn.ApplyT(func(arn string) (pulumi.String, error) {
    				var _zero pulumi.String
    				tmpJSON2, err := json.Marshal(map[string]interface{}{
    					"Version": "2012-10-17",
    					"Statement": []map[string]interface{}{
    						map[string]interface{}{
    							"Effect": "Allow",
    							"Action": []string{
    								"sqs:SendMessage",
    							},
    							"Resource": []string{
    								arn,
    							},
    						},
    					},
    				})
    				if err != nil {
    					return _zero, err
    				}
    				json2 := string(tmpJSON2)
    				return pulumi.String(json2), nil
    			}).(pulumi.StringOutput),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = pipes.NewPipe(ctx, "example", &pipes.PipeArgs{
    			Name:    pulumi.String("example-pipe"),
    			RoleArn: example.Arn,
    			Source:  sourceQueue.Arn,
    			Target:  targetQueue.Arn,
    		}, pulumi.DependsOn([]pulumi.Resource{
    			source,
    			target,
    		}))
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using System.Text.Json;
    using Pulumi;
    using Aws = Pulumi.Aws;
    
    return await Deployment.RunAsync(() => 
    {
        var main = Aws.GetCallerIdentity.Invoke();
    
        var example = new Aws.Iam.Role("example", new()
        {
            AssumeRolePolicy = JsonSerializer.Serialize(new Dictionary<string, object?>
            {
                ["Version"] = "2012-10-17",
                ["Statement"] = new Dictionary<string, object?>
                {
                    ["Effect"] = "Allow",
                    ["Action"] = "sts:AssumeRole",
                    ["Principal"] = new Dictionary<string, object?>
                    {
                        ["Service"] = "pipes.amazonaws.com",
                    },
                    ["Condition"] = new Dictionary<string, object?>
                    {
                        ["StringEquals"] = new Dictionary<string, object?>
                        {
                            ["aws:SourceAccount"] = main.Apply(getCallerIdentityResult => getCallerIdentityResult.AccountId),
                        },
                    },
                },
            }),
        });
    
        var sourceQueue = new Aws.Sqs.Queue("source");
    
        var source = new Aws.Iam.RolePolicy("source", new()
        {
            Role = example.Id,
            Policy = Output.JsonSerialize(Output.Create(new Dictionary<string, object?>
            {
                ["Version"] = "2012-10-17",
                ["Statement"] = new[]
                {
                    new Dictionary<string, object?>
                    {
                        ["Effect"] = "Allow",
                        ["Action"] = new[]
                        {
                            "sqs:DeleteMessage",
                            "sqs:GetQueueAttributes",
                            "sqs:ReceiveMessage",
                        },
                        ["Resource"] = new[]
                        {
                            sourceQueue.Arn,
                        },
                    },
                },
            })),
        });
    
        var targetQueue = new Aws.Sqs.Queue("target");
    
        var target = new Aws.Iam.RolePolicy("target", new()
        {
            Role = example.Id,
            Policy = Output.JsonSerialize(Output.Create(new Dictionary<string, object?>
            {
                ["Version"] = "2012-10-17",
                ["Statement"] = new[]
                {
                    new Dictionary<string, object?>
                    {
                        ["Effect"] = "Allow",
                        ["Action"] = new[]
                        {
                            "sqs:SendMessage",
                        },
                        ["Resource"] = new[]
                        {
                            targetQueue.Arn,
                        },
                    },
                },
            })),
        });
    
        var examplePipe = new Aws.Pipes.Pipe("example", new()
        {
            Name = "example-pipe",
            RoleArn = example.Arn,
            Source = sourceQueue.Arn,
            Target = targetQueue.Arn,
        }, new CustomResourceOptions
        {
            DependsOn =
            {
                source,
                target,
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.aws.AwsFunctions;
    import com.pulumi.aws.inputs.GetCallerIdentityArgs;
    import com.pulumi.aws.iam.Role;
    import com.pulumi.aws.iam.RoleArgs;
    import com.pulumi.aws.sqs.Queue;
    import com.pulumi.aws.iam.RolePolicy;
    import com.pulumi.aws.iam.RolePolicyArgs;
    import com.pulumi.aws.pipes.Pipe;
    import com.pulumi.aws.pipes.PipeArgs;
    import static com.pulumi.codegen.internal.Serialization.*;
    import com.pulumi.resources.CustomResourceOptions;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            final var main = AwsFunctions.getCallerIdentity();
    
            var example = new Role("example", RoleArgs.builder()
                .assumeRolePolicy(serializeJson(
                    jsonObject(
                        jsonProperty("Version", "2012-10-17"),
                        jsonProperty("Statement", jsonObject(
                            jsonProperty("Effect", "Allow"),
                            jsonProperty("Action", "sts:AssumeRole"),
                            jsonProperty("Principal", jsonObject(
                                jsonProperty("Service", "pipes.amazonaws.com")
                            )),
                            jsonProperty("Condition", jsonObject(
                                jsonProperty("StringEquals", jsonObject(
                                    jsonProperty("aws:SourceAccount", main.applyValue(getCallerIdentityResult -> getCallerIdentityResult.accountId()))
                                ))
                            ))
                        ))
                    )))
                .build());
    
            var sourceQueue = new Queue("sourceQueue");
    
            var source = new RolePolicy("source", RolePolicyArgs.builder()
                .role(example.id())
                .policy(sourceQueue.arn().applyValue(arn -> serializeJson(
                    jsonObject(
                        jsonProperty("Version", "2012-10-17"),
                        jsonProperty("Statement", jsonArray(jsonObject(
                            jsonProperty("Effect", "Allow"),
                            jsonProperty("Action", jsonArray(
                                "sqs:DeleteMessage", 
                                "sqs:GetQueueAttributes", 
                                "sqs:ReceiveMessage"
                            )),
                            jsonProperty("Resource", jsonArray(arn))
                        )))
                    ))))
                .build());
    
            var targetQueue = new Queue("targetQueue");
    
            var target = new RolePolicy("target", RolePolicyArgs.builder()
                .role(example.id())
                .policy(targetQueue.arn().applyValue(arn -> serializeJson(
                    jsonObject(
                        jsonProperty("Version", "2012-10-17"),
                        jsonProperty("Statement", jsonArray(jsonObject(
                            jsonProperty("Effect", "Allow"),
                            jsonProperty("Action", jsonArray("sqs:SendMessage")),
                            jsonProperty("Resource", jsonArray(arn))
                        )))
                    ))))
                .build());
    
            var examplePipe = new Pipe("examplePipe", PipeArgs.builder()
                .name("example-pipe")
                .roleArn(example.arn())
                .source(sourceQueue.arn())
                .target(targetQueue.arn())
                .build(), CustomResourceOptions.builder()
                    .dependsOn(                
                        source,
                        target)
                    .build());
    
        }
    }
    
    resources:
      example:
        type: aws:iam:Role
        properties:
          assumeRolePolicy:
            fn::toJSON:
              Version: 2012-10-17
              Statement:
                Effect: Allow
                Action: sts:AssumeRole
                Principal:
                  Service: pipes.amazonaws.com
                Condition:
                  StringEquals:
                    aws:SourceAccount: ${main.accountId}
      source:
        type: aws:iam:RolePolicy
        properties:
          role: ${example.id}
          policy:
            fn::toJSON:
              Version: 2012-10-17
              Statement:
                - Effect: Allow
                  Action:
                    - sqs:DeleteMessage
                    - sqs:GetQueueAttributes
                    - sqs:ReceiveMessage
                  Resource:
                    - ${sourceQueue.arn}
      sourceQueue:
        type: aws:sqs:Queue
        name: source
      target:
        type: aws:iam:RolePolicy
        properties:
          role: ${example.id}
          policy:
            fn::toJSON:
              Version: 2012-10-17
              Statement:
                - Effect: Allow
                  Action:
                    - sqs:SendMessage
                  Resource:
                    - ${targetQueue.arn}
      targetQueue:
        type: aws:sqs:Queue
        name: target
      examplePipe:
        type: aws:pipes:Pipe
        name: example
        properties:
          name: example-pipe
          roleArn: ${example.arn}
          source: ${sourceQueue.arn}
          target: ${targetQueue.arn}
        options:
          dependson:
            - ${source}
            - ${target}
    variables:
      main:
        fn::invoke:
          Function: aws:getCallerIdentity
          Arguments: {}
    

    Enrichment Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as aws from "@pulumi/aws";
    
    const example = new aws.pipes.Pipe("example", {
        name: "example-pipe",
        roleArn: exampleAwsIamRole.arn,
        source: source.arn,
        target: target.arn,
        enrichment: exampleAwsCloudwatchEventApiDestination.arn,
        enrichmentParameters: {
            httpParameters: {
                pathParameterValues: "example-path-param",
                headerParameters: {
                    "example-header": "example-value",
                    "second-example-header": "second-example-value",
                },
                queryStringParameters: {
                    "example-query-string": "example-value",
                    "second-example-query-string": "second-example-value",
                },
            },
        },
    });
    
    import pulumi
    import pulumi_aws as aws
    
    example = aws.pipes.Pipe("example",
        name="example-pipe",
        role_arn=example_aws_iam_role["arn"],
        source=source["arn"],
        target=target["arn"],
        enrichment=example_aws_cloudwatch_event_api_destination["arn"],
        enrichment_parameters={
            "http_parameters": {
                "path_parameter_values": "example-path-param",
                "header_parameters": {
                    "example-header": "example-value",
                    "second-example-header": "second-example-value",
                },
                "query_string_parameters": {
                    "example-query-string": "example-value",
                    "second-example-query-string": "second-example-value",
                },
            },
        })
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/pipes"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := pipes.NewPipe(ctx, "example", &pipes.PipeArgs{
    			Name:       pulumi.String("example-pipe"),
    			RoleArn:    pulumi.Any(exampleAwsIamRole.Arn),
    			Source:     pulumi.Any(source.Arn),
    			Target:     pulumi.Any(target.Arn),
    			Enrichment: pulumi.Any(exampleAwsCloudwatchEventApiDestination.Arn),
    			EnrichmentParameters: &pipes.PipeEnrichmentParametersArgs{
    				HttpParameters: &pipes.PipeEnrichmentParametersHttpParametersArgs{
    					PathParameterValues: pulumi.String("example-path-param"),
    					HeaderParameters: pulumi.StringMap{
    						"example-header":        pulumi.String("example-value"),
    						"second-example-header": pulumi.String("second-example-value"),
    					},
    					QueryStringParameters: pulumi.StringMap{
    						"example-query-string":        pulumi.String("example-value"),
    						"second-example-query-string": pulumi.String("second-example-value"),
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Aws = Pulumi.Aws;
    
    return await Deployment.RunAsync(() => 
    {
        var example = new Aws.Pipes.Pipe("example", new()
        {
            Name = "example-pipe",
            RoleArn = exampleAwsIamRole.Arn,
            Source = source.Arn,
            Target = target.Arn,
            Enrichment = exampleAwsCloudwatchEventApiDestination.Arn,
            EnrichmentParameters = new Aws.Pipes.Inputs.PipeEnrichmentParametersArgs
            {
                HttpParameters = new Aws.Pipes.Inputs.PipeEnrichmentParametersHttpParametersArgs
                {
                    PathParameterValues = "example-path-param",
                    HeaderParameters = 
                    {
                        { "example-header", "example-value" },
                        { "second-example-header", "second-example-value" },
                    },
                    QueryStringParameters = 
                    {
                        { "example-query-string", "example-value" },
                        { "second-example-query-string", "second-example-value" },
                    },
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.aws.pipes.Pipe;
    import com.pulumi.aws.pipes.PipeArgs;
    import com.pulumi.aws.pipes.inputs.PipeEnrichmentParametersArgs;
    import com.pulumi.aws.pipes.inputs.PipeEnrichmentParametersHttpParametersArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var example = new Pipe("example", PipeArgs.builder()
                .name("example-pipe")
                .roleArn(exampleAwsIamRole.arn())
                .source(source.arn())
                .target(target.arn())
                .enrichment(exampleAwsCloudwatchEventApiDestination.arn())
                .enrichmentParameters(PipeEnrichmentParametersArgs.builder()
                    .httpParameters(PipeEnrichmentParametersHttpParametersArgs.builder()
                        .pathParameterValues("example-path-param")
                        .headerParameters(Map.ofEntries(
                            Map.entry("example-header", "example-value"),
                            Map.entry("second-example-header", "second-example-value")
                        ))
                        .queryStringParameters(Map.ofEntries(
                            Map.entry("example-query-string", "example-value"),
                            Map.entry("second-example-query-string", "second-example-value")
                        ))
                        .build())
                    .build())
                .build());
    
        }
    }
    
    resources:
      example:
        type: aws:pipes:Pipe
        properties:
          name: example-pipe
          roleArn: ${exampleAwsIamRole.arn}
          source: ${source.arn}
          target: ${target.arn}
          enrichment: ${exampleAwsCloudwatchEventApiDestination.arn}
          enrichmentParameters:
            httpParameters:
              pathParameterValues: example-path-param
              headerParameters:
                example-header: example-value
                second-example-header: second-example-value
              queryStringParameters:
                example-query-string: example-value
                second-example-query-string: second-example-value
    

    Filter Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as aws from "@pulumi/aws";
    
    const example = new aws.pipes.Pipe("example", {
        name: "example-pipe",
        roleArn: exampleAwsIamRole.arn,
        source: source.arn,
        target: target.arn,
        sourceParameters: {
            filterCriteria: {
                filters: [{
                    pattern: JSON.stringify({
                        source: ["event-source"],
                    }),
                }],
            },
        },
    });
    
    import pulumi
    import json
    import pulumi_aws as aws
    
    example = aws.pipes.Pipe("example",
        name="example-pipe",
        role_arn=example_aws_iam_role["arn"],
        source=source["arn"],
        target=target["arn"],
        source_parameters={
            "filter_criteria": {
                "filters": [{
                    "pattern": json.dumps({
                        "source": ["event-source"],
                    }),
                }],
            },
        })
    
    package main
    
    import (
    	"encoding/json"
    
    	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/pipes"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		tmpJSON0, err := json.Marshal(map[string]interface{}{
    			"source": []string{
    				"event-source",
    			},
    		})
    		if err != nil {
    			return err
    		}
    		json0 := string(tmpJSON0)
    		_, err = pipes.NewPipe(ctx, "example", &pipes.PipeArgs{
    			Name:    pulumi.String("example-pipe"),
    			RoleArn: pulumi.Any(exampleAwsIamRole.Arn),
    			Source:  pulumi.Any(source.Arn),
    			Target:  pulumi.Any(target.Arn),
    			SourceParameters: &pipes.PipeSourceParametersArgs{
    				FilterCriteria: &pipes.PipeSourceParametersFilterCriteriaArgs{
    					Filters: pipes.PipeSourceParametersFilterCriteriaFilterArray{
    						&pipes.PipeSourceParametersFilterCriteriaFilterArgs{
    							Pattern: pulumi.String(json0),
    						},
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using System.Text.Json;
    using Pulumi;
    using Aws = Pulumi.Aws;
    
    return await Deployment.RunAsync(() => 
    {
        var example = new Aws.Pipes.Pipe("example", new()
        {
            Name = "example-pipe",
            RoleArn = exampleAwsIamRole.Arn,
            Source = source.Arn,
            Target = target.Arn,
            SourceParameters = new Aws.Pipes.Inputs.PipeSourceParametersArgs
            {
                FilterCriteria = new Aws.Pipes.Inputs.PipeSourceParametersFilterCriteriaArgs
                {
                    Filters = new[]
                    {
                        new Aws.Pipes.Inputs.PipeSourceParametersFilterCriteriaFilterArgs
                        {
                            Pattern = JsonSerializer.Serialize(new Dictionary<string, object?>
                            {
                                ["source"] = new[]
                                {
                                    "event-source",
                                },
                            }),
                        },
                    },
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.aws.pipes.Pipe;
    import com.pulumi.aws.pipes.PipeArgs;
    import com.pulumi.aws.pipes.inputs.PipeSourceParametersArgs;
    import com.pulumi.aws.pipes.inputs.PipeSourceParametersFilterCriteriaArgs;
    import static com.pulumi.codegen.internal.Serialization.*;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var example = new Pipe("example", PipeArgs.builder()
                .name("example-pipe")
                .roleArn(exampleAwsIamRole.arn())
                .source(source.arn())
                .target(target.arn())
                .sourceParameters(PipeSourceParametersArgs.builder()
                    .filterCriteria(PipeSourceParametersFilterCriteriaArgs.builder()
                        .filters(PipeSourceParametersFilterCriteriaFilterArgs.builder()
                            .pattern(serializeJson(
                                jsonObject(
                                    jsonProperty("source", jsonArray("event-source"))
                                )))
                            .build())
                        .build())
                    .build())
                .build());
    
        }
    }
    
    resources:
      example:
        type: aws:pipes:Pipe
        properties:
          name: example-pipe
          roleArn: ${exampleAwsIamRole.arn}
          source: ${source.arn}
          target: ${target.arn}
          sourceParameters:
            filterCriteria:
              filters:
                - pattern:
                    fn::toJSON:
                      source:
                        - event-source
    

    CloudWatch Logs Logging Configuration Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as aws from "@pulumi/aws";
    
    const example = new aws.cloudwatch.LogGroup("example", {name: "example-pipe-target"});
    const examplePipe = new aws.pipes.Pipe("example", {
        name: "example-pipe",
        roleArn: exampleAwsIamRole.arn,
        source: sourceAwsSqsQueue.arn,
        target: targetAwsSqsQueue.arn,
        logConfiguration: {
            includeExecutionDatas: ["ALL"],
            level: "INFO",
            cloudwatchLogsLogDestination: {
                logGroupArn: targetAwsCloudwatchLogGroup.arn,
            },
        },
    }, {
        dependsOn: [
            source,
            target,
        ],
    });
    
    import pulumi
    import pulumi_aws as aws
    
    example = aws.cloudwatch.LogGroup("example", name="example-pipe-target")
    example_pipe = aws.pipes.Pipe("example",
        name="example-pipe",
        role_arn=example_aws_iam_role["arn"],
        source=source_aws_sqs_queue["arn"],
        target=target_aws_sqs_queue["arn"],
        log_configuration={
            "include_execution_datas": ["ALL"],
            "level": "INFO",
            "cloudwatch_logs_log_destination": {
                "log_group_arn": target_aws_cloudwatch_log_group["arn"],
            },
        },
        opts = pulumi.ResourceOptions(depends_on=[
                source,
                target,
            ]))
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/cloudwatch"
    	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/pipes"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := cloudwatch.NewLogGroup(ctx, "example", &cloudwatch.LogGroupArgs{
    			Name: pulumi.String("example-pipe-target"),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = pipes.NewPipe(ctx, "example", &pipes.PipeArgs{
    			Name:    pulumi.String("example-pipe"),
    			RoleArn: pulumi.Any(exampleAwsIamRole.Arn),
    			Source:  pulumi.Any(sourceAwsSqsQueue.Arn),
    			Target:  pulumi.Any(targetAwsSqsQueue.Arn),
    			LogConfiguration: &pipes.PipeLogConfigurationArgs{
    				IncludeExecutionDatas: pulumi.StringArray{
    					pulumi.String("ALL"),
    				},
    				Level: pulumi.String("INFO"),
    				CloudwatchLogsLogDestination: &pipes.PipeLogConfigurationCloudwatchLogsLogDestinationArgs{
    					LogGroupArn: pulumi.Any(targetAwsCloudwatchLogGroup.Arn),
    				},
    			},
    		}, pulumi.DependsOn([]pulumi.Resource{
    			source,
    			target,
    		}))
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Aws = Pulumi.Aws;
    
    return await Deployment.RunAsync(() => 
    {
        var example = new Aws.CloudWatch.LogGroup("example", new()
        {
            Name = "example-pipe-target",
        });
    
        var examplePipe = new Aws.Pipes.Pipe("example", new()
        {
            Name = "example-pipe",
            RoleArn = exampleAwsIamRole.Arn,
            Source = sourceAwsSqsQueue.Arn,
            Target = targetAwsSqsQueue.Arn,
            LogConfiguration = new Aws.Pipes.Inputs.PipeLogConfigurationArgs
            {
                IncludeExecutionDatas = new[]
                {
                    "ALL",
                },
                Level = "INFO",
                CloudwatchLogsLogDestination = new Aws.Pipes.Inputs.PipeLogConfigurationCloudwatchLogsLogDestinationArgs
                {
                    LogGroupArn = targetAwsCloudwatchLogGroup.Arn,
                },
            },
        }, new CustomResourceOptions
        {
            DependsOn =
            {
                source,
                target,
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.aws.cloudwatch.LogGroup;
    import com.pulumi.aws.cloudwatch.LogGroupArgs;
    import com.pulumi.aws.pipes.Pipe;
    import com.pulumi.aws.pipes.PipeArgs;
    import com.pulumi.aws.pipes.inputs.PipeLogConfigurationArgs;
    import com.pulumi.aws.pipes.inputs.PipeLogConfigurationCloudwatchLogsLogDestinationArgs;
    import com.pulumi.resources.CustomResourceOptions;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var example = new LogGroup("example", LogGroupArgs.builder()
                .name("example-pipe-target")
                .build());
    
            var examplePipe = new Pipe("examplePipe", PipeArgs.builder()
                .name("example-pipe")
                .roleArn(exampleAwsIamRole.arn())
                .source(sourceAwsSqsQueue.arn())
                .target(targetAwsSqsQueue.arn())
                .logConfiguration(PipeLogConfigurationArgs.builder()
                    .includeExecutionDatas("ALL")
                    .level("INFO")
                    .cloudwatchLogsLogDestination(PipeLogConfigurationCloudwatchLogsLogDestinationArgs.builder()
                        .logGroupArn(targetAwsCloudwatchLogGroup.arn())
                        .build())
                    .build())
                .build(), CustomResourceOptions.builder()
                    .dependsOn(                
                        source,
                        target)
                    .build());
    
        }
    }
    
    resources:
      example:
        type: aws:cloudwatch:LogGroup
        properties:
          name: example-pipe-target
      examplePipe:
        type: aws:pipes:Pipe
        name: example
        properties:
          name: example-pipe
          roleArn: ${exampleAwsIamRole.arn}
          source: ${sourceAwsSqsQueue.arn}
          target: ${targetAwsSqsQueue.arn}
          logConfiguration:
            includeExecutionDatas:
              - ALL
            level: INFO
            cloudwatchLogsLogDestination:
              logGroupArn: ${targetAwsCloudwatchLogGroup.arn}
        options:
          dependson:
            - ${source}
            - ${target}
    

    SQS Source and Target Configuration Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as aws from "@pulumi/aws";
    
    const example = new aws.pipes.Pipe("example", {
        name: "example-pipe",
        roleArn: exampleAwsIamRole.arn,
        source: source.arn,
        target: target.arn,
        sourceParameters: {
            sqsQueueParameters: {
                batchSize: 1,
                maximumBatchingWindowInSeconds: 2,
            },
        },
        targetParameters: {
            sqsQueueParameters: {
                messageDeduplicationId: "example-dedupe",
                messageGroupId: "example-group",
            },
        },
    });
    
    import pulumi
    import pulumi_aws as aws
    
    example = aws.pipes.Pipe("example",
        name="example-pipe",
        role_arn=example_aws_iam_role["arn"],
        source=source["arn"],
        target=target["arn"],
        source_parameters={
            "sqs_queue_parameters": {
                "batch_size": 1,
                "maximum_batching_window_in_seconds": 2,
            },
        },
        target_parameters={
            "sqs_queue_parameters": {
                "message_deduplication_id": "example-dedupe",
                "message_group_id": "example-group",
            },
        })
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/pipes"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := pipes.NewPipe(ctx, "example", &pipes.PipeArgs{
    			Name:    pulumi.String("example-pipe"),
    			RoleArn: pulumi.Any(exampleAwsIamRole.Arn),
    			Source:  pulumi.Any(source.Arn),
    			Target:  pulumi.Any(target.Arn),
    			SourceParameters: &pipes.PipeSourceParametersArgs{
    				SqsQueueParameters: &pipes.PipeSourceParametersSqsQueueParametersArgs{
    					BatchSize:                      pulumi.Int(1),
    					MaximumBatchingWindowInSeconds: pulumi.Int(2),
    				},
    			},
    			TargetParameters: &pipes.PipeTargetParametersArgs{
    				SqsQueueParameters: &pipes.PipeTargetParametersSqsQueueParametersArgs{
    					MessageDeduplicationId: pulumi.String("example-dedupe"),
    					MessageGroupId:         pulumi.String("example-group"),
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Aws = Pulumi.Aws;
    
    return await Deployment.RunAsync(() => 
    {
        var example = new Aws.Pipes.Pipe("example", new()
        {
            Name = "example-pipe",
            RoleArn = exampleAwsIamRole.Arn,
            Source = source.Arn,
            Target = target.Arn,
            SourceParameters = new Aws.Pipes.Inputs.PipeSourceParametersArgs
            {
                SqsQueueParameters = new Aws.Pipes.Inputs.PipeSourceParametersSqsQueueParametersArgs
                {
                    BatchSize = 1,
                    MaximumBatchingWindowInSeconds = 2,
                },
            },
            TargetParameters = new Aws.Pipes.Inputs.PipeTargetParametersArgs
            {
                SqsQueueParameters = new Aws.Pipes.Inputs.PipeTargetParametersSqsQueueParametersArgs
                {
                    MessageDeduplicationId = "example-dedupe",
                    MessageGroupId = "example-group",
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.aws.pipes.Pipe;
    import com.pulumi.aws.pipes.PipeArgs;
    import com.pulumi.aws.pipes.inputs.PipeSourceParametersArgs;
    import com.pulumi.aws.pipes.inputs.PipeSourceParametersSqsQueueParametersArgs;
    import com.pulumi.aws.pipes.inputs.PipeTargetParametersArgs;
    import com.pulumi.aws.pipes.inputs.PipeTargetParametersSqsQueueParametersArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var example = new Pipe("example", PipeArgs.builder()
                .name("example-pipe")
                .roleArn(exampleAwsIamRole.arn())
                .source(source.arn())
                .target(target.arn())
                .sourceParameters(PipeSourceParametersArgs.builder()
                    .sqsQueueParameters(PipeSourceParametersSqsQueueParametersArgs.builder()
                        .batchSize(1)
                        .maximumBatchingWindowInSeconds(2)
                        .build())
                    .build())
                .targetParameters(PipeTargetParametersArgs.builder()
                    .sqsQueueParameters(PipeTargetParametersSqsQueueParametersArgs.builder()
                        .messageDeduplicationId("example-dedupe")
                        .messageGroupId("example-group")
                        .build())
                    .build())
                .build());
    
        }
    }
    
    resources:
      example:
        type: aws:pipes:Pipe
        properties:
          name: example-pipe
          roleArn: ${exampleAwsIamRole.arn}
          source: ${source.arn}
          target: ${target.arn}
          sourceParameters:
            sqsQueueParameters:
              batchSize: 1
              maximumBatchingWindowInSeconds: 2
          targetParameters:
            sqsQueueParameters:
              messageDeduplicationId: example-dedupe
              messageGroupId: example-group
    

    Create Pipe Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new Pipe(name: string, args: PipeArgs, opts?: CustomResourceOptions);
    @overload
    def Pipe(resource_name: str,
             args: PipeArgs,
             opts: Optional[ResourceOptions] = None)
    
    @overload
    def Pipe(resource_name: str,
             opts: Optional[ResourceOptions] = None,
             role_arn: Optional[str] = None,
             target: Optional[str] = None,
             source: Optional[str] = None,
             name_prefix: Optional[str] = None,
             log_configuration: Optional[PipeLogConfigurationArgs] = None,
             name: Optional[str] = None,
             description: Optional[str] = None,
             enrichment_parameters: Optional[PipeEnrichmentParametersArgs] = None,
             enrichment: Optional[str] = None,
             source_parameters: Optional[PipeSourceParametersArgs] = None,
             tags: Optional[Mapping[str, str]] = None,
             desired_state: Optional[str] = None,
             target_parameters: Optional[PipeTargetParametersArgs] = None)
    func NewPipe(ctx *Context, name string, args PipeArgs, opts ...ResourceOption) (*Pipe, error)
    public Pipe(string name, PipeArgs args, CustomResourceOptions? opts = null)
    public Pipe(String name, PipeArgs args)
    public Pipe(String name, PipeArgs args, CustomResourceOptions options)
    
    type: aws:pipes:Pipe
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args PipeArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args PipeArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args PipeArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args PipeArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args PipeArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var pipeResource = new Aws.Pipes.Pipe("pipeResource", new()
    {
        RoleArn = "string",
        Target = "string",
        Source = "string",
        NamePrefix = "string",
        LogConfiguration = new Aws.Pipes.Inputs.PipeLogConfigurationArgs
        {
            Level = "string",
            CloudwatchLogsLogDestination = new Aws.Pipes.Inputs.PipeLogConfigurationCloudwatchLogsLogDestinationArgs
            {
                LogGroupArn = "string",
            },
            FirehoseLogDestination = new Aws.Pipes.Inputs.PipeLogConfigurationFirehoseLogDestinationArgs
            {
                DeliveryStreamArn = "string",
            },
            IncludeExecutionDatas = new[]
            {
                "string",
            },
            S3LogDestination = new Aws.Pipes.Inputs.PipeLogConfigurationS3LogDestinationArgs
            {
                BucketName = "string",
                BucketOwner = "string",
                OutputFormat = "string",
                Prefix = "string",
            },
        },
        Name = "string",
        Description = "string",
        EnrichmentParameters = new Aws.Pipes.Inputs.PipeEnrichmentParametersArgs
        {
            HttpParameters = new Aws.Pipes.Inputs.PipeEnrichmentParametersHttpParametersArgs
            {
                HeaderParameters = 
                {
                    { "string", "string" },
                },
                PathParameterValues = "string",
                QueryStringParameters = 
                {
                    { "string", "string" },
                },
            },
            InputTemplate = "string",
        },
        Enrichment = "string",
        SourceParameters = new Aws.Pipes.Inputs.PipeSourceParametersArgs
        {
            ActivemqBrokerParameters = new Aws.Pipes.Inputs.PipeSourceParametersActivemqBrokerParametersArgs
            {
                Credentials = new Aws.Pipes.Inputs.PipeSourceParametersActivemqBrokerParametersCredentialsArgs
                {
                    BasicAuth = "string",
                },
                QueueName = "string",
                BatchSize = 0,
                MaximumBatchingWindowInSeconds = 0,
            },
            DynamodbStreamParameters = new Aws.Pipes.Inputs.PipeSourceParametersDynamodbStreamParametersArgs
            {
                StartingPosition = "string",
                BatchSize = 0,
                DeadLetterConfig = new Aws.Pipes.Inputs.PipeSourceParametersDynamodbStreamParametersDeadLetterConfigArgs
                {
                    Arn = "string",
                },
                MaximumBatchingWindowInSeconds = 0,
                MaximumRecordAgeInSeconds = 0,
                MaximumRetryAttempts = 0,
                OnPartialBatchItemFailure = "string",
                ParallelizationFactor = 0,
            },
            FilterCriteria = new Aws.Pipes.Inputs.PipeSourceParametersFilterCriteriaArgs
            {
                Filters = new[]
                {
                    new Aws.Pipes.Inputs.PipeSourceParametersFilterCriteriaFilterArgs
                    {
                        Pattern = "string",
                    },
                },
            },
            KinesisStreamParameters = new Aws.Pipes.Inputs.PipeSourceParametersKinesisStreamParametersArgs
            {
                StartingPosition = "string",
                BatchSize = 0,
                DeadLetterConfig = new Aws.Pipes.Inputs.PipeSourceParametersKinesisStreamParametersDeadLetterConfigArgs
                {
                    Arn = "string",
                },
                MaximumBatchingWindowInSeconds = 0,
                MaximumRecordAgeInSeconds = 0,
                MaximumRetryAttempts = 0,
                OnPartialBatchItemFailure = "string",
                ParallelizationFactor = 0,
                StartingPositionTimestamp = "string",
            },
            ManagedStreamingKafkaParameters = new Aws.Pipes.Inputs.PipeSourceParametersManagedStreamingKafkaParametersArgs
            {
                TopicName = "string",
                BatchSize = 0,
                ConsumerGroupId = "string",
                Credentials = new Aws.Pipes.Inputs.PipeSourceParametersManagedStreamingKafkaParametersCredentialsArgs
                {
                    ClientCertificateTlsAuth = "string",
                    SaslScram512Auth = "string",
                },
                MaximumBatchingWindowInSeconds = 0,
                StartingPosition = "string",
            },
            RabbitmqBrokerParameters = new Aws.Pipes.Inputs.PipeSourceParametersRabbitmqBrokerParametersArgs
            {
                Credentials = new Aws.Pipes.Inputs.PipeSourceParametersRabbitmqBrokerParametersCredentialsArgs
                {
                    BasicAuth = "string",
                },
                QueueName = "string",
                BatchSize = 0,
                MaximumBatchingWindowInSeconds = 0,
                VirtualHost = "string",
            },
            SelfManagedKafkaParameters = new Aws.Pipes.Inputs.PipeSourceParametersSelfManagedKafkaParametersArgs
            {
                TopicName = "string",
                AdditionalBootstrapServers = new[]
                {
                    "string",
                },
                BatchSize = 0,
                ConsumerGroupId = "string",
                Credentials = new Aws.Pipes.Inputs.PipeSourceParametersSelfManagedKafkaParametersCredentialsArgs
                {
                    BasicAuth = "string",
                    ClientCertificateTlsAuth = "string",
                    SaslScram256Auth = "string",
                    SaslScram512Auth = "string",
                },
                MaximumBatchingWindowInSeconds = 0,
                ServerRootCaCertificate = "string",
                StartingPosition = "string",
                Vpc = new Aws.Pipes.Inputs.PipeSourceParametersSelfManagedKafkaParametersVpcArgs
                {
                    SecurityGroups = new[]
                    {
                        "string",
                    },
                    Subnets = new[]
                    {
                        "string",
                    },
                },
            },
            SqsQueueParameters = new Aws.Pipes.Inputs.PipeSourceParametersSqsQueueParametersArgs
            {
                BatchSize = 0,
                MaximumBatchingWindowInSeconds = 0,
            },
        },
        Tags = 
        {
            { "string", "string" },
        },
        DesiredState = "string",
        TargetParameters = new Aws.Pipes.Inputs.PipeTargetParametersArgs
        {
            BatchJobParameters = new Aws.Pipes.Inputs.PipeTargetParametersBatchJobParametersArgs
            {
                JobDefinition = "string",
                JobName = "string",
                ArrayProperties = new Aws.Pipes.Inputs.PipeTargetParametersBatchJobParametersArrayPropertiesArgs
                {
                    Size = 0,
                },
                ContainerOverrides = new Aws.Pipes.Inputs.PipeTargetParametersBatchJobParametersContainerOverridesArgs
                {
                    Commands = new[]
                    {
                        "string",
                    },
                    Environments = new[]
                    {
                        new Aws.Pipes.Inputs.PipeTargetParametersBatchJobParametersContainerOverridesEnvironmentArgs
                        {
                            Name = "string",
                            Value = "string",
                        },
                    },
                    InstanceType = "string",
                    ResourceRequirements = new[]
                    {
                        new Aws.Pipes.Inputs.PipeTargetParametersBatchJobParametersContainerOverridesResourceRequirementArgs
                        {
                            Type = "string",
                            Value = "string",
                        },
                    },
                },
                DependsOns = new[]
                {
                    new Aws.Pipes.Inputs.PipeTargetParametersBatchJobParametersDependsOnArgs
                    {
                        JobId = "string",
                        Type = "string",
                    },
                },
                Parameters = 
                {
                    { "string", "string" },
                },
                RetryStrategy = new Aws.Pipes.Inputs.PipeTargetParametersBatchJobParametersRetryStrategyArgs
                {
                    Attempts = 0,
                },
            },
            CloudwatchLogsParameters = new Aws.Pipes.Inputs.PipeTargetParametersCloudwatchLogsParametersArgs
            {
                LogStreamName = "string",
                Timestamp = "string",
            },
            EcsTaskParameters = new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersArgs
            {
                TaskDefinitionArn = "string",
                Overrides = new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersOverridesArgs
                {
                    ContainerOverrides = new[]
                    {
                        new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideArgs
                        {
                            Commands = new[]
                            {
                                "string",
                            },
                            Cpu = 0,
                            EnvironmentFiles = new[]
                            {
                                new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentFileArgs
                                {
                                    Type = "string",
                                    Value = "string",
                                },
                            },
                            Environments = new[]
                            {
                                new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentArgs
                                {
                                    Name = "string",
                                    Value = "string",
                                },
                            },
                            Memory = 0,
                            MemoryReservation = 0,
                            Name = "string",
                            ResourceRequirements = new[]
                            {
                                new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideResourceRequirementArgs
                                {
                                    Type = "string",
                                    Value = "string",
                                },
                            },
                        },
                    },
                    Cpu = "string",
                    EphemeralStorage = new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersOverridesEphemeralStorageArgs
                    {
                        SizeInGib = 0,
                    },
                    ExecutionRoleArn = "string",
                    InferenceAcceleratorOverrides = new[]
                    {
                        new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersOverridesInferenceAcceleratorOverrideArgs
                        {
                            DeviceName = "string",
                            DeviceType = "string",
                        },
                    },
                    Memory = "string",
                    TaskRoleArn = "string",
                },
                PlacementStrategies = new[]
                {
                    new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersPlacementStrategyArgs
                    {
                        Field = "string",
                        Type = "string",
                    },
                },
                Group = "string",
                LaunchType = "string",
                NetworkConfiguration = new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersNetworkConfigurationArgs
                {
                    AwsVpcConfiguration = new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersNetworkConfigurationAwsVpcConfigurationArgs
                    {
                        AssignPublicIp = "string",
                        SecurityGroups = new[]
                        {
                            "string",
                        },
                        Subnets = new[]
                        {
                            "string",
                        },
                    },
                },
                CapacityProviderStrategies = new[]
                {
                    new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersCapacityProviderStrategyArgs
                    {
                        CapacityProvider = "string",
                        Base = 0,
                        Weight = 0,
                    },
                },
                PlacementConstraints = new[]
                {
                    new Aws.Pipes.Inputs.PipeTargetParametersEcsTaskParametersPlacementConstraintArgs
                    {
                        Expression = "string",
                        Type = "string",
                    },
                },
                EnableExecuteCommand = false,
                PlatformVersion = "string",
                PropagateTags = "string",
                ReferenceId = "string",
                Tags = 
                {
                    { "string", "string" },
                },
                TaskCount = 0,
                EnableEcsManagedTags = false,
            },
            EventbridgeEventBusParameters = new Aws.Pipes.Inputs.PipeTargetParametersEventbridgeEventBusParametersArgs
            {
                DetailType = "string",
                EndpointId = "string",
                Resources = new[]
                {
                    "string",
                },
                Source = "string",
                Time = "string",
            },
            HttpParameters = new Aws.Pipes.Inputs.PipeTargetParametersHttpParametersArgs
            {
                HeaderParameters = 
                {
                    { "string", "string" },
                },
                PathParameterValues = "string",
                QueryStringParameters = 
                {
                    { "string", "string" },
                },
            },
            InputTemplate = "string",
            KinesisStreamParameters = new Aws.Pipes.Inputs.PipeTargetParametersKinesisStreamParametersArgs
            {
                PartitionKey = "string",
            },
            LambdaFunctionParameters = new Aws.Pipes.Inputs.PipeTargetParametersLambdaFunctionParametersArgs
            {
                InvocationType = "string",
            },
            RedshiftDataParameters = new Aws.Pipes.Inputs.PipeTargetParametersRedshiftDataParametersArgs
            {
                Database = "string",
                Sqls = new[]
                {
                    "string",
                },
                DbUser = "string",
                SecretManagerArn = "string",
                StatementName = "string",
                WithEvent = false,
            },
            SagemakerPipelineParameters = new Aws.Pipes.Inputs.PipeTargetParametersSagemakerPipelineParametersArgs
            {
                PipelineParameters = new[]
                {
                    new Aws.Pipes.Inputs.PipeTargetParametersSagemakerPipelineParametersPipelineParameterArgs
                    {
                        Name = "string",
                        Value = "string",
                    },
                },
            },
            SqsQueueParameters = new Aws.Pipes.Inputs.PipeTargetParametersSqsQueueParametersArgs
            {
                MessageDeduplicationId = "string",
                MessageGroupId = "string",
            },
            StepFunctionStateMachineParameters = new Aws.Pipes.Inputs.PipeTargetParametersStepFunctionStateMachineParametersArgs
            {
                InvocationType = "string",
            },
        },
    });
    
    example, err := pipes.NewPipe(ctx, "pipeResource", &pipes.PipeArgs{
    	RoleArn:    pulumi.String("string"),
    	Target:     pulumi.String("string"),
    	Source:     pulumi.String("string"),
    	NamePrefix: pulumi.String("string"),
    	LogConfiguration: &pipes.PipeLogConfigurationArgs{
    		Level: pulumi.String("string"),
    		CloudwatchLogsLogDestination: &pipes.PipeLogConfigurationCloudwatchLogsLogDestinationArgs{
    			LogGroupArn: pulumi.String("string"),
    		},
    		FirehoseLogDestination: &pipes.PipeLogConfigurationFirehoseLogDestinationArgs{
    			DeliveryStreamArn: pulumi.String("string"),
    		},
    		IncludeExecutionDatas: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		S3LogDestination: &pipes.PipeLogConfigurationS3LogDestinationArgs{
    			BucketName:   pulumi.String("string"),
    			BucketOwner:  pulumi.String("string"),
    			OutputFormat: pulumi.String("string"),
    			Prefix:       pulumi.String("string"),
    		},
    	},
    	Name:        pulumi.String("string"),
    	Description: pulumi.String("string"),
    	EnrichmentParameters: &pipes.PipeEnrichmentParametersArgs{
    		HttpParameters: &pipes.PipeEnrichmentParametersHttpParametersArgs{
    			HeaderParameters: pulumi.StringMap{
    				"string": pulumi.String("string"),
    			},
    			PathParameterValues: pulumi.String("string"),
    			QueryStringParameters: pulumi.StringMap{
    				"string": pulumi.String("string"),
    			},
    		},
    		InputTemplate: pulumi.String("string"),
    	},
    	Enrichment: pulumi.String("string"),
    	SourceParameters: &pipes.PipeSourceParametersArgs{
    		ActivemqBrokerParameters: &pipes.PipeSourceParametersActivemqBrokerParametersArgs{
    			Credentials: &pipes.PipeSourceParametersActivemqBrokerParametersCredentialsArgs{
    				BasicAuth: pulumi.String("string"),
    			},
    			QueueName:                      pulumi.String("string"),
    			BatchSize:                      pulumi.Int(0),
    			MaximumBatchingWindowInSeconds: pulumi.Int(0),
    		},
    		DynamodbStreamParameters: &pipes.PipeSourceParametersDynamodbStreamParametersArgs{
    			StartingPosition: pulumi.String("string"),
    			BatchSize:        pulumi.Int(0),
    			DeadLetterConfig: &pipes.PipeSourceParametersDynamodbStreamParametersDeadLetterConfigArgs{
    				Arn: pulumi.String("string"),
    			},
    			MaximumBatchingWindowInSeconds: pulumi.Int(0),
    			MaximumRecordAgeInSeconds:      pulumi.Int(0),
    			MaximumRetryAttempts:           pulumi.Int(0),
    			OnPartialBatchItemFailure:      pulumi.String("string"),
    			ParallelizationFactor:          pulumi.Int(0),
    		},
    		FilterCriteria: &pipes.PipeSourceParametersFilterCriteriaArgs{
    			Filters: pipes.PipeSourceParametersFilterCriteriaFilterArray{
    				&pipes.PipeSourceParametersFilterCriteriaFilterArgs{
    					Pattern: pulumi.String("string"),
    				},
    			},
    		},
    		KinesisStreamParameters: &pipes.PipeSourceParametersKinesisStreamParametersArgs{
    			StartingPosition: pulumi.String("string"),
    			BatchSize:        pulumi.Int(0),
    			DeadLetterConfig: &pipes.PipeSourceParametersKinesisStreamParametersDeadLetterConfigArgs{
    				Arn: pulumi.String("string"),
    			},
    			MaximumBatchingWindowInSeconds: pulumi.Int(0),
    			MaximumRecordAgeInSeconds:      pulumi.Int(0),
    			MaximumRetryAttempts:           pulumi.Int(0),
    			OnPartialBatchItemFailure:      pulumi.String("string"),
    			ParallelizationFactor:          pulumi.Int(0),
    			StartingPositionTimestamp:      pulumi.String("string"),
    		},
    		ManagedStreamingKafkaParameters: &pipes.PipeSourceParametersManagedStreamingKafkaParametersArgs{
    			TopicName:       pulumi.String("string"),
    			BatchSize:       pulumi.Int(0),
    			ConsumerGroupId: pulumi.String("string"),
    			Credentials: &pipes.PipeSourceParametersManagedStreamingKafkaParametersCredentialsArgs{
    				ClientCertificateTlsAuth: pulumi.String("string"),
    				SaslScram512Auth:         pulumi.String("string"),
    			},
    			MaximumBatchingWindowInSeconds: pulumi.Int(0),
    			StartingPosition:               pulumi.String("string"),
    		},
    		RabbitmqBrokerParameters: &pipes.PipeSourceParametersRabbitmqBrokerParametersArgs{
    			Credentials: &pipes.PipeSourceParametersRabbitmqBrokerParametersCredentialsArgs{
    				BasicAuth: pulumi.String("string"),
    			},
    			QueueName:                      pulumi.String("string"),
    			BatchSize:                      pulumi.Int(0),
    			MaximumBatchingWindowInSeconds: pulumi.Int(0),
    			VirtualHost:                    pulumi.String("string"),
    		},
    		SelfManagedKafkaParameters: &pipes.PipeSourceParametersSelfManagedKafkaParametersArgs{
    			TopicName: pulumi.String("string"),
    			AdditionalBootstrapServers: pulumi.StringArray{
    				pulumi.String("string"),
    			},
    			BatchSize:       pulumi.Int(0),
    			ConsumerGroupId: pulumi.String("string"),
    			Credentials: &pipes.PipeSourceParametersSelfManagedKafkaParametersCredentialsArgs{
    				BasicAuth:                pulumi.String("string"),
    				ClientCertificateTlsAuth: pulumi.String("string"),
    				SaslScram256Auth:         pulumi.String("string"),
    				SaslScram512Auth:         pulumi.String("string"),
    			},
    			MaximumBatchingWindowInSeconds: pulumi.Int(0),
    			ServerRootCaCertificate:        pulumi.String("string"),
    			StartingPosition:               pulumi.String("string"),
    			Vpc: &pipes.PipeSourceParametersSelfManagedKafkaParametersVpcArgs{
    				SecurityGroups: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				Subnets: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    			},
    		},
    		SqsQueueParameters: &pipes.PipeSourceParametersSqsQueueParametersArgs{
    			BatchSize:                      pulumi.Int(0),
    			MaximumBatchingWindowInSeconds: pulumi.Int(0),
    		},
    	},
    	Tags: pulumi.StringMap{
    		"string": pulumi.String("string"),
    	},
    	DesiredState: pulumi.String("string"),
    	TargetParameters: &pipes.PipeTargetParametersArgs{
    		BatchJobParameters: &pipes.PipeTargetParametersBatchJobParametersArgs{
    			JobDefinition: pulumi.String("string"),
    			JobName:       pulumi.String("string"),
    			ArrayProperties: &pipes.PipeTargetParametersBatchJobParametersArrayPropertiesArgs{
    				Size: pulumi.Int(0),
    			},
    			ContainerOverrides: &pipes.PipeTargetParametersBatchJobParametersContainerOverridesArgs{
    				Commands: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				Environments: pipes.PipeTargetParametersBatchJobParametersContainerOverridesEnvironmentArray{
    					&pipes.PipeTargetParametersBatchJobParametersContainerOverridesEnvironmentArgs{
    						Name:  pulumi.String("string"),
    						Value: pulumi.String("string"),
    					},
    				},
    				InstanceType: pulumi.String("string"),
    				ResourceRequirements: pipes.PipeTargetParametersBatchJobParametersContainerOverridesResourceRequirementArray{
    					&pipes.PipeTargetParametersBatchJobParametersContainerOverridesResourceRequirementArgs{
    						Type:  pulumi.String("string"),
    						Value: pulumi.String("string"),
    					},
    				},
    			},
    			DependsOns: pipes.PipeTargetParametersBatchJobParametersDependsOnArray{
    				&pipes.PipeTargetParametersBatchJobParametersDependsOnArgs{
    					JobId: pulumi.String("string"),
    					Type:  pulumi.String("string"),
    				},
    			},
    			Parameters: pulumi.StringMap{
    				"string": pulumi.String("string"),
    			},
    			RetryStrategy: &pipes.PipeTargetParametersBatchJobParametersRetryStrategyArgs{
    				Attempts: pulumi.Int(0),
    			},
    		},
    		CloudwatchLogsParameters: &pipes.PipeTargetParametersCloudwatchLogsParametersArgs{
    			LogStreamName: pulumi.String("string"),
    			Timestamp:     pulumi.String("string"),
    		},
    		EcsTaskParameters: &pipes.PipeTargetParametersEcsTaskParametersArgs{
    			TaskDefinitionArn: pulumi.String("string"),
    			Overrides: &pipes.PipeTargetParametersEcsTaskParametersOverridesArgs{
    				ContainerOverrides: pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideArray{
    					&pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideArgs{
    						Commands: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    						Cpu: pulumi.Int(0),
    						EnvironmentFiles: pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentFileArray{
    							&pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentFileArgs{
    								Type:  pulumi.String("string"),
    								Value: pulumi.String("string"),
    							},
    						},
    						Environments: pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentArray{
    							&pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentArgs{
    								Name:  pulumi.String("string"),
    								Value: pulumi.String("string"),
    							},
    						},
    						Memory:            pulumi.Int(0),
    						MemoryReservation: pulumi.Int(0),
    						Name:              pulumi.String("string"),
    						ResourceRequirements: pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideResourceRequirementArray{
    							&pipes.PipeTargetParametersEcsTaskParametersOverridesContainerOverrideResourceRequirementArgs{
    								Type:  pulumi.String("string"),
    								Value: pulumi.String("string"),
    							},
    						},
    					},
    				},
    				Cpu: pulumi.String("string"),
    				EphemeralStorage: &pipes.PipeTargetParametersEcsTaskParametersOverridesEphemeralStorageArgs{
    					SizeInGib: pulumi.Int(0),
    				},
    				ExecutionRoleArn: pulumi.String("string"),
    				InferenceAcceleratorOverrides: pipes.PipeTargetParametersEcsTaskParametersOverridesInferenceAcceleratorOverrideArray{
    					&pipes.PipeTargetParametersEcsTaskParametersOverridesInferenceAcceleratorOverrideArgs{
    						DeviceName: pulumi.String("string"),
    						DeviceType: pulumi.String("string"),
    					},
    				},
    				Memory:      pulumi.String("string"),
    				TaskRoleArn: pulumi.String("string"),
    			},
    			PlacementStrategies: pipes.PipeTargetParametersEcsTaskParametersPlacementStrategyArray{
    				&pipes.PipeTargetParametersEcsTaskParametersPlacementStrategyArgs{
    					Field: pulumi.String("string"),
    					Type:  pulumi.String("string"),
    				},
    			},
    			Group:      pulumi.String("string"),
    			LaunchType: pulumi.String("string"),
    			NetworkConfiguration: &pipes.PipeTargetParametersEcsTaskParametersNetworkConfigurationArgs{
    				AwsVpcConfiguration: &pipes.PipeTargetParametersEcsTaskParametersNetworkConfigurationAwsVpcConfigurationArgs{
    					AssignPublicIp: pulumi.String("string"),
    					SecurityGroups: pulumi.StringArray{
    						pulumi.String("string"),
    					},
    					Subnets: pulumi.StringArray{
    						pulumi.String("string"),
    					},
    				},
    			},
    			CapacityProviderStrategies: pipes.PipeTargetParametersEcsTaskParametersCapacityProviderStrategyArray{
    				&pipes.PipeTargetParametersEcsTaskParametersCapacityProviderStrategyArgs{
    					CapacityProvider: pulumi.String("string"),
    					Base:             pulumi.Int(0),
    					Weight:           pulumi.Int(0),
    				},
    			},
    			PlacementConstraints: pipes.PipeTargetParametersEcsTaskParametersPlacementConstraintArray{
    				&pipes.PipeTargetParametersEcsTaskParametersPlacementConstraintArgs{
    					Expression: pulumi.String("string"),
    					Type:       pulumi.String("string"),
    				},
    			},
    			EnableExecuteCommand: pulumi.Bool(false),
    			PlatformVersion:      pulumi.String("string"),
    			PropagateTags:        pulumi.String("string"),
    			ReferenceId:          pulumi.String("string"),
    			Tags: pulumi.StringMap{
    				"string": pulumi.String("string"),
    			},
    			TaskCount:            pulumi.Int(0),
    			EnableEcsManagedTags: pulumi.Bool(false),
    		},
    		EventbridgeEventBusParameters: &pipes.PipeTargetParametersEventbridgeEventBusParametersArgs{
    			DetailType: pulumi.String("string"),
    			EndpointId: pulumi.String("string"),
    			Resources: pulumi.StringArray{
    				pulumi.String("string"),
    			},
    			Source: pulumi.String("string"),
    			Time:   pulumi.String("string"),
    		},
    		HttpParameters: &pipes.PipeTargetParametersHttpParametersArgs{
    			HeaderParameters: pulumi.StringMap{
    				"string": pulumi.String("string"),
    			},
    			PathParameterValues: pulumi.String("string"),
    			QueryStringParameters: pulumi.StringMap{
    				"string": pulumi.String("string"),
    			},
    		},
    		InputTemplate: pulumi.String("string"),
    		KinesisStreamParameters: &pipes.PipeTargetParametersKinesisStreamParametersArgs{
    			PartitionKey: pulumi.String("string"),
    		},
    		LambdaFunctionParameters: &pipes.PipeTargetParametersLambdaFunctionParametersArgs{
    			InvocationType: pulumi.String("string"),
    		},
    		RedshiftDataParameters: &pipes.PipeTargetParametersRedshiftDataParametersArgs{
    			Database: pulumi.String("string"),
    			Sqls: pulumi.StringArray{
    				pulumi.String("string"),
    			},
    			DbUser:           pulumi.String("string"),
    			SecretManagerArn: pulumi.String("string"),
    			StatementName:    pulumi.String("string"),
    			WithEvent:        pulumi.Bool(false),
    		},
    		SagemakerPipelineParameters: &pipes.PipeTargetParametersSagemakerPipelineParametersArgs{
    			PipelineParameters: pipes.PipeTargetParametersSagemakerPipelineParametersPipelineParameterArray{
    				&pipes.PipeTargetParametersSagemakerPipelineParametersPipelineParameterArgs{
    					Name:  pulumi.String("string"),
    					Value: pulumi.String("string"),
    				},
    			},
    		},
    		SqsQueueParameters: &pipes.PipeTargetParametersSqsQueueParametersArgs{
    			MessageDeduplicationId: pulumi.String("string"),
    			MessageGroupId:         pulumi.String("string"),
    		},
    		StepFunctionStateMachineParameters: &pipes.PipeTargetParametersStepFunctionStateMachineParametersArgs{
    			InvocationType: pulumi.String("string"),
    		},
    	},
    })
    
    var pipeResource = new Pipe("pipeResource", PipeArgs.builder()
        .roleArn("string")
        .target("string")
        .source("string")
        .namePrefix("string")
        .logConfiguration(PipeLogConfigurationArgs.builder()
            .level("string")
            .cloudwatchLogsLogDestination(PipeLogConfigurationCloudwatchLogsLogDestinationArgs.builder()
                .logGroupArn("string")
                .build())
            .firehoseLogDestination(PipeLogConfigurationFirehoseLogDestinationArgs.builder()
                .deliveryStreamArn("string")
                .build())
            .includeExecutionDatas("string")
            .s3LogDestination(PipeLogConfigurationS3LogDestinationArgs.builder()
                .bucketName("string")
                .bucketOwner("string")
                .outputFormat("string")
                .prefix("string")
                .build())
            .build())
        .name("string")
        .description("string")
        .enrichmentParameters(PipeEnrichmentParametersArgs.builder()
            .httpParameters(PipeEnrichmentParametersHttpParametersArgs.builder()
                .headerParameters(Map.of("string", "string"))
                .pathParameterValues("string")
                .queryStringParameters(Map.of("string", "string"))
                .build())
            .inputTemplate("string")
            .build())
        .enrichment("string")
        .sourceParameters(PipeSourceParametersArgs.builder()
            .activemqBrokerParameters(PipeSourceParametersActivemqBrokerParametersArgs.builder()
                .credentials(PipeSourceParametersActivemqBrokerParametersCredentialsArgs.builder()
                    .basicAuth("string")
                    .build())
                .queueName("string")
                .batchSize(0)
                .maximumBatchingWindowInSeconds(0)
                .build())
            .dynamodbStreamParameters(PipeSourceParametersDynamodbStreamParametersArgs.builder()
                .startingPosition("string")
                .batchSize(0)
                .deadLetterConfig(PipeSourceParametersDynamodbStreamParametersDeadLetterConfigArgs.builder()
                    .arn("string")
                    .build())
                .maximumBatchingWindowInSeconds(0)
                .maximumRecordAgeInSeconds(0)
                .maximumRetryAttempts(0)
                .onPartialBatchItemFailure("string")
                .parallelizationFactor(0)
                .build())
            .filterCriteria(PipeSourceParametersFilterCriteriaArgs.builder()
                .filters(PipeSourceParametersFilterCriteriaFilterArgs.builder()
                    .pattern("string")
                    .build())
                .build())
            .kinesisStreamParameters(PipeSourceParametersKinesisStreamParametersArgs.builder()
                .startingPosition("string")
                .batchSize(0)
                .deadLetterConfig(PipeSourceParametersKinesisStreamParametersDeadLetterConfigArgs.builder()
                    .arn("string")
                    .build())
                .maximumBatchingWindowInSeconds(0)
                .maximumRecordAgeInSeconds(0)
                .maximumRetryAttempts(0)
                .onPartialBatchItemFailure("string")
                .parallelizationFactor(0)
                .startingPositionTimestamp("string")
                .build())
            .managedStreamingKafkaParameters(PipeSourceParametersManagedStreamingKafkaParametersArgs.builder()
                .topicName("string")
                .batchSize(0)
                .consumerGroupId("string")
                .credentials(PipeSourceParametersManagedStreamingKafkaParametersCredentialsArgs.builder()
                    .clientCertificateTlsAuth("string")
                    .saslScram512Auth("string")
                    .build())
                .maximumBatchingWindowInSeconds(0)
                .startingPosition("string")
                .build())
            .rabbitmqBrokerParameters(PipeSourceParametersRabbitmqBrokerParametersArgs.builder()
                .credentials(PipeSourceParametersRabbitmqBrokerParametersCredentialsArgs.builder()
                    .basicAuth("string")
                    .build())
                .queueName("string")
                .batchSize(0)
                .maximumBatchingWindowInSeconds(0)
                .virtualHost("string")
                .build())
            .selfManagedKafkaParameters(PipeSourceParametersSelfManagedKafkaParametersArgs.builder()
                .topicName("string")
                .additionalBootstrapServers("string")
                .batchSize(0)
                .consumerGroupId("string")
                .credentials(PipeSourceParametersSelfManagedKafkaParametersCredentialsArgs.builder()
                    .basicAuth("string")
                    .clientCertificateTlsAuth("string")
                    .saslScram256Auth("string")
                    .saslScram512Auth("string")
                    .build())
                .maximumBatchingWindowInSeconds(0)
                .serverRootCaCertificate("string")
                .startingPosition("string")
                .vpc(PipeSourceParametersSelfManagedKafkaParametersVpcArgs.builder()
                    .securityGroups("string")
                    .subnets("string")
                    .build())
                .build())
            .sqsQueueParameters(PipeSourceParametersSqsQueueParametersArgs.builder()
                .batchSize(0)
                .maximumBatchingWindowInSeconds(0)
                .build())
            .build())
        .tags(Map.of("string", "string"))
        .desiredState("string")
        .targetParameters(PipeTargetParametersArgs.builder()
            .batchJobParameters(PipeTargetParametersBatchJobParametersArgs.builder()
                .jobDefinition("string")
                .jobName("string")
                .arrayProperties(PipeTargetParametersBatchJobParametersArrayPropertiesArgs.builder()
                    .size(0)
                    .build())
                .containerOverrides(PipeTargetParametersBatchJobParametersContainerOverridesArgs.builder()
                    .commands("string")
                    .environments(PipeTargetParametersBatchJobParametersContainerOverridesEnvironmentArgs.builder()
                        .name("string")
                        .value("string")
                        .build())
                    .instanceType("string")
                    .resourceRequirements(PipeTargetParametersBatchJobParametersContainerOverridesResourceRequirementArgs.builder()
                        .type("string")
                        .value("string")
                        .build())
                    .build())
                .dependsOns(PipeTargetParametersBatchJobParametersDependsOnArgs.builder()
                    .jobId("string")
                    .type("string")
                    .build())
                .parameters(Map.of("string", "string"))
                .retryStrategy(PipeTargetParametersBatchJobParametersRetryStrategyArgs.builder()
                    .attempts(0)
                    .build())
                .build())
            .cloudwatchLogsParameters(PipeTargetParametersCloudwatchLogsParametersArgs.builder()
                .logStreamName("string")
                .timestamp("string")
                .build())
            .ecsTaskParameters(PipeTargetParametersEcsTaskParametersArgs.builder()
                .taskDefinitionArn("string")
                .overrides(PipeTargetParametersEcsTaskParametersOverridesArgs.builder()
                    .containerOverrides(PipeTargetParametersEcsTaskParametersOverridesContainerOverrideArgs.builder()
                        .commands("string")
                        .cpu(0)
                        .environmentFiles(PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentFileArgs.builder()
                            .type("string")
                            .value("string")
                            .build())
                        .environments(PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentArgs.builder()
                            .name("string")
                            .value("string")
                            .build())
                        .memory(0)
                        .memoryReservation(0)
                        .name("string")
                        .resourceRequirements(PipeTargetParametersEcsTaskParametersOverridesContainerOverrideResourceRequirementArgs.builder()
                            .type("string")
                            .value("string")
                            .build())
                        .build())
                    .cpu("string")
                    .ephemeralStorage(PipeTargetParametersEcsTaskParametersOverridesEphemeralStorageArgs.builder()
                        .sizeInGib(0)
                        .build())
                    .executionRoleArn("string")
                    .inferenceAcceleratorOverrides(PipeTargetParametersEcsTaskParametersOverridesInferenceAcceleratorOverrideArgs.builder()
                        .deviceName("string")
                        .deviceType("string")
                        .build())
                    .memory("string")
                    .taskRoleArn("string")
                    .build())
                .placementStrategies(PipeTargetParametersEcsTaskParametersPlacementStrategyArgs.builder()
                    .field("string")
                    .type("string")
                    .build())
                .group("string")
                .launchType("string")
                .networkConfiguration(PipeTargetParametersEcsTaskParametersNetworkConfigurationArgs.builder()
                    .awsVpcConfiguration(PipeTargetParametersEcsTaskParametersNetworkConfigurationAwsVpcConfigurationArgs.builder()
                        .assignPublicIp("string")
                        .securityGroups("string")
                        .subnets("string")
                        .build())
                    .build())
                .capacityProviderStrategies(PipeTargetParametersEcsTaskParametersCapacityProviderStrategyArgs.builder()
                    .capacityProvider("string")
                    .base(0)
                    .weight(0)
                    .build())
                .placementConstraints(PipeTargetParametersEcsTaskParametersPlacementConstraintArgs.builder()
                    .expression("string")
                    .type("string")
                    .build())
                .enableExecuteCommand(false)
                .platformVersion("string")
                .propagateTags("string")
                .referenceId("string")
                .tags(Map.of("string", "string"))
                .taskCount(0)
                .enableEcsManagedTags(false)
                .build())
            .eventbridgeEventBusParameters(PipeTargetParametersEventbridgeEventBusParametersArgs.builder()
                .detailType("string")
                .endpointId("string")
                .resources("string")
                .source("string")
                .time("string")
                .build())
            .httpParameters(PipeTargetParametersHttpParametersArgs.builder()
                .headerParameters(Map.of("string", "string"))
                .pathParameterValues("string")
                .queryStringParameters(Map.of("string", "string"))
                .build())
            .inputTemplate("string")
            .kinesisStreamParameters(PipeTargetParametersKinesisStreamParametersArgs.builder()
                .partitionKey("string")
                .build())
            .lambdaFunctionParameters(PipeTargetParametersLambdaFunctionParametersArgs.builder()
                .invocationType("string")
                .build())
            .redshiftDataParameters(PipeTargetParametersRedshiftDataParametersArgs.builder()
                .database("string")
                .sqls("string")
                .dbUser("string")
                .secretManagerArn("string")
                .statementName("string")
                .withEvent(false)
                .build())
            .sagemakerPipelineParameters(PipeTargetParametersSagemakerPipelineParametersArgs.builder()
                .pipelineParameters(PipeTargetParametersSagemakerPipelineParametersPipelineParameterArgs.builder()
                    .name("string")
                    .value("string")
                    .build())
                .build())
            .sqsQueueParameters(PipeTargetParametersSqsQueueParametersArgs.builder()
                .messageDeduplicationId("string")
                .messageGroupId("string")
                .build())
            .stepFunctionStateMachineParameters(PipeTargetParametersStepFunctionStateMachineParametersArgs.builder()
                .invocationType("string")
                .build())
            .build())
        .build());
    
    pipe_resource = aws.pipes.Pipe("pipeResource",
        role_arn="string",
        target="string",
        source="string",
        name_prefix="string",
        log_configuration={
            "level": "string",
            "cloudwatch_logs_log_destination": {
                "log_group_arn": "string",
            },
            "firehose_log_destination": {
                "delivery_stream_arn": "string",
            },
            "include_execution_datas": ["string"],
            "s3_log_destination": {
                "bucket_name": "string",
                "bucket_owner": "string",
                "output_format": "string",
                "prefix": "string",
            },
        },
        name="string",
        description="string",
        enrichment_parameters={
            "http_parameters": {
                "header_parameters": {
                    "string": "string",
                },
                "path_parameter_values": "string",
                "query_string_parameters": {
                    "string": "string",
                },
            },
            "input_template": "string",
        },
        enrichment="string",
        source_parameters={
            "activemq_broker_parameters": {
                "credentials": {
                    "basic_auth": "string",
                },
                "queue_name": "string",
                "batch_size": 0,
                "maximum_batching_window_in_seconds": 0,
            },
            "dynamodb_stream_parameters": {
                "starting_position": "string",
                "batch_size": 0,
                "dead_letter_config": {
                    "arn": "string",
                },
                "maximum_batching_window_in_seconds": 0,
                "maximum_record_age_in_seconds": 0,
                "maximum_retry_attempts": 0,
                "on_partial_batch_item_failure": "string",
                "parallelization_factor": 0,
            },
            "filter_criteria": {
                "filters": [{
                    "pattern": "string",
                }],
            },
            "kinesis_stream_parameters": {
                "starting_position": "string",
                "batch_size": 0,
                "dead_letter_config": {
                    "arn": "string",
                },
                "maximum_batching_window_in_seconds": 0,
                "maximum_record_age_in_seconds": 0,
                "maximum_retry_attempts": 0,
                "on_partial_batch_item_failure": "string",
                "parallelization_factor": 0,
                "starting_position_timestamp": "string",
            },
            "managed_streaming_kafka_parameters": {
                "topic_name": "string",
                "batch_size": 0,
                "consumer_group_id": "string",
                "credentials": {
                    "client_certificate_tls_auth": "string",
                    "sasl_scram512_auth": "string",
                },
                "maximum_batching_window_in_seconds": 0,
                "starting_position": "string",
            },
            "rabbitmq_broker_parameters": {
                "credentials": {
                    "basic_auth": "string",
                },
                "queue_name": "string",
                "batch_size": 0,
                "maximum_batching_window_in_seconds": 0,
                "virtual_host": "string",
            },
            "self_managed_kafka_parameters": {
                "topic_name": "string",
                "additional_bootstrap_servers": ["string"],
                "batch_size": 0,
                "consumer_group_id": "string",
                "credentials": {
                    "basic_auth": "string",
                    "client_certificate_tls_auth": "string",
                    "sasl_scram256_auth": "string",
                    "sasl_scram512_auth": "string",
                },
                "maximum_batching_window_in_seconds": 0,
                "server_root_ca_certificate": "string",
                "starting_position": "string",
                "vpc": {
                    "security_groups": ["string"],
                    "subnets": ["string"],
                },
            },
            "sqs_queue_parameters": {
                "batch_size": 0,
                "maximum_batching_window_in_seconds": 0,
            },
        },
        tags={
            "string": "string",
        },
        desired_state="string",
        target_parameters={
            "batch_job_parameters": {
                "job_definition": "string",
                "job_name": "string",
                "array_properties": {
                    "size": 0,
                },
                "container_overrides": {
                    "commands": ["string"],
                    "environments": [{
                        "name": "string",
                        "value": "string",
                    }],
                    "instance_type": "string",
                    "resource_requirements": [{
                        "type": "string",
                        "value": "string",
                    }],
                },
                "depends_ons": [{
                    "job_id": "string",
                    "type": "string",
                }],
                "parameters": {
                    "string": "string",
                },
                "retry_strategy": {
                    "attempts": 0,
                },
            },
            "cloudwatch_logs_parameters": {
                "log_stream_name": "string",
                "timestamp": "string",
            },
            "ecs_task_parameters": {
                "task_definition_arn": "string",
                "overrides": {
                    "container_overrides": [{
                        "commands": ["string"],
                        "cpu": 0,
                        "environment_files": [{
                            "type": "string",
                            "value": "string",
                        }],
                        "environments": [{
                            "name": "string",
                            "value": "string",
                        }],
                        "memory": 0,
                        "memory_reservation": 0,
                        "name": "string",
                        "resource_requirements": [{
                            "type": "string",
                            "value": "string",
                        }],
                    }],
                    "cpu": "string",
                    "ephemeral_storage": {
                        "size_in_gib": 0,
                    },
                    "execution_role_arn": "string",
                    "inference_accelerator_overrides": [{
                        "device_name": "string",
                        "device_type": "string",
                    }],
                    "memory": "string",
                    "task_role_arn": "string",
                },
                "placement_strategies": [{
                    "field": "string",
                    "type": "string",
                }],
                "group": "string",
                "launch_type": "string",
                "network_configuration": {
                    "aws_vpc_configuration": {
                        "assign_public_ip": "string",
                        "security_groups": ["string"],
                        "subnets": ["string"],
                    },
                },
                "capacity_provider_strategies": [{
                    "capacity_provider": "string",
                    "base": 0,
                    "weight": 0,
                }],
                "placement_constraints": [{
                    "expression": "string",
                    "type": "string",
                }],
                "enable_execute_command": False,
                "platform_version": "string",
                "propagate_tags": "string",
                "reference_id": "string",
                "tags": {
                    "string": "string",
                },
                "task_count": 0,
                "enable_ecs_managed_tags": False,
            },
            "eventbridge_event_bus_parameters": {
                "detail_type": "string",
                "endpoint_id": "string",
                "resources": ["string"],
                "source": "string",
                "time": "string",
            },
            "http_parameters": {
                "header_parameters": {
                    "string": "string",
                },
                "path_parameter_values": "string",
                "query_string_parameters": {
                    "string": "string",
                },
            },
            "input_template": "string",
            "kinesis_stream_parameters": {
                "partition_key": "string",
            },
            "lambda_function_parameters": {
                "invocation_type": "string",
            },
            "redshift_data_parameters": {
                "database": "string",
                "sqls": ["string"],
                "db_user": "string",
                "secret_manager_arn": "string",
                "statement_name": "string",
                "with_event": False,
            },
            "sagemaker_pipeline_parameters": {
                "pipeline_parameters": [{
                    "name": "string",
                    "value": "string",
                }],
            },
            "sqs_queue_parameters": {
                "message_deduplication_id": "string",
                "message_group_id": "string",
            },
            "step_function_state_machine_parameters": {
                "invocation_type": "string",
            },
        })
    
    const pipeResource = new aws.pipes.Pipe("pipeResource", {
        roleArn: "string",
        target: "string",
        source: "string",
        namePrefix: "string",
        logConfiguration: {
            level: "string",
            cloudwatchLogsLogDestination: {
                logGroupArn: "string",
            },
            firehoseLogDestination: {
                deliveryStreamArn: "string",
            },
            includeExecutionDatas: ["string"],
            s3LogDestination: {
                bucketName: "string",
                bucketOwner: "string",
                outputFormat: "string",
                prefix: "string",
            },
        },
        name: "string",
        description: "string",
        enrichmentParameters: {
            httpParameters: {
                headerParameters: {
                    string: "string",
                },
                pathParameterValues: "string",
                queryStringParameters: {
                    string: "string",
                },
            },
            inputTemplate: "string",
        },
        enrichment: "string",
        sourceParameters: {
            activemqBrokerParameters: {
                credentials: {
                    basicAuth: "string",
                },
                queueName: "string",
                batchSize: 0,
                maximumBatchingWindowInSeconds: 0,
            },
            dynamodbStreamParameters: {
                startingPosition: "string",
                batchSize: 0,
                deadLetterConfig: {
                    arn: "string",
                },
                maximumBatchingWindowInSeconds: 0,
                maximumRecordAgeInSeconds: 0,
                maximumRetryAttempts: 0,
                onPartialBatchItemFailure: "string",
                parallelizationFactor: 0,
            },
            filterCriteria: {
                filters: [{
                    pattern: "string",
                }],
            },
            kinesisStreamParameters: {
                startingPosition: "string",
                batchSize: 0,
                deadLetterConfig: {
                    arn: "string",
                },
                maximumBatchingWindowInSeconds: 0,
                maximumRecordAgeInSeconds: 0,
                maximumRetryAttempts: 0,
                onPartialBatchItemFailure: "string",
                parallelizationFactor: 0,
                startingPositionTimestamp: "string",
            },
            managedStreamingKafkaParameters: {
                topicName: "string",
                batchSize: 0,
                consumerGroupId: "string",
                credentials: {
                    clientCertificateTlsAuth: "string",
                    saslScram512Auth: "string",
                },
                maximumBatchingWindowInSeconds: 0,
                startingPosition: "string",
            },
            rabbitmqBrokerParameters: {
                credentials: {
                    basicAuth: "string",
                },
                queueName: "string",
                batchSize: 0,
                maximumBatchingWindowInSeconds: 0,
                virtualHost: "string",
            },
            selfManagedKafkaParameters: {
                topicName: "string",
                additionalBootstrapServers: ["string"],
                batchSize: 0,
                consumerGroupId: "string",
                credentials: {
                    basicAuth: "string",
                    clientCertificateTlsAuth: "string",
                    saslScram256Auth: "string",
                    saslScram512Auth: "string",
                },
                maximumBatchingWindowInSeconds: 0,
                serverRootCaCertificate: "string",
                startingPosition: "string",
                vpc: {
                    securityGroups: ["string"],
                    subnets: ["string"],
                },
            },
            sqsQueueParameters: {
                batchSize: 0,
                maximumBatchingWindowInSeconds: 0,
            },
        },
        tags: {
            string: "string",
        },
        desiredState: "string",
        targetParameters: {
            batchJobParameters: {
                jobDefinition: "string",
                jobName: "string",
                arrayProperties: {
                    size: 0,
                },
                containerOverrides: {
                    commands: ["string"],
                    environments: [{
                        name: "string",
                        value: "string",
                    }],
                    instanceType: "string",
                    resourceRequirements: [{
                        type: "string",
                        value: "string",
                    }],
                },
                dependsOns: [{
                    jobId: "string",
                    type: "string",
                }],
                parameters: {
                    string: "string",
                },
                retryStrategy: {
                    attempts: 0,
                },
            },
            cloudwatchLogsParameters: {
                logStreamName: "string",
                timestamp: "string",
            },
            ecsTaskParameters: {
                taskDefinitionArn: "string",
                overrides: {
                    containerOverrides: [{
                        commands: ["string"],
                        cpu: 0,
                        environmentFiles: [{
                            type: "string",
                            value: "string",
                        }],
                        environments: [{
                            name: "string",
                            value: "string",
                        }],
                        memory: 0,
                        memoryReservation: 0,
                        name: "string",
                        resourceRequirements: [{
                            type: "string",
                            value: "string",
                        }],
                    }],
                    cpu: "string",
                    ephemeralStorage: {
                        sizeInGib: 0,
                    },
                    executionRoleArn: "string",
                    inferenceAcceleratorOverrides: [{
                        deviceName: "string",
                        deviceType: "string",
                    }],
                    memory: "string",
                    taskRoleArn: "string",
                },
                placementStrategies: [{
                    field: "string",
                    type: "string",
                }],
                group: "string",
                launchType: "string",
                networkConfiguration: {
                    awsVpcConfiguration: {
                        assignPublicIp: "string",
                        securityGroups: ["string"],
                        subnets: ["string"],
                    },
                },
                capacityProviderStrategies: [{
                    capacityProvider: "string",
                    base: 0,
                    weight: 0,
                }],
                placementConstraints: [{
                    expression: "string",
                    type: "string",
                }],
                enableExecuteCommand: false,
                platformVersion: "string",
                propagateTags: "string",
                referenceId: "string",
                tags: {
                    string: "string",
                },
                taskCount: 0,
                enableEcsManagedTags: false,
            },
            eventbridgeEventBusParameters: {
                detailType: "string",
                endpointId: "string",
                resources: ["string"],
                source: "string",
                time: "string",
            },
            httpParameters: {
                headerParameters: {
                    string: "string",
                },
                pathParameterValues: "string",
                queryStringParameters: {
                    string: "string",
                },
            },
            inputTemplate: "string",
            kinesisStreamParameters: {
                partitionKey: "string",
            },
            lambdaFunctionParameters: {
                invocationType: "string",
            },
            redshiftDataParameters: {
                database: "string",
                sqls: ["string"],
                dbUser: "string",
                secretManagerArn: "string",
                statementName: "string",
                withEvent: false,
            },
            sagemakerPipelineParameters: {
                pipelineParameters: [{
                    name: "string",
                    value: "string",
                }],
            },
            sqsQueueParameters: {
                messageDeduplicationId: "string",
                messageGroupId: "string",
            },
            stepFunctionStateMachineParameters: {
                invocationType: "string",
            },
        },
    });
    
    type: aws:pipes:Pipe
    properties:
        description: string
        desiredState: string
        enrichment: string
        enrichmentParameters:
            httpParameters:
                headerParameters:
                    string: string
                pathParameterValues: string
                queryStringParameters:
                    string: string
            inputTemplate: string
        logConfiguration:
            cloudwatchLogsLogDestination:
                logGroupArn: string
            firehoseLogDestination:
                deliveryStreamArn: string
            includeExecutionDatas:
                - string
            level: string
            s3LogDestination:
                bucketName: string
                bucketOwner: string
                outputFormat: string
                prefix: string
        name: string
        namePrefix: string
        roleArn: string
        source: string
        sourceParameters:
            activemqBrokerParameters:
                batchSize: 0
                credentials:
                    basicAuth: string
                maximumBatchingWindowInSeconds: 0
                queueName: string
            dynamodbStreamParameters:
                batchSize: 0
                deadLetterConfig:
                    arn: string
                maximumBatchingWindowInSeconds: 0
                maximumRecordAgeInSeconds: 0
                maximumRetryAttempts: 0
                onPartialBatchItemFailure: string
                parallelizationFactor: 0
                startingPosition: string
            filterCriteria:
                filters:
                    - pattern: string
            kinesisStreamParameters:
                batchSize: 0
                deadLetterConfig:
                    arn: string
                maximumBatchingWindowInSeconds: 0
                maximumRecordAgeInSeconds: 0
                maximumRetryAttempts: 0
                onPartialBatchItemFailure: string
                parallelizationFactor: 0
                startingPosition: string
                startingPositionTimestamp: string
            managedStreamingKafkaParameters:
                batchSize: 0
                consumerGroupId: string
                credentials:
                    clientCertificateTlsAuth: string
                    saslScram512Auth: string
                maximumBatchingWindowInSeconds: 0
                startingPosition: string
                topicName: string
            rabbitmqBrokerParameters:
                batchSize: 0
                credentials:
                    basicAuth: string
                maximumBatchingWindowInSeconds: 0
                queueName: string
                virtualHost: string
            selfManagedKafkaParameters:
                additionalBootstrapServers:
                    - string
                batchSize: 0
                consumerGroupId: string
                credentials:
                    basicAuth: string
                    clientCertificateTlsAuth: string
                    saslScram256Auth: string
                    saslScram512Auth: string
                maximumBatchingWindowInSeconds: 0
                serverRootCaCertificate: string
                startingPosition: string
                topicName: string
                vpc:
                    securityGroups:
                        - string
                    subnets:
                        - string
            sqsQueueParameters:
                batchSize: 0
                maximumBatchingWindowInSeconds: 0
        tags:
            string: string
        target: string
        targetParameters:
            batchJobParameters:
                arrayProperties:
                    size: 0
                containerOverrides:
                    commands:
                        - string
                    environments:
                        - name: string
                          value: string
                    instanceType: string
                    resourceRequirements:
                        - type: string
                          value: string
                dependsOns:
                    - jobId: string
                      type: string
                jobDefinition: string
                jobName: string
                parameters:
                    string: string
                retryStrategy:
                    attempts: 0
            cloudwatchLogsParameters:
                logStreamName: string
                timestamp: string
            ecsTaskParameters:
                capacityProviderStrategies:
                    - base: 0
                      capacityProvider: string
                      weight: 0
                enableEcsManagedTags: false
                enableExecuteCommand: false
                group: string
                launchType: string
                networkConfiguration:
                    awsVpcConfiguration:
                        assignPublicIp: string
                        securityGroups:
                            - string
                        subnets:
                            - string
                overrides:
                    containerOverrides:
                        - commands:
                            - string
                          cpu: 0
                          environmentFiles:
                            - type: string
                              value: string
                          environments:
                            - name: string
                              value: string
                          memory: 0
                          memoryReservation: 0
                          name: string
                          resourceRequirements:
                            - type: string
                              value: string
                    cpu: string
                    ephemeralStorage:
                        sizeInGib: 0
                    executionRoleArn: string
                    inferenceAcceleratorOverrides:
                        - deviceName: string
                          deviceType: string
                    memory: string
                    taskRoleArn: string
                placementConstraints:
                    - expression: string
                      type: string
                placementStrategies:
                    - field: string
                      type: string
                platformVersion: string
                propagateTags: string
                referenceId: string
                tags:
                    string: string
                taskCount: 0
                taskDefinitionArn: string
            eventbridgeEventBusParameters:
                detailType: string
                endpointId: string
                resources:
                    - string
                source: string
                time: string
            httpParameters:
                headerParameters:
                    string: string
                pathParameterValues: string
                queryStringParameters:
                    string: string
            inputTemplate: string
            kinesisStreamParameters:
                partitionKey: string
            lambdaFunctionParameters:
                invocationType: string
            redshiftDataParameters:
                database: string
                dbUser: string
                secretManagerArn: string
                sqls:
                    - string
                statementName: string
                withEvent: false
            sagemakerPipelineParameters:
                pipelineParameters:
                    - name: string
                      value: string
            sqsQueueParameters:
                messageDeduplicationId: string
                messageGroupId: string
            stepFunctionStateMachineParameters:
                invocationType: string
    

    Pipe Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The Pipe resource accepts the following input properties:

    RoleArn string
    ARN of the role that allows the pipe to send data to the target.
    Source string
    Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
    Target string

    Target resource of the pipe (typically an ARN).

    The following arguments are optional:

    Description string
    A description of the pipe. At most 512 characters.
    DesiredState string
    The state the pipe should be in. One of: RUNNING, STOPPED.
    Enrichment string
    Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
    EnrichmentParameters PipeEnrichmentParameters
    Parameters to configure enrichment for your pipe. Detailed below.
    LogConfiguration PipeLogConfiguration
    Logging configuration settings for the pipe. Detailed below.
    Name string
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    NamePrefix string
    Creates a unique name beginning with the specified prefix. Conflicts with name.
    SourceParameters PipeSourceParameters
    Parameters to configure a source for the pipe. Detailed below.
    Tags Dictionary<string, string>
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    TargetParameters PipeTargetParameters
    Parameters to configure a target for your pipe. Detailed below.
    RoleArn string
    ARN of the role that allows the pipe to send data to the target.
    Source string
    Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
    Target string

    Target resource of the pipe (typically an ARN).

    The following arguments are optional:

    Description string
    A description of the pipe. At most 512 characters.
    DesiredState string
    The state the pipe should be in. One of: RUNNING, STOPPED.
    Enrichment string
    Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
    EnrichmentParameters PipeEnrichmentParametersArgs
    Parameters to configure enrichment for your pipe. Detailed below.
    LogConfiguration PipeLogConfigurationArgs
    Logging configuration settings for the pipe. Detailed below.
    Name string
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    NamePrefix string
    Creates a unique name beginning with the specified prefix. Conflicts with name.
    SourceParameters PipeSourceParametersArgs
    Parameters to configure a source for the pipe. Detailed below.
    Tags map[string]string
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    TargetParameters PipeTargetParametersArgs
    Parameters to configure a target for your pipe. Detailed below.
    roleArn String
    ARN of the role that allows the pipe to send data to the target.
    source String
    Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
    target String

    Target resource of the pipe (typically an ARN).

    The following arguments are optional:

    description String
    A description of the pipe. At most 512 characters.
    desiredState String
    The state the pipe should be in. One of: RUNNING, STOPPED.
    enrichment String
    Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
    enrichmentParameters PipeEnrichmentParameters
    Parameters to configure enrichment for your pipe. Detailed below.
    logConfiguration PipeLogConfiguration
    Logging configuration settings for the pipe. Detailed below.
    name String
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    namePrefix String
    Creates a unique name beginning with the specified prefix. Conflicts with name.
    sourceParameters PipeSourceParameters
    Parameters to configure a source for the pipe. Detailed below.
    tags Map<String,String>
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    targetParameters PipeTargetParameters
    Parameters to configure a target for your pipe. Detailed below.
    roleArn string
    ARN of the role that allows the pipe to send data to the target.
    source string
    Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
    target string

    Target resource of the pipe (typically an ARN).

    The following arguments are optional:

    description string
    A description of the pipe. At most 512 characters.
    desiredState string
    The state the pipe should be in. One of: RUNNING, STOPPED.
    enrichment string
    Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
    enrichmentParameters PipeEnrichmentParameters
    Parameters to configure enrichment for your pipe. Detailed below.
    logConfiguration PipeLogConfiguration
    Logging configuration settings for the pipe. Detailed below.
    name string
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    namePrefix string
    Creates a unique name beginning with the specified prefix. Conflicts with name.
    sourceParameters PipeSourceParameters
    Parameters to configure a source for the pipe. Detailed below.
    tags {[key: string]: string}
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    targetParameters PipeTargetParameters
    Parameters to configure a target for your pipe. Detailed below.
    role_arn str
    ARN of the role that allows the pipe to send data to the target.
    source str
    Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
    target str

    Target resource of the pipe (typically an ARN).

    The following arguments are optional:

    description str
    A description of the pipe. At most 512 characters.
    desired_state str
    The state the pipe should be in. One of: RUNNING, STOPPED.
    enrichment str
    Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
    enrichment_parameters PipeEnrichmentParametersArgs
    Parameters to configure enrichment for your pipe. Detailed below.
    log_configuration PipeLogConfigurationArgs
    Logging configuration settings for the pipe. Detailed below.
    name str
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    name_prefix str
    Creates a unique name beginning with the specified prefix. Conflicts with name.
    source_parameters PipeSourceParametersArgs
    Parameters to configure a source for the pipe. Detailed below.
    tags Mapping[str, str]
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    target_parameters PipeTargetParametersArgs
    Parameters to configure a target for your pipe. Detailed below.
    roleArn String
    ARN of the role that allows the pipe to send data to the target.
    source String
    Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
    target String

    Target resource of the pipe (typically an ARN).

    The following arguments are optional:

    description String
    A description of the pipe. At most 512 characters.
    desiredState String
    The state the pipe should be in. One of: RUNNING, STOPPED.
    enrichment String
    Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
    enrichmentParameters Property Map
    Parameters to configure enrichment for your pipe. Detailed below.
    logConfiguration Property Map
    Logging configuration settings for the pipe. Detailed below.
    name String
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    namePrefix String
    Creates a unique name beginning with the specified prefix. Conflicts with name.
    sourceParameters Property Map
    Parameters to configure a source for the pipe. Detailed below.
    tags Map<String>
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    targetParameters Property Map
    Parameters to configure a target for your pipe. Detailed below.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the Pipe resource produces the following output properties:

    Arn string
    ARN of this pipe.
    Id string
    The provider-assigned unique ID for this managed resource.
    TagsAll Dictionary<string, string>
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    Arn string
    ARN of this pipe.
    Id string
    The provider-assigned unique ID for this managed resource.
    TagsAll map[string]string
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    arn String
    ARN of this pipe.
    id String
    The provider-assigned unique ID for this managed resource.
    tagsAll Map<String,String>
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    arn string
    ARN of this pipe.
    id string
    The provider-assigned unique ID for this managed resource.
    tagsAll {[key: string]: string}
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    arn str
    ARN of this pipe.
    id str
    The provider-assigned unique ID for this managed resource.
    tags_all Mapping[str, str]
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    arn String
    ARN of this pipe.
    id String
    The provider-assigned unique ID for this managed resource.
    tagsAll Map<String>
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    Look up Existing Pipe Resource

    Get an existing Pipe resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: PipeState, opts?: CustomResourceOptions): Pipe
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            arn: Optional[str] = None,
            description: Optional[str] = None,
            desired_state: Optional[str] = None,
            enrichment: Optional[str] = None,
            enrichment_parameters: Optional[PipeEnrichmentParametersArgs] = None,
            log_configuration: Optional[PipeLogConfigurationArgs] = None,
            name: Optional[str] = None,
            name_prefix: Optional[str] = None,
            role_arn: Optional[str] = None,
            source: Optional[str] = None,
            source_parameters: Optional[PipeSourceParametersArgs] = None,
            tags: Optional[Mapping[str, str]] = None,
            tags_all: Optional[Mapping[str, str]] = None,
            target: Optional[str] = None,
            target_parameters: Optional[PipeTargetParametersArgs] = None) -> Pipe
    func GetPipe(ctx *Context, name string, id IDInput, state *PipeState, opts ...ResourceOption) (*Pipe, error)
    public static Pipe Get(string name, Input<string> id, PipeState? state, CustomResourceOptions? opts = null)
    public static Pipe get(String name, Output<String> id, PipeState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    Arn string
    ARN of this pipe.
    Description string
    A description of the pipe. At most 512 characters.
    DesiredState string
    The state the pipe should be in. One of: RUNNING, STOPPED.
    Enrichment string
    Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
    EnrichmentParameters PipeEnrichmentParameters
    Parameters to configure enrichment for your pipe. Detailed below.
    LogConfiguration PipeLogConfiguration
    Logging configuration settings for the pipe. Detailed below.
    Name string
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    NamePrefix string
    Creates a unique name beginning with the specified prefix. Conflicts with name.
    RoleArn string
    ARN of the role that allows the pipe to send data to the target.
    Source string
    Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
    SourceParameters PipeSourceParameters
    Parameters to configure a source for the pipe. Detailed below.
    Tags Dictionary<string, string>
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    TagsAll Dictionary<string, string>
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    Target string

    Target resource of the pipe (typically an ARN).

    The following arguments are optional:

    TargetParameters PipeTargetParameters
    Parameters to configure a target for your pipe. Detailed below.
    Arn string
    ARN of this pipe.
    Description string
    A description of the pipe. At most 512 characters.
    DesiredState string
    The state the pipe should be in. One of: RUNNING, STOPPED.
    Enrichment string
    Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
    EnrichmentParameters PipeEnrichmentParametersArgs
    Parameters to configure enrichment for your pipe. Detailed below.
    LogConfiguration PipeLogConfigurationArgs
    Logging configuration settings for the pipe. Detailed below.
    Name string
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    NamePrefix string
    Creates a unique name beginning with the specified prefix. Conflicts with name.
    RoleArn string
    ARN of the role that allows the pipe to send data to the target.
    Source string
    Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
    SourceParameters PipeSourceParametersArgs
    Parameters to configure a source for the pipe. Detailed below.
    Tags map[string]string
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    TagsAll map[string]string
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    Target string

    Target resource of the pipe (typically an ARN).

    The following arguments are optional:

    TargetParameters PipeTargetParametersArgs
    Parameters to configure a target for your pipe. Detailed below.
    arn String
    ARN of this pipe.
    description String
    A description of the pipe. At most 512 characters.
    desiredState String
    The state the pipe should be in. One of: RUNNING, STOPPED.
    enrichment String
    Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
    enrichmentParameters PipeEnrichmentParameters
    Parameters to configure enrichment for your pipe. Detailed below.
    logConfiguration PipeLogConfiguration
    Logging configuration settings for the pipe. Detailed below.
    name String
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    namePrefix String
    Creates a unique name beginning with the specified prefix. Conflicts with name.
    roleArn String
    ARN of the role that allows the pipe to send data to the target.
    source String
    Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
    sourceParameters PipeSourceParameters
    Parameters to configure a source for the pipe. Detailed below.
    tags Map<String,String>
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    tagsAll Map<String,String>
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    target String

    Target resource of the pipe (typically an ARN).

    The following arguments are optional:

    targetParameters PipeTargetParameters
    Parameters to configure a target for your pipe. Detailed below.
    arn string
    ARN of this pipe.
    description string
    A description of the pipe. At most 512 characters.
    desiredState string
    The state the pipe should be in. One of: RUNNING, STOPPED.
    enrichment string
    Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
    enrichmentParameters PipeEnrichmentParameters
    Parameters to configure enrichment for your pipe. Detailed below.
    logConfiguration PipeLogConfiguration
    Logging configuration settings for the pipe. Detailed below.
    name string
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    namePrefix string
    Creates a unique name beginning with the specified prefix. Conflicts with name.
    roleArn string
    ARN of the role that allows the pipe to send data to the target.
    source string
    Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
    sourceParameters PipeSourceParameters
    Parameters to configure a source for the pipe. Detailed below.
    tags {[key: string]: string}
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    tagsAll {[key: string]: string}
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    target string

    Target resource of the pipe (typically an ARN).

    The following arguments are optional:

    targetParameters PipeTargetParameters
    Parameters to configure a target for your pipe. Detailed below.
    arn str
    ARN of this pipe.
    description str
    A description of the pipe. At most 512 characters.
    desired_state str
    The state the pipe should be in. One of: RUNNING, STOPPED.
    enrichment str
    Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
    enrichment_parameters PipeEnrichmentParametersArgs
    Parameters to configure enrichment for your pipe. Detailed below.
    log_configuration PipeLogConfigurationArgs
    Logging configuration settings for the pipe. Detailed below.
    name str
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    name_prefix str
    Creates a unique name beginning with the specified prefix. Conflicts with name.
    role_arn str
    ARN of the role that allows the pipe to send data to the target.
    source str
    Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
    source_parameters PipeSourceParametersArgs
    Parameters to configure a source for the pipe. Detailed below.
    tags Mapping[str, str]
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    tags_all Mapping[str, str]
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    target str

    Target resource of the pipe (typically an ARN).

    The following arguments are optional:

    target_parameters PipeTargetParametersArgs
    Parameters to configure a target for your pipe. Detailed below.
    arn String
    ARN of this pipe.
    description String
    A description of the pipe. At most 512 characters.
    desiredState String
    The state the pipe should be in. One of: RUNNING, STOPPED.
    enrichment String
    Enrichment resource of the pipe (typically an ARN). Read more about enrichment in the User Guide.
    enrichmentParameters Property Map
    Parameters to configure enrichment for your pipe. Detailed below.
    logConfiguration Property Map
    Logging configuration settings for the pipe. Detailed below.
    name String
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    namePrefix String
    Creates a unique name beginning with the specified prefix. Conflicts with name.
    roleArn String
    ARN of the role that allows the pipe to send data to the target.
    source String
    Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
    sourceParameters Property Map
    Parameters to configure a source for the pipe. Detailed below.
    tags Map<String>
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    tagsAll Map<String>
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    target String

    Target resource of the pipe (typically an ARN).

    The following arguments are optional:

    targetParameters Property Map
    Parameters to configure a target for your pipe. Detailed below.

    Supporting Types

    PipeEnrichmentParameters, PipeEnrichmentParametersArgs

    HttpParameters PipeEnrichmentParametersHttpParameters
    Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. Detailed below.
    InputTemplate string
    Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
    HttpParameters PipeEnrichmentParametersHttpParameters
    Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. Detailed below.
    InputTemplate string
    Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
    httpParameters PipeEnrichmentParametersHttpParameters
    Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. Detailed below.
    inputTemplate String
    Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
    httpParameters PipeEnrichmentParametersHttpParameters
    Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. Detailed below.
    inputTemplate string
    Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
    http_parameters PipeEnrichmentParametersHttpParameters
    Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. Detailed below.
    input_template str
    Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
    httpParameters Property Map
    Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination. If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence. Detailed below.
    inputTemplate String
    Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.

    PipeEnrichmentParametersHttpParameters, PipeEnrichmentParametersHttpParametersArgs

    HeaderParameters Dictionary<string, string>
    PathParameterValues string
    QueryStringParameters Dictionary<string, string>
    HeaderParameters map[string]string
    PathParameterValues string
    QueryStringParameters map[string]string
    headerParameters Map<String,String>
    pathParameterValues String
    queryStringParameters Map<String,String>
    headerParameters {[key: string]: string}
    pathParameterValues string
    queryStringParameters {[key: string]: string}

    PipeLogConfiguration, PipeLogConfigurationArgs

    Level string
    The level of logging detail to include. Valid values OFF, ERROR, INFO and TRACE.
    CloudwatchLogsLogDestination PipeLogConfigurationCloudwatchLogsLogDestination
    Amazon CloudWatch Logs logging configuration settings for the pipe. Detailed below.
    FirehoseLogDestination PipeLogConfigurationFirehoseLogDestination
    Amazon Kinesis Data Firehose logging configuration settings for the pipe. Detailed below.
    IncludeExecutionDatas List<string>
    String list that specifies whether the execution data (specifically, the payload, awsRequest, and awsResponse fields) is included in the log messages for this pipe. This applies to all log destinations for the pipe. Valid values ALL.
    S3LogDestination PipeLogConfigurationS3LogDestination
    Amazon S3 logging configuration settings for the pipe. Detailed below.
    Level string
    The level of logging detail to include. Valid values OFF, ERROR, INFO and TRACE.
    CloudwatchLogsLogDestination PipeLogConfigurationCloudwatchLogsLogDestination
    Amazon CloudWatch Logs logging configuration settings for the pipe. Detailed below.
    FirehoseLogDestination PipeLogConfigurationFirehoseLogDestination
    Amazon Kinesis Data Firehose logging configuration settings for the pipe. Detailed below.
    IncludeExecutionDatas []string
    String list that specifies whether the execution data (specifically, the payload, awsRequest, and awsResponse fields) is included in the log messages for this pipe. This applies to all log destinations for the pipe. Valid values ALL.
    S3LogDestination PipeLogConfigurationS3LogDestination
    Amazon S3 logging configuration settings for the pipe. Detailed below.
    level String
    The level of logging detail to include. Valid values OFF, ERROR, INFO and TRACE.
    cloudwatchLogsLogDestination PipeLogConfigurationCloudwatchLogsLogDestination
    Amazon CloudWatch Logs logging configuration settings for the pipe. Detailed below.
    firehoseLogDestination PipeLogConfigurationFirehoseLogDestination
    Amazon Kinesis Data Firehose logging configuration settings for the pipe. Detailed below.
    includeExecutionDatas List<String>
    String list that specifies whether the execution data (specifically, the payload, awsRequest, and awsResponse fields) is included in the log messages for this pipe. This applies to all log destinations for the pipe. Valid values ALL.
    s3LogDestination PipeLogConfigurationS3LogDestination
    Amazon S3 logging configuration settings for the pipe. Detailed below.
    level string
    The level of logging detail to include. Valid values OFF, ERROR, INFO and TRACE.
    cloudwatchLogsLogDestination PipeLogConfigurationCloudwatchLogsLogDestination
    Amazon CloudWatch Logs logging configuration settings for the pipe. Detailed below.
    firehoseLogDestination PipeLogConfigurationFirehoseLogDestination
    Amazon Kinesis Data Firehose logging configuration settings for the pipe. Detailed below.
    includeExecutionDatas string[]
    String list that specifies whether the execution data (specifically, the payload, awsRequest, and awsResponse fields) is included in the log messages for this pipe. This applies to all log destinations for the pipe. Valid values ALL.
    s3LogDestination PipeLogConfigurationS3LogDestination
    Amazon S3 logging configuration settings for the pipe. Detailed below.
    level str
    The level of logging detail to include. Valid values OFF, ERROR, INFO and TRACE.
    cloudwatch_logs_log_destination PipeLogConfigurationCloudwatchLogsLogDestination
    Amazon CloudWatch Logs logging configuration settings for the pipe. Detailed below.
    firehose_log_destination PipeLogConfigurationFirehoseLogDestination
    Amazon Kinesis Data Firehose logging configuration settings for the pipe. Detailed below.
    include_execution_datas Sequence[str]
    String list that specifies whether the execution data (specifically, the payload, awsRequest, and awsResponse fields) is included in the log messages for this pipe. This applies to all log destinations for the pipe. Valid values ALL.
    s3_log_destination PipeLogConfigurationS3LogDestination
    Amazon S3 logging configuration settings for the pipe. Detailed below.
    level String
    The level of logging detail to include. Valid values OFF, ERROR, INFO and TRACE.
    cloudwatchLogsLogDestination Property Map
    Amazon CloudWatch Logs logging configuration settings for the pipe. Detailed below.
    firehoseLogDestination Property Map
    Amazon Kinesis Data Firehose logging configuration settings for the pipe. Detailed below.
    includeExecutionDatas List<String>
    String list that specifies whether the execution data (specifically, the payload, awsRequest, and awsResponse fields) is included in the log messages for this pipe. This applies to all log destinations for the pipe. Valid values ALL.
    s3LogDestination Property Map
    Amazon S3 logging configuration settings for the pipe. Detailed below.

    PipeLogConfigurationCloudwatchLogsLogDestination, PipeLogConfigurationCloudwatchLogsLogDestinationArgs

    LogGroupArn string
    Amazon Web Services Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
    LogGroupArn string
    Amazon Web Services Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
    logGroupArn String
    Amazon Web Services Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
    logGroupArn string
    Amazon Web Services Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
    log_group_arn str
    Amazon Web Services Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
    logGroupArn String
    Amazon Web Services Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.

    PipeLogConfigurationFirehoseLogDestination, PipeLogConfigurationFirehoseLogDestinationArgs

    DeliveryStreamArn string
    Amazon Resource Name (ARN) of the Kinesis Data Firehose delivery stream to which EventBridge delivers the pipe log records.
    DeliveryStreamArn string
    Amazon Resource Name (ARN) of the Kinesis Data Firehose delivery stream to which EventBridge delivers the pipe log records.
    deliveryStreamArn String
    Amazon Resource Name (ARN) of the Kinesis Data Firehose delivery stream to which EventBridge delivers the pipe log records.
    deliveryStreamArn string
    Amazon Resource Name (ARN) of the Kinesis Data Firehose delivery stream to which EventBridge delivers the pipe log records.
    delivery_stream_arn str
    Amazon Resource Name (ARN) of the Kinesis Data Firehose delivery stream to which EventBridge delivers the pipe log records.
    deliveryStreamArn String
    Amazon Resource Name (ARN) of the Kinesis Data Firehose delivery stream to which EventBridge delivers the pipe log records.

    PipeLogConfigurationS3LogDestination, PipeLogConfigurationS3LogDestinationArgs

    BucketName string
    Name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
    BucketOwner string
    Amazon Web Services account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
    OutputFormat string
    EventBridge format for the log records. Valid values json, plain and w3c.
    Prefix string
    Prefix text with which to begin Amazon S3 log object names.
    BucketName string
    Name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
    BucketOwner string
    Amazon Web Services account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
    OutputFormat string
    EventBridge format for the log records. Valid values json, plain and w3c.
    Prefix string
    Prefix text with which to begin Amazon S3 log object names.
    bucketName String
    Name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
    bucketOwner String
    Amazon Web Services account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
    outputFormat String
    EventBridge format for the log records. Valid values json, plain and w3c.
    prefix String
    Prefix text with which to begin Amazon S3 log object names.
    bucketName string
    Name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
    bucketOwner string
    Amazon Web Services account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
    outputFormat string
    EventBridge format for the log records. Valid values json, plain and w3c.
    prefix string
    Prefix text with which to begin Amazon S3 log object names.
    bucket_name str
    Name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
    bucket_owner str
    Amazon Web Services account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
    output_format str
    EventBridge format for the log records. Valid values json, plain and w3c.
    prefix str
    Prefix text with which to begin Amazon S3 log object names.
    bucketName String
    Name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
    bucketOwner String
    Amazon Web Services account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
    outputFormat String
    EventBridge format for the log records. Valid values json, plain and w3c.
    prefix String
    Prefix text with which to begin Amazon S3 log object names.

    PipeSourceParameters, PipeSourceParametersArgs

    ActivemqBrokerParameters PipeSourceParametersActivemqBrokerParameters
    The parameters for using an Active MQ broker as a source. Detailed below.
    DynamodbStreamParameters PipeSourceParametersDynamodbStreamParameters
    The parameters for using a DynamoDB stream as a source. Detailed below.
    FilterCriteria PipeSourceParametersFilterCriteria
    The collection of event patterns used to filter events. Detailed below.
    KinesisStreamParameters PipeSourceParametersKinesisStreamParameters
    The parameters for using a Kinesis stream as a source. Detailed below.
    ManagedStreamingKafkaParameters PipeSourceParametersManagedStreamingKafkaParameters
    The parameters for using an MSK stream as a source. Detailed below.
    RabbitmqBrokerParameters PipeSourceParametersRabbitmqBrokerParameters
    The parameters for using a Rabbit MQ broker as a source. Detailed below.
    SelfManagedKafkaParameters PipeSourceParametersSelfManagedKafkaParameters
    The parameters for using a self-managed Apache Kafka stream as a source. Detailed below.
    SqsQueueParameters PipeSourceParametersSqsQueueParameters
    The parameters for using a Amazon SQS stream as a source. Detailed below.
    ActivemqBrokerParameters PipeSourceParametersActivemqBrokerParameters
    The parameters for using an Active MQ broker as a source. Detailed below.
    DynamodbStreamParameters PipeSourceParametersDynamodbStreamParameters
    The parameters for using a DynamoDB stream as a source. Detailed below.
    FilterCriteria PipeSourceParametersFilterCriteria
    The collection of event patterns used to filter events. Detailed below.
    KinesisStreamParameters PipeSourceParametersKinesisStreamParameters
    The parameters for using a Kinesis stream as a source. Detailed below.
    ManagedStreamingKafkaParameters PipeSourceParametersManagedStreamingKafkaParameters
    The parameters for using an MSK stream as a source. Detailed below.
    RabbitmqBrokerParameters PipeSourceParametersRabbitmqBrokerParameters
    The parameters for using a Rabbit MQ broker as a source. Detailed below.
    SelfManagedKafkaParameters PipeSourceParametersSelfManagedKafkaParameters
    The parameters for using a self-managed Apache Kafka stream as a source. Detailed below.
    SqsQueueParameters PipeSourceParametersSqsQueueParameters
    The parameters for using a Amazon SQS stream as a source. Detailed below.
    activemqBrokerParameters PipeSourceParametersActivemqBrokerParameters
    The parameters for using an Active MQ broker as a source. Detailed below.
    dynamodbStreamParameters PipeSourceParametersDynamodbStreamParameters
    The parameters for using a DynamoDB stream as a source. Detailed below.
    filterCriteria PipeSourceParametersFilterCriteria
    The collection of event patterns used to filter events. Detailed below.
    kinesisStreamParameters PipeSourceParametersKinesisStreamParameters
    The parameters for using a Kinesis stream as a source. Detailed below.
    managedStreamingKafkaParameters PipeSourceParametersManagedStreamingKafkaParameters
    The parameters for using an MSK stream as a source. Detailed below.
    rabbitmqBrokerParameters PipeSourceParametersRabbitmqBrokerParameters
    The parameters for using a Rabbit MQ broker as a source. Detailed below.
    selfManagedKafkaParameters PipeSourceParametersSelfManagedKafkaParameters
    The parameters for using a self-managed Apache Kafka stream as a source. Detailed below.
    sqsQueueParameters PipeSourceParametersSqsQueueParameters
    The parameters for using a Amazon SQS stream as a source. Detailed below.
    activemqBrokerParameters PipeSourceParametersActivemqBrokerParameters
    The parameters for using an Active MQ broker as a source. Detailed below.
    dynamodbStreamParameters PipeSourceParametersDynamodbStreamParameters
    The parameters for using a DynamoDB stream as a source. Detailed below.
    filterCriteria PipeSourceParametersFilterCriteria
    The collection of event patterns used to filter events. Detailed below.
    kinesisStreamParameters PipeSourceParametersKinesisStreamParameters
    The parameters for using a Kinesis stream as a source. Detailed below.
    managedStreamingKafkaParameters PipeSourceParametersManagedStreamingKafkaParameters
    The parameters for using an MSK stream as a source. Detailed below.
    rabbitmqBrokerParameters PipeSourceParametersRabbitmqBrokerParameters
    The parameters for using a Rabbit MQ broker as a source. Detailed below.
    selfManagedKafkaParameters PipeSourceParametersSelfManagedKafkaParameters
    The parameters for using a self-managed Apache Kafka stream as a source. Detailed below.
    sqsQueueParameters PipeSourceParametersSqsQueueParameters
    The parameters for using a Amazon SQS stream as a source. Detailed below.
    activemq_broker_parameters PipeSourceParametersActivemqBrokerParameters
    The parameters for using an Active MQ broker as a source. Detailed below.
    dynamodb_stream_parameters PipeSourceParametersDynamodbStreamParameters
    The parameters for using a DynamoDB stream as a source. Detailed below.
    filter_criteria PipeSourceParametersFilterCriteria
    The collection of event patterns used to filter events. Detailed below.
    kinesis_stream_parameters PipeSourceParametersKinesisStreamParameters
    The parameters for using a Kinesis stream as a source. Detailed below.
    managed_streaming_kafka_parameters PipeSourceParametersManagedStreamingKafkaParameters
    The parameters for using an MSK stream as a source. Detailed below.
    rabbitmq_broker_parameters PipeSourceParametersRabbitmqBrokerParameters
    The parameters for using a Rabbit MQ broker as a source. Detailed below.
    self_managed_kafka_parameters PipeSourceParametersSelfManagedKafkaParameters
    The parameters for using a self-managed Apache Kafka stream as a source. Detailed below.
    sqs_queue_parameters PipeSourceParametersSqsQueueParameters
    The parameters for using a Amazon SQS stream as a source. Detailed below.
    activemqBrokerParameters Property Map
    The parameters for using an Active MQ broker as a source. Detailed below.
    dynamodbStreamParameters Property Map
    The parameters for using a DynamoDB stream as a source. Detailed below.
    filterCriteria Property Map
    The collection of event patterns used to filter events. Detailed below.
    kinesisStreamParameters Property Map
    The parameters for using a Kinesis stream as a source. Detailed below.
    managedStreamingKafkaParameters Property Map
    The parameters for using an MSK stream as a source. Detailed below.
    rabbitmqBrokerParameters Property Map
    The parameters for using a Rabbit MQ broker as a source. Detailed below.
    selfManagedKafkaParameters Property Map
    The parameters for using a self-managed Apache Kafka stream as a source. Detailed below.
    sqsQueueParameters Property Map
    The parameters for using a Amazon SQS stream as a source. Detailed below.

    PipeSourceParametersActivemqBrokerParameters, PipeSourceParametersActivemqBrokerParametersArgs

    Credentials PipeSourceParametersActivemqBrokerParametersCredentials
    The credentials needed to access the resource. Detailed below.
    QueueName string
    The name of the destination queue to consume. Maximum length of 1000.
    BatchSize int
    The maximum number of records to include in each batch. Maximum value of 10000.
    MaximumBatchingWindowInSeconds int
    The maximum length of a time to wait for events. Maximum value of 300.
    Credentials PipeSourceParametersActivemqBrokerParametersCredentials
    The credentials needed to access the resource. Detailed below.
    QueueName string
    The name of the destination queue to consume. Maximum length of 1000.
    BatchSize int
    The maximum number of records to include in each batch. Maximum value of 10000.
    MaximumBatchingWindowInSeconds int
    The maximum length of a time to wait for events. Maximum value of 300.
    credentials PipeSourceParametersActivemqBrokerParametersCredentials
    The credentials needed to access the resource. Detailed below.
    queueName String
    The name of the destination queue to consume. Maximum length of 1000.
    batchSize Integer
    The maximum number of records to include in each batch. Maximum value of 10000.
    maximumBatchingWindowInSeconds Integer
    The maximum length of a time to wait for events. Maximum value of 300.
    credentials PipeSourceParametersActivemqBrokerParametersCredentials
    The credentials needed to access the resource. Detailed below.
    queueName string
    The name of the destination queue to consume. Maximum length of 1000.
    batchSize number
    The maximum number of records to include in each batch. Maximum value of 10000.
    maximumBatchingWindowInSeconds number
    The maximum length of a time to wait for events. Maximum value of 300.
    credentials PipeSourceParametersActivemqBrokerParametersCredentials
    The credentials needed to access the resource. Detailed below.
    queue_name str
    The name of the destination queue to consume. Maximum length of 1000.
    batch_size int
    The maximum number of records to include in each batch. Maximum value of 10000.
    maximum_batching_window_in_seconds int
    The maximum length of a time to wait for events. Maximum value of 300.
    credentials Property Map
    The credentials needed to access the resource. Detailed below.
    queueName String
    The name of the destination queue to consume. Maximum length of 1000.
    batchSize Number
    The maximum number of records to include in each batch. Maximum value of 10000.
    maximumBatchingWindowInSeconds Number
    The maximum length of a time to wait for events. Maximum value of 300.

    PipeSourceParametersActivemqBrokerParametersCredentials, PipeSourceParametersActivemqBrokerParametersCredentialsArgs

    BasicAuth string
    The ARN of the Secrets Manager secret containing the credentials.
    BasicAuth string
    The ARN of the Secrets Manager secret containing the credentials.
    basicAuth String
    The ARN of the Secrets Manager secret containing the credentials.
    basicAuth string
    The ARN of the Secrets Manager secret containing the credentials.
    basic_auth str
    The ARN of the Secrets Manager secret containing the credentials.
    basicAuth String
    The ARN of the Secrets Manager secret containing the credentials.

    PipeSourceParametersDynamodbStreamParameters, PipeSourceParametersDynamodbStreamParametersArgs

    StartingPosition string
    The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
    BatchSize int
    The maximum number of records to include in each batch. Maximum value of 10000.
    DeadLetterConfig PipeSourceParametersDynamodbStreamParametersDeadLetterConfig
    Define the target queue to send dead-letter queue events to. Detailed below.
    MaximumBatchingWindowInSeconds int
    The maximum length of a time to wait for events. Maximum value of 300.
    MaximumRecordAgeInSeconds int
    Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
    MaximumRetryAttempts int
    Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
    OnPartialBatchItemFailure string
    Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
    ParallelizationFactor int
    The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
    StartingPosition string
    The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
    BatchSize int
    The maximum number of records to include in each batch. Maximum value of 10000.
    DeadLetterConfig PipeSourceParametersDynamodbStreamParametersDeadLetterConfig
    Define the target queue to send dead-letter queue events to. Detailed below.
    MaximumBatchingWindowInSeconds int
    The maximum length of a time to wait for events. Maximum value of 300.
    MaximumRecordAgeInSeconds int
    Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
    MaximumRetryAttempts int
    Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
    OnPartialBatchItemFailure string
    Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
    ParallelizationFactor int
    The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
    startingPosition String
    The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
    batchSize Integer
    The maximum number of records to include in each batch. Maximum value of 10000.
    deadLetterConfig PipeSourceParametersDynamodbStreamParametersDeadLetterConfig
    Define the target queue to send dead-letter queue events to. Detailed below.
    maximumBatchingWindowInSeconds Integer
    The maximum length of a time to wait for events. Maximum value of 300.
    maximumRecordAgeInSeconds Integer
    Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
    maximumRetryAttempts Integer
    Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
    onPartialBatchItemFailure String
    Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
    parallelizationFactor Integer
    The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
    startingPosition string
    The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
    batchSize number
    The maximum number of records to include in each batch. Maximum value of 10000.
    deadLetterConfig PipeSourceParametersDynamodbStreamParametersDeadLetterConfig
    Define the target queue to send dead-letter queue events to. Detailed below.
    maximumBatchingWindowInSeconds number
    The maximum length of a time to wait for events. Maximum value of 300.
    maximumRecordAgeInSeconds number
    Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
    maximumRetryAttempts number
    Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
    onPartialBatchItemFailure string
    Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
    parallelizationFactor number
    The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
    starting_position str
    The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
    batch_size int
    The maximum number of records to include in each batch. Maximum value of 10000.
    dead_letter_config PipeSourceParametersDynamodbStreamParametersDeadLetterConfig
    Define the target queue to send dead-letter queue events to. Detailed below.
    maximum_batching_window_in_seconds int
    The maximum length of a time to wait for events. Maximum value of 300.
    maximum_record_age_in_seconds int
    Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
    maximum_retry_attempts int
    Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
    on_partial_batch_item_failure str
    Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
    parallelization_factor int
    The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
    startingPosition String
    The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
    batchSize Number
    The maximum number of records to include in each batch. Maximum value of 10000.
    deadLetterConfig Property Map
    Define the target queue to send dead-letter queue events to. Detailed below.
    maximumBatchingWindowInSeconds Number
    The maximum length of a time to wait for events. Maximum value of 300.
    maximumRecordAgeInSeconds Number
    Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
    maximumRetryAttempts Number
    Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
    onPartialBatchItemFailure String
    Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
    parallelizationFactor Number
    The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.

    PipeSourceParametersDynamodbStreamParametersDeadLetterConfig, PipeSourceParametersDynamodbStreamParametersDeadLetterConfigArgs

    Arn string
    ARN of this pipe.
    Arn string
    ARN of this pipe.
    arn String
    ARN of this pipe.
    arn string
    ARN of this pipe.
    arn str
    ARN of this pipe.
    arn String
    ARN of this pipe.

    PipeSourceParametersFilterCriteria, PipeSourceParametersFilterCriteriaArgs

    Filters List<PipeSourceParametersFilterCriteriaFilter>
    An array of up to 5 event patterns. Detailed below.
    Filters []PipeSourceParametersFilterCriteriaFilter
    An array of up to 5 event patterns. Detailed below.
    filters List<PipeSourceParametersFilterCriteriaFilter>
    An array of up to 5 event patterns. Detailed below.
    filters PipeSourceParametersFilterCriteriaFilter[]
    An array of up to 5 event patterns. Detailed below.
    filters Sequence[PipeSourceParametersFilterCriteriaFilter]
    An array of up to 5 event patterns. Detailed below.
    filters List<Property Map>
    An array of up to 5 event patterns. Detailed below.

    PipeSourceParametersFilterCriteriaFilter, PipeSourceParametersFilterCriteriaFilterArgs

    Pattern string
    The event pattern. At most 4096 characters.
    Pattern string
    The event pattern. At most 4096 characters.
    pattern String
    The event pattern. At most 4096 characters.
    pattern string
    The event pattern. At most 4096 characters.
    pattern str
    The event pattern. At most 4096 characters.
    pattern String
    The event pattern. At most 4096 characters.

    PipeSourceParametersKinesisStreamParameters, PipeSourceParametersKinesisStreamParametersArgs

    StartingPosition string
    The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
    BatchSize int
    The maximum number of records to include in each batch. Maximum value of 10000.
    DeadLetterConfig PipeSourceParametersKinesisStreamParametersDeadLetterConfig
    Define the target queue to send dead-letter queue events to. Detailed below.
    MaximumBatchingWindowInSeconds int
    The maximum length of a time to wait for events. Maximum value of 300.
    MaximumRecordAgeInSeconds int
    Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
    MaximumRetryAttempts int
    Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
    OnPartialBatchItemFailure string
    Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
    ParallelizationFactor int
    The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
    StartingPositionTimestamp string
    With StartingPosition set to AT_TIMESTAMP, the time from which to start reading, in Unix time seconds.
    StartingPosition string
    The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
    BatchSize int
    The maximum number of records to include in each batch. Maximum value of 10000.
    DeadLetterConfig PipeSourceParametersKinesisStreamParametersDeadLetterConfig
    Define the target queue to send dead-letter queue events to. Detailed below.
    MaximumBatchingWindowInSeconds int
    The maximum length of a time to wait for events. Maximum value of 300.
    MaximumRecordAgeInSeconds int
    Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
    MaximumRetryAttempts int
    Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
    OnPartialBatchItemFailure string
    Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
    ParallelizationFactor int
    The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
    StartingPositionTimestamp string
    With StartingPosition set to AT_TIMESTAMP, the time from which to start reading, in Unix time seconds.
    startingPosition String
    The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
    batchSize Integer
    The maximum number of records to include in each batch. Maximum value of 10000.
    deadLetterConfig PipeSourceParametersKinesisStreamParametersDeadLetterConfig
    Define the target queue to send dead-letter queue events to. Detailed below.
    maximumBatchingWindowInSeconds Integer
    The maximum length of a time to wait for events. Maximum value of 300.
    maximumRecordAgeInSeconds Integer
    Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
    maximumRetryAttempts Integer
    Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
    onPartialBatchItemFailure String
    Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
    parallelizationFactor Integer
    The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
    startingPositionTimestamp String
    With StartingPosition set to AT_TIMESTAMP, the time from which to start reading, in Unix time seconds.
    startingPosition string
    The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
    batchSize number
    The maximum number of records to include in each batch. Maximum value of 10000.
    deadLetterConfig PipeSourceParametersKinesisStreamParametersDeadLetterConfig
    Define the target queue to send dead-letter queue events to. Detailed below.
    maximumBatchingWindowInSeconds number
    The maximum length of a time to wait for events. Maximum value of 300.
    maximumRecordAgeInSeconds number
    Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
    maximumRetryAttempts number
    Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
    onPartialBatchItemFailure string
    Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
    parallelizationFactor number
    The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
    startingPositionTimestamp string
    With StartingPosition set to AT_TIMESTAMP, the time from which to start reading, in Unix time seconds.
    starting_position str
    The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
    batch_size int
    The maximum number of records to include in each batch. Maximum value of 10000.
    dead_letter_config PipeSourceParametersKinesisStreamParametersDeadLetterConfig
    Define the target queue to send dead-letter queue events to. Detailed below.
    maximum_batching_window_in_seconds int
    The maximum length of a time to wait for events. Maximum value of 300.
    maximum_record_age_in_seconds int
    Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
    maximum_retry_attempts int
    Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
    on_partial_batch_item_failure str
    Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
    parallelization_factor int
    The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
    starting_position_timestamp str
    With StartingPosition set to AT_TIMESTAMP, the time from which to start reading, in Unix time seconds.
    startingPosition String
    The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
    batchSize Number
    The maximum number of records to include in each batch. Maximum value of 10000.
    deadLetterConfig Property Map
    Define the target queue to send dead-letter queue events to. Detailed below.
    maximumBatchingWindowInSeconds Number
    The maximum length of a time to wait for events. Maximum value of 300.
    maximumRecordAgeInSeconds Number
    Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records. Maximum value of 604,800.
    maximumRetryAttempts Number
    Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source. Maximum value of 10,000.
    onPartialBatchItemFailure String
    Define how to handle item process failures. AUTOMATIC_BISECT halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. Valid values: AUTOMATIC_BISECT.
    parallelizationFactor Number
    The number of batches to process concurrently from each shard. The default value is 1. Maximum value of 10.
    startingPositionTimestamp String
    With StartingPosition set to AT_TIMESTAMP, the time from which to start reading, in Unix time seconds.

    PipeSourceParametersKinesisStreamParametersDeadLetterConfig, PipeSourceParametersKinesisStreamParametersDeadLetterConfigArgs

    Arn string
    ARN of this pipe.
    Arn string
    ARN of this pipe.
    arn String
    ARN of this pipe.
    arn string
    ARN of this pipe.
    arn str
    ARN of this pipe.
    arn String
    ARN of this pipe.

    PipeSourceParametersManagedStreamingKafkaParameters, PipeSourceParametersManagedStreamingKafkaParametersArgs

    TopicName string
    The name of the topic that the pipe will read from. Maximum length of 249.
    BatchSize int
    The maximum number of records to include in each batch. Maximum value of 10000.
    ConsumerGroupId string
    The name of the destination queue to consume. Maximum value of 200.
    Credentials PipeSourceParametersManagedStreamingKafkaParametersCredentials
    The credentials needed to access the resource. Detailed below.
    MaximumBatchingWindowInSeconds int
    The maximum length of a time to wait for events. Maximum value of 300.
    StartingPosition string
    The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
    TopicName string
    The name of the topic that the pipe will read from. Maximum length of 249.
    BatchSize int
    The maximum number of records to include in each batch. Maximum value of 10000.
    ConsumerGroupId string
    The name of the destination queue to consume. Maximum value of 200.
    Credentials PipeSourceParametersManagedStreamingKafkaParametersCredentials
    The credentials needed to access the resource. Detailed below.
    MaximumBatchingWindowInSeconds int
    The maximum length of a time to wait for events. Maximum value of 300.
    StartingPosition string
    The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
    topicName String
    The name of the topic that the pipe will read from. Maximum length of 249.
    batchSize Integer
    The maximum number of records to include in each batch. Maximum value of 10000.
    consumerGroupId String
    The name of the destination queue to consume. Maximum value of 200.
    credentials PipeSourceParametersManagedStreamingKafkaParametersCredentials
    The credentials needed to access the resource. Detailed below.
    maximumBatchingWindowInSeconds Integer
    The maximum length of a time to wait for events. Maximum value of 300.
    startingPosition String
    The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
    topicName string
    The name of the topic that the pipe will read from. Maximum length of 249.
    batchSize number
    The maximum number of records to include in each batch. Maximum value of 10000.
    consumerGroupId string
    The name of the destination queue to consume. Maximum value of 200.
    credentials PipeSourceParametersManagedStreamingKafkaParametersCredentials
    The credentials needed to access the resource. Detailed below.
    maximumBatchingWindowInSeconds number
    The maximum length of a time to wait for events. Maximum value of 300.
    startingPosition string
    The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
    topic_name str
    The name of the topic that the pipe will read from. Maximum length of 249.
    batch_size int
    The maximum number of records to include in each batch. Maximum value of 10000.
    consumer_group_id str
    The name of the destination queue to consume. Maximum value of 200.
    credentials PipeSourceParametersManagedStreamingKafkaParametersCredentials
    The credentials needed to access the resource. Detailed below.
    maximum_batching_window_in_seconds int
    The maximum length of a time to wait for events. Maximum value of 300.
    starting_position str
    The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
    topicName String
    The name of the topic that the pipe will read from. Maximum length of 249.
    batchSize Number
    The maximum number of records to include in each batch. Maximum value of 10000.
    consumerGroupId String
    The name of the destination queue to consume. Maximum value of 200.
    credentials Property Map
    The credentials needed to access the resource. Detailed below.
    maximumBatchingWindowInSeconds Number
    The maximum length of a time to wait for events. Maximum value of 300.
    startingPosition String
    The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.

    PipeSourceParametersManagedStreamingKafkaParametersCredentials, PipeSourceParametersManagedStreamingKafkaParametersCredentialsArgs

    ClientCertificateTlsAuth string
    The ARN of the Secrets Manager secret containing the credentials.
    SaslScram512Auth string
    The ARN of the Secrets Manager secret containing the credentials.
    ClientCertificateTlsAuth string
    The ARN of the Secrets Manager secret containing the credentials.
    SaslScram512Auth string
    The ARN of the Secrets Manager secret containing the credentials.
    clientCertificateTlsAuth String
    The ARN of the Secrets Manager secret containing the credentials.
    saslScram512Auth String
    The ARN of the Secrets Manager secret containing the credentials.
    clientCertificateTlsAuth string
    The ARN of the Secrets Manager secret containing the credentials.
    saslScram512Auth string
    The ARN of the Secrets Manager secret containing the credentials.
    client_certificate_tls_auth str
    The ARN of the Secrets Manager secret containing the credentials.
    sasl_scram512_auth str
    The ARN of the Secrets Manager secret containing the credentials.
    clientCertificateTlsAuth String
    The ARN of the Secrets Manager secret containing the credentials.
    saslScram512Auth String
    The ARN of the Secrets Manager secret containing the credentials.

    PipeSourceParametersRabbitmqBrokerParameters, PipeSourceParametersRabbitmqBrokerParametersArgs

    Credentials PipeSourceParametersRabbitmqBrokerParametersCredentials
    The credentials needed to access the resource. Detailed below.
    QueueName string
    The name of the destination queue to consume. Maximum length of 1000.
    BatchSize int
    The maximum number of records to include in each batch. Maximum value of 10000.
    MaximumBatchingWindowInSeconds int
    The maximum length of a time to wait for events. Maximum value of 300.
    VirtualHost string
    The name of the virtual host associated with the source broker. Maximum length of 200.
    Credentials PipeSourceParametersRabbitmqBrokerParametersCredentials
    The credentials needed to access the resource. Detailed below.
    QueueName string
    The name of the destination queue to consume. Maximum length of 1000.
    BatchSize int
    The maximum number of records to include in each batch. Maximum value of 10000.
    MaximumBatchingWindowInSeconds int
    The maximum length of a time to wait for events. Maximum value of 300.
    VirtualHost string
    The name of the virtual host associated with the source broker. Maximum length of 200.
    credentials PipeSourceParametersRabbitmqBrokerParametersCredentials
    The credentials needed to access the resource. Detailed below.
    queueName String
    The name of the destination queue to consume. Maximum length of 1000.
    batchSize Integer
    The maximum number of records to include in each batch. Maximum value of 10000.
    maximumBatchingWindowInSeconds Integer
    The maximum length of a time to wait for events. Maximum value of 300.
    virtualHost String
    The name of the virtual host associated with the source broker. Maximum length of 200.
    credentials PipeSourceParametersRabbitmqBrokerParametersCredentials
    The credentials needed to access the resource. Detailed below.
    queueName string
    The name of the destination queue to consume. Maximum length of 1000.
    batchSize number
    The maximum number of records to include in each batch. Maximum value of 10000.
    maximumBatchingWindowInSeconds number
    The maximum length of a time to wait for events. Maximum value of 300.
    virtualHost string
    The name of the virtual host associated with the source broker. Maximum length of 200.
    credentials PipeSourceParametersRabbitmqBrokerParametersCredentials
    The credentials needed to access the resource. Detailed below.
    queue_name str
    The name of the destination queue to consume. Maximum length of 1000.
    batch_size int
    The maximum number of records to include in each batch. Maximum value of 10000.
    maximum_batching_window_in_seconds int
    The maximum length of a time to wait for events. Maximum value of 300.
    virtual_host str
    The name of the virtual host associated with the source broker. Maximum length of 200.
    credentials Property Map
    The credentials needed to access the resource. Detailed below.
    queueName String
    The name of the destination queue to consume. Maximum length of 1000.
    batchSize Number
    The maximum number of records to include in each batch. Maximum value of 10000.
    maximumBatchingWindowInSeconds Number
    The maximum length of a time to wait for events. Maximum value of 300.
    virtualHost String
    The name of the virtual host associated with the source broker. Maximum length of 200.

    PipeSourceParametersRabbitmqBrokerParametersCredentials, PipeSourceParametersRabbitmqBrokerParametersCredentialsArgs

    BasicAuth string
    The ARN of the Secrets Manager secret containing the credentials.
    BasicAuth string
    The ARN of the Secrets Manager secret containing the credentials.
    basicAuth String
    The ARN of the Secrets Manager secret containing the credentials.
    basicAuth string
    The ARN of the Secrets Manager secret containing the credentials.
    basic_auth str
    The ARN of the Secrets Manager secret containing the credentials.
    basicAuth String
    The ARN of the Secrets Manager secret containing the credentials.

    PipeSourceParametersSelfManagedKafkaParameters, PipeSourceParametersSelfManagedKafkaParametersArgs

    TopicName string
    The name of the topic that the pipe will read from. Maximum length of 249.
    AdditionalBootstrapServers List<string>
    An array of server URLs. Maximum number of 2 items, each of maximum length 300.
    BatchSize int
    The maximum number of records to include in each batch. Maximum value of 10000.
    ConsumerGroupId string
    The name of the destination queue to consume. Maximum value of 200.
    Credentials PipeSourceParametersSelfManagedKafkaParametersCredentials
    The credentials needed to access the resource. Detailed below.
    MaximumBatchingWindowInSeconds int
    The maximum length of a time to wait for events. Maximum value of 300.
    ServerRootCaCertificate string
    The ARN of the Secrets Manager secret used for certification.
    StartingPosition string
    The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
    Vpc PipeSourceParametersSelfManagedKafkaParametersVpc
    This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used. Detailed below.
    TopicName string
    The name of the topic that the pipe will read from. Maximum length of 249.
    AdditionalBootstrapServers []string
    An array of server URLs. Maximum number of 2 items, each of maximum length 300.
    BatchSize int
    The maximum number of records to include in each batch. Maximum value of 10000.
    ConsumerGroupId string
    The name of the destination queue to consume. Maximum value of 200.
    Credentials PipeSourceParametersSelfManagedKafkaParametersCredentials
    The credentials needed to access the resource. Detailed below.
    MaximumBatchingWindowInSeconds int
    The maximum length of a time to wait for events. Maximum value of 300.
    ServerRootCaCertificate string
    The ARN of the Secrets Manager secret used for certification.
    StartingPosition string
    The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
    Vpc PipeSourceParametersSelfManagedKafkaParametersVpc
    This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used. Detailed below.
    topicName String
    The name of the topic that the pipe will read from. Maximum length of 249.
    additionalBootstrapServers List<String>
    An array of server URLs. Maximum number of 2 items, each of maximum length 300.
    batchSize Integer
    The maximum number of records to include in each batch. Maximum value of 10000.
    consumerGroupId String
    The name of the destination queue to consume. Maximum value of 200.
    credentials PipeSourceParametersSelfManagedKafkaParametersCredentials
    The credentials needed to access the resource. Detailed below.
    maximumBatchingWindowInSeconds Integer
    The maximum length of a time to wait for events. Maximum value of 300.
    serverRootCaCertificate String
    The ARN of the Secrets Manager secret used for certification.
    startingPosition String
    The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
    vpc PipeSourceParametersSelfManagedKafkaParametersVpc
    This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used. Detailed below.
    topicName string
    The name of the topic that the pipe will read from. Maximum length of 249.
    additionalBootstrapServers string[]
    An array of server URLs. Maximum number of 2 items, each of maximum length 300.
    batchSize number
    The maximum number of records to include in each batch. Maximum value of 10000.
    consumerGroupId string
    The name of the destination queue to consume. Maximum value of 200.
    credentials PipeSourceParametersSelfManagedKafkaParametersCredentials
    The credentials needed to access the resource. Detailed below.
    maximumBatchingWindowInSeconds number
    The maximum length of a time to wait for events. Maximum value of 300.
    serverRootCaCertificate string
    The ARN of the Secrets Manager secret used for certification.
    startingPosition string
    The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
    vpc PipeSourceParametersSelfManagedKafkaParametersVpc
    This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used. Detailed below.
    topic_name str
    The name of the topic that the pipe will read from. Maximum length of 249.
    additional_bootstrap_servers Sequence[str]
    An array of server URLs. Maximum number of 2 items, each of maximum length 300.
    batch_size int
    The maximum number of records to include in each batch. Maximum value of 10000.
    consumer_group_id str
    The name of the destination queue to consume. Maximum value of 200.
    credentials PipeSourceParametersSelfManagedKafkaParametersCredentials
    The credentials needed to access the resource. Detailed below.
    maximum_batching_window_in_seconds int
    The maximum length of a time to wait for events. Maximum value of 300.
    server_root_ca_certificate str
    The ARN of the Secrets Manager secret used for certification.
    starting_position str
    The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
    vpc PipeSourceParametersSelfManagedKafkaParametersVpc
    This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used. Detailed below.
    topicName String
    The name of the topic that the pipe will read from. Maximum length of 249.
    additionalBootstrapServers List<String>
    An array of server URLs. Maximum number of 2 items, each of maximum length 300.
    batchSize Number
    The maximum number of records to include in each batch. Maximum value of 10000.
    consumerGroupId String
    The name of the destination queue to consume. Maximum value of 200.
    credentials Property Map
    The credentials needed to access the resource. Detailed below.
    maximumBatchingWindowInSeconds Number
    The maximum length of a time to wait for events. Maximum value of 300.
    serverRootCaCertificate String
    The ARN of the Secrets Manager secret used for certification.
    startingPosition String
    The position in a stream from which to start reading. Valid values: TRIM_HORIZON, LATEST.
    vpc Property Map
    This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used. Detailed below.

    PipeSourceParametersSelfManagedKafkaParametersCredentials, PipeSourceParametersSelfManagedKafkaParametersCredentialsArgs

    BasicAuth string
    The ARN of the Secrets Manager secret containing the credentials.
    ClientCertificateTlsAuth string
    The ARN of the Secrets Manager secret containing the credentials.
    SaslScram256Auth string
    The ARN of the Secrets Manager secret containing the credentials.
    SaslScram512Auth string
    The ARN of the Secrets Manager secret containing the credentials.
    BasicAuth string
    The ARN of the Secrets Manager secret containing the credentials.
    ClientCertificateTlsAuth string
    The ARN of the Secrets Manager secret containing the credentials.
    SaslScram256Auth string
    The ARN of the Secrets Manager secret containing the credentials.
    SaslScram512Auth string
    The ARN of the Secrets Manager secret containing the credentials.
    basicAuth String
    The ARN of the Secrets Manager secret containing the credentials.
    clientCertificateTlsAuth String
    The ARN of the Secrets Manager secret containing the credentials.
    saslScram256Auth String
    The ARN of the Secrets Manager secret containing the credentials.
    saslScram512Auth String
    The ARN of the Secrets Manager secret containing the credentials.
    basicAuth string
    The ARN of the Secrets Manager secret containing the credentials.
    clientCertificateTlsAuth string
    The ARN of the Secrets Manager secret containing the credentials.
    saslScram256Auth string
    The ARN of the Secrets Manager secret containing the credentials.
    saslScram512Auth string
    The ARN of the Secrets Manager secret containing the credentials.
    basic_auth str
    The ARN of the Secrets Manager secret containing the credentials.
    client_certificate_tls_auth str
    The ARN of the Secrets Manager secret containing the credentials.
    sasl_scram256_auth str
    The ARN of the Secrets Manager secret containing the credentials.
    sasl_scram512_auth str
    The ARN of the Secrets Manager secret containing the credentials.
    basicAuth String
    The ARN of the Secrets Manager secret containing the credentials.
    clientCertificateTlsAuth String
    The ARN of the Secrets Manager secret containing the credentials.
    saslScram256Auth String
    The ARN of the Secrets Manager secret containing the credentials.
    saslScram512Auth String
    The ARN of the Secrets Manager secret containing the credentials.

    PipeSourceParametersSelfManagedKafkaParametersVpc, PipeSourceParametersSelfManagedKafkaParametersVpcArgs

    SecurityGroups List<string>
    Subnets List<string>
    SecurityGroups []string
    Subnets []string
    securityGroups List<String>
    subnets List<String>
    securityGroups string[]
    subnets string[]
    security_groups Sequence[str]
    subnets Sequence[str]
    securityGroups List<String>
    subnets List<String>

    PipeSourceParametersSqsQueueParameters, PipeSourceParametersSqsQueueParametersArgs

    BatchSize int
    The maximum number of records to include in each batch. Maximum value of 10000.
    MaximumBatchingWindowInSeconds int
    The maximum length of a time to wait for events. Maximum value of 300.
    BatchSize int
    The maximum number of records to include in each batch. Maximum value of 10000.
    MaximumBatchingWindowInSeconds int
    The maximum length of a time to wait for events. Maximum value of 300.
    batchSize Integer
    The maximum number of records to include in each batch. Maximum value of 10000.
    maximumBatchingWindowInSeconds Integer
    The maximum length of a time to wait for events. Maximum value of 300.
    batchSize number
    The maximum number of records to include in each batch. Maximum value of 10000.
    maximumBatchingWindowInSeconds number
    The maximum length of a time to wait for events. Maximum value of 300.
    batch_size int
    The maximum number of records to include in each batch. Maximum value of 10000.
    maximum_batching_window_in_seconds int
    The maximum length of a time to wait for events. Maximum value of 300.
    batchSize Number
    The maximum number of records to include in each batch. Maximum value of 10000.
    maximumBatchingWindowInSeconds Number
    The maximum length of a time to wait for events. Maximum value of 300.

    PipeTargetParameters, PipeTargetParametersArgs

    BatchJobParameters PipeTargetParametersBatchJobParameters
    The parameters for using an AWS Batch job as a target. Detailed below.
    CloudwatchLogsParameters PipeTargetParametersCloudwatchLogsParameters
    The parameters for using an CloudWatch Logs log stream as a target. Detailed below.
    EcsTaskParameters PipeTargetParametersEcsTaskParameters
    The parameters for using an Amazon ECS task as a target. Detailed below.
    EventbridgeEventBusParameters PipeTargetParametersEventbridgeEventBusParameters
    The parameters for using an EventBridge event bus as a target. Detailed below.
    HttpParameters PipeTargetParametersHttpParameters
    These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations. Detailed below.
    InputTemplate string
    Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
    KinesisStreamParameters PipeTargetParametersKinesisStreamParameters
    The parameters for using a Kinesis stream as a source. Detailed below.
    LambdaFunctionParameters PipeTargetParametersLambdaFunctionParameters
    The parameters for using a Lambda function as a target. Detailed below.
    RedshiftDataParameters PipeTargetParametersRedshiftDataParameters
    These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement. Detailed below.
    SagemakerPipelineParameters PipeTargetParametersSagemakerPipelineParameters
    The parameters for using a SageMaker pipeline as a target. Detailed below.
    SqsQueueParameters PipeTargetParametersSqsQueueParameters
    The parameters for using a Amazon SQS stream as a target. Detailed below.
    StepFunctionStateMachineParameters PipeTargetParametersStepFunctionStateMachineParameters
    The parameters for using a Step Functions state machine as a target. Detailed below.
    BatchJobParameters PipeTargetParametersBatchJobParameters
    The parameters for using an AWS Batch job as a target. Detailed below.
    CloudwatchLogsParameters PipeTargetParametersCloudwatchLogsParameters
    The parameters for using an CloudWatch Logs log stream as a target. Detailed below.
    EcsTaskParameters PipeTargetParametersEcsTaskParameters
    The parameters for using an Amazon ECS task as a target. Detailed below.
    EventbridgeEventBusParameters PipeTargetParametersEventbridgeEventBusParameters
    The parameters for using an EventBridge event bus as a target. Detailed below.
    HttpParameters PipeTargetParametersHttpParameters
    These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations. Detailed below.
    InputTemplate string
    Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
    KinesisStreamParameters PipeTargetParametersKinesisStreamParameters
    The parameters for using a Kinesis stream as a source. Detailed below.
    LambdaFunctionParameters PipeTargetParametersLambdaFunctionParameters
    The parameters for using a Lambda function as a target. Detailed below.
    RedshiftDataParameters PipeTargetParametersRedshiftDataParameters
    These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement. Detailed below.
    SagemakerPipelineParameters PipeTargetParametersSagemakerPipelineParameters
    The parameters for using a SageMaker pipeline as a target. Detailed below.
    SqsQueueParameters PipeTargetParametersSqsQueueParameters
    The parameters for using a Amazon SQS stream as a target. Detailed below.
    StepFunctionStateMachineParameters PipeTargetParametersStepFunctionStateMachineParameters
    The parameters for using a Step Functions state machine as a target. Detailed below.
    batchJobParameters PipeTargetParametersBatchJobParameters
    The parameters for using an AWS Batch job as a target. Detailed below.
    cloudwatchLogsParameters PipeTargetParametersCloudwatchLogsParameters
    The parameters for using an CloudWatch Logs log stream as a target. Detailed below.
    ecsTaskParameters PipeTargetParametersEcsTaskParameters
    The parameters for using an Amazon ECS task as a target. Detailed below.
    eventbridgeEventBusParameters PipeTargetParametersEventbridgeEventBusParameters
    The parameters for using an EventBridge event bus as a target. Detailed below.
    httpParameters PipeTargetParametersHttpParameters
    These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations. Detailed below.
    inputTemplate String
    Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
    kinesisStreamParameters PipeTargetParametersKinesisStreamParameters
    The parameters for using a Kinesis stream as a source. Detailed below.
    lambdaFunctionParameters PipeTargetParametersLambdaFunctionParameters
    The parameters for using a Lambda function as a target. Detailed below.
    redshiftDataParameters PipeTargetParametersRedshiftDataParameters
    These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement. Detailed below.
    sagemakerPipelineParameters PipeTargetParametersSagemakerPipelineParameters
    The parameters for using a SageMaker pipeline as a target. Detailed below.
    sqsQueueParameters PipeTargetParametersSqsQueueParameters
    The parameters for using a Amazon SQS stream as a target. Detailed below.
    stepFunctionStateMachineParameters PipeTargetParametersStepFunctionStateMachineParameters
    The parameters for using a Step Functions state machine as a target. Detailed below.
    batchJobParameters PipeTargetParametersBatchJobParameters
    The parameters for using an AWS Batch job as a target. Detailed below.
    cloudwatchLogsParameters PipeTargetParametersCloudwatchLogsParameters
    The parameters for using an CloudWatch Logs log stream as a target. Detailed below.
    ecsTaskParameters PipeTargetParametersEcsTaskParameters
    The parameters for using an Amazon ECS task as a target. Detailed below.
    eventbridgeEventBusParameters PipeTargetParametersEventbridgeEventBusParameters
    The parameters for using an EventBridge event bus as a target. Detailed below.
    httpParameters PipeTargetParametersHttpParameters
    These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations. Detailed below.
    inputTemplate string
    Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
    kinesisStreamParameters PipeTargetParametersKinesisStreamParameters
    The parameters for using a Kinesis stream as a source. Detailed below.
    lambdaFunctionParameters PipeTargetParametersLambdaFunctionParameters
    The parameters for using a Lambda function as a target. Detailed below.
    redshiftDataParameters PipeTargetParametersRedshiftDataParameters
    These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement. Detailed below.
    sagemakerPipelineParameters PipeTargetParametersSagemakerPipelineParameters
    The parameters for using a SageMaker pipeline as a target. Detailed below.
    sqsQueueParameters PipeTargetParametersSqsQueueParameters
    The parameters for using a Amazon SQS stream as a target. Detailed below.
    stepFunctionStateMachineParameters PipeTargetParametersStepFunctionStateMachineParameters
    The parameters for using a Step Functions state machine as a target. Detailed below.
    batch_job_parameters PipeTargetParametersBatchJobParameters
    The parameters for using an AWS Batch job as a target. Detailed below.
    cloudwatch_logs_parameters PipeTargetParametersCloudwatchLogsParameters
    The parameters for using an CloudWatch Logs log stream as a target. Detailed below.
    ecs_task_parameters PipeTargetParametersEcsTaskParameters
    The parameters for using an Amazon ECS task as a target. Detailed below.
    eventbridge_event_bus_parameters PipeTargetParametersEventbridgeEventBusParameters
    The parameters for using an EventBridge event bus as a target. Detailed below.
    http_parameters PipeTargetParametersHttpParameters
    These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations. Detailed below.
    input_template str
    Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
    kinesis_stream_parameters PipeTargetParametersKinesisStreamParameters
    The parameters for using a Kinesis stream as a source. Detailed below.
    lambda_function_parameters PipeTargetParametersLambdaFunctionParameters
    The parameters for using a Lambda function as a target. Detailed below.
    redshift_data_parameters PipeTargetParametersRedshiftDataParameters
    These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement. Detailed below.
    sagemaker_pipeline_parameters PipeTargetParametersSagemakerPipelineParameters
    The parameters for using a SageMaker pipeline as a target. Detailed below.
    sqs_queue_parameters PipeTargetParametersSqsQueueParameters
    The parameters for using a Amazon SQS stream as a target. Detailed below.
    step_function_state_machine_parameters PipeTargetParametersStepFunctionStateMachineParameters
    The parameters for using a Step Functions state machine as a target. Detailed below.
    batchJobParameters Property Map
    The parameters for using an AWS Batch job as a target. Detailed below.
    cloudwatchLogsParameters Property Map
    The parameters for using an CloudWatch Logs log stream as a target. Detailed below.
    ecsTaskParameters Property Map
    The parameters for using an Amazon ECS task as a target. Detailed below.
    eventbridgeEventBusParameters Property Map
    The parameters for using an EventBridge event bus as a target. Detailed below.
    httpParameters Property Map
    These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations. Detailed below.
    inputTemplate String
    Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. Maximum length of 8192 characters.
    kinesisStreamParameters Property Map
    The parameters for using a Kinesis stream as a source. Detailed below.
    lambdaFunctionParameters Property Map
    The parameters for using a Lambda function as a target. Detailed below.
    redshiftDataParameters Property Map
    These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement. Detailed below.
    sagemakerPipelineParameters Property Map
    The parameters for using a SageMaker pipeline as a target. Detailed below.
    sqsQueueParameters Property Map
    The parameters for using a Amazon SQS stream as a target. Detailed below.
    stepFunctionStateMachineParameters Property Map
    The parameters for using a Step Functions state machine as a target. Detailed below.

    PipeTargetParametersBatchJobParameters, PipeTargetParametersBatchJobParametersArgs

    JobDefinition string
    The job definition used by this job. This value can be one of name, name:revision, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used.
    JobName string
    The name of the job. It can be up to 128 letters long.
    ArrayProperties PipeTargetParametersBatchJobParametersArrayProperties
    The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job. Detailed below.
    ContainerOverrides PipeTargetParametersBatchJobParametersContainerOverrides
    The overrides that are sent to a container. Detailed below.
    DependsOns List<PipeTargetParametersBatchJobParametersDependsOn>
    A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIAL type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify an N_TO_N type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin. Detailed below.
    Parameters Dictionary<string, string>
    Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition. Detailed below.
    RetryStrategy PipeTargetParametersBatchJobParametersRetryStrategy
    The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition. Detailed below.
    JobDefinition string
    The job definition used by this job. This value can be one of name, name:revision, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used.
    JobName string
    The name of the job. It can be up to 128 letters long.
    ArrayProperties PipeTargetParametersBatchJobParametersArrayProperties
    The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job. Detailed below.
    ContainerOverrides PipeTargetParametersBatchJobParametersContainerOverrides
    The overrides that are sent to a container. Detailed below.
    DependsOns []PipeTargetParametersBatchJobParametersDependsOn
    A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIAL type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify an N_TO_N type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin. Detailed below.
    Parameters map[string]string
    Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition. Detailed below.
    RetryStrategy PipeTargetParametersBatchJobParametersRetryStrategy
    The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition. Detailed below.
    jobDefinition String
    The job definition used by this job. This value can be one of name, name:revision, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used.
    jobName String
    The name of the job. It can be up to 128 letters long.
    arrayProperties PipeTargetParametersBatchJobParametersArrayProperties
    The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job. Detailed below.
    containerOverrides PipeTargetParametersBatchJobParametersContainerOverrides
    The overrides that are sent to a container. Detailed below.
    dependsOns List<PipeTargetParametersBatchJobParametersDependsOn>
    A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIAL type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify an N_TO_N type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin. Detailed below.
    parameters Map<String,String>
    Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition. Detailed below.
    retryStrategy PipeTargetParametersBatchJobParametersRetryStrategy
    The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition. Detailed below.
    jobDefinition string
    The job definition used by this job. This value can be one of name, name:revision, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used.
    jobName string
    The name of the job. It can be up to 128 letters long.
    arrayProperties PipeTargetParametersBatchJobParametersArrayProperties
    The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job. Detailed below.
    containerOverrides PipeTargetParametersBatchJobParametersContainerOverrides
    The overrides that are sent to a container. Detailed below.
    dependsOns PipeTargetParametersBatchJobParametersDependsOn[]
    A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIAL type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify an N_TO_N type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin. Detailed below.
    parameters {[key: string]: string}
    Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition. Detailed below.
    retryStrategy PipeTargetParametersBatchJobParametersRetryStrategy
    The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition. Detailed below.
    job_definition str
    The job definition used by this job. This value can be one of name, name:revision, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used.
    job_name str
    The name of the job. It can be up to 128 letters long.
    array_properties PipeTargetParametersBatchJobParametersArrayProperties
    The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job. Detailed below.
    container_overrides PipeTargetParametersBatchJobParametersContainerOverrides
    The overrides that are sent to a container. Detailed below.
    depends_ons Sequence[PipeTargetParametersBatchJobParametersDependsOn]
    A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIAL type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify an N_TO_N type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin. Detailed below.
    parameters Mapping[str, str]
    Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition. Detailed below.
    retry_strategy PipeTargetParametersBatchJobParametersRetryStrategy
    The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition. Detailed below.
    jobDefinition String
    The job definition used by this job. This value can be one of name, name:revision, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used.
    jobName String
    The name of the job. It can be up to 128 letters long.
    arrayProperties Property Map
    The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job. Detailed below.
    containerOverrides Property Map
    The overrides that are sent to a container. Detailed below.
    dependsOns List<Property Map>
    A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a SEQUENTIAL type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify an N_TO_N type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin. Detailed below.
    parameters Map<String>
    Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition. Detailed below.
    retryStrategy Property Map
    The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition. Detailed below.

    PipeTargetParametersBatchJobParametersArrayProperties, PipeTargetParametersBatchJobParametersArrayPropertiesArgs

    Size int
    The size of the array, if this is an array batch job. Minimum value of 2. Maximum value of 10,000.
    Size int
    The size of the array, if this is an array batch job. Minimum value of 2. Maximum value of 10,000.
    size Integer
    The size of the array, if this is an array batch job. Minimum value of 2. Maximum value of 10,000.
    size number
    The size of the array, if this is an array batch job. Minimum value of 2. Maximum value of 10,000.
    size int
    The size of the array, if this is an array batch job. Minimum value of 2. Maximum value of 10,000.
    size Number
    The size of the array, if this is an array batch job. Minimum value of 2. Maximum value of 10,000.

    PipeTargetParametersBatchJobParametersContainerOverrides, PipeTargetParametersBatchJobParametersContainerOverridesArgs

    Commands List<string>
    List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
    Environments List<PipeTargetParametersBatchJobParametersContainerOverridesEnvironment>
    The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
    InstanceType string
    The instance type to use for a multi-node parallel job. This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided.
    ResourceRequirements List<PipeTargetParametersBatchJobParametersContainerOverridesResourceRequirement>
    The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
    Commands []string
    List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
    Environments []PipeTargetParametersBatchJobParametersContainerOverridesEnvironment
    The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
    InstanceType string
    The instance type to use for a multi-node parallel job. This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided.
    ResourceRequirements []PipeTargetParametersBatchJobParametersContainerOverridesResourceRequirement
    The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
    commands List<String>
    List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
    environments List<PipeTargetParametersBatchJobParametersContainerOverridesEnvironment>
    The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
    instanceType String
    The instance type to use for a multi-node parallel job. This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided.
    resourceRequirements List<PipeTargetParametersBatchJobParametersContainerOverridesResourceRequirement>
    The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
    commands string[]
    List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
    environments PipeTargetParametersBatchJobParametersContainerOverridesEnvironment[]
    The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
    instanceType string
    The instance type to use for a multi-node parallel job. This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided.
    resourceRequirements PipeTargetParametersBatchJobParametersContainerOverridesResourceRequirement[]
    The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
    commands Sequence[str]
    List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
    environments Sequence[PipeTargetParametersBatchJobParametersContainerOverridesEnvironment]
    The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
    instance_type str
    The instance type to use for a multi-node parallel job. This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided.
    resource_requirements Sequence[PipeTargetParametersBatchJobParametersContainerOverridesResourceRequirement]
    The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
    commands List<String>
    List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
    environments List<Property Map>
    The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
    instanceType String
    The instance type to use for a multi-node parallel job. This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided.
    resourceRequirements List<Property Map>
    The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.

    PipeTargetParametersBatchJobParametersContainerOverridesEnvironment, PipeTargetParametersBatchJobParametersContainerOverridesEnvironmentArgs

    Name string
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    Value string
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
    Name string
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    Value string
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
    name String
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    value String
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
    name string
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    value string
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
    name str
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    value str
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
    name String
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    value String
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.

    PipeTargetParametersBatchJobParametersContainerOverridesResourceRequirement, PipeTargetParametersBatchJobParametersContainerOverridesResourceRequirementArgs

    Type string
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    Value string
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
    Type string
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    Value string
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
    type String
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    value String
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
    type string
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    value string
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
    type str
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    value str
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
    type String
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    value String
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.

    PipeTargetParametersBatchJobParametersDependsOn, PipeTargetParametersBatchJobParametersDependsOnArgs

    JobId string
    The job ID of the AWS Batch job that's associated with this dependency.
    Type string
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    JobId string
    The job ID of the AWS Batch job that's associated with this dependency.
    Type string
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    jobId String
    The job ID of the AWS Batch job that's associated with this dependency.
    type String
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    jobId string
    The job ID of the AWS Batch job that's associated with this dependency.
    type string
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    job_id str
    The job ID of the AWS Batch job that's associated with this dependency.
    type str
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    jobId String
    The job ID of the AWS Batch job that's associated with this dependency.
    type String
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.

    PipeTargetParametersBatchJobParametersRetryStrategy, PipeTargetParametersBatchJobParametersRetryStrategyArgs

    Attempts int
    The number of times to move a job to the RUNNABLE status. If the value of attempts is greater than one, the job is retried on failure the same number of attempts as the value. Maximum value of 10.
    Attempts int
    The number of times to move a job to the RUNNABLE status. If the value of attempts is greater than one, the job is retried on failure the same number of attempts as the value. Maximum value of 10.
    attempts Integer
    The number of times to move a job to the RUNNABLE status. If the value of attempts is greater than one, the job is retried on failure the same number of attempts as the value. Maximum value of 10.
    attempts number
    The number of times to move a job to the RUNNABLE status. If the value of attempts is greater than one, the job is retried on failure the same number of attempts as the value. Maximum value of 10.
    attempts int
    The number of times to move a job to the RUNNABLE status. If the value of attempts is greater than one, the job is retried on failure the same number of attempts as the value. Maximum value of 10.
    attempts Number
    The number of times to move a job to the RUNNABLE status. If the value of attempts is greater than one, the job is retried on failure the same number of attempts as the value. Maximum value of 10.

    PipeTargetParametersCloudwatchLogsParameters, PipeTargetParametersCloudwatchLogsParametersArgs

    LogStreamName string
    The name of the log stream.
    Timestamp string
    The time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. This is the JSON path to the field in the event e.g. $.detail.timestamp
    LogStreamName string
    The name of the log stream.
    Timestamp string
    The time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. This is the JSON path to the field in the event e.g. $.detail.timestamp
    logStreamName String
    The name of the log stream.
    timestamp String
    The time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. This is the JSON path to the field in the event e.g. $.detail.timestamp
    logStreamName string
    The name of the log stream.
    timestamp string
    The time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. This is the JSON path to the field in the event e.g. $.detail.timestamp
    log_stream_name str
    The name of the log stream.
    timestamp str
    The time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. This is the JSON path to the field in the event e.g. $.detail.timestamp
    logStreamName String
    The name of the log stream.
    timestamp String
    The time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. This is the JSON path to the field in the event e.g. $.detail.timestamp

    PipeTargetParametersEcsTaskParameters, PipeTargetParametersEcsTaskParametersArgs

    TaskDefinitionArn string
    The ARN of the task definition to use if the event target is an Amazon ECS task.
    CapacityProviderStrategies List<PipeTargetParametersEcsTaskParametersCapacityProviderStrategy>
    List of capacity provider strategies to use for the task. If a capacityProviderStrategy is specified, the launchType parameter must be omitted. If no capacityProviderStrategy or launchType is specified, the defaultCapacityProviderStrategy for the cluster is used. Detailed below.
    EnableEcsManagedTags bool
    Specifies whether to enable Amazon ECS managed tags for the task. Valid values: true, false.
    EnableExecuteCommand bool
    Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task. Valid values: true, false.
    Group string
    Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
    LaunchType string
    Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATE value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. Valid Values: EC2, FARGATE, EXTERNAL
    NetworkConfiguration PipeTargetParametersEcsTaskParametersNetworkConfiguration
    Use this structure if the Amazon ECS task uses the awsvpc network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if LaunchType is FARGATE because the awsvpc mode is required for Fargate tasks. If you specify NetworkConfiguration when the target ECS task does not use the awsvpc network mode, the task fails. Detailed below.
    Overrides PipeTargetParametersEcsTaskParametersOverrides
    The overrides that are associated with a task. Detailed below.
    PlacementConstraints List<PipeTargetParametersEcsTaskParametersPlacementConstraint>
    An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime). Detailed below.
    PlacementStrategies List<PipeTargetParametersEcsTaskParametersPlacementStrategy>
    The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task. Detailed below.
    PlatformVersion string
    Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as 1.1.0. This structure is used only if LaunchType is FARGATE.
    PropagateTags string
    Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResource API action. Valid Values: TASK_DEFINITION
    ReferenceId string
    The reference ID to use for the task. Maximum length of 1,024.
    Tags Dictionary<string, string>
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    TaskCount int
    The number of tasks to create based on TaskDefinition. The default is 1.
    TaskDefinitionArn string
    The ARN of the task definition to use if the event target is an Amazon ECS task.
    CapacityProviderStrategies []PipeTargetParametersEcsTaskParametersCapacityProviderStrategy
    List of capacity provider strategies to use for the task. If a capacityProviderStrategy is specified, the launchType parameter must be omitted. If no capacityProviderStrategy or launchType is specified, the defaultCapacityProviderStrategy for the cluster is used. Detailed below.
    EnableEcsManagedTags bool
    Specifies whether to enable Amazon ECS managed tags for the task. Valid values: true, false.
    EnableExecuteCommand bool
    Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task. Valid values: true, false.
    Group string
    Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
    LaunchType string
    Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATE value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. Valid Values: EC2, FARGATE, EXTERNAL
    NetworkConfiguration PipeTargetParametersEcsTaskParametersNetworkConfiguration
    Use this structure if the Amazon ECS task uses the awsvpc network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if LaunchType is FARGATE because the awsvpc mode is required for Fargate tasks. If you specify NetworkConfiguration when the target ECS task does not use the awsvpc network mode, the task fails. Detailed below.
    Overrides PipeTargetParametersEcsTaskParametersOverrides
    The overrides that are associated with a task. Detailed below.
    PlacementConstraints []PipeTargetParametersEcsTaskParametersPlacementConstraint
    An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime). Detailed below.
    PlacementStrategies []PipeTargetParametersEcsTaskParametersPlacementStrategy
    The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task. Detailed below.
    PlatformVersion string
    Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as 1.1.0. This structure is used only if LaunchType is FARGATE.
    PropagateTags string
    Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResource API action. Valid Values: TASK_DEFINITION
    ReferenceId string
    The reference ID to use for the task. Maximum length of 1,024.
    Tags map[string]string
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    TaskCount int
    The number of tasks to create based on TaskDefinition. The default is 1.
    taskDefinitionArn String
    The ARN of the task definition to use if the event target is an Amazon ECS task.
    capacityProviderStrategies List<PipeTargetParametersEcsTaskParametersCapacityProviderStrategy>
    List of capacity provider strategies to use for the task. If a capacityProviderStrategy is specified, the launchType parameter must be omitted. If no capacityProviderStrategy or launchType is specified, the defaultCapacityProviderStrategy for the cluster is used. Detailed below.
    enableEcsManagedTags Boolean
    Specifies whether to enable Amazon ECS managed tags for the task. Valid values: true, false.
    enableExecuteCommand Boolean
    Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task. Valid values: true, false.
    group String
    Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
    launchType String
    Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATE value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. Valid Values: EC2, FARGATE, EXTERNAL
    networkConfiguration PipeTargetParametersEcsTaskParametersNetworkConfiguration
    Use this structure if the Amazon ECS task uses the awsvpc network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if LaunchType is FARGATE because the awsvpc mode is required for Fargate tasks. If you specify NetworkConfiguration when the target ECS task does not use the awsvpc network mode, the task fails. Detailed below.
    overrides PipeTargetParametersEcsTaskParametersOverrides
    The overrides that are associated with a task. Detailed below.
    placementConstraints List<PipeTargetParametersEcsTaskParametersPlacementConstraint>
    An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime). Detailed below.
    placementStrategies List<PipeTargetParametersEcsTaskParametersPlacementStrategy>
    The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task. Detailed below.
    platformVersion String
    Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as 1.1.0. This structure is used only if LaunchType is FARGATE.
    propagateTags String
    Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResource API action. Valid Values: TASK_DEFINITION
    referenceId String
    The reference ID to use for the task. Maximum length of 1,024.
    tags Map<String,String>
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    taskCount Integer
    The number of tasks to create based on TaskDefinition. The default is 1.
    taskDefinitionArn string
    The ARN of the task definition to use if the event target is an Amazon ECS task.
    capacityProviderStrategies PipeTargetParametersEcsTaskParametersCapacityProviderStrategy[]
    List of capacity provider strategies to use for the task. If a capacityProviderStrategy is specified, the launchType parameter must be omitted. If no capacityProviderStrategy or launchType is specified, the defaultCapacityProviderStrategy for the cluster is used. Detailed below.
    enableEcsManagedTags boolean
    Specifies whether to enable Amazon ECS managed tags for the task. Valid values: true, false.
    enableExecuteCommand boolean
    Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task. Valid values: true, false.
    group string
    Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
    launchType string
    Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATE value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. Valid Values: EC2, FARGATE, EXTERNAL
    networkConfiguration PipeTargetParametersEcsTaskParametersNetworkConfiguration
    Use this structure if the Amazon ECS task uses the awsvpc network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if LaunchType is FARGATE because the awsvpc mode is required for Fargate tasks. If you specify NetworkConfiguration when the target ECS task does not use the awsvpc network mode, the task fails. Detailed below.
    overrides PipeTargetParametersEcsTaskParametersOverrides
    The overrides that are associated with a task. Detailed below.
    placementConstraints PipeTargetParametersEcsTaskParametersPlacementConstraint[]
    An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime). Detailed below.
    placementStrategies PipeTargetParametersEcsTaskParametersPlacementStrategy[]
    The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task. Detailed below.
    platformVersion string
    Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as 1.1.0. This structure is used only if LaunchType is FARGATE.
    propagateTags string
    Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResource API action. Valid Values: TASK_DEFINITION
    referenceId string
    The reference ID to use for the task. Maximum length of 1,024.
    tags {[key: string]: string}
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    taskCount number
    The number of tasks to create based on TaskDefinition. The default is 1.
    task_definition_arn str
    The ARN of the task definition to use if the event target is an Amazon ECS task.
    capacity_provider_strategies Sequence[PipeTargetParametersEcsTaskParametersCapacityProviderStrategy]
    List of capacity provider strategies to use for the task. If a capacityProviderStrategy is specified, the launchType parameter must be omitted. If no capacityProviderStrategy or launchType is specified, the defaultCapacityProviderStrategy for the cluster is used. Detailed below.
    enable_ecs_managed_tags bool
    Specifies whether to enable Amazon ECS managed tags for the task. Valid values: true, false.
    enable_execute_command bool
    Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task. Valid values: true, false.
    group str
    Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
    launch_type str
    Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATE value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. Valid Values: EC2, FARGATE, EXTERNAL
    network_configuration PipeTargetParametersEcsTaskParametersNetworkConfiguration
    Use this structure if the Amazon ECS task uses the awsvpc network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if LaunchType is FARGATE because the awsvpc mode is required for Fargate tasks. If you specify NetworkConfiguration when the target ECS task does not use the awsvpc network mode, the task fails. Detailed below.
    overrides PipeTargetParametersEcsTaskParametersOverrides
    The overrides that are associated with a task. Detailed below.
    placement_constraints Sequence[PipeTargetParametersEcsTaskParametersPlacementConstraint]
    An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime). Detailed below.
    placement_strategies Sequence[PipeTargetParametersEcsTaskParametersPlacementStrategy]
    The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task. Detailed below.
    platform_version str
    Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as 1.1.0. This structure is used only if LaunchType is FARGATE.
    propagate_tags str
    Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResource API action. Valid Values: TASK_DEFINITION
    reference_id str
    The reference ID to use for the task. Maximum length of 1,024.
    tags Mapping[str, str]
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    task_count int
    The number of tasks to create based on TaskDefinition. The default is 1.
    taskDefinitionArn String
    The ARN of the task definition to use if the event target is an Amazon ECS task.
    capacityProviderStrategies List<Property Map>
    List of capacity provider strategies to use for the task. If a capacityProviderStrategy is specified, the launchType parameter must be omitted. If no capacityProviderStrategy or launchType is specified, the defaultCapacityProviderStrategy for the cluster is used. Detailed below.
    enableEcsManagedTags Boolean
    Specifies whether to enable Amazon ECS managed tags for the task. Valid values: true, false.
    enableExecuteCommand Boolean
    Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task. Valid values: true, false.
    group String
    Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
    launchType String
    Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The FARGATE value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. Valid Values: EC2, FARGATE, EXTERNAL
    networkConfiguration Property Map
    Use this structure if the Amazon ECS task uses the awsvpc network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required if LaunchType is FARGATE because the awsvpc mode is required for Fargate tasks. If you specify NetworkConfiguration when the target ECS task does not use the awsvpc network mode, the task fails. Detailed below.
    overrides Property Map
    The overrides that are associated with a task. Detailed below.
    placementConstraints List<Property Map>
    An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime). Detailed below.
    placementStrategies List<Property Map>
    The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task. Detailed below.
    platformVersion String
    Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as 1.1.0. This structure is used only if LaunchType is FARGATE.
    propagateTags String
    Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the TagResource API action. Valid Values: TASK_DEFINITION
    referenceId String
    The reference ID to use for the task. Maximum length of 1,024.
    tags Map<String>
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    taskCount Number
    The number of tasks to create based on TaskDefinition. The default is 1.

    PipeTargetParametersEcsTaskParametersCapacityProviderStrategy, PipeTargetParametersEcsTaskParametersCapacityProviderStrategyArgs

    CapacityProvider string
    The short name of the capacity provider. Maximum value of 255.
    Base int
    The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used. Maximum value of 100,000.
    Weight int
    The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied. Maximum value of 1,000.
    CapacityProvider string
    The short name of the capacity provider. Maximum value of 255.
    Base int
    The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used. Maximum value of 100,000.
    Weight int
    The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied. Maximum value of 1,000.
    capacityProvider String
    The short name of the capacity provider. Maximum value of 255.
    base Integer
    The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used. Maximum value of 100,000.
    weight Integer
    The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied. Maximum value of 1,000.
    capacityProvider string
    The short name of the capacity provider. Maximum value of 255.
    base number
    The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used. Maximum value of 100,000.
    weight number
    The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied. Maximum value of 1,000.
    capacity_provider str
    The short name of the capacity provider. Maximum value of 255.
    base int
    The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used. Maximum value of 100,000.
    weight int
    The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied. Maximum value of 1,000.
    capacityProvider String
    The short name of the capacity provider. Maximum value of 255.
    base Number
    The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used. Maximum value of 100,000.
    weight Number
    The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied. Maximum value of 1,000.

    PipeTargetParametersEcsTaskParametersNetworkConfiguration, PipeTargetParametersEcsTaskParametersNetworkConfigurationArgs

    AwsVpcConfiguration PipeTargetParametersEcsTaskParametersNetworkConfigurationAwsVpcConfiguration
    Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpc network mode. Detailed below.
    AwsVpcConfiguration PipeTargetParametersEcsTaskParametersNetworkConfigurationAwsVpcConfiguration
    Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpc network mode. Detailed below.
    awsVpcConfiguration PipeTargetParametersEcsTaskParametersNetworkConfigurationAwsVpcConfiguration
    Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpc network mode. Detailed below.
    awsVpcConfiguration PipeTargetParametersEcsTaskParametersNetworkConfigurationAwsVpcConfiguration
    Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpc network mode. Detailed below.
    aws_vpc_configuration PipeTargetParametersEcsTaskParametersNetworkConfigurationAwsVpcConfiguration
    Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpc network mode. Detailed below.
    awsVpcConfiguration Property Map
    Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the awsvpc network mode. Detailed below.

    PipeTargetParametersEcsTaskParametersNetworkConfigurationAwsVpcConfiguration, PipeTargetParametersEcsTaskParametersNetworkConfigurationAwsVpcConfigurationArgs

    AssignPublicIp string
    Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLED only when LaunchType in EcsParameters is set to FARGATE. Valid Values: ENABLED, DISABLED.
    SecurityGroups List<string>
    Subnets List<string>
    AssignPublicIp string
    Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLED only when LaunchType in EcsParameters is set to FARGATE. Valid Values: ENABLED, DISABLED.
    SecurityGroups []string
    Subnets []string
    assignPublicIp String
    Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLED only when LaunchType in EcsParameters is set to FARGATE. Valid Values: ENABLED, DISABLED.
    securityGroups List<String>
    subnets List<String>
    assignPublicIp string
    Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLED only when LaunchType in EcsParameters is set to FARGATE. Valid Values: ENABLED, DISABLED.
    securityGroups string[]
    subnets string[]
    assign_public_ip str
    Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLED only when LaunchType in EcsParameters is set to FARGATE. Valid Values: ENABLED, DISABLED.
    security_groups Sequence[str]
    subnets Sequence[str]
    assignPublicIp String
    Specifies whether the task's elastic network interface receives a public IP address. You can specify ENABLED only when LaunchType in EcsParameters is set to FARGATE. Valid Values: ENABLED, DISABLED.
    securityGroups List<String>
    subnets List<String>

    PipeTargetParametersEcsTaskParametersOverrides, PipeTargetParametersEcsTaskParametersOverridesArgs

    ContainerOverrides List<PipeTargetParametersEcsTaskParametersOverridesContainerOverride>
    One or more container overrides that are sent to a task. Detailed below.
    Cpu string
    The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
    EphemeralStorage PipeTargetParametersEcsTaskParametersOverridesEphemeralStorage
    The ephemeral storage setting override for the task. Detailed below.
    ExecutionRoleArn string
    The Amazon Resource Name (ARN) of the task execution IAM role override for the task.
    InferenceAcceleratorOverrides List<PipeTargetParametersEcsTaskParametersOverridesInferenceAcceleratorOverride>
    List of Elastic Inference accelerator overrides for the task. Detailed below.
    Memory string
    The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
    TaskRoleArn string
    The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role.
    ContainerOverrides []PipeTargetParametersEcsTaskParametersOverridesContainerOverride
    One or more container overrides that are sent to a task. Detailed below.
    Cpu string
    The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
    EphemeralStorage PipeTargetParametersEcsTaskParametersOverridesEphemeralStorage
    The ephemeral storage setting override for the task. Detailed below.
    ExecutionRoleArn string
    The Amazon Resource Name (ARN) of the task execution IAM role override for the task.
    InferenceAcceleratorOverrides []PipeTargetParametersEcsTaskParametersOverridesInferenceAcceleratorOverride
    List of Elastic Inference accelerator overrides for the task. Detailed below.
    Memory string
    The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
    TaskRoleArn string
    The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role.
    containerOverrides List<PipeTargetParametersEcsTaskParametersOverridesContainerOverride>
    One or more container overrides that are sent to a task. Detailed below.
    cpu String
    The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
    ephemeralStorage PipeTargetParametersEcsTaskParametersOverridesEphemeralStorage
    The ephemeral storage setting override for the task. Detailed below.
    executionRoleArn String
    The Amazon Resource Name (ARN) of the task execution IAM role override for the task.
    inferenceAcceleratorOverrides List<PipeTargetParametersEcsTaskParametersOverridesInferenceAcceleratorOverride>
    List of Elastic Inference accelerator overrides for the task. Detailed below.
    memory String
    The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
    taskRoleArn String
    The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role.
    containerOverrides PipeTargetParametersEcsTaskParametersOverridesContainerOverride[]
    One or more container overrides that are sent to a task. Detailed below.
    cpu string
    The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
    ephemeralStorage PipeTargetParametersEcsTaskParametersOverridesEphemeralStorage
    The ephemeral storage setting override for the task. Detailed below.
    executionRoleArn string
    The Amazon Resource Name (ARN) of the task execution IAM role override for the task.
    inferenceAcceleratorOverrides PipeTargetParametersEcsTaskParametersOverridesInferenceAcceleratorOverride[]
    List of Elastic Inference accelerator overrides for the task. Detailed below.
    memory string
    The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
    taskRoleArn string
    The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role.
    container_overrides Sequence[PipeTargetParametersEcsTaskParametersOverridesContainerOverride]
    One or more container overrides that are sent to a task. Detailed below.
    cpu str
    The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
    ephemeral_storage PipeTargetParametersEcsTaskParametersOverridesEphemeralStorage
    The ephemeral storage setting override for the task. Detailed below.
    execution_role_arn str
    The Amazon Resource Name (ARN) of the task execution IAM role override for the task.
    inference_accelerator_overrides Sequence[PipeTargetParametersEcsTaskParametersOverridesInferenceAcceleratorOverride]
    List of Elastic Inference accelerator overrides for the task. Detailed below.
    memory str
    The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
    task_role_arn str
    The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role.
    containerOverrides List<Property Map>
    One or more container overrides that are sent to a task. Detailed below.
    cpu String
    The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
    ephemeralStorage Property Map
    The ephemeral storage setting override for the task. Detailed below.
    executionRoleArn String
    The Amazon Resource Name (ARN) of the task execution IAM role override for the task.
    inferenceAcceleratorOverrides List<Property Map>
    List of Elastic Inference accelerator overrides for the task. Detailed below.
    memory String
    The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
    taskRoleArn String
    The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role.

    PipeTargetParametersEcsTaskParametersOverridesContainerOverride, PipeTargetParametersEcsTaskParametersOverridesContainerOverrideArgs

    Commands List<string>
    List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
    Cpu int
    The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
    EnvironmentFiles List<PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentFile>
    A list of files containing the environment variables to pass to a container, instead of the value from the container definition. Detailed below.
    Environments List<PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironment>
    The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
    Memory int
    The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
    MemoryReservation int
    The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
    Name string
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    ResourceRequirements List<PipeTargetParametersEcsTaskParametersOverridesContainerOverrideResourceRequirement>
    The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
    Commands []string
    List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
    Cpu int
    The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
    EnvironmentFiles []PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentFile
    A list of files containing the environment variables to pass to a container, instead of the value from the container definition. Detailed below.
    Environments []PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironment
    The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
    Memory int
    The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
    MemoryReservation int
    The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
    Name string
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    ResourceRequirements []PipeTargetParametersEcsTaskParametersOverridesContainerOverrideResourceRequirement
    The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
    commands List<String>
    List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
    cpu Integer
    The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
    environmentFiles List<PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentFile>
    A list of files containing the environment variables to pass to a container, instead of the value from the container definition. Detailed below.
    environments List<PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironment>
    The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
    memory Integer
    The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
    memoryReservation Integer
    The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
    name String
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    resourceRequirements List<PipeTargetParametersEcsTaskParametersOverridesContainerOverrideResourceRequirement>
    The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
    commands string[]
    List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
    cpu number
    The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
    environmentFiles PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentFile[]
    A list of files containing the environment variables to pass to a container, instead of the value from the container definition. Detailed below.
    environments PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironment[]
    The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
    memory number
    The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
    memoryReservation number
    The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
    name string
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    resourceRequirements PipeTargetParametersEcsTaskParametersOverridesContainerOverrideResourceRequirement[]
    The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
    commands Sequence[str]
    List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
    cpu int
    The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
    environment_files Sequence[PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentFile]
    A list of files containing the environment variables to pass to a container, instead of the value from the container definition. Detailed below.
    environments Sequence[PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironment]
    The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
    memory int
    The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
    memory_reservation int
    The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
    name str
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    resource_requirements Sequence[PipeTargetParametersEcsTaskParametersOverridesContainerOverrideResourceRequirement]
    The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.
    commands List<String>
    List of commands to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
    cpu Number
    The number of cpu units reserved for the container, instead of the default value from the task definition. You must also specify a container name.
    environmentFiles List<Property Map>
    A list of files containing the environment variables to pass to a container, instead of the value from the container definition. Detailed below.
    environments List<Property Map>
    The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name. Detailed below.
    memory Number
    The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
    memoryReservation Number
    The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
    name String
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    resourceRequirements List<Property Map>
    The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU. Detailed below.

    PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironment, PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentArgs

    Name string
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    Value string
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
    Name string
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    Value string
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
    name String
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    value String
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
    name string
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    value string
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
    name str
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    value str
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
    name String
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    value String
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.

    PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentFile, PipeTargetParametersEcsTaskParametersOverridesContainerOverrideEnvironmentFileArgs

    Type string
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    Value string
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
    Type string
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    Value string
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
    type String
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    value String
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
    type string
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    value string
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
    type str
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    value str
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
    type String
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    value String
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.

    PipeTargetParametersEcsTaskParametersOverridesContainerOverrideResourceRequirement, PipeTargetParametersEcsTaskParametersOverridesContainerOverrideResourceRequirementArgs

    Type string
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    Value string
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
    Type string
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    Value string
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
    type String
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    value String
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
    type string
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    value string
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
    type str
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    value str
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
    type String
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    value String
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.

    PipeTargetParametersEcsTaskParametersOverridesEphemeralStorage, PipeTargetParametersEcsTaskParametersOverridesEphemeralStorageArgs

    SizeInGib int
    The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21 GiB and the maximum supported value is 200 GiB.
    SizeInGib int
    The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21 GiB and the maximum supported value is 200 GiB.
    sizeInGib Integer
    The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21 GiB and the maximum supported value is 200 GiB.
    sizeInGib number
    The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21 GiB and the maximum supported value is 200 GiB.
    size_in_gib int
    The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21 GiB and the maximum supported value is 200 GiB.
    sizeInGib Number
    The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is 21 GiB and the maximum supported value is 200 GiB.

    PipeTargetParametersEcsTaskParametersOverridesInferenceAcceleratorOverride, PipeTargetParametersEcsTaskParametersOverridesInferenceAcceleratorOverrideArgs

    DeviceName string
    The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceName specified in the task definition.
    DeviceType string
    The Elastic Inference accelerator type to use.
    DeviceName string
    The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceName specified in the task definition.
    DeviceType string
    The Elastic Inference accelerator type to use.
    deviceName String
    The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceName specified in the task definition.
    deviceType String
    The Elastic Inference accelerator type to use.
    deviceName string
    The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceName specified in the task definition.
    deviceType string
    The Elastic Inference accelerator type to use.
    device_name str
    The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceName specified in the task definition.
    device_type str
    The Elastic Inference accelerator type to use.
    deviceName String
    The Elastic Inference accelerator device name to override for the task. This parameter must match a deviceName specified in the task definition.
    deviceType String
    The Elastic Inference accelerator type to use.

    PipeTargetParametersEcsTaskParametersPlacementConstraint, PipeTargetParametersEcsTaskParametersPlacementConstraintArgs

    Expression string
    A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance. Maximum length of 2,000.
    Type string
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    Expression string
    A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance. Maximum length of 2,000.
    Type string
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    expression String
    A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance. Maximum length of 2,000.
    type String
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    expression string
    A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance. Maximum length of 2,000.
    type string
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    expression str
    A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance. Maximum length of 2,000.
    type str
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    expression String
    A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is distinctInstance. Maximum length of 2,000.
    type String
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.

    PipeTargetParametersEcsTaskParametersPlacementStrategy, PipeTargetParametersEcsTaskParametersPlacementStrategyArgs

    Field string
    The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used. Maximum length of 255.
    Type string
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    Field string
    The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used. Maximum length of 255.
    Type string
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    field String
    The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used. Maximum length of 255.
    type String
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    field string
    The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used. Maximum length of 255.
    type string
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    field str
    The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used. Maximum length of 255.
    type str
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.
    field String
    The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used. Maximum length of 255.
    type String
    The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task). Valid Values: random, spread, binpack.

    PipeTargetParametersEventbridgeEventBusParameters, PipeTargetParametersEventbridgeEventBusParametersArgs

    DetailType string
    A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
    EndpointId string
    The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo.
    Resources List<string>
    List of AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
    Source string
    Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
    Time string
    The time stamp of the event, per RFC3339. If no time stamp is provided, the time stamp of the PutEvents call is used. This is the JSON path to the field in the event e.g. $.detail.timestamp
    DetailType string
    A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
    EndpointId string
    The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo.
    Resources []string
    List of AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
    Source string
    Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
    Time string
    The time stamp of the event, per RFC3339. If no time stamp is provided, the time stamp of the PutEvents call is used. This is the JSON path to the field in the event e.g. $.detail.timestamp
    detailType String
    A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
    endpointId String
    The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo.
    resources List<String>
    List of AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
    source String
    Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
    time String
    The time stamp of the event, per RFC3339. If no time stamp is provided, the time stamp of the PutEvents call is used. This is the JSON path to the field in the event e.g. $.detail.timestamp
    detailType string
    A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
    endpointId string
    The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo.
    resources string[]
    List of AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
    source string
    Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
    time string
    The time stamp of the event, per RFC3339. If no time stamp is provided, the time stamp of the PutEvents call is used. This is the JSON path to the field in the event e.g. $.detail.timestamp
    detail_type str
    A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
    endpoint_id str
    The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo.
    resources Sequence[str]
    List of AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
    source str
    Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
    time str
    The time stamp of the event, per RFC3339. If no time stamp is provided, the time stamp of the PutEvents call is used. This is the JSON path to the field in the event e.g. $.detail.timestamp
    detailType String
    A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
    endpointId String
    The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is abcde.veo.
    resources List<String>
    List of AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
    source String
    Source resource of the pipe. This field typically requires an ARN (Amazon Resource Name). However, when using a self-managed Kafka cluster, you should use a different format. Instead of an ARN, use 'smk://' followed by the bootstrap server's address.
    time String
    The time stamp of the event, per RFC3339. If no time stamp is provided, the time stamp of the PutEvents call is used. This is the JSON path to the field in the event e.g. $.detail.timestamp

    PipeTargetParametersHttpParameters, PipeTargetParametersHttpParametersArgs

    HeaderParameters Dictionary<string, string>
    PathParameterValues string
    QueryStringParameters Dictionary<string, string>
    HeaderParameters map[string]string
    PathParameterValues string
    QueryStringParameters map[string]string
    headerParameters Map<String,String>
    pathParameterValues String
    queryStringParameters Map<String,String>
    headerParameters {[key: string]: string}
    pathParameterValues string
    queryStringParameters {[key: string]: string}

    PipeTargetParametersKinesisStreamParameters, PipeTargetParametersKinesisStreamParametersArgs

    PartitionKey string
    Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
    PartitionKey string
    Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
    partitionKey String
    Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
    partitionKey string
    Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
    partition_key str
    Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
    partitionKey String
    Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.

    PipeTargetParametersLambdaFunctionParameters, PipeTargetParametersLambdaFunctionParametersArgs

    InvocationType string
    Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
    InvocationType string
    Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
    invocationType String
    Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
    invocationType string
    Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
    invocation_type str
    Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
    invocationType String
    Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.

    PipeTargetParametersRedshiftDataParameters, PipeTargetParametersRedshiftDataParametersArgs

    Database string
    The name of the database. Required when authenticating using temporary credentials.
    Sqls List<string>
    List of SQL statements text to run, each of maximum length of 100,000.
    DbUser string
    The database user name. Required when authenticating using temporary credentials.
    SecretManagerArn string
    The name or ARN of the secret that enables access to the database. Required when authenticating using Secrets Manager.
    StatementName string
    The name of the SQL statement. You can name the SQL statement when you create it to identify the query.
    WithEvent bool
    Indicates whether to send an event back to EventBridge after the SQL statement runs.
    Database string
    The name of the database. Required when authenticating using temporary credentials.
    Sqls []string
    List of SQL statements text to run, each of maximum length of 100,000.
    DbUser string
    The database user name. Required when authenticating using temporary credentials.
    SecretManagerArn string
    The name or ARN of the secret that enables access to the database. Required when authenticating using Secrets Manager.
    StatementName string
    The name of the SQL statement. You can name the SQL statement when you create it to identify the query.
    WithEvent bool
    Indicates whether to send an event back to EventBridge after the SQL statement runs.
    database String
    The name of the database. Required when authenticating using temporary credentials.
    sqls List<String>
    List of SQL statements text to run, each of maximum length of 100,000.
    dbUser String
    The database user name. Required when authenticating using temporary credentials.
    secretManagerArn String
    The name or ARN of the secret that enables access to the database. Required when authenticating using Secrets Manager.
    statementName String
    The name of the SQL statement. You can name the SQL statement when you create it to identify the query.
    withEvent Boolean
    Indicates whether to send an event back to EventBridge after the SQL statement runs.
    database string
    The name of the database. Required when authenticating using temporary credentials.
    sqls string[]
    List of SQL statements text to run, each of maximum length of 100,000.
    dbUser string
    The database user name. Required when authenticating using temporary credentials.
    secretManagerArn string
    The name or ARN of the secret that enables access to the database. Required when authenticating using Secrets Manager.
    statementName string
    The name of the SQL statement. You can name the SQL statement when you create it to identify the query.
    withEvent boolean
    Indicates whether to send an event back to EventBridge after the SQL statement runs.
    database str
    The name of the database. Required when authenticating using temporary credentials.
    sqls Sequence[str]
    List of SQL statements text to run, each of maximum length of 100,000.
    db_user str
    The database user name. Required when authenticating using temporary credentials.
    secret_manager_arn str
    The name or ARN of the secret that enables access to the database. Required when authenticating using Secrets Manager.
    statement_name str
    The name of the SQL statement. You can name the SQL statement when you create it to identify the query.
    with_event bool
    Indicates whether to send an event back to EventBridge after the SQL statement runs.
    database String
    The name of the database. Required when authenticating using temporary credentials.
    sqls List<String>
    List of SQL statements text to run, each of maximum length of 100,000.
    dbUser String
    The database user name. Required when authenticating using temporary credentials.
    secretManagerArn String
    The name or ARN of the secret that enables access to the database. Required when authenticating using Secrets Manager.
    statementName String
    The name of the SQL statement. You can name the SQL statement when you create it to identify the query.
    withEvent Boolean
    Indicates whether to send an event back to EventBridge after the SQL statement runs.

    PipeTargetParametersSagemakerPipelineParameters, PipeTargetParametersSagemakerPipelineParametersArgs

    PipelineParameters List<PipeTargetParametersSagemakerPipelineParametersPipelineParameter>
    List of Parameter names and values for SageMaker Model Building Pipeline execution. Detailed below.
    PipelineParameters []PipeTargetParametersSagemakerPipelineParametersPipelineParameter
    List of Parameter names and values for SageMaker Model Building Pipeline execution. Detailed below.
    pipelineParameters List<PipeTargetParametersSagemakerPipelineParametersPipelineParameter>
    List of Parameter names and values for SageMaker Model Building Pipeline execution. Detailed below.
    pipelineParameters PipeTargetParametersSagemakerPipelineParametersPipelineParameter[]
    List of Parameter names and values for SageMaker Model Building Pipeline execution. Detailed below.
    pipeline_parameters Sequence[PipeTargetParametersSagemakerPipelineParametersPipelineParameter]
    List of Parameter names and values for SageMaker Model Building Pipeline execution. Detailed below.
    pipelineParameters List<Property Map>
    List of Parameter names and values for SageMaker Model Building Pipeline execution. Detailed below.

    PipeTargetParametersSagemakerPipelineParametersPipelineParameter, PipeTargetParametersSagemakerPipelineParametersPipelineParameterArgs

    Name string
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    Value string
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
    Name string
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    Value string
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
    name String
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    value String
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
    name string
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    value string
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
    name str
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    value str
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.
    name String
    Name of the pipe. If omitted, the provider will assign a random, unique name. Conflicts with name_prefix.
    value String
    Value of parameter to start execution of a SageMaker Model Building Pipeline. Maximum length of 1024.

    PipeTargetParametersSqsQueueParameters, PipeTargetParametersSqsQueueParametersArgs

    MessageDeduplicationId string
    This parameter applies only to FIFO (first-in-first-out) queues. The token used for deduplication of sent messages.
    MessageGroupId string
    The FIFO message group ID to use as the target.
    MessageDeduplicationId string
    This parameter applies only to FIFO (first-in-first-out) queues. The token used for deduplication of sent messages.
    MessageGroupId string
    The FIFO message group ID to use as the target.
    messageDeduplicationId String
    This parameter applies only to FIFO (first-in-first-out) queues. The token used for deduplication of sent messages.
    messageGroupId String
    The FIFO message group ID to use as the target.
    messageDeduplicationId string
    This parameter applies only to FIFO (first-in-first-out) queues. The token used for deduplication of sent messages.
    messageGroupId string
    The FIFO message group ID to use as the target.
    message_deduplication_id str
    This parameter applies only to FIFO (first-in-first-out) queues. The token used for deduplication of sent messages.
    message_group_id str
    The FIFO message group ID to use as the target.
    messageDeduplicationId String
    This parameter applies only to FIFO (first-in-first-out) queues. The token used for deduplication of sent messages.
    messageGroupId String
    The FIFO message group ID to use as the target.

    PipeTargetParametersStepFunctionStateMachineParameters, PipeTargetParametersStepFunctionStateMachineParametersArgs

    InvocationType string
    Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
    InvocationType string
    Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
    invocationType String
    Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
    invocationType string
    Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
    invocation_type str
    Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.
    invocationType String
    Specify whether to invoke the function synchronously or asynchronously. Valid Values: REQUEST_RESPONSE, FIRE_AND_FORGET.

    Import

    Using pulumi import, import pipes using the name. For example:

    $ pulumi import aws:pipes/pipe:Pipe example my-pipe
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    AWS Classic pulumi/pulumi-aws
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the aws Terraform Provider.
    aws logo
    AWS v6.60.0 published on Tuesday, Nov 19, 2024 by Pulumi