We recommend new projects start with resources from the AWS provider.
aws-native.pipes.Pipe
Explore with Pulumi AI
We recommend new projects start with resources from the AWS provider.
Definition of AWS::Pipes::Pipe Resource Type
Example Usage
Example
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AwsNative = Pulumi.AwsNative;
return await Deployment.RunAsync(() =>
{
var testPipe = new AwsNative.Pipes.Pipe("testPipe", new()
{
Name = "PipeCfnExample",
RoleArn = "arn:aws:iam::123456789123:role/Pipe-Dev-All-Targets-Dummy-Execution-Role",
Source = "arn:aws:sqs:us-east-1:123456789123:pipeDemoSource",
Enrichment = "arn:aws:execute-api:us-east-1:123456789123:53eo2i89p9/*/POST/pets",
Target = "arn:aws:states:us-east-1:123456789123:stateMachine:PipeTargetStateMachine",
});
});
package main
import (
"github.com/pulumi/pulumi-aws-native/sdk/go/aws/pipes"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := pipes.NewPipe(ctx, "testPipe", &pipes.PipeArgs{
Name: pulumi.String("PipeCfnExample"),
RoleArn: pulumi.String("arn:aws:iam::123456789123:role/Pipe-Dev-All-Targets-Dummy-Execution-Role"),
Source: pulumi.String("arn:aws:sqs:us-east-1:123456789123:pipeDemoSource"),
Enrichment: pulumi.String("arn:aws:execute-api:us-east-1:123456789123:53eo2i89p9/*/POST/pets"),
Target: pulumi.String("arn:aws:states:us-east-1:123456789123:stateMachine:PipeTargetStateMachine"),
})
if err != nil {
return err
}
return nil
})
}
Coming soon!
import pulumi
import pulumi_aws_native as aws_native
test_pipe = aws_native.pipes.Pipe("testPipe",
name="PipeCfnExample",
role_arn="arn:aws:iam::123456789123:role/Pipe-Dev-All-Targets-Dummy-Execution-Role",
source="arn:aws:sqs:us-east-1:123456789123:pipeDemoSource",
enrichment="arn:aws:execute-api:us-east-1:123456789123:53eo2i89p9/*/POST/pets",
target="arn:aws:states:us-east-1:123456789123:stateMachine:PipeTargetStateMachine")
import * as pulumi from "@pulumi/pulumi";
import * as aws_native from "@pulumi/aws-native";
const testPipe = new aws_native.pipes.Pipe("testPipe", {
name: "PipeCfnExample",
roleArn: "arn:aws:iam::123456789123:role/Pipe-Dev-All-Targets-Dummy-Execution-Role",
source: "arn:aws:sqs:us-east-1:123456789123:pipeDemoSource",
enrichment: "arn:aws:execute-api:us-east-1:123456789123:53eo2i89p9/*/POST/pets",
target: "arn:aws:states:us-east-1:123456789123:stateMachine:PipeTargetStateMachine",
});
Coming soon!
Example
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AwsNative = Pulumi.AwsNative;
return await Deployment.RunAsync(() =>
{
var testPipe = new AwsNative.Pipes.Pipe("testPipe", new()
{
Name = "PipeCfnExample",
RoleArn = "arn:aws:iam::123456789123:role/Pipe-Dev-All-Targets-Dummy-Execution-Role",
Source = "arn:aws:sqs:us-east-1:123456789123:pipeDemoSource",
Enrichment = "arn:aws:execute-api:us-east-1:123456789123:53eo2i89p9/*/POST/pets",
Target = "arn:aws:states:us-east-1:123456789123:stateMachine:PipeTargetStateMachine",
});
});
package main
import (
"github.com/pulumi/pulumi-aws-native/sdk/go/aws/pipes"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := pipes.NewPipe(ctx, "testPipe", &pipes.PipeArgs{
Name: pulumi.String("PipeCfnExample"),
RoleArn: pulumi.String("arn:aws:iam::123456789123:role/Pipe-Dev-All-Targets-Dummy-Execution-Role"),
Source: pulumi.String("arn:aws:sqs:us-east-1:123456789123:pipeDemoSource"),
Enrichment: pulumi.String("arn:aws:execute-api:us-east-1:123456789123:53eo2i89p9/*/POST/pets"),
Target: pulumi.String("arn:aws:states:us-east-1:123456789123:stateMachine:PipeTargetStateMachine"),
})
if err != nil {
return err
}
return nil
})
}
Coming soon!
import pulumi
import pulumi_aws_native as aws_native
test_pipe = aws_native.pipes.Pipe("testPipe",
name="PipeCfnExample",
role_arn="arn:aws:iam::123456789123:role/Pipe-Dev-All-Targets-Dummy-Execution-Role",
source="arn:aws:sqs:us-east-1:123456789123:pipeDemoSource",
enrichment="arn:aws:execute-api:us-east-1:123456789123:53eo2i89p9/*/POST/pets",
target="arn:aws:states:us-east-1:123456789123:stateMachine:PipeTargetStateMachine")
import * as pulumi from "@pulumi/pulumi";
import * as aws_native from "@pulumi/aws-native";
const testPipe = new aws_native.pipes.Pipe("testPipe", {
name: "PipeCfnExample",
roleArn: "arn:aws:iam::123456789123:role/Pipe-Dev-All-Targets-Dummy-Execution-Role",
source: "arn:aws:sqs:us-east-1:123456789123:pipeDemoSource",
enrichment: "arn:aws:execute-api:us-east-1:123456789123:53eo2i89p9/*/POST/pets",
target: "arn:aws:states:us-east-1:123456789123:stateMachine:PipeTargetStateMachine",
});
Coming soon!
Create Pipe Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new Pipe(name: string, args: PipeArgs, opts?: CustomResourceOptions);
@overload
def Pipe(resource_name: str,
args: PipeArgs,
opts: Optional[ResourceOptions] = None)
@overload
def Pipe(resource_name: str,
opts: Optional[ResourceOptions] = None,
role_arn: Optional[str] = None,
target: Optional[str] = None,
source: Optional[str] = None,
name: Optional[str] = None,
kms_key_identifier: Optional[str] = None,
log_configuration: Optional[PipeLogConfigurationArgs] = None,
description: Optional[str] = None,
enrichment_parameters: Optional[PipeEnrichmentParametersArgs] = None,
enrichment: Optional[str] = None,
source_parameters: Optional[PipeSourceParametersArgs] = None,
tags: Optional[Mapping[str, str]] = None,
desired_state: Optional[PipeRequestedPipeState] = None,
target_parameters: Optional[PipeTargetParametersArgs] = None)
func NewPipe(ctx *Context, name string, args PipeArgs, opts ...ResourceOption) (*Pipe, error)
public Pipe(string name, PipeArgs args, CustomResourceOptions? opts = null)
type: aws-native:pipes:Pipe
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args PipeArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args PipeArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args PipeArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args PipeArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args PipeArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Pipe Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The Pipe resource accepts the following input properties:
- Role
Arn string - The ARN of the role that allows the pipe to send data to the target.
- Source string
- The ARN of the source resource.
- Target string
- The ARN of the target resource.
- Description string
- A description of the pipe.
- Desired
State Pulumi.Aws Native. Pipes. Pipe Requested Pipe State - The state the pipe should be in.
- Enrichment string
- The ARN of the enrichment resource.
- Enrichment
Parameters Pulumi.Aws Native. Pipes. Inputs. Pipe Enrichment Parameters - The parameters required to set up enrichment on your pipe.
- Kms
Key stringIdentifier The identifier of the AWS KMS customer managed key for EventBridge to use, if you choose to use a customer managed key to encrypt pipe data. The identifier can be the key Amazon Resource Name (ARN), KeyId, key alias, or key alias ARN.
To update a pipe that is using the default AWS owned key to use a customer managed key instead, or update a pipe that is using a customer managed key to use a different customer managed key, specify a customer managed key identifier.
To update a pipe that is using a customer managed key to use the default AWS owned key , specify an empty string.
For more information, see Managing keys in the AWS Key Management Service Developer Guide .
- Log
Configuration Pulumi.Aws Native. Pipes. Inputs. Pipe Log Configuration - The logging configuration settings for the pipe.
- Name string
- The name of the pipe.
- Source
Parameters Pulumi.Aws Native. Pipes. Inputs. Pipe Source Parameters - The parameters required to set up a source for your pipe.
- Dictionary<string, string>
- The list of key-value pairs to associate with the pipe.
- Target
Parameters Pulumi.Aws Native. Pipes. Inputs. Pipe Target Parameters The parameters required to set up a target for your pipe.
For more information about pipe target parameters, including how to use dynamic path parameters, see Target parameters in the Amazon EventBridge User Guide .
- Role
Arn string - The ARN of the role that allows the pipe to send data to the target.
- Source string
- The ARN of the source resource.
- Target string
- The ARN of the target resource.
- Description string
- A description of the pipe.
- Desired
State PipeRequested Pipe State - The state the pipe should be in.
- Enrichment string
- The ARN of the enrichment resource.
- Enrichment
Parameters PipeEnrichment Parameters Args - The parameters required to set up enrichment on your pipe.
- Kms
Key stringIdentifier The identifier of the AWS KMS customer managed key for EventBridge to use, if you choose to use a customer managed key to encrypt pipe data. The identifier can be the key Amazon Resource Name (ARN), KeyId, key alias, or key alias ARN.
To update a pipe that is using the default AWS owned key to use a customer managed key instead, or update a pipe that is using a customer managed key to use a different customer managed key, specify a customer managed key identifier.
To update a pipe that is using a customer managed key to use the default AWS owned key , specify an empty string.
For more information, see Managing keys in the AWS Key Management Service Developer Guide .
- Log
Configuration PipeLog Configuration Args - The logging configuration settings for the pipe.
- Name string
- The name of the pipe.
- Source
Parameters PipeSource Parameters Args - The parameters required to set up a source for your pipe.
- map[string]string
- The list of key-value pairs to associate with the pipe.
- Target
Parameters PipeTarget Parameters Args The parameters required to set up a target for your pipe.
For more information about pipe target parameters, including how to use dynamic path parameters, see Target parameters in the Amazon EventBridge User Guide .
- role
Arn String - The ARN of the role that allows the pipe to send data to the target.
- source String
- The ARN of the source resource.
- target String
- The ARN of the target resource.
- description String
- A description of the pipe.
- desired
State PipeRequested Pipe State - The state the pipe should be in.
- enrichment String
- The ARN of the enrichment resource.
- enrichment
Parameters PipeEnrichment Parameters - The parameters required to set up enrichment on your pipe.
- kms
Key StringIdentifier The identifier of the AWS KMS customer managed key for EventBridge to use, if you choose to use a customer managed key to encrypt pipe data. The identifier can be the key Amazon Resource Name (ARN), KeyId, key alias, or key alias ARN.
To update a pipe that is using the default AWS owned key to use a customer managed key instead, or update a pipe that is using a customer managed key to use a different customer managed key, specify a customer managed key identifier.
To update a pipe that is using a customer managed key to use the default AWS owned key , specify an empty string.
For more information, see Managing keys in the AWS Key Management Service Developer Guide .
- log
Configuration PipeLog Configuration - The logging configuration settings for the pipe.
- name String
- The name of the pipe.
- source
Parameters PipeSource Parameters - The parameters required to set up a source for your pipe.
- Map<String,String>
- The list of key-value pairs to associate with the pipe.
- target
Parameters PipeTarget Parameters The parameters required to set up a target for your pipe.
For more information about pipe target parameters, including how to use dynamic path parameters, see Target parameters in the Amazon EventBridge User Guide .
- role
Arn string - The ARN of the role that allows the pipe to send data to the target.
- source string
- The ARN of the source resource.
- target string
- The ARN of the target resource.
- description string
- A description of the pipe.
- desired
State PipeRequested Pipe State - The state the pipe should be in.
- enrichment string
- The ARN of the enrichment resource.
- enrichment
Parameters PipeEnrichment Parameters - The parameters required to set up enrichment on your pipe.
- kms
Key stringIdentifier The identifier of the AWS KMS customer managed key for EventBridge to use, if you choose to use a customer managed key to encrypt pipe data. The identifier can be the key Amazon Resource Name (ARN), KeyId, key alias, or key alias ARN.
To update a pipe that is using the default AWS owned key to use a customer managed key instead, or update a pipe that is using a customer managed key to use a different customer managed key, specify a customer managed key identifier.
To update a pipe that is using a customer managed key to use the default AWS owned key , specify an empty string.
For more information, see Managing keys in the AWS Key Management Service Developer Guide .
- log
Configuration PipeLog Configuration - The logging configuration settings for the pipe.
- name string
- The name of the pipe.
- source
Parameters PipeSource Parameters - The parameters required to set up a source for your pipe.
- {[key: string]: string}
- The list of key-value pairs to associate with the pipe.
- target
Parameters PipeTarget Parameters The parameters required to set up a target for your pipe.
For more information about pipe target parameters, including how to use dynamic path parameters, see Target parameters in the Amazon EventBridge User Guide .
- role_
arn str - The ARN of the role that allows the pipe to send data to the target.
- source str
- The ARN of the source resource.
- target str
- The ARN of the target resource.
- description str
- A description of the pipe.
- desired_
state PipeRequested Pipe State - The state the pipe should be in.
- enrichment str
- The ARN of the enrichment resource.
- enrichment_
parameters PipeEnrichment Parameters Args - The parameters required to set up enrichment on your pipe.
- kms_
key_ stridentifier The identifier of the AWS KMS customer managed key for EventBridge to use, if you choose to use a customer managed key to encrypt pipe data. The identifier can be the key Amazon Resource Name (ARN), KeyId, key alias, or key alias ARN.
To update a pipe that is using the default AWS owned key to use a customer managed key instead, or update a pipe that is using a customer managed key to use a different customer managed key, specify a customer managed key identifier.
To update a pipe that is using a customer managed key to use the default AWS owned key , specify an empty string.
For more information, see Managing keys in the AWS Key Management Service Developer Guide .
- log_
configuration PipeLog Configuration Args - The logging configuration settings for the pipe.
- name str
- The name of the pipe.
- source_
parameters PipeSource Parameters Args - The parameters required to set up a source for your pipe.
- Mapping[str, str]
- The list of key-value pairs to associate with the pipe.
- target_
parameters PipeTarget Parameters Args The parameters required to set up a target for your pipe.
For more information about pipe target parameters, including how to use dynamic path parameters, see Target parameters in the Amazon EventBridge User Guide .
- role
Arn String - The ARN of the role that allows the pipe to send data to the target.
- source String
- The ARN of the source resource.
- target String
- The ARN of the target resource.
- description String
- A description of the pipe.
- desired
State "RUNNING" | "STOPPED" - The state the pipe should be in.
- enrichment String
- The ARN of the enrichment resource.
- enrichment
Parameters Property Map - The parameters required to set up enrichment on your pipe.
- kms
Key StringIdentifier The identifier of the AWS KMS customer managed key for EventBridge to use, if you choose to use a customer managed key to encrypt pipe data. The identifier can be the key Amazon Resource Name (ARN), KeyId, key alias, or key alias ARN.
To update a pipe that is using the default AWS owned key to use a customer managed key instead, or update a pipe that is using a customer managed key to use a different customer managed key, specify a customer managed key identifier.
To update a pipe that is using a customer managed key to use the default AWS owned key , specify an empty string.
For more information, see Managing keys in the AWS Key Management Service Developer Guide .
- log
Configuration Property Map - The logging configuration settings for the pipe.
- name String
- The name of the pipe.
- source
Parameters Property Map - The parameters required to set up a source for your pipe.
- Map<String>
- The list of key-value pairs to associate with the pipe.
- target
Parameters Property Map The parameters required to set up a target for your pipe.
For more information about pipe target parameters, including how to use dynamic path parameters, see Target parameters in the Amazon EventBridge User Guide .
Outputs
All input properties are implicitly available as output properties. Additionally, the Pipe resource produces the following output properties:
- Arn string
- The ARN of the pipe.
- Creation
Time string - The time the pipe was created.
- Current
State Pulumi.Aws Native. Pipes. Pipe State - The state the pipe is in.
- Id string
- The provider-assigned unique ID for this managed resource.
- Last
Modified stringTime - When the pipe was last updated, in ISO-8601 format (YYYY-MM-DDThh:mm:ss.sTZD).
- State
Reason string - The reason the pipe is in its current state.
- Arn string
- The ARN of the pipe.
- Creation
Time string - The time the pipe was created.
- Current
State PipeState Enum - The state the pipe is in.
- Id string
- The provider-assigned unique ID for this managed resource.
- Last
Modified stringTime - When the pipe was last updated, in ISO-8601 format (YYYY-MM-DDThh:mm:ss.sTZD).
- State
Reason string - The reason the pipe is in its current state.
- arn String
- The ARN of the pipe.
- creation
Time String - The time the pipe was created.
- current
State PipeState - The state the pipe is in.
- id String
- The provider-assigned unique ID for this managed resource.
- last
Modified StringTime - When the pipe was last updated, in ISO-8601 format (YYYY-MM-DDThh:mm:ss.sTZD).
- state
Reason String - The reason the pipe is in its current state.
- arn string
- The ARN of the pipe.
- creation
Time string - The time the pipe was created.
- current
State PipeState - The state the pipe is in.
- id string
- The provider-assigned unique ID for this managed resource.
- last
Modified stringTime - When the pipe was last updated, in ISO-8601 format (YYYY-MM-DDThh:mm:ss.sTZD).
- state
Reason string - The reason the pipe is in its current state.
- arn str
- The ARN of the pipe.
- creation_
time str - The time the pipe was created.
- current_
state PipeState - The state the pipe is in.
- id str
- The provider-assigned unique ID for this managed resource.
- last_
modified_ strtime - When the pipe was last updated, in ISO-8601 format (YYYY-MM-DDThh:mm:ss.sTZD).
- state_
reason str - The reason the pipe is in its current state.
- arn String
- The ARN of the pipe.
- creation
Time String - The time the pipe was created.
- current
State "RUNNING" | "STOPPED" | "CREATING" | "UPDATING" | "DELETING" | "STARTING" | "STOPPING" | "CREATE_FAILED" | "UPDATE_FAILED" | "START_FAILED" | "STOP_FAILED" | "DELETE_FAILED" | "CREATE_ROLLBACK_FAILED" | "DELETE_ROLLBACK_FAILED" | "UPDATE_ROLLBACK_FAILED" - The state the pipe is in.
- id String
- The provider-assigned unique ID for this managed resource.
- last
Modified StringTime - When the pipe was last updated, in ISO-8601 format (YYYY-MM-DDThh:mm:ss.sTZD).
- state
Reason String - The reason the pipe is in its current state.
Supporting Types
PipeAssignPublicIp, PipeAssignPublicIpArgs
- Enabled
- ENABLED
- Disabled
- DISABLED
- Pipe
Assign Public Ip Enabled - ENABLED
- Pipe
Assign Public Ip Disabled - DISABLED
- Enabled
- ENABLED
- Disabled
- DISABLED
- Enabled
- ENABLED
- Disabled
- DISABLED
- ENABLED
- ENABLED
- DISABLED
- DISABLED
- "ENABLED"
- ENABLED
- "DISABLED"
- DISABLED
PipeAwsVpcConfiguration, PipeAwsVpcConfigurationArgs
- Subnets List<string>
- Specifies the subnets associated with the task. These subnets must all be in the same VPC. You can specify as many as 16 subnets.
- Assign
Public Pulumi.Ip Aws Native. Pipes. Pipe Assign Public Ip - Specifies whether the task's elastic network interface receives a public IP address. You can specify
ENABLED
only whenLaunchType
inEcsParameters
is set toFARGATE
. - Security
Groups List<string> - Specifies the security groups associated with the task. These security groups must all be in the same VPC. You can specify as many as five security groups. If you do not specify a security group, the default security group for the VPC is used.
- Subnets []string
- Specifies the subnets associated with the task. These subnets must all be in the same VPC. You can specify as many as 16 subnets.
- Assign
Public PipeIp Assign Public Ip - Specifies whether the task's elastic network interface receives a public IP address. You can specify
ENABLED
only whenLaunchType
inEcsParameters
is set toFARGATE
. - Security
Groups []string - Specifies the security groups associated with the task. These security groups must all be in the same VPC. You can specify as many as five security groups. If you do not specify a security group, the default security group for the VPC is used.
- subnets List<String>
- Specifies the subnets associated with the task. These subnets must all be in the same VPC. You can specify as many as 16 subnets.
- assign
Public PipeIp Assign Public Ip - Specifies whether the task's elastic network interface receives a public IP address. You can specify
ENABLED
only whenLaunchType
inEcsParameters
is set toFARGATE
. - security
Groups List<String> - Specifies the security groups associated with the task. These security groups must all be in the same VPC. You can specify as many as five security groups. If you do not specify a security group, the default security group for the VPC is used.
- subnets string[]
- Specifies the subnets associated with the task. These subnets must all be in the same VPC. You can specify as many as 16 subnets.
- assign
Public PipeIp Assign Public Ip - Specifies whether the task's elastic network interface receives a public IP address. You can specify
ENABLED
only whenLaunchType
inEcsParameters
is set toFARGATE
. - security
Groups string[] - Specifies the security groups associated with the task. These security groups must all be in the same VPC. You can specify as many as five security groups. If you do not specify a security group, the default security group for the VPC is used.
- subnets Sequence[str]
- Specifies the subnets associated with the task. These subnets must all be in the same VPC. You can specify as many as 16 subnets.
- assign_
public_ Pipeip Assign Public Ip - Specifies whether the task's elastic network interface receives a public IP address. You can specify
ENABLED
only whenLaunchType
inEcsParameters
is set toFARGATE
. - security_
groups Sequence[str] - Specifies the security groups associated with the task. These security groups must all be in the same VPC. You can specify as many as five security groups. If you do not specify a security group, the default security group for the VPC is used.
- subnets List<String>
- Specifies the subnets associated with the task. These subnets must all be in the same VPC. You can specify as many as 16 subnets.
- assign
Public "ENABLED" | "DISABLED"Ip - Specifies whether the task's elastic network interface receives a public IP address. You can specify
ENABLED
only whenLaunchType
inEcsParameters
is set toFARGATE
. - security
Groups List<String> - Specifies the security groups associated with the task. These security groups must all be in the same VPC. You can specify as many as five security groups. If you do not specify a security group, the default security group for the VPC is used.
PipeBatchArrayProperties, PipeBatchArrayPropertiesArgs
- Size int
- The size of the array, if this is an array batch job.
- Size int
- The size of the array, if this is an array batch job.
- size Integer
- The size of the array, if this is an array batch job.
- size number
- The size of the array, if this is an array batch job.
- size int
- The size of the array, if this is an array batch job.
- size Number
- The size of the array, if this is an array batch job.
PipeBatchContainerOverrides, PipeBatchContainerOverridesArgs
- Command List<string>
- The command to send to the container that overrides the default command from the Docker image or the task definition.
- Environment
List<Pulumi.
Aws Native. Pipes. Inputs. Pipe Batch Environment Variable> The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition.
Environment variables cannot start with "
AWS Batch
". This naming convention is reserved for variables that AWS Batch sets.- Instance
Type string The instance type to use for a multi-node parallel job.
This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided.
- Resource
Requirements List<Pulumi.Aws Native. Pipes. Inputs. Pipe Batch Resource Requirement> - The type and amount of resources to assign to a container. This overrides the settings in the job definition. The supported resources include
GPU
,MEMORY
, andVCPU
.
- Command []string
- The command to send to the container that overrides the default command from the Docker image or the task definition.
- Environment
[]Pipe
Batch Environment Variable The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition.
Environment variables cannot start with "
AWS Batch
". This naming convention is reserved for variables that AWS Batch sets.- Instance
Type string The instance type to use for a multi-node parallel job.
This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided.
- Resource
Requirements []PipeBatch Resource Requirement - The type and amount of resources to assign to a container. This overrides the settings in the job definition. The supported resources include
GPU
,MEMORY
, andVCPU
.
- command List<String>
- The command to send to the container that overrides the default command from the Docker image or the task definition.
- environment
List<Pipe
Batch Environment Variable> The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition.
Environment variables cannot start with "
AWS Batch
". This naming convention is reserved for variables that AWS Batch sets.- instance
Type String The instance type to use for a multi-node parallel job.
This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided.
- resource
Requirements List<PipeBatch Resource Requirement> - The type and amount of resources to assign to a container. This overrides the settings in the job definition. The supported resources include
GPU
,MEMORY
, andVCPU
.
- command string[]
- The command to send to the container that overrides the default command from the Docker image or the task definition.
- environment
Pipe
Batch Environment Variable[] The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition.
Environment variables cannot start with "
AWS Batch
". This naming convention is reserved for variables that AWS Batch sets.- instance
Type string The instance type to use for a multi-node parallel job.
This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided.
- resource
Requirements PipeBatch Resource Requirement[] - The type and amount of resources to assign to a container. This overrides the settings in the job definition. The supported resources include
GPU
,MEMORY
, andVCPU
.
- command Sequence[str]
- The command to send to the container that overrides the default command from the Docker image or the task definition.
- environment
Sequence[Pipe
Batch Environment Variable] The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition.
Environment variables cannot start with "
AWS Batch
". This naming convention is reserved for variables that AWS Batch sets.- instance_
type str The instance type to use for a multi-node parallel job.
This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided.
- resource_
requirements Sequence[PipeBatch Resource Requirement] - The type and amount of resources to assign to a container. This overrides the settings in the job definition. The supported resources include
GPU
,MEMORY
, andVCPU
.
- command List<String>
- The command to send to the container that overrides the default command from the Docker image or the task definition.
- environment List<Property Map>
The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition.
Environment variables cannot start with "
AWS Batch
". This naming convention is reserved for variables that AWS Batch sets.- instance
Type String The instance type to use for a multi-node parallel job.
This parameter isn't applicable to single-node container jobs or jobs that run on Fargate resources, and shouldn't be provided.
- resource
Requirements List<Property Map> - The type and amount of resources to assign to a container. This overrides the settings in the job definition. The supported resources include
GPU
,MEMORY
, andVCPU
.
PipeBatchEnvironmentVariable, PipeBatchEnvironmentVariableArgs
PipeBatchJobDependency, PipeBatchJobDependencyArgs
- Job
Id string - The job ID of the AWS Batch job that's associated with this dependency.
- Type
Pulumi.
Aws Native. Pipes. Pipe Batch Job Dependency Type - The type of the job dependency.
- Job
Id string - The job ID of the AWS Batch job that's associated with this dependency.
- Type
Pipe
Batch Job Dependency Type - The type of the job dependency.
- job
Id String - The job ID of the AWS Batch job that's associated with this dependency.
- type
Pipe
Batch Job Dependency Type - The type of the job dependency.
- job
Id string - The job ID of the AWS Batch job that's associated with this dependency.
- type
Pipe
Batch Job Dependency Type - The type of the job dependency.
- job_
id str - The job ID of the AWS Batch job that's associated with this dependency.
- type
Pipe
Batch Job Dependency Type - The type of the job dependency.
- job
Id String - The job ID of the AWS Batch job that's associated with this dependency.
- type "N_TO_N" | "SEQUENTIAL"
- The type of the job dependency.
PipeBatchJobDependencyType, PipeBatchJobDependencyTypeArgs
- NTo
N - N_TO_N
- Sequential
- SEQUENTIAL
- Pipe
Batch Job Dependency Type NTo N - N_TO_N
- Pipe
Batch Job Dependency Type Sequential - SEQUENTIAL
- NTo
N - N_TO_N
- Sequential
- SEQUENTIAL
- NTo
N - N_TO_N
- Sequential
- SEQUENTIAL
- N_TO_N
- N_TO_N
- SEQUENTIAL
- SEQUENTIAL
- "N_TO_N"
- N_TO_N
- "SEQUENTIAL"
- SEQUENTIAL
PipeBatchResourceRequirement, PipeBatchResourceRequirementArgs
- Type
Pulumi.
Aws Native. Pipes. Pipe Batch Resource Requirement Type - The type of resource to assign to a container. The supported resources include
GPU
,MEMORY
, andVCPU
. - Value string
The quantity of the specified resource to reserve for the container. The values vary based on the
type
specified.- type="GPU" - The number of physical GPUs to reserve for the container. Make sure that the number of GPUs reserved for all containers in a job doesn't exceed the number of available GPUs on the compute resource that the job is launched on.
GPUs aren't available for jobs that are running on Fargate resources.
- type="MEMORY" - The memory hard limit (in MiB) present to the container. This parameter is supported for jobs that are running on EC2 resources. If your container attempts to exceed the memory specified, the container is terminated. This parameter maps to
Memory
in the Create a container section of the Docker Remote API and the--memory
option to docker run . You must specify at least 4 MiB of memory for a job. This is required but can be specified in several places for multi-node parallel (MNP) jobs. It must be specified for each node at least once. This parameter maps toMemory
in the Create a container section of the Docker Remote API and the--memory
option to docker run .
If you're trying to maximize your resource utilization by providing your jobs as much memory as possible for a particular instance type, see Memory management in the AWS Batch User Guide .
For jobs that are running on Fargate resources, then
value
is the hard limit (in MiB), and must match one of the supported values and theVCPU
values must be one of the values supported for that memory value.- value = 512 -
VCPU
= 0.25 - value = 1024 -
VCPU
= 0.25 or 0.5 - value = 2048 -
VCPU
= 0.25, 0.5, or 1 - value = 3072 -
VCPU
= 0.5, or 1 - value = 4096 -
VCPU
= 0.5, 1, or 2 - value = 5120, 6144, or 7168 -
VCPU
= 1 or 2 - value = 8192 -
VCPU
= 1, 2, 4, or 8 - value = 9216, 10240, 11264, 12288, 13312, 14336, or 15360 -
VCPU
= 2 or 4 - value = 16384 -
VCPU
= 2, 4, or 8 - value = 17408, 18432, 19456, 21504, 22528, 23552, 25600, 26624, 27648, 29696, or 30720 -
VCPU
= 4 - value = 20480, 24576, or 28672 -
VCPU
= 4 or 8 - value = 36864, 45056, 53248, or 61440 -
VCPU
= 8 - value = 32768, 40960, 49152, or 57344 -
VCPU
= 8 or 16 - value = 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880 -
VCPU
= 16 - type="VCPU" - The number of vCPUs reserved for the container. This parameter maps to
CpuShares
in the Create a container section of the Docker Remote API and the--cpu-shares
option to docker run . Each vCPU is equivalent to 1,024 CPU shares. For EC2 resources, you must specify at least one vCPU. This is required but can be specified in several places; it must be specified for each node at least once.
The default for the Fargate On-Demand vCPU resource count quota is 6 vCPUs. For more information about Fargate quotas, see AWS Fargate quotas in the AWS General Reference .
For jobs that are running on Fargate resources, then
value
must match one of the supported values and theMEMORY
values must be one of the values supported for thatVCPU
value. The supported values are 0.25, 0.5, 1, 2, 4, 8, and 16- value = 0.25 -
MEMORY
= 512, 1024, or 2048 - value = 0.5 -
MEMORY
= 1024, 2048, 3072, or 4096 - value = 1 -
MEMORY
= 2048, 3072, 4096, 5120, 6144, 7168, or 8192 - value = 2 -
MEMORY
= 4096, 5120, 6144, 7168, 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, or 16384 - value = 4 -
MEMORY
= 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, 16384, 17408, 18432, 19456, 20480, 21504, 22528, 23552, 24576, 25600, 26624, 27648, 28672, 29696, or 30720 - value = 8 -
MEMORY
= 16384, 20480, 24576, 28672, 32768, 36864, 40960, 45056, 49152, 53248, 57344, or 61440 - value = 16 -
MEMORY
= 32768, 40960, 49152, 57344, 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880
- Type
Pipe
Batch Resource Requirement Type - The type of resource to assign to a container. The supported resources include
GPU
,MEMORY
, andVCPU
. - Value string
The quantity of the specified resource to reserve for the container. The values vary based on the
type
specified.- type="GPU" - The number of physical GPUs to reserve for the container. Make sure that the number of GPUs reserved for all containers in a job doesn't exceed the number of available GPUs on the compute resource that the job is launched on.
GPUs aren't available for jobs that are running on Fargate resources.
- type="MEMORY" - The memory hard limit (in MiB) present to the container. This parameter is supported for jobs that are running on EC2 resources. If your container attempts to exceed the memory specified, the container is terminated. This parameter maps to
Memory
in the Create a container section of the Docker Remote API and the--memory
option to docker run . You must specify at least 4 MiB of memory for a job. This is required but can be specified in several places for multi-node parallel (MNP) jobs. It must be specified for each node at least once. This parameter maps toMemory
in the Create a container section of the Docker Remote API and the--memory
option to docker run .
If you're trying to maximize your resource utilization by providing your jobs as much memory as possible for a particular instance type, see Memory management in the AWS Batch User Guide .
For jobs that are running on Fargate resources, then
value
is the hard limit (in MiB), and must match one of the supported values and theVCPU
values must be one of the values supported for that memory value.- value = 512 -
VCPU
= 0.25 - value = 1024 -
VCPU
= 0.25 or 0.5 - value = 2048 -
VCPU
= 0.25, 0.5, or 1 - value = 3072 -
VCPU
= 0.5, or 1 - value = 4096 -
VCPU
= 0.5, 1, or 2 - value = 5120, 6144, or 7168 -
VCPU
= 1 or 2 - value = 8192 -
VCPU
= 1, 2, 4, or 8 - value = 9216, 10240, 11264, 12288, 13312, 14336, or 15360 -
VCPU
= 2 or 4 - value = 16384 -
VCPU
= 2, 4, or 8 - value = 17408, 18432, 19456, 21504, 22528, 23552, 25600, 26624, 27648, 29696, or 30720 -
VCPU
= 4 - value = 20480, 24576, or 28672 -
VCPU
= 4 or 8 - value = 36864, 45056, 53248, or 61440 -
VCPU
= 8 - value = 32768, 40960, 49152, or 57344 -
VCPU
= 8 or 16 - value = 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880 -
VCPU
= 16 - type="VCPU" - The number of vCPUs reserved for the container. This parameter maps to
CpuShares
in the Create a container section of the Docker Remote API and the--cpu-shares
option to docker run . Each vCPU is equivalent to 1,024 CPU shares. For EC2 resources, you must specify at least one vCPU. This is required but can be specified in several places; it must be specified for each node at least once.
The default for the Fargate On-Demand vCPU resource count quota is 6 vCPUs. For more information about Fargate quotas, see AWS Fargate quotas in the AWS General Reference .
For jobs that are running on Fargate resources, then
value
must match one of the supported values and theMEMORY
values must be one of the values supported for thatVCPU
value. The supported values are 0.25, 0.5, 1, 2, 4, 8, and 16- value = 0.25 -
MEMORY
= 512, 1024, or 2048 - value = 0.5 -
MEMORY
= 1024, 2048, 3072, or 4096 - value = 1 -
MEMORY
= 2048, 3072, 4096, 5120, 6144, 7168, or 8192 - value = 2 -
MEMORY
= 4096, 5120, 6144, 7168, 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, or 16384 - value = 4 -
MEMORY
= 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, 16384, 17408, 18432, 19456, 20480, 21504, 22528, 23552, 24576, 25600, 26624, 27648, 28672, 29696, or 30720 - value = 8 -
MEMORY
= 16384, 20480, 24576, 28672, 32768, 36864, 40960, 45056, 49152, 53248, 57344, or 61440 - value = 16 -
MEMORY
= 32768, 40960, 49152, 57344, 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880
- type
Pipe
Batch Resource Requirement Type - The type of resource to assign to a container. The supported resources include
GPU
,MEMORY
, andVCPU
. - value String
The quantity of the specified resource to reserve for the container. The values vary based on the
type
specified.- type="GPU" - The number of physical GPUs to reserve for the container. Make sure that the number of GPUs reserved for all containers in a job doesn't exceed the number of available GPUs on the compute resource that the job is launched on.
GPUs aren't available for jobs that are running on Fargate resources.
- type="MEMORY" - The memory hard limit (in MiB) present to the container. This parameter is supported for jobs that are running on EC2 resources. If your container attempts to exceed the memory specified, the container is terminated. This parameter maps to
Memory
in the Create a container section of the Docker Remote API and the--memory
option to docker run . You must specify at least 4 MiB of memory for a job. This is required but can be specified in several places for multi-node parallel (MNP) jobs. It must be specified for each node at least once. This parameter maps toMemory
in the Create a container section of the Docker Remote API and the--memory
option to docker run .
If you're trying to maximize your resource utilization by providing your jobs as much memory as possible for a particular instance type, see Memory management in the AWS Batch User Guide .
For jobs that are running on Fargate resources, then
value
is the hard limit (in MiB), and must match one of the supported values and theVCPU
values must be one of the values supported for that memory value.- value = 512 -
VCPU
= 0.25 - value = 1024 -
VCPU
= 0.25 or 0.5 - value = 2048 -
VCPU
= 0.25, 0.5, or 1 - value = 3072 -
VCPU
= 0.5, or 1 - value = 4096 -
VCPU
= 0.5, 1, or 2 - value = 5120, 6144, or 7168 -
VCPU
= 1 or 2 - value = 8192 -
VCPU
= 1, 2, 4, or 8 - value = 9216, 10240, 11264, 12288, 13312, 14336, or 15360 -
VCPU
= 2 or 4 - value = 16384 -
VCPU
= 2, 4, or 8 - value = 17408, 18432, 19456, 21504, 22528, 23552, 25600, 26624, 27648, 29696, or 30720 -
VCPU
= 4 - value = 20480, 24576, or 28672 -
VCPU
= 4 or 8 - value = 36864, 45056, 53248, or 61440 -
VCPU
= 8 - value = 32768, 40960, 49152, or 57344 -
VCPU
= 8 or 16 - value = 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880 -
VCPU
= 16 - type="VCPU" - The number of vCPUs reserved for the container. This parameter maps to
CpuShares
in the Create a container section of the Docker Remote API and the--cpu-shares
option to docker run . Each vCPU is equivalent to 1,024 CPU shares. For EC2 resources, you must specify at least one vCPU. This is required but can be specified in several places; it must be specified for each node at least once.
The default for the Fargate On-Demand vCPU resource count quota is 6 vCPUs. For more information about Fargate quotas, see AWS Fargate quotas in the AWS General Reference .
For jobs that are running on Fargate resources, then
value
must match one of the supported values and theMEMORY
values must be one of the values supported for thatVCPU
value. The supported values are 0.25, 0.5, 1, 2, 4, 8, and 16- value = 0.25 -
MEMORY
= 512, 1024, or 2048 - value = 0.5 -
MEMORY
= 1024, 2048, 3072, or 4096 - value = 1 -
MEMORY
= 2048, 3072, 4096, 5120, 6144, 7168, or 8192 - value = 2 -
MEMORY
= 4096, 5120, 6144, 7168, 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, or 16384 - value = 4 -
MEMORY
= 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, 16384, 17408, 18432, 19456, 20480, 21504, 22528, 23552, 24576, 25600, 26624, 27648, 28672, 29696, or 30720 - value = 8 -
MEMORY
= 16384, 20480, 24576, 28672, 32768, 36864, 40960, 45056, 49152, 53248, 57344, or 61440 - value = 16 -
MEMORY
= 32768, 40960, 49152, 57344, 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880
- type
Pipe
Batch Resource Requirement Type - The type of resource to assign to a container. The supported resources include
GPU
,MEMORY
, andVCPU
. - value string
The quantity of the specified resource to reserve for the container. The values vary based on the
type
specified.- type="GPU" - The number of physical GPUs to reserve for the container. Make sure that the number of GPUs reserved for all containers in a job doesn't exceed the number of available GPUs on the compute resource that the job is launched on.
GPUs aren't available for jobs that are running on Fargate resources.
- type="MEMORY" - The memory hard limit (in MiB) present to the container. This parameter is supported for jobs that are running on EC2 resources. If your container attempts to exceed the memory specified, the container is terminated. This parameter maps to
Memory
in the Create a container section of the Docker Remote API and the--memory
option to docker run . You must specify at least 4 MiB of memory for a job. This is required but can be specified in several places for multi-node parallel (MNP) jobs. It must be specified for each node at least once. This parameter maps toMemory
in the Create a container section of the Docker Remote API and the--memory
option to docker run .
If you're trying to maximize your resource utilization by providing your jobs as much memory as possible for a particular instance type, see Memory management in the AWS Batch User Guide .
For jobs that are running on Fargate resources, then
value
is the hard limit (in MiB), and must match one of the supported values and theVCPU
values must be one of the values supported for that memory value.- value = 512 -
VCPU
= 0.25 - value = 1024 -
VCPU
= 0.25 or 0.5 - value = 2048 -
VCPU
= 0.25, 0.5, or 1 - value = 3072 -
VCPU
= 0.5, or 1 - value = 4096 -
VCPU
= 0.5, 1, or 2 - value = 5120, 6144, or 7168 -
VCPU
= 1 or 2 - value = 8192 -
VCPU
= 1, 2, 4, or 8 - value = 9216, 10240, 11264, 12288, 13312, 14336, or 15360 -
VCPU
= 2 or 4 - value = 16384 -
VCPU
= 2, 4, or 8 - value = 17408, 18432, 19456, 21504, 22528, 23552, 25600, 26624, 27648, 29696, or 30720 -
VCPU
= 4 - value = 20480, 24576, or 28672 -
VCPU
= 4 or 8 - value = 36864, 45056, 53248, or 61440 -
VCPU
= 8 - value = 32768, 40960, 49152, or 57344 -
VCPU
= 8 or 16 - value = 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880 -
VCPU
= 16 - type="VCPU" - The number of vCPUs reserved for the container. This parameter maps to
CpuShares
in the Create a container section of the Docker Remote API and the--cpu-shares
option to docker run . Each vCPU is equivalent to 1,024 CPU shares. For EC2 resources, you must specify at least one vCPU. This is required but can be specified in several places; it must be specified for each node at least once.
The default for the Fargate On-Demand vCPU resource count quota is 6 vCPUs. For more information about Fargate quotas, see AWS Fargate quotas in the AWS General Reference .
For jobs that are running on Fargate resources, then
value
must match one of the supported values and theMEMORY
values must be one of the values supported for thatVCPU
value. The supported values are 0.25, 0.5, 1, 2, 4, 8, and 16- value = 0.25 -
MEMORY
= 512, 1024, or 2048 - value = 0.5 -
MEMORY
= 1024, 2048, 3072, or 4096 - value = 1 -
MEMORY
= 2048, 3072, 4096, 5120, 6144, 7168, or 8192 - value = 2 -
MEMORY
= 4096, 5120, 6144, 7168, 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, or 16384 - value = 4 -
MEMORY
= 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, 16384, 17408, 18432, 19456, 20480, 21504, 22528, 23552, 24576, 25600, 26624, 27648, 28672, 29696, or 30720 - value = 8 -
MEMORY
= 16384, 20480, 24576, 28672, 32768, 36864, 40960, 45056, 49152, 53248, 57344, or 61440 - value = 16 -
MEMORY
= 32768, 40960, 49152, 57344, 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880
- type
Pipe
Batch Resource Requirement Type - The type of resource to assign to a container. The supported resources include
GPU
,MEMORY
, andVCPU
. - value str
The quantity of the specified resource to reserve for the container. The values vary based on the
type
specified.- type="GPU" - The number of physical GPUs to reserve for the container. Make sure that the number of GPUs reserved for all containers in a job doesn't exceed the number of available GPUs on the compute resource that the job is launched on.
GPUs aren't available for jobs that are running on Fargate resources.
- type="MEMORY" - The memory hard limit (in MiB) present to the container. This parameter is supported for jobs that are running on EC2 resources. If your container attempts to exceed the memory specified, the container is terminated. This parameter maps to
Memory
in the Create a container section of the Docker Remote API and the--memory
option to docker run . You must specify at least 4 MiB of memory for a job. This is required but can be specified in several places for multi-node parallel (MNP) jobs. It must be specified for each node at least once. This parameter maps toMemory
in the Create a container section of the Docker Remote API and the--memory
option to docker run .
If you're trying to maximize your resource utilization by providing your jobs as much memory as possible for a particular instance type, see Memory management in the AWS Batch User Guide .
For jobs that are running on Fargate resources, then
value
is the hard limit (in MiB), and must match one of the supported values and theVCPU
values must be one of the values supported for that memory value.- value = 512 -
VCPU
= 0.25 - value = 1024 -
VCPU
= 0.25 or 0.5 - value = 2048 -
VCPU
= 0.25, 0.5, or 1 - value = 3072 -
VCPU
= 0.5, or 1 - value = 4096 -
VCPU
= 0.5, 1, or 2 - value = 5120, 6144, or 7168 -
VCPU
= 1 or 2 - value = 8192 -
VCPU
= 1, 2, 4, or 8 - value = 9216, 10240, 11264, 12288, 13312, 14336, or 15360 -
VCPU
= 2 or 4 - value = 16384 -
VCPU
= 2, 4, or 8 - value = 17408, 18432, 19456, 21504, 22528, 23552, 25600, 26624, 27648, 29696, or 30720 -
VCPU
= 4 - value = 20480, 24576, or 28672 -
VCPU
= 4 or 8 - value = 36864, 45056, 53248, or 61440 -
VCPU
= 8 - value = 32768, 40960, 49152, or 57344 -
VCPU
= 8 or 16 - value = 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880 -
VCPU
= 16 - type="VCPU" - The number of vCPUs reserved for the container. This parameter maps to
CpuShares
in the Create a container section of the Docker Remote API and the--cpu-shares
option to docker run . Each vCPU is equivalent to 1,024 CPU shares. For EC2 resources, you must specify at least one vCPU. This is required but can be specified in several places; it must be specified for each node at least once.
The default for the Fargate On-Demand vCPU resource count quota is 6 vCPUs. For more information about Fargate quotas, see AWS Fargate quotas in the AWS General Reference .
For jobs that are running on Fargate resources, then
value
must match one of the supported values and theMEMORY
values must be one of the values supported for thatVCPU
value. The supported values are 0.25, 0.5, 1, 2, 4, 8, and 16- value = 0.25 -
MEMORY
= 512, 1024, or 2048 - value = 0.5 -
MEMORY
= 1024, 2048, 3072, or 4096 - value = 1 -
MEMORY
= 2048, 3072, 4096, 5120, 6144, 7168, or 8192 - value = 2 -
MEMORY
= 4096, 5120, 6144, 7168, 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, or 16384 - value = 4 -
MEMORY
= 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, 16384, 17408, 18432, 19456, 20480, 21504, 22528, 23552, 24576, 25600, 26624, 27648, 28672, 29696, or 30720 - value = 8 -
MEMORY
= 16384, 20480, 24576, 28672, 32768, 36864, 40960, 45056, 49152, 53248, 57344, or 61440 - value = 16 -
MEMORY
= 32768, 40960, 49152, 57344, 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880
- type "GPU" | "MEMORY" | "VCPU"
- The type of resource to assign to a container. The supported resources include
GPU
,MEMORY
, andVCPU
. - value String
The quantity of the specified resource to reserve for the container. The values vary based on the
type
specified.- type="GPU" - The number of physical GPUs to reserve for the container. Make sure that the number of GPUs reserved for all containers in a job doesn't exceed the number of available GPUs on the compute resource that the job is launched on.
GPUs aren't available for jobs that are running on Fargate resources.
- type="MEMORY" - The memory hard limit (in MiB) present to the container. This parameter is supported for jobs that are running on EC2 resources. If your container attempts to exceed the memory specified, the container is terminated. This parameter maps to
Memory
in the Create a container section of the Docker Remote API and the--memory
option to docker run . You must specify at least 4 MiB of memory for a job. This is required but can be specified in several places for multi-node parallel (MNP) jobs. It must be specified for each node at least once. This parameter maps toMemory
in the Create a container section of the Docker Remote API and the--memory
option to docker run .
If you're trying to maximize your resource utilization by providing your jobs as much memory as possible for a particular instance type, see Memory management in the AWS Batch User Guide .
For jobs that are running on Fargate resources, then
value
is the hard limit (in MiB), and must match one of the supported values and theVCPU
values must be one of the values supported for that memory value.- value = 512 -
VCPU
= 0.25 - value = 1024 -
VCPU
= 0.25 or 0.5 - value = 2048 -
VCPU
= 0.25, 0.5, or 1 - value = 3072 -
VCPU
= 0.5, or 1 - value = 4096 -
VCPU
= 0.5, 1, or 2 - value = 5120, 6144, or 7168 -
VCPU
= 1 or 2 - value = 8192 -
VCPU
= 1, 2, 4, or 8 - value = 9216, 10240, 11264, 12288, 13312, 14336, or 15360 -
VCPU
= 2 or 4 - value = 16384 -
VCPU
= 2, 4, or 8 - value = 17408, 18432, 19456, 21504, 22528, 23552, 25600, 26624, 27648, 29696, or 30720 -
VCPU
= 4 - value = 20480, 24576, or 28672 -
VCPU
= 4 or 8 - value = 36864, 45056, 53248, or 61440 -
VCPU
= 8 - value = 32768, 40960, 49152, or 57344 -
VCPU
= 8 or 16 - value = 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880 -
VCPU
= 16 - type="VCPU" - The number of vCPUs reserved for the container. This parameter maps to
CpuShares
in the Create a container section of the Docker Remote API and the--cpu-shares
option to docker run . Each vCPU is equivalent to 1,024 CPU shares. For EC2 resources, you must specify at least one vCPU. This is required but can be specified in several places; it must be specified for each node at least once.
The default for the Fargate On-Demand vCPU resource count quota is 6 vCPUs. For more information about Fargate quotas, see AWS Fargate quotas in the AWS General Reference .
For jobs that are running on Fargate resources, then
value
must match one of the supported values and theMEMORY
values must be one of the values supported for thatVCPU
value. The supported values are 0.25, 0.5, 1, 2, 4, 8, and 16- value = 0.25 -
MEMORY
= 512, 1024, or 2048 - value = 0.5 -
MEMORY
= 1024, 2048, 3072, or 4096 - value = 1 -
MEMORY
= 2048, 3072, 4096, 5120, 6144, 7168, or 8192 - value = 2 -
MEMORY
= 4096, 5120, 6144, 7168, 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, or 16384 - value = 4 -
MEMORY
= 8192, 9216, 10240, 11264, 12288, 13312, 14336, 15360, 16384, 17408, 18432, 19456, 20480, 21504, 22528, 23552, 24576, 25600, 26624, 27648, 28672, 29696, or 30720 - value = 8 -
MEMORY
= 16384, 20480, 24576, 28672, 32768, 36864, 40960, 45056, 49152, 53248, 57344, or 61440 - value = 16 -
MEMORY
= 32768, 40960, 49152, 57344, 65536, 73728, 81920, 90112, 98304, 106496, 114688, or 122880
PipeBatchResourceRequirementType, PipeBatchResourceRequirementTypeArgs
- Gpu
- GPU
- Memory
- MEMORY
- Vcpu
- VCPU
- Pipe
Batch Resource Requirement Type Gpu - GPU
- Pipe
Batch Resource Requirement Type Memory - MEMORY
- Pipe
Batch Resource Requirement Type Vcpu - VCPU
- Gpu
- GPU
- Memory
- MEMORY
- Vcpu
- VCPU
- Gpu
- GPU
- Memory
- MEMORY
- Vcpu
- VCPU
- GPU
- GPU
- MEMORY
- MEMORY
- VCPU
- VCPU
- "GPU"
- GPU
- "MEMORY"
- MEMORY
- "VCPU"
- VCPU
PipeBatchRetryStrategy, PipeBatchRetryStrategyArgs
- Attempts int
- The number of times to move a job to the
RUNNABLE
status. If the value ofattempts
is greater than one, the job is retried on failure the same number of attempts as the value.
- Attempts int
- The number of times to move a job to the
RUNNABLE
status. If the value ofattempts
is greater than one, the job is retried on failure the same number of attempts as the value.
- attempts Integer
- The number of times to move a job to the
RUNNABLE
status. If the value ofattempts
is greater than one, the job is retried on failure the same number of attempts as the value.
- attempts number
- The number of times to move a job to the
RUNNABLE
status. If the value ofattempts
is greater than one, the job is retried on failure the same number of attempts as the value.
- attempts int
- The number of times to move a job to the
RUNNABLE
status. If the value ofattempts
is greater than one, the job is retried on failure the same number of attempts as the value.
- attempts Number
- The number of times to move a job to the
RUNNABLE
status. If the value ofattempts
is greater than one, the job is retried on failure the same number of attempts as the value.
PipeCapacityProviderStrategyItem, PipeCapacityProviderStrategyItemArgs
- Capacity
Provider string - The short name of the capacity provider.
- Base int
- The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used.
- Weight int
- The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied.
- Capacity
Provider string - The short name of the capacity provider.
- Base int
- The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used.
- Weight int
- The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied.
- capacity
Provider String - The short name of the capacity provider.
- base Integer
- The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used.
- weight Integer
- The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied.
- capacity
Provider string - The short name of the capacity provider.
- base number
- The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used.
- weight number
- The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied.
- capacity_
provider str - The short name of the capacity provider.
- base int
- The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used.
- weight int
- The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied.
- capacity
Provider String - The short name of the capacity provider.
- base Number
- The base value designates how many tasks, at a minimum, to run on the specified capacity provider. Only one capacity provider in a capacity provider strategy can have a base defined. If no value is specified, the default value of 0 is used.
- weight Number
- The weight value designates the relative percentage of the total number of tasks launched that should use the specified capacity provider. The weight value is taken into consideration after the base value, if defined, is satisfied.
PipeCloudwatchLogsLogDestination, PipeCloudwatchLogsLogDestinationArgs
- Log
Group stringArn - The AWS Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
- Log
Group stringArn - The AWS Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
- log
Group StringArn - The AWS Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
- log
Group stringArn - The AWS Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
- log_
group_ strarn - The AWS Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
- log
Group StringArn - The AWS Resource Name (ARN) for the CloudWatch log group to which EventBridge sends the log records.
PipeDeadLetterConfig, PipeDeadLetterConfigArgs
- Arn string
The ARN of the specified target for the dead-letter queue.
For Amazon Kinesis stream and Amazon DynamoDB stream sources, specify either an Amazon SNS topic or Amazon SQS queue ARN.
- Arn string
The ARN of the specified target for the dead-letter queue.
For Amazon Kinesis stream and Amazon DynamoDB stream sources, specify either an Amazon SNS topic or Amazon SQS queue ARN.
- arn String
The ARN of the specified target for the dead-letter queue.
For Amazon Kinesis stream and Amazon DynamoDB stream sources, specify either an Amazon SNS topic or Amazon SQS queue ARN.
- arn string
The ARN of the specified target for the dead-letter queue.
For Amazon Kinesis stream and Amazon DynamoDB stream sources, specify either an Amazon SNS topic or Amazon SQS queue ARN.
- arn str
The ARN of the specified target for the dead-letter queue.
For Amazon Kinesis stream and Amazon DynamoDB stream sources, specify either an Amazon SNS topic or Amazon SQS queue ARN.
- arn String
The ARN of the specified target for the dead-letter queue.
For Amazon Kinesis stream and Amazon DynamoDB stream sources, specify either an Amazon SNS topic or Amazon SQS queue ARN.
PipeDimensionMapping, PipeDimensionMappingArgs
- Dimension
Name string - The metadata attributes of the time series. For example, the name and Availability Zone of an Amazon EC2 instance or the name of the manufacturer of a wind turbine are dimensions.
- Dimension
Value string - Dynamic path to the dimension value in the source event.
- Dimension
Value Pulumi.Type Aws Native. Pipes. Pipe Dimension Value Type - The data type of the dimension for the time-series data.
- Dimension
Name string - The metadata attributes of the time series. For example, the name and Availability Zone of an Amazon EC2 instance or the name of the manufacturer of a wind turbine are dimensions.
- Dimension
Value string - Dynamic path to the dimension value in the source event.
- Dimension
Value PipeType Dimension Value Type - The data type of the dimension for the time-series data.
- dimension
Name String - The metadata attributes of the time series. For example, the name and Availability Zone of an Amazon EC2 instance or the name of the manufacturer of a wind turbine are dimensions.
- dimension
Value String - Dynamic path to the dimension value in the source event.
- dimension
Value PipeType Dimension Value Type - The data type of the dimension for the time-series data.
- dimension
Name string - The metadata attributes of the time series. For example, the name and Availability Zone of an Amazon EC2 instance or the name of the manufacturer of a wind turbine are dimensions.
- dimension
Value string - Dynamic path to the dimension value in the source event.
- dimension
Value PipeType Dimension Value Type - The data type of the dimension for the time-series data.
- dimension_
name str - The metadata attributes of the time series. For example, the name and Availability Zone of an Amazon EC2 instance or the name of the manufacturer of a wind turbine are dimensions.
- dimension_
value str - Dynamic path to the dimension value in the source event.
- dimension_
value_ Pipetype Dimension Value Type - The data type of the dimension for the time-series data.
- dimension
Name String - The metadata attributes of the time series. For example, the name and Availability Zone of an Amazon EC2 instance or the name of the manufacturer of a wind turbine are dimensions.
- dimension
Value String - Dynamic path to the dimension value in the source event.
- dimension
Value "VARCHAR"Type - The data type of the dimension for the time-series data.
PipeDimensionValueType, PipeDimensionValueTypeArgs
- Varchar
- VARCHAR
- Pipe
Dimension Value Type Varchar - VARCHAR
- Varchar
- VARCHAR
- Varchar
- VARCHAR
- VARCHAR
- VARCHAR
- "VARCHAR"
- VARCHAR
PipeDynamoDbStreamStartPosition, PipeDynamoDbStreamStartPositionArgs
- Trim
Horizon - TRIM_HORIZON
- Latest
- LATEST
- Pipe
Dynamo Db Stream Start Position Trim Horizon - TRIM_HORIZON
- Pipe
Dynamo Db Stream Start Position Latest - LATEST
- Trim
Horizon - TRIM_HORIZON
- Latest
- LATEST
- Trim
Horizon - TRIM_HORIZON
- Latest
- LATEST
- TRIM_HORIZON
- TRIM_HORIZON
- LATEST
- LATEST
- "TRIM_HORIZON"
- TRIM_HORIZON
- "LATEST"
- LATEST
PipeEcsContainerOverride, PipeEcsContainerOverrideArgs
- Command List<string>
- The command to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- Cpu int
- The number of
cpu
units reserved for the container, instead of the default value from the task definition. You must also specify a container name. - Environment
List<Pulumi.
Aws Native. Pipes. Inputs. Pipe Ecs Environment Variable> - The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name.
- Environment
Files List<Pulumi.Aws Native. Pipes. Inputs. Pipe Ecs Environment File> - A list of files containing the environment variables to pass to a container, instead of the value from the container definition.
- Memory int
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- Memory
Reservation int - The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
- Name string
- The name of the container that receives the override. This parameter is required if any override is specified.
- Resource
Requirements List<Pulumi.Aws Native. Pipes. Inputs. Pipe Ecs Resource Requirement> - The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU.
- Command []string
- The command to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- Cpu int
- The number of
cpu
units reserved for the container, instead of the default value from the task definition. You must also specify a container name. - Environment
[]Pipe
Ecs Environment Variable - The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name.
- Environment
Files []PipeEcs Environment File - A list of files containing the environment variables to pass to a container, instead of the value from the container definition.
- Memory int
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- Memory
Reservation int - The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
- Name string
- The name of the container that receives the override. This parameter is required if any override is specified.
- Resource
Requirements []PipeEcs Resource Requirement - The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU.
- command List<String>
- The command to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- cpu Integer
- The number of
cpu
units reserved for the container, instead of the default value from the task definition. You must also specify a container name. - environment
List<Pipe
Ecs Environment Variable> - The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name.
- environment
Files List<PipeEcs Environment File> - A list of files containing the environment variables to pass to a container, instead of the value from the container definition.
- memory Integer
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- memory
Reservation Integer - The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
- name String
- The name of the container that receives the override. This parameter is required if any override is specified.
- resource
Requirements List<PipeEcs Resource Requirement> - The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU.
- command string[]
- The command to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- cpu number
- The number of
cpu
units reserved for the container, instead of the default value from the task definition. You must also specify a container name. - environment
Pipe
Ecs Environment Variable[] - The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name.
- environment
Files PipeEcs Environment File[] - A list of files containing the environment variables to pass to a container, instead of the value from the container definition.
- memory number
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- memory
Reservation number - The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
- name string
- The name of the container that receives the override. This parameter is required if any override is specified.
- resource
Requirements PipeEcs Resource Requirement[] - The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU.
- command Sequence[str]
- The command to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- cpu int
- The number of
cpu
units reserved for the container, instead of the default value from the task definition. You must also specify a container name. - environment
Sequence[Pipe
Ecs Environment Variable] - The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name.
- environment_
files Sequence[PipeEcs Environment File] - A list of files containing the environment variables to pass to a container, instead of the value from the container definition.
- memory int
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- memory_
reservation int - The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
- name str
- The name of the container that receives the override. This parameter is required if any override is specified.
- resource_
requirements Sequence[PipeEcs Resource Requirement] - The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU.
- command List<String>
- The command to send to the container that overrides the default command from the Docker image or the task definition. You must also specify a container name.
- cpu Number
- The number of
cpu
units reserved for the container, instead of the default value from the task definition. You must also specify a container name. - environment List<Property Map>
- The environment variables to send to the container. You can add new environment variables, which are added to the container at launch, or you can override the existing environment variables from the Docker image or the task definition. You must also specify a container name.
- environment
Files List<Property Map> - A list of files containing the environment variables to pass to a container, instead of the value from the container definition.
- memory Number
- The hard limit (in MiB) of memory to present to the container, instead of the default value from the task definition. If your container attempts to exceed the memory specified here, the container is killed. You must also specify a container name.
- memory
Reservation Number - The soft limit (in MiB) of memory to reserve for the container, instead of the default value from the task definition. You must also specify a container name.
- name String
- The name of the container that receives the override. This parameter is required if any override is specified.
- resource
Requirements List<Property Map> - The type and amount of a resource to assign to a container, instead of the default value from the task definition. The only supported resource is a GPU.
PipeEcsEnvironmentFile, PipeEcsEnvironmentFileArgs
- Type
Pulumi.
Aws Native. Pipes. Pipe Ecs Environment File Type - The file type to use. The only supported value is
s3
. - Value string
- The Amazon Resource Name (ARN) of the Amazon S3 object containing the environment variable file.
- Type
Pipe
Ecs Environment File Type - The file type to use. The only supported value is
s3
. - Value string
- The Amazon Resource Name (ARN) of the Amazon S3 object containing the environment variable file.
- type
Pipe
Ecs Environment File Type - The file type to use. The only supported value is
s3
. - value String
- The Amazon Resource Name (ARN) of the Amazon S3 object containing the environment variable file.
- type
Pipe
Ecs Environment File Type - The file type to use. The only supported value is
s3
. - value string
- The Amazon Resource Name (ARN) of the Amazon S3 object containing the environment variable file.
- type
Pipe
Ecs Environment File Type - The file type to use. The only supported value is
s3
. - value str
- The Amazon Resource Name (ARN) of the Amazon S3 object containing the environment variable file.
PipeEcsEnvironmentFileType, PipeEcsEnvironmentFileTypeArgs
- S3
- s3
- Pipe
Ecs Environment File Type S3 - s3
- S3
- s3
- S3
- s3
- S3
- s3
- "s3"
- s3
PipeEcsEnvironmentVariable, PipeEcsEnvironmentVariableArgs
PipeEcsEphemeralStorage, PipeEcsEphemeralStorageArgs
- Size
In intGi B - The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is
21
GiB and the maximum supported value is200
GiB.
- Size
In intGi B - The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is
21
GiB and the maximum supported value is200
GiB.
- size
In IntegerGi B - The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is
21
GiB and the maximum supported value is200
GiB.
- size
In numberGi B - The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is
21
GiB and the maximum supported value is200
GiB.
- size_
in_ intgi_ b - The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is
21
GiB and the maximum supported value is200
GiB.
- size
In NumberGi B - The total amount, in GiB, of ephemeral storage to set for the task. The minimum supported value is
21
GiB and the maximum supported value is200
GiB.
PipeEcsInferenceAcceleratorOverride, PipeEcsInferenceAcceleratorOverrideArgs
- Device
Name string - The Elastic Inference accelerator device name to override for the task. This parameter must match a
deviceName
specified in the task definition. - Device
Type string - The Elastic Inference accelerator type to use.
- Device
Name string - The Elastic Inference accelerator device name to override for the task. This parameter must match a
deviceName
specified in the task definition. - Device
Type string - The Elastic Inference accelerator type to use.
- device
Name String - The Elastic Inference accelerator device name to override for the task. This parameter must match a
deviceName
specified in the task definition. - device
Type String - The Elastic Inference accelerator type to use.
- device
Name string - The Elastic Inference accelerator device name to override for the task. This parameter must match a
deviceName
specified in the task definition. - device
Type string - The Elastic Inference accelerator type to use.
- device_
name str - The Elastic Inference accelerator device name to override for the task. This parameter must match a
deviceName
specified in the task definition. - device_
type str - The Elastic Inference accelerator type to use.
- device
Name String - The Elastic Inference accelerator device name to override for the task. This parameter must match a
deviceName
specified in the task definition. - device
Type String - The Elastic Inference accelerator type to use.
PipeEcsResourceRequirement, PipeEcsResourceRequirementArgs
- Type
Pulumi.
Aws Native. Pipes. Pipe Ecs Resource Requirement Type - The type of resource to assign to a container. The supported values are
GPU
orInferenceAccelerator
. - Value string
The value for the specified resource type.
If the
GPU
type is used, the value is the number of physicalGPUs
the Amazon ECS container agent reserves for the container. The number of GPUs that's reserved for all containers in a task can't exceed the number of available GPUs on the container instance that the task is launched on.If the
InferenceAccelerator
type is used, thevalue
matches thedeviceName
for an InferenceAccelerator specified in a task definition.
- Type
Pipe
Ecs Resource Requirement Type - The type of resource to assign to a container. The supported values are
GPU
orInferenceAccelerator
. - Value string
The value for the specified resource type.
If the
GPU
type is used, the value is the number of physicalGPUs
the Amazon ECS container agent reserves for the container. The number of GPUs that's reserved for all containers in a task can't exceed the number of available GPUs on the container instance that the task is launched on.If the
InferenceAccelerator
type is used, thevalue
matches thedeviceName
for an InferenceAccelerator specified in a task definition.
- type
Pipe
Ecs Resource Requirement Type - The type of resource to assign to a container. The supported values are
GPU
orInferenceAccelerator
. - value String
The value for the specified resource type.
If the
GPU
type is used, the value is the number of physicalGPUs
the Amazon ECS container agent reserves for the container. The number of GPUs that's reserved for all containers in a task can't exceed the number of available GPUs on the container instance that the task is launched on.If the
InferenceAccelerator
type is used, thevalue
matches thedeviceName
for an InferenceAccelerator specified in a task definition.
- type
Pipe
Ecs Resource Requirement Type - The type of resource to assign to a container. The supported values are
GPU
orInferenceAccelerator
. - value string
The value for the specified resource type.
If the
GPU
type is used, the value is the number of physicalGPUs
the Amazon ECS container agent reserves for the container. The number of GPUs that's reserved for all containers in a task can't exceed the number of available GPUs on the container instance that the task is launched on.If the
InferenceAccelerator
type is used, thevalue
matches thedeviceName
for an InferenceAccelerator specified in a task definition.
- type
Pipe
Ecs Resource Requirement Type - The type of resource to assign to a container. The supported values are
GPU
orInferenceAccelerator
. - value str
The value for the specified resource type.
If the
GPU
type is used, the value is the number of physicalGPUs
the Amazon ECS container agent reserves for the container. The number of GPUs that's reserved for all containers in a task can't exceed the number of available GPUs on the container instance that the task is launched on.If the
InferenceAccelerator
type is used, thevalue
matches thedeviceName
for an InferenceAccelerator specified in a task definition.
- type
"GPU" | "Inference
Accelerator" - The type of resource to assign to a container. The supported values are
GPU
orInferenceAccelerator
. - value String
The value for the specified resource type.
If the
GPU
type is used, the value is the number of physicalGPUs
the Amazon ECS container agent reserves for the container. The number of GPUs that's reserved for all containers in a task can't exceed the number of available GPUs on the container instance that the task is launched on.If the
InferenceAccelerator
type is used, thevalue
matches thedeviceName
for an InferenceAccelerator specified in a task definition.
PipeEcsResourceRequirementType, PipeEcsResourceRequirementTypeArgs
- Gpu
- GPU
- Inference
Accelerator - InferenceAccelerator
- Pipe
Ecs Resource Requirement Type Gpu - GPU
- Pipe
Ecs Resource Requirement Type Inference Accelerator - InferenceAccelerator
- Gpu
- GPU
- Inference
Accelerator - InferenceAccelerator
- Gpu
- GPU
- Inference
Accelerator - InferenceAccelerator
- GPU
- GPU
- INFERENCE_ACCELERATOR
- InferenceAccelerator
- "GPU"
- GPU
- "Inference
Accelerator" - InferenceAccelerator
PipeEcsTaskOverride, PipeEcsTaskOverrideArgs
- Container
Overrides List<Pulumi.Aws Native. Pipes. Inputs. Pipe Ecs Container Override> - One or more container overrides that are sent to a task.
- Cpu string
- The cpu override for the task.
- Ephemeral
Storage Pulumi.Aws Native. Pipes. Inputs. Pipe Ecs Ephemeral Storage The ephemeral storage setting override for the task.
This parameter is only supported for tasks hosted on Fargate that use the following platform versions:
- Linux platform version
1.4.0
or later. - Windows platform version
1.0.0
or later.
- Linux platform version
- Execution
Role stringArn - The Amazon Resource Name (ARN) of the task execution IAM role override for the task. For more information, see Amazon ECS task execution IAM role in the Amazon Elastic Container Service Developer Guide .
- Inference
Accelerator List<Pulumi.Overrides Aws Native. Pipes. Inputs. Pipe Ecs Inference Accelerator Override> - The Elastic Inference accelerator override for the task.
- Memory string
- The memory override for the task.
- Task
Role stringArn - The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role. For more information, see IAM Role for Tasks in the Amazon Elastic Container Service Developer Guide .
- Container
Overrides []PipeEcs Container Override - One or more container overrides that are sent to a task.
- Cpu string
- The cpu override for the task.
- Ephemeral
Storage PipeEcs Ephemeral Storage The ephemeral storage setting override for the task.
This parameter is only supported for tasks hosted on Fargate that use the following platform versions:
- Linux platform version
1.4.0
or later. - Windows platform version
1.0.0
or later.
- Linux platform version
- Execution
Role stringArn - The Amazon Resource Name (ARN) of the task execution IAM role override for the task. For more information, see Amazon ECS task execution IAM role in the Amazon Elastic Container Service Developer Guide .
- Inference
Accelerator []PipeOverrides Ecs Inference Accelerator Override - The Elastic Inference accelerator override for the task.
- Memory string
- The memory override for the task.
- Task
Role stringArn - The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role. For more information, see IAM Role for Tasks in the Amazon Elastic Container Service Developer Guide .
- container
Overrides List<PipeEcs Container Override> - One or more container overrides that are sent to a task.
- cpu String
- The cpu override for the task.
- ephemeral
Storage PipeEcs Ephemeral Storage The ephemeral storage setting override for the task.
This parameter is only supported for tasks hosted on Fargate that use the following platform versions:
- Linux platform version
1.4.0
or later. - Windows platform version
1.0.0
or later.
- Linux platform version
- execution
Role StringArn - The Amazon Resource Name (ARN) of the task execution IAM role override for the task. For more information, see Amazon ECS task execution IAM role in the Amazon Elastic Container Service Developer Guide .
- inference
Accelerator List<PipeOverrides Ecs Inference Accelerator Override> - The Elastic Inference accelerator override for the task.
- memory String
- The memory override for the task.
- task
Role StringArn - The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role. For more information, see IAM Role for Tasks in the Amazon Elastic Container Service Developer Guide .
- container
Overrides PipeEcs Container Override[] - One or more container overrides that are sent to a task.
- cpu string
- The cpu override for the task.
- ephemeral
Storage PipeEcs Ephemeral Storage The ephemeral storage setting override for the task.
This parameter is only supported for tasks hosted on Fargate that use the following platform versions:
- Linux platform version
1.4.0
or later. - Windows platform version
1.0.0
or later.
- Linux platform version
- execution
Role stringArn - The Amazon Resource Name (ARN) of the task execution IAM role override for the task. For more information, see Amazon ECS task execution IAM role in the Amazon Elastic Container Service Developer Guide .
- inference
Accelerator PipeOverrides Ecs Inference Accelerator Override[] - The Elastic Inference accelerator override for the task.
- memory string
- The memory override for the task.
- task
Role stringArn - The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role. For more information, see IAM Role for Tasks in the Amazon Elastic Container Service Developer Guide .
- container_
overrides Sequence[PipeEcs Container Override] - One or more container overrides that are sent to a task.
- cpu str
- The cpu override for the task.
- ephemeral_
storage PipeEcs Ephemeral Storage The ephemeral storage setting override for the task.
This parameter is only supported for tasks hosted on Fargate that use the following platform versions:
- Linux platform version
1.4.0
or later. - Windows platform version
1.0.0
or later.
- Linux platform version
- execution_
role_ strarn - The Amazon Resource Name (ARN) of the task execution IAM role override for the task. For more information, see Amazon ECS task execution IAM role in the Amazon Elastic Container Service Developer Guide .
- inference_
accelerator_ Sequence[Pipeoverrides Ecs Inference Accelerator Override] - The Elastic Inference accelerator override for the task.
- memory str
- The memory override for the task.
- task_
role_ strarn - The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role. For more information, see IAM Role for Tasks in the Amazon Elastic Container Service Developer Guide .
- container
Overrides List<Property Map> - One or more container overrides that are sent to a task.
- cpu String
- The cpu override for the task.
- ephemeral
Storage Property Map The ephemeral storage setting override for the task.
This parameter is only supported for tasks hosted on Fargate that use the following platform versions:
- Linux platform version
1.4.0
or later. - Windows platform version
1.0.0
or later.
- Linux platform version
- execution
Role StringArn - The Amazon Resource Name (ARN) of the task execution IAM role override for the task. For more information, see Amazon ECS task execution IAM role in the Amazon Elastic Container Service Developer Guide .
- inference
Accelerator List<Property Map>Overrides - The Elastic Inference accelerator override for the task.
- memory String
- The memory override for the task.
- task
Role StringArn - The Amazon Resource Name (ARN) of the IAM role that containers in this task can assume. All containers in this task are granted the permissions that are specified in this role. For more information, see IAM Role for Tasks in the Amazon Elastic Container Service Developer Guide .
PipeEnrichmentHttpParameters, PipeEnrichmentHttpParametersArgs
- Header
Parameters Dictionary<string, string> - The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- Path
Parameter List<string>Values - The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
- Query
String Dictionary<string, string>Parameters - The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- Header
Parameters map[string]string - The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- Path
Parameter []stringValues - The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
- Query
String map[string]stringParameters - The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- header
Parameters Map<String,String> - The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- path
Parameter List<String>Values - The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
- query
String Map<String,String>Parameters - The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- header
Parameters {[key: string]: string} - The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- path
Parameter string[]Values - The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
- query
String {[key: string]: string}Parameters - The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- header_
parameters Mapping[str, str] - The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- path_
parameter_ Sequence[str]values - The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
- query_
string_ Mapping[str, str]parameters - The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- header
Parameters Map<String> - The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- path
Parameter List<String>Values - The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
- query
String Map<String>Parameters - The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
PipeEnrichmentParameters, PipeEnrichmentParametersArgs
- Http
Parameters Pulumi.Aws Native. Pipes. Inputs. Pipe Enrichment Http Parameters Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination.
If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence.
- Input
Template string Valid JSON text passed to the enrichment. In this case, nothing from the event itself is passed to the enrichment. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format .
To remove an input template, specify an empty string.
- Http
Parameters PipeEnrichment Http Parameters Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination.
If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence.
- Input
Template string Valid JSON text passed to the enrichment. In this case, nothing from the event itself is passed to the enrichment. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format .
To remove an input template, specify an empty string.
- http
Parameters PipeEnrichment Http Parameters Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination.
If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence.
- input
Template String Valid JSON text passed to the enrichment. In this case, nothing from the event itself is passed to the enrichment. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format .
To remove an input template, specify an empty string.
- http
Parameters PipeEnrichment Http Parameters Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination.
If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence.
- input
Template string Valid JSON text passed to the enrichment. In this case, nothing from the event itself is passed to the enrichment. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format .
To remove an input template, specify an empty string.
- http_
parameters PipeEnrichment Http Parameters Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination.
If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence.
- input_
template str Valid JSON text passed to the enrichment. In this case, nothing from the event itself is passed to the enrichment. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format .
To remove an input template, specify an empty string.
- http
Parameters Property Map Contains the HTTP parameters to use when the target is a API Gateway REST endpoint or EventBridge ApiDestination.
If you specify an API Gateway REST API or EventBridge ApiDestination as a target, you can use this parameter to specify headers, path parameters, and query string keys/values as part of your target invoking request. If you're using ApiDestinations, the corresponding Connection can also have these values configured. In case of any conflicting keys, values from the Connection take precedence.
- input
Template String Valid JSON text passed to the enrichment. In this case, nothing from the event itself is passed to the enrichment. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format .
To remove an input template, specify an empty string.
PipeEpochTimeUnit, PipeEpochTimeUnitArgs
- Milliseconds
- MILLISECONDS
- Seconds
- SECONDS
- Microseconds
- MICROSECONDS
- Nanoseconds
- NANOSECONDS
- Pipe
Epoch Time Unit Milliseconds - MILLISECONDS
- Pipe
Epoch Time Unit Seconds - SECONDS
- Pipe
Epoch Time Unit Microseconds - MICROSECONDS
- Pipe
Epoch Time Unit Nanoseconds - NANOSECONDS
- Milliseconds
- MILLISECONDS
- Seconds
- SECONDS
- Microseconds
- MICROSECONDS
- Nanoseconds
- NANOSECONDS
- Milliseconds
- MILLISECONDS
- Seconds
- SECONDS
- Microseconds
- MICROSECONDS
- Nanoseconds
- NANOSECONDS
- MILLISECONDS
- MILLISECONDS
- SECONDS
- SECONDS
- MICROSECONDS
- MICROSECONDS
- NANOSECONDS
- NANOSECONDS
- "MILLISECONDS"
- MILLISECONDS
- "SECONDS"
- SECONDS
- "MICROSECONDS"
- MICROSECONDS
- "NANOSECONDS"
- NANOSECONDS
PipeFilter, PipeFilterArgs
- Pattern string
- The event pattern.
- Pattern string
- The event pattern.
- pattern String
- The event pattern.
- pattern string
- The event pattern.
- pattern str
- The event pattern.
- pattern String
- The event pattern.
PipeFilterCriteria, PipeFilterCriteriaArgs
- Filters
List<Pulumi.
Aws Native. Pipes. Inputs. Pipe Filter> - The event patterns.
- Filters
[]Pipe
Filter - The event patterns.
- filters
List<Pipe
Filter> - The event patterns.
- filters
Pipe
Filter[] - The event patterns.
- filters
Sequence[Pipe
Filter] - The event patterns.
- filters List<Property Map>
- The event patterns.
PipeFirehoseLogDestination, PipeFirehoseLogDestinationArgs
- Delivery
Stream stringArn - The Amazon Resource Name (ARN) of the Firehose delivery stream to which EventBridge delivers the pipe log records.
- Delivery
Stream stringArn - The Amazon Resource Name (ARN) of the Firehose delivery stream to which EventBridge delivers the pipe log records.
- delivery
Stream StringArn - The Amazon Resource Name (ARN) of the Firehose delivery stream to which EventBridge delivers the pipe log records.
- delivery
Stream stringArn - The Amazon Resource Name (ARN) of the Firehose delivery stream to which EventBridge delivers the pipe log records.
- delivery_
stream_ strarn - The Amazon Resource Name (ARN) of the Firehose delivery stream to which EventBridge delivers the pipe log records.
- delivery
Stream StringArn - The Amazon Resource Name (ARN) of the Firehose delivery stream to which EventBridge delivers the pipe log records.
PipeIncludeExecutionDataOption, PipeIncludeExecutionDataOptionArgs
- All
- ALL
- Pipe
Include Execution Data Option All - ALL
- All
- ALL
- All
- ALL
- ALL
- ALL
- "ALL"
- ALL
PipeKinesisStreamStartPosition, PipeKinesisStreamStartPositionArgs
- Trim
Horizon - TRIM_HORIZON
- Latest
- LATEST
- At
Timestamp - AT_TIMESTAMP
- Pipe
Kinesis Stream Start Position Trim Horizon - TRIM_HORIZON
- Pipe
Kinesis Stream Start Position Latest - LATEST
- Pipe
Kinesis Stream Start Position At Timestamp - AT_TIMESTAMP
- Trim
Horizon - TRIM_HORIZON
- Latest
- LATEST
- At
Timestamp - AT_TIMESTAMP
- Trim
Horizon - TRIM_HORIZON
- Latest
- LATEST
- At
Timestamp - AT_TIMESTAMP
- TRIM_HORIZON
- TRIM_HORIZON
- LATEST
- LATEST
- AT_TIMESTAMP
- AT_TIMESTAMP
- "TRIM_HORIZON"
- TRIM_HORIZON
- "LATEST"
- LATEST
- "AT_TIMESTAMP"
- AT_TIMESTAMP
PipeLaunchType, PipeLaunchTypeArgs
- Ec2
- EC2
- Fargate
- FARGATE
- External
- EXTERNAL
- Pipe
Launch Type Ec2 - EC2
- Pipe
Launch Type Fargate - FARGATE
- Pipe
Launch Type External - EXTERNAL
- Ec2
- EC2
- Fargate
- FARGATE
- External
- EXTERNAL
- Ec2
- EC2
- Fargate
- FARGATE
- External
- EXTERNAL
- EC2
- EC2
- FARGATE
- FARGATE
- EXTERNAL
- EXTERNAL
- "EC2"
- EC2
- "FARGATE"
- FARGATE
- "EXTERNAL"
- EXTERNAL
PipeLogConfiguration, PipeLogConfigurationArgs
- Cloudwatch
Logs Pulumi.Log Destination Aws Native. Pipes. Inputs. Pipe Cloudwatch Logs Log Destination - The logging configuration settings for the pipe.
- Firehose
Log Pulumi.Destination Aws Native. Pipes. Inputs. Pipe Firehose Log Destination - The Amazon Data Firehose logging configuration settings for the pipe.
- Include
Execution List<Pulumi.Data Aws Native. Pipes. Pipe Include Execution Data Option> Whether the execution data (specifically, the
payload
,awsRequest
, andawsResponse
fields) is included in the log messages for this pipe.This applies to all log destinations for the pipe.
For more information, see Including execution data in logs in the Amazon EventBridge User Guide .
Allowed values:
ALL
- Level
Pulumi.
Aws Native. Pipes. Pipe Log Level - The level of logging detail to include. This applies to all log destinations for the pipe.
- S3Log
Destination Pulumi.Aws Native. Pipes. Inputs. Pipe S3Log Destination - The Amazon S3 logging configuration settings for the pipe.
- Cloudwatch
Logs PipeLog Destination Cloudwatch Logs Log Destination - The logging configuration settings for the pipe.
- Firehose
Log PipeDestination Firehose Log Destination - The Amazon Data Firehose logging configuration settings for the pipe.
- Include
Execution []PipeData Include Execution Data Option Whether the execution data (specifically, the
payload
,awsRequest
, andawsResponse
fields) is included in the log messages for this pipe.This applies to all log destinations for the pipe.
For more information, see Including execution data in logs in the Amazon EventBridge User Guide .
Allowed values:
ALL
- Level
Pipe
Log Level - The level of logging detail to include. This applies to all log destinations for the pipe.
- S3Log
Destination PipeS3Log Destination - The Amazon S3 logging configuration settings for the pipe.
- cloudwatch
Logs PipeLog Destination Cloudwatch Logs Log Destination - The logging configuration settings for the pipe.
- firehose
Log PipeDestination Firehose Log Destination - The Amazon Data Firehose logging configuration settings for the pipe.
- include
Execution List<PipeData Include Execution Data Option> Whether the execution data (specifically, the
payload
,awsRequest
, andawsResponse
fields) is included in the log messages for this pipe.This applies to all log destinations for the pipe.
For more information, see Including execution data in logs in the Amazon EventBridge User Guide .
Allowed values:
ALL
- level
Pipe
Log Level - The level of logging detail to include. This applies to all log destinations for the pipe.
- s3Log
Destination PipeS3Log Destination - The Amazon S3 logging configuration settings for the pipe.
- cloudwatch
Logs PipeLog Destination Cloudwatch Logs Log Destination - The logging configuration settings for the pipe.
- firehose
Log PipeDestination Firehose Log Destination - The Amazon Data Firehose logging configuration settings for the pipe.
- include
Execution PipeData Include Execution Data Option[] Whether the execution data (specifically, the
payload
,awsRequest
, andawsResponse
fields) is included in the log messages for this pipe.This applies to all log destinations for the pipe.
For more information, see Including execution data in logs in the Amazon EventBridge User Guide .
Allowed values:
ALL
- level
Pipe
Log Level - The level of logging detail to include. This applies to all log destinations for the pipe.
- s3Log
Destination PipeS3Log Destination - The Amazon S3 logging configuration settings for the pipe.
- cloudwatch_
logs_ Pipelog_ destination Cloudwatch Logs Log Destination - The logging configuration settings for the pipe.
- firehose_
log_ Pipedestination Firehose Log Destination - The Amazon Data Firehose logging configuration settings for the pipe.
- include_
execution_ Sequence[Pipedata Include Execution Data Option] Whether the execution data (specifically, the
payload
,awsRequest
, andawsResponse
fields) is included in the log messages for this pipe.This applies to all log destinations for the pipe.
For more information, see Including execution data in logs in the Amazon EventBridge User Guide .
Allowed values:
ALL
- level
Pipe
Log Level - The level of logging detail to include. This applies to all log destinations for the pipe.
- s3_
log_ Pipedestination S3Log Destination - The Amazon S3 logging configuration settings for the pipe.
- cloudwatch
Logs Property MapLog Destination - The logging configuration settings for the pipe.
- firehose
Log Property MapDestination - The Amazon Data Firehose logging configuration settings for the pipe.
- include
Execution List<"ALL">Data Whether the execution data (specifically, the
payload
,awsRequest
, andawsResponse
fields) is included in the log messages for this pipe.This applies to all log destinations for the pipe.
For more information, see Including execution data in logs in the Amazon EventBridge User Guide .
Allowed values:
ALL
- level "OFF" | "ERROR" | "INFO" | "TRACE"
- The level of logging detail to include. This applies to all log destinations for the pipe.
- s3Log
Destination Property Map - The Amazon S3 logging configuration settings for the pipe.
PipeLogLevel, PipeLogLevelArgs
- Off
- OFF
- Error
- ERROR
- Info
- INFO
- Trace
- TRACE
- Pipe
Log Level Off - OFF
- Pipe
Log Level Error - ERROR
- Pipe
Log Level Info - INFO
- Pipe
Log Level Trace - TRACE
- Off
- OFF
- Error
- ERROR
- Info
- INFO
- Trace
- TRACE
- Off
- OFF
- Error
- ERROR
- Info
- INFO
- Trace
- TRACE
- OFF
- OFF
- ERROR
- ERROR
- INFO
- INFO
- TRACE
- TRACE
- "OFF"
- OFF
- "ERROR"
- ERROR
- "INFO"
- INFO
- "TRACE"
- TRACE
PipeMeasureValueType, PipeMeasureValueTypeArgs
- Double
- DOUBLE
- Bigint
- BIGINT
- Varchar
- VARCHAR
- Boolean
- BOOLEAN
- Timestamp
- TIMESTAMP
- Pipe
Measure Value Type Double - DOUBLE
- Pipe
Measure Value Type Bigint - BIGINT
- Pipe
Measure Value Type Varchar - VARCHAR
- Pipe
Measure Value Type Boolean - BOOLEAN
- Pipe
Measure Value Type Timestamp - TIMESTAMP
- Double
- DOUBLE
- Bigint
- BIGINT
- Varchar
- VARCHAR
- Boolean
- BOOLEAN
- Timestamp
- TIMESTAMP
- Double
- DOUBLE
- Bigint
- BIGINT
- Varchar
- VARCHAR
- Boolean
- BOOLEAN
- Timestamp
- TIMESTAMP
- DOUBLE
- DOUBLE
- BIGINT
- BIGINT
- VARCHAR
- VARCHAR
- BOOLEAN
- BOOLEAN
- TIMESTAMP
- TIMESTAMP
- "DOUBLE"
- DOUBLE
- "BIGINT"
- BIGINT
- "VARCHAR"
- VARCHAR
- "BOOLEAN"
- BOOLEAN
- "TIMESTAMP"
- TIMESTAMP
PipeMqBrokerAccessCredentialsProperties, PipeMqBrokerAccessCredentialsPropertiesArgs
- Basic
Auth string - Optional SecretManager ARN which stores the database credentials
- Basic
Auth string - Optional SecretManager ARN which stores the database credentials
- basic
Auth String - Optional SecretManager ARN which stores the database credentials
- basic
Auth string - Optional SecretManager ARN which stores the database credentials
- basic_
auth str - Optional SecretManager ARN which stores the database credentials
- basic
Auth String - Optional SecretManager ARN which stores the database credentials
PipeMskAccessCredentials0Properties, PipeMskAccessCredentials0PropertiesArgs
- Sasl
Scram512Auth string - Optional SecretManager ARN which stores the database credentials
- Sasl
Scram512Auth string - Optional SecretManager ARN which stores the database credentials
- sasl
Scram512Auth String - Optional SecretManager ARN which stores the database credentials
- sasl
Scram512Auth string - Optional SecretManager ARN which stores the database credentials
- sasl_
scram512_ strauth - Optional SecretManager ARN which stores the database credentials
- sasl
Scram512Auth String - Optional SecretManager ARN which stores the database credentials
PipeMskAccessCredentials1Properties, PipeMskAccessCredentials1PropertiesArgs
- Client
Certificate stringTls Auth - Optional SecretManager ARN which stores the database credentials
- Client
Certificate stringTls Auth - Optional SecretManager ARN which stores the database credentials
- client
Certificate StringTls Auth - Optional SecretManager ARN which stores the database credentials
- client
Certificate stringTls Auth - Optional SecretManager ARN which stores the database credentials
- client_
certificate_ strtls_ auth - Optional SecretManager ARN which stores the database credentials
- client
Certificate StringTls Auth - Optional SecretManager ARN which stores the database credentials
PipeMskStartPosition, PipeMskStartPositionArgs
- Trim
Horizon - TRIM_HORIZON
- Latest
- LATEST
- Pipe
Msk Start Position Trim Horizon - TRIM_HORIZON
- Pipe
Msk Start Position Latest - LATEST
- Trim
Horizon - TRIM_HORIZON
- Latest
- LATEST
- Trim
Horizon - TRIM_HORIZON
- Latest
- LATEST
- TRIM_HORIZON
- TRIM_HORIZON
- LATEST
- LATEST
- "TRIM_HORIZON"
- TRIM_HORIZON
- "LATEST"
- LATEST
PipeMultiMeasureAttributeMapping, PipeMultiMeasureAttributeMappingArgs
- Measure
Value string - Dynamic path to the measurement attribute in the source event.
- Measure
Value Pulumi.Type Aws Native. Pipes. Pipe Measure Value Type - Data type of the measurement attribute in the source event.
- Multi
Measure stringAttribute Name - Target measure name to be used.
- Measure
Value string - Dynamic path to the measurement attribute in the source event.
- Measure
Value PipeType Measure Value Type - Data type of the measurement attribute in the source event.
- Multi
Measure stringAttribute Name - Target measure name to be used.
- measure
Value String - Dynamic path to the measurement attribute in the source event.
- measure
Value PipeType Measure Value Type - Data type of the measurement attribute in the source event.
- multi
Measure StringAttribute Name - Target measure name to be used.
- measure
Value string - Dynamic path to the measurement attribute in the source event.
- measure
Value PipeType Measure Value Type - Data type of the measurement attribute in the source event.
- multi
Measure stringAttribute Name - Target measure name to be used.
- measure_
value str - Dynamic path to the measurement attribute in the source event.
- measure_
value_ Pipetype Measure Value Type - Data type of the measurement attribute in the source event.
- multi_
measure_ strattribute_ name - Target measure name to be used.
- measure
Value String - Dynamic path to the measurement attribute in the source event.
- measure
Value "DOUBLE" | "BIGINT" | "VARCHAR" | "BOOLEAN" | "TIMESTAMP"Type - Data type of the measurement attribute in the source event.
- multi
Measure StringAttribute Name - Target measure name to be used.
PipeMultiMeasureMapping, PipeMultiMeasureMappingArgs
- Multi
Measure List<Pulumi.Attribute Mappings Aws Native. Pipes. Inputs. Pipe Multi Measure Attribute Mapping> - Mappings that represent multiple source event fields mapped to measures in the same Timestream for LiveAnalytics record.
- Multi
Measure stringName - The name of the multiple measurements per record (multi-measure).
- Multi
Measure []PipeAttribute Mappings Multi Measure Attribute Mapping - Mappings that represent multiple source event fields mapped to measures in the same Timestream for LiveAnalytics record.
- Multi
Measure stringName - The name of the multiple measurements per record (multi-measure).
- multi
Measure List<PipeAttribute Mappings Multi Measure Attribute Mapping> - Mappings that represent multiple source event fields mapped to measures in the same Timestream for LiveAnalytics record.
- multi
Measure StringName - The name of the multiple measurements per record (multi-measure).
- multi
Measure PipeAttribute Mappings Multi Measure Attribute Mapping[] - Mappings that represent multiple source event fields mapped to measures in the same Timestream for LiveAnalytics record.
- multi
Measure stringName - The name of the multiple measurements per record (multi-measure).
- multi_
measure_ Sequence[Pipeattribute_ mappings Multi Measure Attribute Mapping] - Mappings that represent multiple source event fields mapped to measures in the same Timestream for LiveAnalytics record.
- multi_
measure_ strname - The name of the multiple measurements per record (multi-measure).
- multi
Measure List<Property Map>Attribute Mappings - Mappings that represent multiple source event fields mapped to measures in the same Timestream for LiveAnalytics record.
- multi
Measure StringName - The name of the multiple measurements per record (multi-measure).
PipeNetworkConfiguration, PipeNetworkConfigurationArgs
- Awsvpc
Configuration Pulumi.Aws Native. Pipes. Inputs. Pipe Aws Vpc Configuration - Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the
awsvpc
network mode.
- Awsvpc
Configuration PipeAws Vpc Configuration - Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the
awsvpc
network mode.
- awsvpc
Configuration PipeAws Vpc Configuration - Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the
awsvpc
network mode.
- awsvpc
Configuration PipeAws Vpc Configuration - Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the
awsvpc
network mode.
- awsvpc_
configuration PipeAws Vpc Configuration - Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the
awsvpc
network mode.
- awsvpc
Configuration Property Map - Use this structure to specify the VPC subnets and security groups for the task, and whether a public IP address is to be used. This structure is relevant only for ECS tasks that use the
awsvpc
network mode.
PipeOnPartialBatchItemFailureStreams, PipeOnPartialBatchItemFailureStreamsArgs
- Automatic
Bisect - AUTOMATIC_BISECT
- Pipe
On Partial Batch Item Failure Streams Automatic Bisect - AUTOMATIC_BISECT
- Automatic
Bisect - AUTOMATIC_BISECT
- Automatic
Bisect - AUTOMATIC_BISECT
- AUTOMATIC_BISECT
- AUTOMATIC_BISECT
- "AUTOMATIC_BISECT"
- AUTOMATIC_BISECT
PipePlacementConstraint, PipePlacementConstraintArgs
- Expression string
- A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is
distinctInstance
. To learn more, see Cluster Query Language in the Amazon Elastic Container Service Developer Guide. - Type
Pulumi.
Aws Native. Pipes. Pipe Placement Constraint Type - The type of constraint. Use distinctInstance to ensure that each task in a particular group is running on a different container instance. Use memberOf to restrict the selection to a group of valid candidates.
- Expression string
- A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is
distinctInstance
. To learn more, see Cluster Query Language in the Amazon Elastic Container Service Developer Guide. - Type
Pipe
Placement Constraint Type - The type of constraint. Use distinctInstance to ensure that each task in a particular group is running on a different container instance. Use memberOf to restrict the selection to a group of valid candidates.
- expression String
- A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is
distinctInstance
. To learn more, see Cluster Query Language in the Amazon Elastic Container Service Developer Guide. - type
Pipe
Placement Constraint Type - The type of constraint. Use distinctInstance to ensure that each task in a particular group is running on a different container instance. Use memberOf to restrict the selection to a group of valid candidates.
- expression string
- A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is
distinctInstance
. To learn more, see Cluster Query Language in the Amazon Elastic Container Service Developer Guide. - type
Pipe
Placement Constraint Type - The type of constraint. Use distinctInstance to ensure that each task in a particular group is running on a different container instance. Use memberOf to restrict the selection to a group of valid candidates.
- expression str
- A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is
distinctInstance
. To learn more, see Cluster Query Language in the Amazon Elastic Container Service Developer Guide. - type
Pipe
Placement Constraint Type - The type of constraint. Use distinctInstance to ensure that each task in a particular group is running on a different container instance. Use memberOf to restrict the selection to a group of valid candidates.
- expression String
- A cluster query language expression to apply to the constraint. You cannot specify an expression if the constraint type is
distinctInstance
. To learn more, see Cluster Query Language in the Amazon Elastic Container Service Developer Guide. - type
"distinct
Instance" | "member Of" - The type of constraint. Use distinctInstance to ensure that each task in a particular group is running on a different container instance. Use memberOf to restrict the selection to a group of valid candidates.
PipePlacementConstraintType, PipePlacementConstraintTypeArgs
- Distinct
Instance - distinctInstance
- Member
Of - memberOf
- Pipe
Placement Constraint Type Distinct Instance - distinctInstance
- Pipe
Placement Constraint Type Member Of - memberOf
- Distinct
Instance - distinctInstance
- Member
Of - memberOf
- Distinct
Instance - distinctInstance
- Member
Of - memberOf
- DISTINCT_INSTANCE
- distinctInstance
- MEMBER_OF
- memberOf
- "distinct
Instance" - distinctInstance
- "member
Of" - memberOf
PipePlacementStrategy, PipePlacementStrategyArgs
- Field string
- The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used.
- Type
Pulumi.
Aws Native. Pipes. Pipe Placement Strategy Type - The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task).
- Field string
- The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used.
- Type
Pipe
Placement Strategy Type - The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task).
- field String
- The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used.
- type
Pipe
Placement Strategy Type - The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task).
- field string
- The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used.
- type
Pipe
Placement Strategy Type - The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task).
- field str
- The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used.
- type
Pipe
Placement Strategy Type - The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task).
- field String
- The field to apply the placement strategy against. For the spread placement strategy, valid values are instanceId (or host, which has the same effect), or any platform or custom attribute that is applied to a container instance, such as attribute:ecs.availability-zone. For the binpack placement strategy, valid values are cpu and memory. For the random placement strategy, this field is not used.
- type "random" | "spread" | "binpack"
- The type of placement strategy. The random placement strategy randomly places tasks on available candidates. The spread placement strategy spreads placement across available candidates evenly based on the field parameter. The binpack strategy places tasks on available candidates that have the least available amount of the resource that is specified with the field parameter. For example, if you binpack on memory, a task is placed on the instance with the least amount of remaining memory (but still enough to run the task).
PipePlacementStrategyType, PipePlacementStrategyTypeArgs
- Random
- random
- Spread
- spread
- Binpack
- binpack
- Pipe
Placement Strategy Type Random - random
- Pipe
Placement Strategy Type Spread - spread
- Pipe
Placement Strategy Type Binpack - binpack
- Random
- random
- Spread
- spread
- Binpack
- binpack
- Random
- random
- Spread
- spread
- Binpack
- binpack
- RANDOM
- random
- SPREAD
- spread
- BINPACK
- binpack
- "random"
- random
- "spread"
- spread
- "binpack"
- binpack
PipePropagateTags, PipePropagateTagsArgs
- Task
Definition - TASK_DEFINITION
- Pipe
Propagate Tags Task Definition - TASK_DEFINITION
- Task
Definition - TASK_DEFINITION
- Task
Definition - TASK_DEFINITION
- TASK_DEFINITION
- TASK_DEFINITION
- "TASK_DEFINITION"
- TASK_DEFINITION
PipeRequestedPipeState, PipeRequestedPipeStateArgs
- Running
- RUNNING
- Stopped
- STOPPED
- Pipe
Requested Pipe State Running - RUNNING
- Pipe
Requested Pipe State Stopped - STOPPED
- Running
- RUNNING
- Stopped
- STOPPED
- Running
- RUNNING
- Stopped
- STOPPED
- RUNNING
- RUNNING
- STOPPED
- STOPPED
- "RUNNING"
- RUNNING
- "STOPPED"
- STOPPED
PipeS3LogDestination, PipeS3LogDestinationArgs
- Bucket
Name string - The name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- Bucket
Owner string - The AWS account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- Output
Format Pulumi.Aws Native. Pipes. Pipe S3Output Format The format EventBridge uses for the log records.
EventBridge currently only supports
json
formatting.- Prefix string
The prefix text with which to begin Amazon S3 log object names.
For more information, see Organizing objects using prefixes in the Amazon Simple Storage Service User Guide .
- Bucket
Name string - The name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- Bucket
Owner string - The AWS account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- Output
Format PipeS3Output Format The format EventBridge uses for the log records.
EventBridge currently only supports
json
formatting.- Prefix string
The prefix text with which to begin Amazon S3 log object names.
For more information, see Organizing objects using prefixes in the Amazon Simple Storage Service User Guide .
- bucket
Name String - The name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- bucket
Owner String - The AWS account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- output
Format PipeS3Output Format The format EventBridge uses for the log records.
EventBridge currently only supports
json
formatting.- prefix String
The prefix text with which to begin Amazon S3 log object names.
For more information, see Organizing objects using prefixes in the Amazon Simple Storage Service User Guide .
- bucket
Name string - The name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- bucket
Owner string - The AWS account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- output
Format PipeS3Output Format The format EventBridge uses for the log records.
EventBridge currently only supports
json
formatting.- prefix string
The prefix text with which to begin Amazon S3 log object names.
For more information, see Organizing objects using prefixes in the Amazon Simple Storage Service User Guide .
- bucket_
name str - The name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- bucket_
owner str - The AWS account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- output_
format PipeS3Output Format The format EventBridge uses for the log records.
EventBridge currently only supports
json
formatting.- prefix str
The prefix text with which to begin Amazon S3 log object names.
For more information, see Organizing objects using prefixes in the Amazon Simple Storage Service User Guide .
- bucket
Name String - The name of the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- bucket
Owner String - The AWS account that owns the Amazon S3 bucket to which EventBridge delivers the log records for the pipe.
- output
Format "json" | "plain" | "w3c" The format EventBridge uses for the log records.
EventBridge currently only supports
json
formatting.- prefix String
The prefix text with which to begin Amazon S3 log object names.
For more information, see Organizing objects using prefixes in the Amazon Simple Storage Service User Guide .
PipeS3OutputFormat, PipeS3OutputFormatArgs
- Json
- json
- Plain
- plain
- W3c
- w3c
- Pipe
S3Output Format Json - json
- Pipe
S3Output Format Plain - plain
- Pipe
S3Output Format W3c - w3c
- Json
- json
- Plain
- plain
- W3c
- w3c
- Json
- json
- Plain
- plain
- W3c
- w3c
- JSON
- json
- PLAIN
- plain
- W3C
- w3c
- "json"
- json
- "plain"
- plain
- "w3c"
- w3c
PipeSageMakerPipelineParameter, PipeSageMakerPipelineParameterArgs
PipeSelfManagedKafkaAccessConfigurationCredentials0Properties, PipeSelfManagedKafkaAccessConfigurationCredentials0PropertiesArgs
- Basic
Auth string - Optional SecretManager ARN which stores the database credentials
- Basic
Auth string - Optional SecretManager ARN which stores the database credentials
- basic
Auth String - Optional SecretManager ARN which stores the database credentials
- basic
Auth string - Optional SecretManager ARN which stores the database credentials
- basic_
auth str - Optional SecretManager ARN which stores the database credentials
- basic
Auth String - Optional SecretManager ARN which stores the database credentials
PipeSelfManagedKafkaAccessConfigurationCredentials1Properties, PipeSelfManagedKafkaAccessConfigurationCredentials1PropertiesArgs
- Sasl
Scram512Auth string - Optional SecretManager ARN which stores the database credentials
- Sasl
Scram512Auth string - Optional SecretManager ARN which stores the database credentials
- sasl
Scram512Auth String - Optional SecretManager ARN which stores the database credentials
- sasl
Scram512Auth string - Optional SecretManager ARN which stores the database credentials
- sasl_
scram512_ strauth - Optional SecretManager ARN which stores the database credentials
- sasl
Scram512Auth String - Optional SecretManager ARN which stores the database credentials
PipeSelfManagedKafkaAccessConfigurationCredentials2Properties, PipeSelfManagedKafkaAccessConfigurationCredentials2PropertiesArgs
- Sasl
Scram256Auth string - Optional SecretManager ARN which stores the database credentials
- Sasl
Scram256Auth string - Optional SecretManager ARN which stores the database credentials
- sasl
Scram256Auth String - Optional SecretManager ARN which stores the database credentials
- sasl
Scram256Auth string - Optional SecretManager ARN which stores the database credentials
- sasl_
scram256_ strauth - Optional SecretManager ARN which stores the database credentials
- sasl
Scram256Auth String - Optional SecretManager ARN which stores the database credentials
PipeSelfManagedKafkaAccessConfigurationCredentials3Properties, PipeSelfManagedKafkaAccessConfigurationCredentials3PropertiesArgs
- Client
Certificate stringTls Auth - Optional SecretManager ARN which stores the database credentials
- Client
Certificate stringTls Auth - Optional SecretManager ARN which stores the database credentials
- client
Certificate StringTls Auth - Optional SecretManager ARN which stores the database credentials
- client
Certificate stringTls Auth - Optional SecretManager ARN which stores the database credentials
- client_
certificate_ strtls_ auth - Optional SecretManager ARN which stores the database credentials
- client
Certificate StringTls Auth - Optional SecretManager ARN which stores the database credentials
PipeSelfManagedKafkaAccessConfigurationVpc, PipeSelfManagedKafkaAccessConfigurationVpcArgs
- Security
Group List<string> - List of SecurityGroupId.
- Subnets List<string>
- List of SubnetId.
- Security
Group []string - List of SecurityGroupId.
- Subnets []string
- List of SubnetId.
- security
Group List<String> - List of SecurityGroupId.
- subnets List<String>
- List of SubnetId.
- security
Group string[] - List of SecurityGroupId.
- subnets string[]
- List of SubnetId.
- security_
group Sequence[str] - List of SecurityGroupId.
- subnets Sequence[str]
- List of SubnetId.
- security
Group List<String> - List of SecurityGroupId.
- subnets List<String>
- List of SubnetId.
PipeSelfManagedKafkaStartPosition, PipeSelfManagedKafkaStartPositionArgs
- Trim
Horizon - TRIM_HORIZON
- Latest
- LATEST
- Pipe
Self Managed Kafka Start Position Trim Horizon - TRIM_HORIZON
- Pipe
Self Managed Kafka Start Position Latest - LATEST
- Trim
Horizon - TRIM_HORIZON
- Latest
- LATEST
- Trim
Horizon - TRIM_HORIZON
- Latest
- LATEST
- TRIM_HORIZON
- TRIM_HORIZON
- LATEST
- LATEST
- "TRIM_HORIZON"
- TRIM_HORIZON
- "LATEST"
- LATEST
PipeSingleMeasureMapping, PipeSingleMeasureMappingArgs
- Measure
Name string - Target measure name for the measurement attribute in the Timestream table.
- Measure
Value string - Dynamic path of the source field to map to the measure in the record.
- Measure
Value Pulumi.Type Aws Native. Pipes. Pipe Measure Value Type - Data type of the source field.
- Measure
Name string - Target measure name for the measurement attribute in the Timestream table.
- Measure
Value string - Dynamic path of the source field to map to the measure in the record.
- Measure
Value PipeType Measure Value Type - Data type of the source field.
- measure
Name String - Target measure name for the measurement attribute in the Timestream table.
- measure
Value String - Dynamic path of the source field to map to the measure in the record.
- measure
Value PipeType Measure Value Type - Data type of the source field.
- measure
Name string - Target measure name for the measurement attribute in the Timestream table.
- measure
Value string - Dynamic path of the source field to map to the measure in the record.
- measure
Value PipeType Measure Value Type - Data type of the source field.
- measure_
name str - Target measure name for the measurement attribute in the Timestream table.
- measure_
value str - Dynamic path of the source field to map to the measure in the record.
- measure_
value_ Pipetype Measure Value Type - Data type of the source field.
- measure
Name String - Target measure name for the measurement attribute in the Timestream table.
- measure
Value String - Dynamic path of the source field to map to the measure in the record.
- measure
Value "DOUBLE" | "BIGINT" | "VARCHAR" | "BOOLEAN" | "TIMESTAMP"Type - Data type of the source field.
PipeSourceActiveMqBrokerParameters, PipeSourceActiveMqBrokerParametersArgs
- Credentials
Pulumi.
Aws Native. Pipes. Inputs. Pipe Mq Broker Access Credentials Properties - The credentials needed to access the resource.
- Queue
Name string - The name of the destination queue to consume.
- Batch
Size int - The maximum number of records to include in each batch.
- Maximum
Batching intWindow In Seconds - The maximum length of a time to wait for events.
- Credentials
Pipe
Mq Broker Access Credentials Properties - The credentials needed to access the resource.
- Queue
Name string - The name of the destination queue to consume.
- Batch
Size int - The maximum number of records to include in each batch.
- Maximum
Batching intWindow In Seconds - The maximum length of a time to wait for events.
- credentials
Pipe
Mq Broker Access Credentials Properties - The credentials needed to access the resource.
- queue
Name String - The name of the destination queue to consume.
- batch
Size Integer - The maximum number of records to include in each batch.
- maximum
Batching IntegerWindow In Seconds - The maximum length of a time to wait for events.
- credentials
Pipe
Mq Broker Access Credentials Properties - The credentials needed to access the resource.
- queue
Name string - The name of the destination queue to consume.
- batch
Size number - The maximum number of records to include in each batch.
- maximum
Batching numberWindow In Seconds - The maximum length of a time to wait for events.
- credentials
Pipe
Mq Broker Access Credentials Properties - The credentials needed to access the resource.
- queue_
name str - The name of the destination queue to consume.
- batch_
size int - The maximum number of records to include in each batch.
- maximum_
batching_ intwindow_ in_ seconds - The maximum length of a time to wait for events.
- credentials Property Map
- The credentials needed to access the resource.
- queue
Name String - The name of the destination queue to consume.
- batch
Size Number - The maximum number of records to include in each batch.
- maximum
Batching NumberWindow In Seconds - The maximum length of a time to wait for events.
PipeSourceDynamoDbStreamParameters, PipeSourceDynamoDbStreamParametersArgs
- Starting
Position Pulumi.Aws Native. Pipes. Pipe Dynamo Db Stream Start Position (Streams only) The position in a stream from which to start reading.
Valid values :
TRIM_HORIZON | LATEST
- Batch
Size int - The maximum number of records to include in each batch.
- Dead
Letter Pulumi.Config Aws Native. Pipes. Inputs. Pipe Dead Letter Config - Define the target queue to send dead-letter queue events to.
- Maximum
Batching intWindow In Seconds - The maximum length of a time to wait for events.
- Maximum
Record intAge In Seconds - Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
- Maximum
Retry intAttempts - Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
- On
Partial Pulumi.Batch Item Failure Aws Native. Pipes. Pipe On Partial Batch Item Failure Streams - Define how to handle item process failures.
AUTOMATIC_BISECT
halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. - Parallelization
Factor int - The number of batches to process concurrently from each shard. The default value is 1.
- Starting
Position PipeDynamo Db Stream Start Position (Streams only) The position in a stream from which to start reading.
Valid values :
TRIM_HORIZON | LATEST
- Batch
Size int - The maximum number of records to include in each batch.
- Dead
Letter PipeConfig Dead Letter Config - Define the target queue to send dead-letter queue events to.
- Maximum
Batching intWindow In Seconds - The maximum length of a time to wait for events.
- Maximum
Record intAge In Seconds - Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
- Maximum
Retry intAttempts - Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
- On
Partial PipeBatch Item Failure On Partial Batch Item Failure Streams - Define how to handle item process failures.
AUTOMATIC_BISECT
halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. - Parallelization
Factor int - The number of batches to process concurrently from each shard. The default value is 1.
- starting
Position PipeDynamo Db Stream Start Position (Streams only) The position in a stream from which to start reading.
Valid values :
TRIM_HORIZON | LATEST
- batch
Size Integer - The maximum number of records to include in each batch.
- dead
Letter PipeConfig Dead Letter Config - Define the target queue to send dead-letter queue events to.
- maximum
Batching IntegerWindow In Seconds - The maximum length of a time to wait for events.
- maximum
Record IntegerAge In Seconds - Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
- maximum
Retry IntegerAttempts - Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
- on
Partial PipeBatch Item Failure On Partial Batch Item Failure Streams - Define how to handle item process failures.
AUTOMATIC_BISECT
halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. - parallelization
Factor Integer - The number of batches to process concurrently from each shard. The default value is 1.
- starting
Position PipeDynamo Db Stream Start Position (Streams only) The position in a stream from which to start reading.
Valid values :
TRIM_HORIZON | LATEST
- batch
Size number - The maximum number of records to include in each batch.
- dead
Letter PipeConfig Dead Letter Config - Define the target queue to send dead-letter queue events to.
- maximum
Batching numberWindow In Seconds - The maximum length of a time to wait for events.
- maximum
Record numberAge In Seconds - Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
- maximum
Retry numberAttempts - Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
- on
Partial PipeBatch Item Failure On Partial Batch Item Failure Streams - Define how to handle item process failures.
AUTOMATIC_BISECT
halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. - parallelization
Factor number - The number of batches to process concurrently from each shard. The default value is 1.
- starting_
position PipeDynamo Db Stream Start Position (Streams only) The position in a stream from which to start reading.
Valid values :
TRIM_HORIZON | LATEST
- batch_
size int - The maximum number of records to include in each batch.
- dead_
letter_ Pipeconfig Dead Letter Config - Define the target queue to send dead-letter queue events to.
- maximum_
batching_ intwindow_ in_ seconds - The maximum length of a time to wait for events.
- maximum_
record_ intage_ in_ seconds - Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
- maximum_
retry_ intattempts - Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
- on_
partial_ Pipebatch_ item_ failure On Partial Batch Item Failure Streams - Define how to handle item process failures.
AUTOMATIC_BISECT
halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. - parallelization_
factor int - The number of batches to process concurrently from each shard. The default value is 1.
- starting
Position "TRIM_HORIZON" | "LATEST" (Streams only) The position in a stream from which to start reading.
Valid values :
TRIM_HORIZON | LATEST
- batch
Size Number - The maximum number of records to include in each batch.
- dead
Letter Property MapConfig - Define the target queue to send dead-letter queue events to.
- maximum
Batching NumberWindow In Seconds - The maximum length of a time to wait for events.
- maximum
Record NumberAge In Seconds - Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
- maximum
Retry NumberAttempts - Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
- on
Partial "AUTOMATIC_BISECT"Batch Item Failure - Define how to handle item process failures.
AUTOMATIC_BISECT
halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. - parallelization
Factor Number - The number of batches to process concurrently from each shard. The default value is 1.
PipeSourceKinesisStreamParameters, PipeSourceKinesisStreamParametersArgs
- Starting
Position Pulumi.Aws Native. Pipes. Pipe Kinesis Stream Start Position - The position in a stream from which to start reading.
- Batch
Size int - The maximum number of records to include in each batch.
- Dead
Letter Pulumi.Config Aws Native. Pipes. Inputs. Pipe Dead Letter Config - Define the target queue to send dead-letter queue events to.
- Maximum
Batching intWindow In Seconds - The maximum length of a time to wait for events.
- Maximum
Record intAge In Seconds - Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
- Maximum
Retry intAttempts - Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
- On
Partial Pulumi.Batch Item Failure Aws Native. Pipes. Pipe On Partial Batch Item Failure Streams - Define how to handle item process failures.
AUTOMATIC_BISECT
halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. - Parallelization
Factor int - The number of batches to process concurrently from each shard. The default value is 1.
- Starting
Position stringTimestamp - With
StartingPosition
set toAT_TIMESTAMP
, the time from which to start reading, in Unix time seconds.
- Starting
Position PipeKinesis Stream Start Position - The position in a stream from which to start reading.
- Batch
Size int - The maximum number of records to include in each batch.
- Dead
Letter PipeConfig Dead Letter Config - Define the target queue to send dead-letter queue events to.
- Maximum
Batching intWindow In Seconds - The maximum length of a time to wait for events.
- Maximum
Record intAge In Seconds - Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
- Maximum
Retry intAttempts - Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
- On
Partial PipeBatch Item Failure On Partial Batch Item Failure Streams - Define how to handle item process failures.
AUTOMATIC_BISECT
halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. - Parallelization
Factor int - The number of batches to process concurrently from each shard. The default value is 1.
- Starting
Position stringTimestamp - With
StartingPosition
set toAT_TIMESTAMP
, the time from which to start reading, in Unix time seconds.
- starting
Position PipeKinesis Stream Start Position - The position in a stream from which to start reading.
- batch
Size Integer - The maximum number of records to include in each batch.
- dead
Letter PipeConfig Dead Letter Config - Define the target queue to send dead-letter queue events to.
- maximum
Batching IntegerWindow In Seconds - The maximum length of a time to wait for events.
- maximum
Record IntegerAge In Seconds - Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
- maximum
Retry IntegerAttempts - Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
- on
Partial PipeBatch Item Failure On Partial Batch Item Failure Streams - Define how to handle item process failures.
AUTOMATIC_BISECT
halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. - parallelization
Factor Integer - The number of batches to process concurrently from each shard. The default value is 1.
- starting
Position StringTimestamp - With
StartingPosition
set toAT_TIMESTAMP
, the time from which to start reading, in Unix time seconds.
- starting
Position PipeKinesis Stream Start Position - The position in a stream from which to start reading.
- batch
Size number - The maximum number of records to include in each batch.
- dead
Letter PipeConfig Dead Letter Config - Define the target queue to send dead-letter queue events to.
- maximum
Batching numberWindow In Seconds - The maximum length of a time to wait for events.
- maximum
Record numberAge In Seconds - Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
- maximum
Retry numberAttempts - Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
- on
Partial PipeBatch Item Failure On Partial Batch Item Failure Streams - Define how to handle item process failures.
AUTOMATIC_BISECT
halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. - parallelization
Factor number - The number of batches to process concurrently from each shard. The default value is 1.
- starting
Position stringTimestamp - With
StartingPosition
set toAT_TIMESTAMP
, the time from which to start reading, in Unix time seconds.
- starting_
position PipeKinesis Stream Start Position - The position in a stream from which to start reading.
- batch_
size int - The maximum number of records to include in each batch.
- dead_
letter_ Pipeconfig Dead Letter Config - Define the target queue to send dead-letter queue events to.
- maximum_
batching_ intwindow_ in_ seconds - The maximum length of a time to wait for events.
- maximum_
record_ intage_ in_ seconds - Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
- maximum_
retry_ intattempts - Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
- on_
partial_ Pipebatch_ item_ failure On Partial Batch Item Failure Streams - Define how to handle item process failures.
AUTOMATIC_BISECT
halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. - parallelization_
factor int - The number of batches to process concurrently from each shard. The default value is 1.
- starting_
position_ strtimestamp - With
StartingPosition
set toAT_TIMESTAMP
, the time from which to start reading, in Unix time seconds.
- starting
Position "TRIM_HORIZON" | "LATEST" | "AT_TIMESTAMP" - The position in a stream from which to start reading.
- batch
Size Number - The maximum number of records to include in each batch.
- dead
Letter Property MapConfig - Define the target queue to send dead-letter queue events to.
- maximum
Batching NumberWindow In Seconds - The maximum length of a time to wait for events.
- maximum
Record NumberAge In Seconds - Discard records older than the specified age. The default value is -1, which sets the maximum age to infinite. When the value is set to infinite, EventBridge never discards old records.
- maximum
Retry NumberAttempts - Discard records after the specified number of retries. The default value is -1, which sets the maximum number of retries to infinite. When MaximumRetryAttempts is infinite, EventBridge retries failed records until the record expires in the event source.
- on
Partial "AUTOMATIC_BISECT"Batch Item Failure - Define how to handle item process failures.
AUTOMATIC_BISECT
halves each batch and retry each half until all the records are processed or there is one failed message left in the batch. - parallelization
Factor Number - The number of batches to process concurrently from each shard. The default value is 1.
- starting
Position StringTimestamp - With
StartingPosition
set toAT_TIMESTAMP
, the time from which to start reading, in Unix time seconds.
PipeSourceManagedStreamingKafkaParameters, PipeSourceManagedStreamingKafkaParametersArgs
- Topic
Name string - The name of the topic that the pipe will read from.
- Batch
Size int - The maximum number of records to include in each batch.
- Consumer
Group stringId - The name of the destination queue to consume.
- Credentials
Pulumi.
Aws | Pulumi.Native. Pipes. Inputs. Pipe Msk Access Credentials0Properties Aws Native. Pipes. Inputs. Pipe Msk Access Credentials1Properties - The credentials needed to access the resource.
- Maximum
Batching intWindow In Seconds - The maximum length of a time to wait for events.
- Starting
Position Pulumi.Aws Native. Pipes. Pipe Msk Start Position - The position in a stream from which to start reading.
- Topic
Name string - The name of the topic that the pipe will read from.
- Batch
Size int - The maximum number of records to include in each batch.
- Consumer
Group stringId - The name of the destination queue to consume.
- Credentials
Pipe
Msk | PipeAccess Credentials0Properties Msk Access Credentials1Properties - The credentials needed to access the resource.
- Maximum
Batching intWindow In Seconds - The maximum length of a time to wait for events.
- Starting
Position PipeMsk Start Position - The position in a stream from which to start reading.
- topic
Name String - The name of the topic that the pipe will read from.
- batch
Size Integer - The maximum number of records to include in each batch.
- consumer
Group StringId - The name of the destination queue to consume.
- credentials
Pipe
Msk | PipeAccess Credentials0Properties Msk Access Credentials1Properties - The credentials needed to access the resource.
- maximum
Batching IntegerWindow In Seconds - The maximum length of a time to wait for events.
- starting
Position PipeMsk Start Position - The position in a stream from which to start reading.
- topic
Name string - The name of the topic that the pipe will read from.
- batch
Size number - The maximum number of records to include in each batch.
- consumer
Group stringId - The name of the destination queue to consume.
- credentials
Pipe
Msk | PipeAccess Credentials0Properties Msk Access Credentials1Properties - The credentials needed to access the resource.
- maximum
Batching numberWindow In Seconds - The maximum length of a time to wait for events.
- starting
Position PipeMsk Start Position - The position in a stream from which to start reading.
- topic_
name str - The name of the topic that the pipe will read from.
- batch_
size int - The maximum number of records to include in each batch.
- consumer_
group_ strid - The name of the destination queue to consume.
- credentials
Pipe
Msk | PipeAccess Credentials0Properties Msk Access Credentials1Properties - The credentials needed to access the resource.
- maximum_
batching_ intwindow_ in_ seconds - The maximum length of a time to wait for events.
- starting_
position PipeMsk Start Position - The position in a stream from which to start reading.
- topic
Name String - The name of the topic that the pipe will read from.
- batch
Size Number - The maximum number of records to include in each batch.
- consumer
Group StringId - The name of the destination queue to consume.
- credentials Property Map | Property Map
- The credentials needed to access the resource.
- maximum
Batching NumberWindow In Seconds - The maximum length of a time to wait for events.
- starting
Position "TRIM_HORIZON" | "LATEST" - The position in a stream from which to start reading.
PipeSourceParameters, PipeSourceParametersArgs
- Active
Mq Pulumi.Broker Parameters Aws Native. Pipes. Inputs. Pipe Source Active Mq Broker Parameters - The parameters for using an Active MQ broker as a source.
- Dynamo
Db Pulumi.Stream Parameters Aws Native. Pipes. Inputs. Pipe Source Dynamo Db Stream Parameters - The parameters for using a DynamoDB stream as a source.
- Filter
Criteria Pulumi.Aws Native. Pipes. Inputs. Pipe Filter Criteria The collection of event patterns used to filter events.
To remove a filter, specify a
FilterCriteria
object with an empty array ofFilter
objects.For more information, see Events and Event Patterns in the Amazon EventBridge User Guide .
- Kinesis
Stream Pulumi.Parameters Aws Native. Pipes. Inputs. Pipe Source Kinesis Stream Parameters - The parameters for using a Kinesis stream as a source.
- Managed
Streaming Pulumi.Kafka Parameters Aws Native. Pipes. Inputs. Pipe Source Managed Streaming Kafka Parameters - The parameters for using an MSK stream as a source.
- Rabbit
Mq Pulumi.Broker Parameters Aws Native. Pipes. Inputs. Pipe Source Rabbit Mq Broker Parameters - The parameters for using a Rabbit MQ broker as a source.
- Self
Managed Pulumi.Kafka Parameters Aws Native. Pipes. Inputs. Pipe Source Self Managed Kafka Parameters The parameters for using a self-managed Apache Kafka stream as a source.
A self managed cluster refers to any Apache Kafka cluster not hosted by AWS . This includes both clusters you manage yourself, as well as those hosted by a third-party provider, such as Confluent Cloud , CloudKarafka , or Redpanda . For more information, see Apache Kafka streams as a source in the Amazon EventBridge User Guide .
- Sqs
Queue Pulumi.Parameters Aws Native. Pipes. Inputs. Pipe Source Sqs Queue Parameters - The parameters for using a Amazon SQS stream as a source.
- Active
Mq PipeBroker Parameters Source Active Mq Broker Parameters - The parameters for using an Active MQ broker as a source.
- Dynamo
Db PipeStream Parameters Source Dynamo Db Stream Parameters - The parameters for using a DynamoDB stream as a source.
- Filter
Criteria PipeFilter Criteria The collection of event patterns used to filter events.
To remove a filter, specify a
FilterCriteria
object with an empty array ofFilter
objects.For more information, see Events and Event Patterns in the Amazon EventBridge User Guide .
- Kinesis
Stream PipeParameters Source Kinesis Stream Parameters - The parameters for using a Kinesis stream as a source.
- Managed
Streaming PipeKafka Parameters Source Managed Streaming Kafka Parameters - The parameters for using an MSK stream as a source.
- Rabbit
Mq PipeBroker Parameters Source Rabbit Mq Broker Parameters - The parameters for using a Rabbit MQ broker as a source.
- Self
Managed PipeKafka Parameters Source Self Managed Kafka Parameters The parameters for using a self-managed Apache Kafka stream as a source.
A self managed cluster refers to any Apache Kafka cluster not hosted by AWS . This includes both clusters you manage yourself, as well as those hosted by a third-party provider, such as Confluent Cloud , CloudKarafka , or Redpanda . For more information, see Apache Kafka streams as a source in the Amazon EventBridge User Guide .
- Sqs
Queue PipeParameters Source Sqs Queue Parameters - The parameters for using a Amazon SQS stream as a source.
- active
Mq PipeBroker Parameters Source Active Mq Broker Parameters - The parameters for using an Active MQ broker as a source.
- dynamo
Db PipeStream Parameters Source Dynamo Db Stream Parameters - The parameters for using a DynamoDB stream as a source.
- filter
Criteria PipeFilter Criteria The collection of event patterns used to filter events.
To remove a filter, specify a
FilterCriteria
object with an empty array ofFilter
objects.For more information, see Events and Event Patterns in the Amazon EventBridge User Guide .
- kinesis
Stream PipeParameters Source Kinesis Stream Parameters - The parameters for using a Kinesis stream as a source.
- managed
Streaming PipeKafka Parameters Source Managed Streaming Kafka Parameters - The parameters for using an MSK stream as a source.
- rabbit
Mq PipeBroker Parameters Source Rabbit Mq Broker Parameters - The parameters for using a Rabbit MQ broker as a source.
- self
Managed PipeKafka Parameters Source Self Managed Kafka Parameters The parameters for using a self-managed Apache Kafka stream as a source.
A self managed cluster refers to any Apache Kafka cluster not hosted by AWS . This includes both clusters you manage yourself, as well as those hosted by a third-party provider, such as Confluent Cloud , CloudKarafka , or Redpanda . For more information, see Apache Kafka streams as a source in the Amazon EventBridge User Guide .
- sqs
Queue PipeParameters Source Sqs Queue Parameters - The parameters for using a Amazon SQS stream as a source.
- active
Mq PipeBroker Parameters Source Active Mq Broker Parameters - The parameters for using an Active MQ broker as a source.
- dynamo
Db PipeStream Parameters Source Dynamo Db Stream Parameters - The parameters for using a DynamoDB stream as a source.
- filter
Criteria PipeFilter Criteria The collection of event patterns used to filter events.
To remove a filter, specify a
FilterCriteria
object with an empty array ofFilter
objects.For more information, see Events and Event Patterns in the Amazon EventBridge User Guide .
- kinesis
Stream PipeParameters Source Kinesis Stream Parameters - The parameters for using a Kinesis stream as a source.
- managed
Streaming PipeKafka Parameters Source Managed Streaming Kafka Parameters - The parameters for using an MSK stream as a source.
- rabbit
Mq PipeBroker Parameters Source Rabbit Mq Broker Parameters - The parameters for using a Rabbit MQ broker as a source.
- self
Managed PipeKafka Parameters Source Self Managed Kafka Parameters The parameters for using a self-managed Apache Kafka stream as a source.
A self managed cluster refers to any Apache Kafka cluster not hosted by AWS . This includes both clusters you manage yourself, as well as those hosted by a third-party provider, such as Confluent Cloud , CloudKarafka , or Redpanda . For more information, see Apache Kafka streams as a source in the Amazon EventBridge User Guide .
- sqs
Queue PipeParameters Source Sqs Queue Parameters - The parameters for using a Amazon SQS stream as a source.
- active_
mq_ Pipebroker_ parameters Source Active Mq Broker Parameters - The parameters for using an Active MQ broker as a source.
- dynamo_
db_ Pipestream_ parameters Source Dynamo Db Stream Parameters - The parameters for using a DynamoDB stream as a source.
- filter_
criteria PipeFilter Criteria The collection of event patterns used to filter events.
To remove a filter, specify a
FilterCriteria
object with an empty array ofFilter
objects.For more information, see Events and Event Patterns in the Amazon EventBridge User Guide .
- kinesis_
stream_ Pipeparameters Source Kinesis Stream Parameters - The parameters for using a Kinesis stream as a source.
- managed_
streaming_ Pipekafka_ parameters Source Managed Streaming Kafka Parameters - The parameters for using an MSK stream as a source.
- rabbit_
mq_ Pipebroker_ parameters Source Rabbit Mq Broker Parameters - The parameters for using a Rabbit MQ broker as a source.
- self_
managed_ Pipekafka_ parameters Source Self Managed Kafka Parameters The parameters for using a self-managed Apache Kafka stream as a source.
A self managed cluster refers to any Apache Kafka cluster not hosted by AWS . This includes both clusters you manage yourself, as well as those hosted by a third-party provider, such as Confluent Cloud , CloudKarafka , or Redpanda . For more information, see Apache Kafka streams as a source in the Amazon EventBridge User Guide .
- sqs_
queue_ Pipeparameters Source Sqs Queue Parameters - The parameters for using a Amazon SQS stream as a source.
- active
Mq Property MapBroker Parameters - The parameters for using an Active MQ broker as a source.
- dynamo
Db Property MapStream Parameters - The parameters for using a DynamoDB stream as a source.
- filter
Criteria Property Map The collection of event patterns used to filter events.
To remove a filter, specify a
FilterCriteria
object with an empty array ofFilter
objects.For more information, see Events and Event Patterns in the Amazon EventBridge User Guide .
- kinesis
Stream Property MapParameters - The parameters for using a Kinesis stream as a source.
- managed
Streaming Property MapKafka Parameters - The parameters for using an MSK stream as a source.
- rabbit
Mq Property MapBroker Parameters - The parameters for using a Rabbit MQ broker as a source.
- self
Managed Property MapKafka Parameters The parameters for using a self-managed Apache Kafka stream as a source.
A self managed cluster refers to any Apache Kafka cluster not hosted by AWS . This includes both clusters you manage yourself, as well as those hosted by a third-party provider, such as Confluent Cloud , CloudKarafka , or Redpanda . For more information, see Apache Kafka streams as a source in the Amazon EventBridge User Guide .
- sqs
Queue Property MapParameters - The parameters for using a Amazon SQS stream as a source.
PipeSourceRabbitMqBrokerParameters, PipeSourceRabbitMqBrokerParametersArgs
- Credentials
Pulumi.
Aws Native. Pipes. Inputs. Pipe Mq Broker Access Credentials Properties - The credentials needed to access the resource.
- Queue
Name string - The name of the destination queue to consume.
- Batch
Size int - The maximum number of records to include in each batch.
- Maximum
Batching intWindow In Seconds - The maximum length of a time to wait for events.
- Virtual
Host string - The name of the virtual host associated with the source broker.
- Credentials
Pipe
Mq Broker Access Credentials Properties - The credentials needed to access the resource.
- Queue
Name string - The name of the destination queue to consume.
- Batch
Size int - The maximum number of records to include in each batch.
- Maximum
Batching intWindow In Seconds - The maximum length of a time to wait for events.
- Virtual
Host string - The name of the virtual host associated with the source broker.
- credentials
Pipe
Mq Broker Access Credentials Properties - The credentials needed to access the resource.
- queue
Name String - The name of the destination queue to consume.
- batch
Size Integer - The maximum number of records to include in each batch.
- maximum
Batching IntegerWindow In Seconds - The maximum length of a time to wait for events.
- virtual
Host String - The name of the virtual host associated with the source broker.
- credentials
Pipe
Mq Broker Access Credentials Properties - The credentials needed to access the resource.
- queue
Name string - The name of the destination queue to consume.
- batch
Size number - The maximum number of records to include in each batch.
- maximum
Batching numberWindow In Seconds - The maximum length of a time to wait for events.
- virtual
Host string - The name of the virtual host associated with the source broker.
- credentials
Pipe
Mq Broker Access Credentials Properties - The credentials needed to access the resource.
- queue_
name str - The name of the destination queue to consume.
- batch_
size int - The maximum number of records to include in each batch.
- maximum_
batching_ intwindow_ in_ seconds - The maximum length of a time to wait for events.
- virtual_
host str - The name of the virtual host associated with the source broker.
- credentials Property Map
- The credentials needed to access the resource.
- queue
Name String - The name of the destination queue to consume.
- batch
Size Number - The maximum number of records to include in each batch.
- maximum
Batching NumberWindow In Seconds - The maximum length of a time to wait for events.
- virtual
Host String - The name of the virtual host associated with the source broker.
PipeSourceSelfManagedKafkaParameters, PipeSourceSelfManagedKafkaParametersArgs
- Topic
Name string - The name of the topic that the pipe will read from.
- Additional
Bootstrap List<string>Servers - An array of server URLs.
- Batch
Size int - The maximum number of records to include in each batch.
- Consumer
Group stringId - The name of the destination queue to consume.
- Credentials
Pulumi.
Aws | Pulumi.Native. Pipes. Inputs. Pipe Self Managed Kafka Access Configuration Credentials0Properties Aws | Pulumi.Native. Pipes. Inputs. Pipe Self Managed Kafka Access Configuration Credentials1Properties Aws | Pulumi.Native. Pipes. Inputs. Pipe Self Managed Kafka Access Configuration Credentials2Properties Aws Native. Pipes. Inputs. Pipe Self Managed Kafka Access Configuration Credentials3Properties - The credentials needed to access the resource.
- Maximum
Batching intWindow In Seconds - The maximum length of a time to wait for events.
- Server
Root stringCa Certificate - Optional SecretManager ARN which stores the database credentials
- Starting
Position Pulumi.Aws Native. Pipes. Pipe Self Managed Kafka Start Position - The position in a stream from which to start reading.
- Vpc
Pulumi.
Aws Native. Pipes. Inputs. Pipe Self Managed Kafka Access Configuration Vpc - This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used.
- Topic
Name string - The name of the topic that the pipe will read from.
- Additional
Bootstrap []stringServers - An array of server URLs.
- Batch
Size int - The maximum number of records to include in each batch.
- Consumer
Group stringId - The name of the destination queue to consume.
- Credentials
Pipe
Self | PipeManaged Kafka Access Configuration Credentials0Properties Self | PipeManaged Kafka Access Configuration Credentials1Properties Self | PipeManaged Kafka Access Configuration Credentials2Properties Self Managed Kafka Access Configuration Credentials3Properties - The credentials needed to access the resource.
- Maximum
Batching intWindow In Seconds - The maximum length of a time to wait for events.
- Server
Root stringCa Certificate - Optional SecretManager ARN which stores the database credentials
- Starting
Position PipeSelf Managed Kafka Start Position - The position in a stream from which to start reading.
- Vpc
Pipe
Self Managed Kafka Access Configuration Vpc - This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used.
- topic
Name String - The name of the topic that the pipe will read from.
- additional
Bootstrap List<String>Servers - An array of server URLs.
- batch
Size Integer - The maximum number of records to include in each batch.
- consumer
Group StringId - The name of the destination queue to consume.
- credentials
Pipe
Self | PipeManaged Kafka Access Configuration Credentials0Properties Self | PipeManaged Kafka Access Configuration Credentials1Properties Self | PipeManaged Kafka Access Configuration Credentials2Properties Self Managed Kafka Access Configuration Credentials3Properties - The credentials needed to access the resource.
- maximum
Batching IntegerWindow In Seconds - The maximum length of a time to wait for events.
- server
Root StringCa Certificate - Optional SecretManager ARN which stores the database credentials
- starting
Position PipeSelf Managed Kafka Start Position - The position in a stream from which to start reading.
- vpc
Pipe
Self Managed Kafka Access Configuration Vpc - This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used.
- topic
Name string - The name of the topic that the pipe will read from.
- additional
Bootstrap string[]Servers - An array of server URLs.
- batch
Size number - The maximum number of records to include in each batch.
- consumer
Group stringId - The name of the destination queue to consume.
- credentials
Pipe
Self | PipeManaged Kafka Access Configuration Credentials0Properties Self | PipeManaged Kafka Access Configuration Credentials1Properties Self | PipeManaged Kafka Access Configuration Credentials2Properties Self Managed Kafka Access Configuration Credentials3Properties - The credentials needed to access the resource.
- maximum
Batching numberWindow In Seconds - The maximum length of a time to wait for events.
- server
Root stringCa Certificate - Optional SecretManager ARN which stores the database credentials
- starting
Position PipeSelf Managed Kafka Start Position - The position in a stream from which to start reading.
- vpc
Pipe
Self Managed Kafka Access Configuration Vpc - This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used.
- topic_
name str - The name of the topic that the pipe will read from.
- additional_
bootstrap_ Sequence[str]servers - An array of server URLs.
- batch_
size int - The maximum number of records to include in each batch.
- consumer_
group_ strid - The name of the destination queue to consume.
- credentials
Pipe
Self | PipeManaged Kafka Access Configuration Credentials0Properties Self | PipeManaged Kafka Access Configuration Credentials1Properties Self | PipeManaged Kafka Access Configuration Credentials2Properties Self Managed Kafka Access Configuration Credentials3Properties - The credentials needed to access the resource.
- maximum_
batching_ intwindow_ in_ seconds - The maximum length of a time to wait for events.
- server_
root_ strca_ certificate - Optional SecretManager ARN which stores the database credentials
- starting_
position PipeSelf Managed Kafka Start Position - The position in a stream from which to start reading.
- vpc
Pipe
Self Managed Kafka Access Configuration Vpc - This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used.
- topic
Name String - The name of the topic that the pipe will read from.
- additional
Bootstrap List<String>Servers - An array of server URLs.
- batch
Size Number - The maximum number of records to include in each batch.
- consumer
Group StringId - The name of the destination queue to consume.
- credentials Property Map | Property Map | Property Map | Property Map
- The credentials needed to access the resource.
- maximum
Batching NumberWindow In Seconds - The maximum length of a time to wait for events.
- server
Root StringCa Certificate - Optional SecretManager ARN which stores the database credentials
- starting
Position "TRIM_HORIZON" | "LATEST" - The position in a stream from which to start reading.
- vpc Property Map
- This structure specifies the VPC subnets and security groups for the stream, and whether a public IP address is to be used.
PipeSourceSqsQueueParameters, PipeSourceSqsQueueParametersArgs
- Batch
Size int - The maximum number of records to include in each batch.
- Maximum
Batching intWindow In Seconds - The maximum length of a time to wait for events.
- Batch
Size int - The maximum number of records to include in each batch.
- Maximum
Batching intWindow In Seconds - The maximum length of a time to wait for events.
- batch
Size Integer - The maximum number of records to include in each batch.
- maximum
Batching IntegerWindow In Seconds - The maximum length of a time to wait for events.
- batch
Size number - The maximum number of records to include in each batch.
- maximum
Batching numberWindow In Seconds - The maximum length of a time to wait for events.
- batch_
size int - The maximum number of records to include in each batch.
- maximum_
batching_ intwindow_ in_ seconds - The maximum length of a time to wait for events.
- batch
Size Number - The maximum number of records to include in each batch.
- maximum
Batching NumberWindow In Seconds - The maximum length of a time to wait for events.
PipeState, PipeStateArgs
- Running
- RUNNING
- Stopped
- STOPPED
- Creating
- CREATING
- Updating
- UPDATING
- Deleting
- DELETING
- Starting
- STARTING
- Stopping
- STOPPING
- Create
Failed - CREATE_FAILED
- Update
Failed - UPDATE_FAILED
- Start
Failed - START_FAILED
- Stop
Failed - STOP_FAILED
- Delete
Failed - DELETE_FAILED
- Create
Rollback Failed - CREATE_ROLLBACK_FAILED
- Delete
Rollback Failed - DELETE_ROLLBACK_FAILED
- Update
Rollback Failed - UPDATE_ROLLBACK_FAILED
- Pipe
State Running - RUNNING
- Pipe
State Stopped - STOPPED
- Pipe
State Creating - CREATING
- Pipe
State Updating - UPDATING
- Pipe
State Deleting - DELETING
- Pipe
State Starting - STARTING
- Pipe
State Stopping - STOPPING
- Pipe
State Create Failed - CREATE_FAILED
- Pipe
State Update Failed - UPDATE_FAILED
- Pipe
State Start Failed - START_FAILED
- Pipe
State Stop Failed - STOP_FAILED
- Pipe
State Delete Failed - DELETE_FAILED
- Pipe
State Create Rollback Failed - CREATE_ROLLBACK_FAILED
- Pipe
State Delete Rollback Failed - DELETE_ROLLBACK_FAILED
- Pipe
State Update Rollback Failed - UPDATE_ROLLBACK_FAILED
- Running
- RUNNING
- Stopped
- STOPPED
- Creating
- CREATING
- Updating
- UPDATING
- Deleting
- DELETING
- Starting
- STARTING
- Stopping
- STOPPING
- Create
Failed - CREATE_FAILED
- Update
Failed - UPDATE_FAILED
- Start
Failed - START_FAILED
- Stop
Failed - STOP_FAILED
- Delete
Failed - DELETE_FAILED
- Create
Rollback Failed - CREATE_ROLLBACK_FAILED
- Delete
Rollback Failed - DELETE_ROLLBACK_FAILED
- Update
Rollback Failed - UPDATE_ROLLBACK_FAILED
- Running
- RUNNING
- Stopped
- STOPPED
- Creating
- CREATING
- Updating
- UPDATING
- Deleting
- DELETING
- Starting
- STARTING
- Stopping
- STOPPING
- Create
Failed - CREATE_FAILED
- Update
Failed - UPDATE_FAILED
- Start
Failed - START_FAILED
- Stop
Failed - STOP_FAILED
- Delete
Failed - DELETE_FAILED
- Create
Rollback Failed - CREATE_ROLLBACK_FAILED
- Delete
Rollback Failed - DELETE_ROLLBACK_FAILED
- Update
Rollback Failed - UPDATE_ROLLBACK_FAILED
- RUNNING
- RUNNING
- STOPPED
- STOPPED
- CREATING
- CREATING
- UPDATING
- UPDATING
- DELETING
- DELETING
- STARTING
- STARTING
- STOPPING
- STOPPING
- CREATE_FAILED
- CREATE_FAILED
- UPDATE_FAILED
- UPDATE_FAILED
- START_FAILED
- START_FAILED
- STOP_FAILED
- STOP_FAILED
- DELETE_FAILED
- DELETE_FAILED
- CREATE_ROLLBACK_FAILED
- CREATE_ROLLBACK_FAILED
- DELETE_ROLLBACK_FAILED
- DELETE_ROLLBACK_FAILED
- UPDATE_ROLLBACK_FAILED
- UPDATE_ROLLBACK_FAILED
- "RUNNING"
- RUNNING
- "STOPPED"
- STOPPED
- "CREATING"
- CREATING
- "UPDATING"
- UPDATING
- "DELETING"
- DELETING
- "STARTING"
- STARTING
- "STOPPING"
- STOPPING
- "CREATE_FAILED"
- CREATE_FAILED
- "UPDATE_FAILED"
- UPDATE_FAILED
- "START_FAILED"
- START_FAILED
- "STOP_FAILED"
- STOP_FAILED
- "DELETE_FAILED"
- DELETE_FAILED
- "CREATE_ROLLBACK_FAILED"
- CREATE_ROLLBACK_FAILED
- "DELETE_ROLLBACK_FAILED"
- DELETE_ROLLBACK_FAILED
- "UPDATE_ROLLBACK_FAILED"
- UPDATE_ROLLBACK_FAILED
PipeTag, PipeTagArgs
PipeTargetBatchJobParameters, PipeTargetBatchJobParametersArgs
- Job
Definition string - The job definition used by this job. This value can be one of
name
,name:revision
, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used. - Job
Name string - The name of the job. It can be up to 128 letters long. The first character must be alphanumeric, can contain uppercase and lowercase letters, numbers, hyphens (-), and underscores (_).
- Array
Properties Pulumi.Aws Native. Pipes. Inputs. Pipe Batch Array Properties - The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job.
- Container
Overrides Pulumi.Aws Native. Pipes. Inputs. Pipe Batch Container Overrides - The overrides that are sent to a container.
- Depends
On List<Pulumi.Aws Native. Pipes. Inputs. Pipe Batch Job Dependency> - A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a
SEQUENTIAL
type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify anN_TO_N
type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin. - Parameters Dictionary<string, string>
- Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition.
- Retry
Strategy Pulumi.Aws Native. Pipes. Inputs. Pipe Batch Retry Strategy - The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition.
- Job
Definition string - The job definition used by this job. This value can be one of
name
,name:revision
, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used. - Job
Name string - The name of the job. It can be up to 128 letters long. The first character must be alphanumeric, can contain uppercase and lowercase letters, numbers, hyphens (-), and underscores (_).
- Array
Properties PipeBatch Array Properties - The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job.
- Container
Overrides PipeBatch Container Overrides - The overrides that are sent to a container.
- Depends
On []PipeBatch Job Dependency - A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a
SEQUENTIAL
type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify anN_TO_N
type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin. - Parameters map[string]string
- Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition.
- Retry
Strategy PipeBatch Retry Strategy - The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition.
- job
Definition String - The job definition used by this job. This value can be one of
name
,name:revision
, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used. - job
Name String - The name of the job. It can be up to 128 letters long. The first character must be alphanumeric, can contain uppercase and lowercase letters, numbers, hyphens (-), and underscores (_).
- array
Properties PipeBatch Array Properties - The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job.
- container
Overrides PipeBatch Container Overrides - The overrides that are sent to a container.
- depends
On List<PipeBatch Job Dependency> - A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a
SEQUENTIAL
type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify anN_TO_N
type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin. - parameters Map<String,String>
- Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition.
- retry
Strategy PipeBatch Retry Strategy - The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition.
- job
Definition string - The job definition used by this job. This value can be one of
name
,name:revision
, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used. - job
Name string - The name of the job. It can be up to 128 letters long. The first character must be alphanumeric, can contain uppercase and lowercase letters, numbers, hyphens (-), and underscores (_).
- array
Properties PipeBatch Array Properties - The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job.
- container
Overrides PipeBatch Container Overrides - The overrides that are sent to a container.
- depends
On PipeBatch Job Dependency[] - A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a
SEQUENTIAL
type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify anN_TO_N
type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin. - parameters {[key: string]: string}
- Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition.
- retry
Strategy PipeBatch Retry Strategy - The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition.
- job_
definition str - The job definition used by this job. This value can be one of
name
,name:revision
, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used. - job_
name str - The name of the job. It can be up to 128 letters long. The first character must be alphanumeric, can contain uppercase and lowercase letters, numbers, hyphens (-), and underscores (_).
- array_
properties PipeBatch Array Properties - The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job.
- container_
overrides PipeBatch Container Overrides - The overrides that are sent to a container.
- depends_
on Sequence[PipeBatch Job Dependency] - A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a
SEQUENTIAL
type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify anN_TO_N
type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin. - parameters Mapping[str, str]
- Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition.
- retry_
strategy PipeBatch Retry Strategy - The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition.
- job
Definition String - The job definition used by this job. This value can be one of
name
,name:revision
, or the Amazon Resource Name (ARN) for the job definition. If name is specified without a revision then the latest active revision is used. - job
Name String - The name of the job. It can be up to 128 letters long. The first character must be alphanumeric, can contain uppercase and lowercase letters, numbers, hyphens (-), and underscores (_).
- array
Properties Property Map - The array properties for the submitted job, such as the size of the array. The array size can be between 2 and 10,000. If you specify array properties for a job, it becomes an array job. This parameter is used only if the target is an AWS Batch job.
- container
Overrides Property Map - The overrides that are sent to a container.
- depends
On List<Property Map> - A list of dependencies for the job. A job can depend upon a maximum of 20 jobs. You can specify a
SEQUENTIAL
type dependency without specifying a job ID for array jobs so that each child array job completes sequentially, starting at index 0. You can also specify anN_TO_N
type dependency with a job ID for array jobs. In that case, each index child of this job must wait for the corresponding index child of each dependency to complete before it can begin. - parameters Map<String>
- Additional parameters passed to the job that replace parameter substitution placeholders that are set in the job definition. Parameters are specified as a key and value pair mapping. Parameters included here override any corresponding parameter defaults from the job definition.
- retry
Strategy Property Map - The retry strategy to use for failed jobs. When a retry strategy is specified here, it overrides the retry strategy defined in the job definition.
PipeTargetCloudWatchLogsParameters, PipeTargetCloudWatchLogsParametersArgs
- Log
Stream stringName - The name of the log stream.
- Timestamp string
- The time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC.
- Log
Stream stringName - The name of the log stream.
- Timestamp string
- The time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC.
- log
Stream StringName - The name of the log stream.
- timestamp String
- The time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC.
- log
Stream stringName - The name of the log stream.
- timestamp string
- The time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC.
- log_
stream_ strname - The name of the log stream.
- timestamp str
- The time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC.
- log
Stream StringName - The name of the log stream.
- timestamp String
- The time the event occurred, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC.
PipeTargetEcsTaskParameters, PipeTargetEcsTaskParametersArgs
- Task
Definition stringArn - The ARN of the task definition to use if the event target is an Amazon ECS task.
- Capacity
Provider List<Pulumi.Strategy Aws Native. Pipes. Inputs. Pipe Capacity Provider Strategy Item> The capacity provider strategy to use for the task.
If a
capacityProviderStrategy
is specified, thelaunchType
parameter must be omitted. If nocapacityProviderStrategy
or launchType is specified, thedefaultCapacityProviderStrategy
for the cluster is used.- bool
- Specifies whether to enable Amazon ECS managed tags for the task. For more information, see Tagging Your Amazon ECS Resources in the Amazon Elastic Container Service Developer Guide.
- Enable
Execute boolCommand - Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task.
- Group string
- Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
- Launch
Type Pulumi.Aws Native. Pipes. Pipe Launch Type - Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The
FARGATE
value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. For more information, see AWS Fargate on Amazon ECS in the Amazon Elastic Container Service Developer Guide . - Network
Configuration Pulumi.Aws Native. Pipes. Inputs. Pipe Network Configuration Use this structure if the Amazon ECS task uses the
awsvpc
network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required ifLaunchType
isFARGATE
because theawsvpc
mode is required for Fargate tasks.If you specify
NetworkConfiguration
when the target ECS task does not use theawsvpc
network mode, the task fails.- Overrides
Pulumi.
Aws Native. Pipes. Inputs. Pipe Ecs Task Override - The overrides that are associated with a task.
- Placement
Constraints List<Pulumi.Aws Native. Pipes. Inputs. Pipe Placement Constraint> - An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime).
- Placement
Strategy List<Pulumi.Aws Native. Pipes. Inputs. Pipe Placement Strategy> - The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task.
- Platform
Version string Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as
1.1.0
.This structure is used only if
LaunchType
isFARGATE
. For more information about valid platform versions, see AWS Fargate Platform Versions in the Amazon Elastic Container Service Developer Guide .- Pulumi.
Aws Native. Pipes. Pipe Propagate Tags - Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the
TagResource
API action. - Reference
Id string - The reference ID to use for the task.
- List<Pulumi.
Aws Native. Pipes. Inputs. Pipe Tag> - The metadata that you apply to the task to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define. To learn more, see RunTask in the Amazon ECS API Reference.
- Task
Count int - The number of tasks to create based on
TaskDefinition
. The default is 1.
- Task
Definition stringArn - The ARN of the task definition to use if the event target is an Amazon ECS task.
- Capacity
Provider []PipeStrategy Capacity Provider Strategy Item The capacity provider strategy to use for the task.
If a
capacityProviderStrategy
is specified, thelaunchType
parameter must be omitted. If nocapacityProviderStrategy
or launchType is specified, thedefaultCapacityProviderStrategy
for the cluster is used.- bool
- Specifies whether to enable Amazon ECS managed tags for the task. For more information, see Tagging Your Amazon ECS Resources in the Amazon Elastic Container Service Developer Guide.
- Enable
Execute boolCommand - Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task.
- Group string
- Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
- Launch
Type PipeLaunch Type - Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The
FARGATE
value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. For more information, see AWS Fargate on Amazon ECS in the Amazon Elastic Container Service Developer Guide . - Network
Configuration PipeNetwork Configuration Use this structure if the Amazon ECS task uses the
awsvpc
network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required ifLaunchType
isFARGATE
because theawsvpc
mode is required for Fargate tasks.If you specify
NetworkConfiguration
when the target ECS task does not use theawsvpc
network mode, the task fails.- Overrides
Pipe
Ecs Task Override - The overrides that are associated with a task.
- Placement
Constraints []PipePlacement Constraint - An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime).
- Placement
Strategy []PipePlacement Strategy - The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task.
- Platform
Version string Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as
1.1.0
.This structure is used only if
LaunchType
isFARGATE
. For more information about valid platform versions, see AWS Fargate Platform Versions in the Amazon Elastic Container Service Developer Guide .- Pipe
Propagate Tags - Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the
TagResource
API action. - Reference
Id string - The reference ID to use for the task.
- []Pipe
Tag - The metadata that you apply to the task to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define. To learn more, see RunTask in the Amazon ECS API Reference.
- Task
Count int - The number of tasks to create based on
TaskDefinition
. The default is 1.
- task
Definition StringArn - The ARN of the task definition to use if the event target is an Amazon ECS task.
- capacity
Provider List<PipeStrategy Capacity Provider Strategy Item> The capacity provider strategy to use for the task.
If a
capacityProviderStrategy
is specified, thelaunchType
parameter must be omitted. If nocapacityProviderStrategy
or launchType is specified, thedefaultCapacityProviderStrategy
for the cluster is used.- Boolean
- Specifies whether to enable Amazon ECS managed tags for the task. For more information, see Tagging Your Amazon ECS Resources in the Amazon Elastic Container Service Developer Guide.
- enable
Execute BooleanCommand - Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task.
- group String
- Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
- launch
Type PipeLaunch Type - Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The
FARGATE
value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. For more information, see AWS Fargate on Amazon ECS in the Amazon Elastic Container Service Developer Guide . - network
Configuration PipeNetwork Configuration Use this structure if the Amazon ECS task uses the
awsvpc
network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required ifLaunchType
isFARGATE
because theawsvpc
mode is required for Fargate tasks.If you specify
NetworkConfiguration
when the target ECS task does not use theawsvpc
network mode, the task fails.- overrides
Pipe
Ecs Task Override - The overrides that are associated with a task.
- placement
Constraints List<PipePlacement Constraint> - An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime).
- placement
Strategy List<PipePlacement Strategy> - The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task.
- platform
Version String Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as
1.1.0
.This structure is used only if
LaunchType
isFARGATE
. For more information about valid platform versions, see AWS Fargate Platform Versions in the Amazon Elastic Container Service Developer Guide .- Pipe
Propagate Tags - Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the
TagResource
API action. - reference
Id String - The reference ID to use for the task.
- List<Pipe
Tag> - The metadata that you apply to the task to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define. To learn more, see RunTask in the Amazon ECS API Reference.
- task
Count Integer - The number of tasks to create based on
TaskDefinition
. The default is 1.
- task
Definition stringArn - The ARN of the task definition to use if the event target is an Amazon ECS task.
- capacity
Provider PipeStrategy Capacity Provider Strategy Item[] The capacity provider strategy to use for the task.
If a
capacityProviderStrategy
is specified, thelaunchType
parameter must be omitted. If nocapacityProviderStrategy
or launchType is specified, thedefaultCapacityProviderStrategy
for the cluster is used.- boolean
- Specifies whether to enable Amazon ECS managed tags for the task. For more information, see Tagging Your Amazon ECS Resources in the Amazon Elastic Container Service Developer Guide.
- enable
Execute booleanCommand - Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task.
- group string
- Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
- launch
Type PipeLaunch Type - Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The
FARGATE
value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. For more information, see AWS Fargate on Amazon ECS in the Amazon Elastic Container Service Developer Guide . - network
Configuration PipeNetwork Configuration Use this structure if the Amazon ECS task uses the
awsvpc
network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required ifLaunchType
isFARGATE
because theawsvpc
mode is required for Fargate tasks.If you specify
NetworkConfiguration
when the target ECS task does not use theawsvpc
network mode, the task fails.- overrides
Pipe
Ecs Task Override - The overrides that are associated with a task.
- placement
Constraints PipePlacement Constraint[] - An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime).
- placement
Strategy PipePlacement Strategy[] - The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task.
- platform
Version string Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as
1.1.0
.This structure is used only if
LaunchType
isFARGATE
. For more information about valid platform versions, see AWS Fargate Platform Versions in the Amazon Elastic Container Service Developer Guide .- Pipe
Propagate Tags - Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the
TagResource
API action. - reference
Id string - The reference ID to use for the task.
- Pipe
Tag[] - The metadata that you apply to the task to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define. To learn more, see RunTask in the Amazon ECS API Reference.
- task
Count number - The number of tasks to create based on
TaskDefinition
. The default is 1.
- task_
definition_ strarn - The ARN of the task definition to use if the event target is an Amazon ECS task.
- capacity_
provider_ Sequence[Pipestrategy Capacity Provider Strategy Item] The capacity provider strategy to use for the task.
If a
capacityProviderStrategy
is specified, thelaunchType
parameter must be omitted. If nocapacityProviderStrategy
or launchType is specified, thedefaultCapacityProviderStrategy
for the cluster is used.- bool
- Specifies whether to enable Amazon ECS managed tags for the task. For more information, see Tagging Your Amazon ECS Resources in the Amazon Elastic Container Service Developer Guide.
- enable_
execute_ boolcommand - Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task.
- group str
- Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
- launch_
type PipeLaunch Type - Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The
FARGATE
value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. For more information, see AWS Fargate on Amazon ECS in the Amazon Elastic Container Service Developer Guide . - network_
configuration PipeNetwork Configuration Use this structure if the Amazon ECS task uses the
awsvpc
network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required ifLaunchType
isFARGATE
because theawsvpc
mode is required for Fargate tasks.If you specify
NetworkConfiguration
when the target ECS task does not use theawsvpc
network mode, the task fails.- overrides
Pipe
Ecs Task Override - The overrides that are associated with a task.
- placement_
constraints Sequence[PipePlacement Constraint] - An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime).
- placement_
strategy Sequence[PipePlacement Strategy] - The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task.
- platform_
version str Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as
1.1.0
.This structure is used only if
LaunchType
isFARGATE
. For more information about valid platform versions, see AWS Fargate Platform Versions in the Amazon Elastic Container Service Developer Guide .- Pipe
Propagate Tags - Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the
TagResource
API action. - reference_
id str - The reference ID to use for the task.
- Sequence[Pipe
Tag] - The metadata that you apply to the task to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define. To learn more, see RunTask in the Amazon ECS API Reference.
- task_
count int - The number of tasks to create based on
TaskDefinition
. The default is 1.
- task
Definition StringArn - The ARN of the task definition to use if the event target is an Amazon ECS task.
- capacity
Provider List<Property Map>Strategy The capacity provider strategy to use for the task.
If a
capacityProviderStrategy
is specified, thelaunchType
parameter must be omitted. If nocapacityProviderStrategy
or launchType is specified, thedefaultCapacityProviderStrategy
for the cluster is used.- Boolean
- Specifies whether to enable Amazon ECS managed tags for the task. For more information, see Tagging Your Amazon ECS Resources in the Amazon Elastic Container Service Developer Guide.
- enable
Execute BooleanCommand - Whether or not to enable the execute command functionality for the containers in this task. If true, this enables execute command functionality on all containers in the task.
- group String
- Specifies an Amazon ECS task group for the task. The maximum length is 255 characters.
- launch
Type "EC2" | "FARGATE" | "EXTERNAL" - Specifies the launch type on which your task is running. The launch type that you specify here must match one of the launch type (compatibilities) of the target task. The
FARGATE
value is supported only in the Regions where AWS Fargate with Amazon ECS is supported. For more information, see AWS Fargate on Amazon ECS in the Amazon Elastic Container Service Developer Guide . - network
Configuration Property Map Use this structure if the Amazon ECS task uses the
awsvpc
network mode. This structure specifies the VPC subnets and security groups associated with the task, and whether a public IP address is to be used. This structure is required ifLaunchType
isFARGATE
because theawsvpc
mode is required for Fargate tasks.If you specify
NetworkConfiguration
when the target ECS task does not use theawsvpc
network mode, the task fails.- overrides Property Map
- The overrides that are associated with a task.
- placement
Constraints List<Property Map> - An array of placement constraint objects to use for the task. You can specify up to 10 constraints per task (including constraints in the task definition and those specified at runtime).
- placement
Strategy List<Property Map> - The placement strategy objects to use for the task. You can specify a maximum of five strategy rules per task.
- platform
Version String Specifies the platform version for the task. Specify only the numeric portion of the platform version, such as
1.1.0
.This structure is used only if
LaunchType
isFARGATE
. For more information about valid platform versions, see AWS Fargate Platform Versions in the Amazon Elastic Container Service Developer Guide .- "TASK_DEFINITION"
- Specifies whether to propagate the tags from the task definition to the task. If no value is specified, the tags are not propagated. Tags can only be propagated to the task during task creation. To add tags to a task after task creation, use the
TagResource
API action. - reference
Id String - The reference ID to use for the task.
- List<Property Map>
- The metadata that you apply to the task to help you categorize and organize them. Each tag consists of a key and an optional value, both of which you define. To learn more, see RunTask in the Amazon ECS API Reference.
- task
Count Number - The number of tasks to create based on
TaskDefinition
. The default is 1.
PipeTargetEventBridgeEventBusParameters, PipeTargetEventBridgeEventBusParametersArgs
- Detail
Type string - A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
- Endpoint
Id string - The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is
abcde.veo
. - Resources List<string>
- AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
- Source string
- The source of the event.
- Time string
- The time stamp of the event, per RFC3339 . If no time stamp is provided, the time stamp of the PutEvents call is used.
- Detail
Type string - A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
- Endpoint
Id string - The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is
abcde.veo
. - Resources []string
- AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
- Source string
- The source of the event.
- Time string
- The time stamp of the event, per RFC3339 . If no time stamp is provided, the time stamp of the PutEvents call is used.
- detail
Type String - A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
- endpoint
Id String - The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is
abcde.veo
. - resources List<String>
- AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
- source String
- The source of the event.
- time String
- The time stamp of the event, per RFC3339 . If no time stamp is provided, the time stamp of the PutEvents call is used.
- detail
Type string - A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
- endpoint
Id string - The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is
abcde.veo
. - resources string[]
- AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
- source string
- The source of the event.
- time string
- The time stamp of the event, per RFC3339 . If no time stamp is provided, the time stamp of the PutEvents call is used.
- detail_
type str - A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
- endpoint_
id str - The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is
abcde.veo
. - resources Sequence[str]
- AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
- source str
- The source of the event.
- time str
- The time stamp of the event, per RFC3339 . If no time stamp is provided, the time stamp of the PutEvents call is used.
- detail
Type String - A free-form string, with a maximum of 128 characters, used to decide what fields to expect in the event detail.
- endpoint
Id String - The URL subdomain of the endpoint. For example, if the URL for Endpoint is https://abcde.veo.endpoints.event.amazonaws.com, then the EndpointId is
abcde.veo
. - resources List<String>
- AWS resources, identified by Amazon Resource Name (ARN), which the event primarily concerns. Any number, including zero, may be present.
- source String
- The source of the event.
- time String
- The time stamp of the event, per RFC3339 . If no time stamp is provided, the time stamp of the PutEvents call is used.
PipeTargetHttpParameters, PipeTargetHttpParametersArgs
- Header
Parameters Dictionary<string, string> - The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- Path
Parameter List<string>Values - The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
- Query
String Dictionary<string, string>Parameters - The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- Header
Parameters map[string]string - The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- Path
Parameter []stringValues - The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
- Query
String map[string]stringParameters - The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- header
Parameters Map<String,String> - The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- path
Parameter List<String>Values - The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
- query
String Map<String,String>Parameters - The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- header
Parameters {[key: string]: string} - The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- path
Parameter string[]Values - The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
- query
String {[key: string]: string}Parameters - The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- header_
parameters Mapping[str, str] - The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- path_
parameter_ Sequence[str]values - The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
- query_
string_ Mapping[str, str]parameters - The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- header
Parameters Map<String> - The headers that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
- path
Parameter List<String>Values - The path parameter values to be used to populate API Gateway REST API or EventBridge ApiDestination path wildcards ("*").
- query
String Map<String>Parameters - The query string keys/values that need to be sent as part of request invoking the API Gateway REST API or EventBridge ApiDestination.
PipeTargetInvocationType, PipeTargetInvocationTypeArgs
- Request
Response - REQUEST_RESPONSE
- Fire
And Forget - FIRE_AND_FORGET
- Pipe
Target Invocation Type Request Response - REQUEST_RESPONSE
- Pipe
Target Invocation Type Fire And Forget - FIRE_AND_FORGET
- Request
Response - REQUEST_RESPONSE
- Fire
And Forget - FIRE_AND_FORGET
- Request
Response - REQUEST_RESPONSE
- Fire
And Forget - FIRE_AND_FORGET
- REQUEST_RESPONSE
- REQUEST_RESPONSE
- FIRE_AND_FORGET
- FIRE_AND_FORGET
- "REQUEST_RESPONSE"
- REQUEST_RESPONSE
- "FIRE_AND_FORGET"
- FIRE_AND_FORGET
PipeTargetKinesisStreamParameters, PipeTargetKinesisStreamParametersArgs
- Partition
Key string - Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
- Partition
Key string - Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
- partition
Key String - Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
- partition
Key string - Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
- partition_
key str - Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
- partition
Key String - Determines which shard in the stream the data record is assigned to. Partition keys are Unicode strings with a maximum length limit of 256 characters for each key. Amazon Kinesis Data Streams uses the partition key as input to a hash function that maps the partition key and associated data to a specific shard. Specifically, an MD5 hash function is used to map partition keys to 128-bit integer values and to map associated data records to shards. As a result of this hashing mechanism, all data records with the same partition key map to the same shard within the stream.
PipeTargetLambdaFunctionParameters, PipeTargetLambdaFunctionParametersArgs
- Invocation
Type Pulumi.Aws Native. Pipes. Pipe Target Invocation Type Specify whether to invoke the function synchronously or asynchronously.
REQUEST_RESPONSE
(default) - Invoke synchronously. This corresponds to theRequestResponse
option in theInvocationType
parameter for the Lambda Invoke API.FIRE_AND_FORGET
- Invoke asynchronously. This corresponds to theEvent
option in theInvocationType
parameter for the Lambda Invoke API.
For more information, see Invocation types in the Amazon EventBridge User Guide .
- Invocation
Type PipeTarget Invocation Type Specify whether to invoke the function synchronously or asynchronously.
REQUEST_RESPONSE
(default) - Invoke synchronously. This corresponds to theRequestResponse
option in theInvocationType
parameter for the Lambda Invoke API.FIRE_AND_FORGET
- Invoke asynchronously. This corresponds to theEvent
option in theInvocationType
parameter for the Lambda Invoke API.
For more information, see Invocation types in the Amazon EventBridge User Guide .
- invocation
Type PipeTarget Invocation Type Specify whether to invoke the function synchronously or asynchronously.
REQUEST_RESPONSE
(default) - Invoke synchronously. This corresponds to theRequestResponse
option in theInvocationType
parameter for the Lambda Invoke API.FIRE_AND_FORGET
- Invoke asynchronously. This corresponds to theEvent
option in theInvocationType
parameter for the Lambda Invoke API.
For more information, see Invocation types in the Amazon EventBridge User Guide .
- invocation
Type PipeTarget Invocation Type Specify whether to invoke the function synchronously or asynchronously.
REQUEST_RESPONSE
(default) - Invoke synchronously. This corresponds to theRequestResponse
option in theInvocationType
parameter for the Lambda Invoke API.FIRE_AND_FORGET
- Invoke asynchronously. This corresponds to theEvent
option in theInvocationType
parameter for the Lambda Invoke API.
For more information, see Invocation types in the Amazon EventBridge User Guide .
- invocation_
type PipeTarget Invocation Type Specify whether to invoke the function synchronously or asynchronously.
REQUEST_RESPONSE
(default) - Invoke synchronously. This corresponds to theRequestResponse
option in theInvocationType
parameter for the Lambda Invoke API.FIRE_AND_FORGET
- Invoke asynchronously. This corresponds to theEvent
option in theInvocationType
parameter for the Lambda Invoke API.
For more information, see Invocation types in the Amazon EventBridge User Guide .
- invocation
Type "REQUEST_RESPONSE" | "FIRE_AND_FORGET" Specify whether to invoke the function synchronously or asynchronously.
REQUEST_RESPONSE
(default) - Invoke synchronously. This corresponds to theRequestResponse
option in theInvocationType
parameter for the Lambda Invoke API.FIRE_AND_FORGET
- Invoke asynchronously. This corresponds to theEvent
option in theInvocationType
parameter for the Lambda Invoke API.
For more information, see Invocation types in the Amazon EventBridge User Guide .
PipeTargetParameters, PipeTargetParametersArgs
- Batch
Job Pulumi.Parameters Aws Native. Pipes. Inputs. Pipe Target Batch Job Parameters - The parameters for using an AWS Batch job as a target.
- Cloud
Watch Pulumi.Logs Parameters Aws Native. Pipes. Inputs. Pipe Target Cloud Watch Logs Parameters - The parameters for using an CloudWatch Logs log stream as a target.
- Ecs
Task Pulumi.Parameters Aws Native. Pipes. Inputs. Pipe Target Ecs Task Parameters - The parameters for using an Amazon ECS task as a target.
- Event
Bridge Pulumi.Event Bus Parameters Aws Native. Pipes. Inputs. Pipe Target Event Bridge Event Bus Parameters - The parameters for using an EventBridge event bus as a target.
- Http
Parameters Pulumi.Aws Native. Pipes. Inputs. Pipe Target Http Parameters - These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations.
- Input
Template string Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format .
To remove an input template, specify an empty string.
- Kinesis
Stream Pulumi.Parameters Aws Native. Pipes. Inputs. Pipe Target Kinesis Stream Parameters - The parameters for using a Kinesis stream as a target.
- Lambda
Function Pulumi.Parameters Aws Native. Pipes. Inputs. Pipe Target Lambda Function Parameters - The parameters for using a Lambda function as a target.
- Redshift
Data Pulumi.Parameters Aws Native. Pipes. Inputs. Pipe Target Redshift Data Parameters - These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement.
- Sage
Maker Pulumi.Pipeline Parameters Aws Native. Pipes. Inputs. Pipe Target Sage Maker Pipeline Parameters - The parameters for using a SageMaker pipeline as a target.
- Sqs
Queue Pulumi.Parameters Aws Native. Pipes. Inputs. Pipe Target Sqs Queue Parameters - The parameters for using a Amazon SQS stream as a target.
- Step
Function Pulumi.State Machine Parameters Aws Native. Pipes. Inputs. Pipe Target State Machine Parameters - The parameters for using a Step Functions state machine as a target.
- Timestream
Parameters Pulumi.Aws Native. Pipes. Inputs. Pipe Target Timestream Parameters - The parameters for using a Timestream for LiveAnalytics table as a target.
- Batch
Job PipeParameters Target Batch Job Parameters - The parameters for using an AWS Batch job as a target.
- Cloud
Watch PipeLogs Parameters Target Cloud Watch Logs Parameters - The parameters for using an CloudWatch Logs log stream as a target.
- Ecs
Task PipeParameters Target Ecs Task Parameters - The parameters for using an Amazon ECS task as a target.
- Event
Bridge PipeEvent Bus Parameters Target Event Bridge Event Bus Parameters - The parameters for using an EventBridge event bus as a target.
- Http
Parameters PipeTarget Http Parameters - These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations.
- Input
Template string Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format .
To remove an input template, specify an empty string.
- Kinesis
Stream PipeParameters Target Kinesis Stream Parameters - The parameters for using a Kinesis stream as a target.
- Lambda
Function PipeParameters Target Lambda Function Parameters - The parameters for using a Lambda function as a target.
- Redshift
Data PipeParameters Target Redshift Data Parameters - These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement.
- Sage
Maker PipePipeline Parameters Target Sage Maker Pipeline Parameters - The parameters for using a SageMaker pipeline as a target.
- Sqs
Queue PipeParameters Target Sqs Queue Parameters - The parameters for using a Amazon SQS stream as a target.
- Step
Function PipeState Machine Parameters Target State Machine Parameters - The parameters for using a Step Functions state machine as a target.
- Timestream
Parameters PipeTarget Timestream Parameters - The parameters for using a Timestream for LiveAnalytics table as a target.
- batch
Job PipeParameters Target Batch Job Parameters - The parameters for using an AWS Batch job as a target.
- cloud
Watch PipeLogs Parameters Target Cloud Watch Logs Parameters - The parameters for using an CloudWatch Logs log stream as a target.
- ecs
Task PipeParameters Target Ecs Task Parameters - The parameters for using an Amazon ECS task as a target.
- event
Bridge PipeEvent Bus Parameters Target Event Bridge Event Bus Parameters - The parameters for using an EventBridge event bus as a target.
- http
Parameters PipeTarget Http Parameters - These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations.
- input
Template String Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format .
To remove an input template, specify an empty string.
- kinesis
Stream PipeParameters Target Kinesis Stream Parameters - The parameters for using a Kinesis stream as a target.
- lambda
Function PipeParameters Target Lambda Function Parameters - The parameters for using a Lambda function as a target.
- redshift
Data PipeParameters Target Redshift Data Parameters - These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement.
- sage
Maker PipePipeline Parameters Target Sage Maker Pipeline Parameters - The parameters for using a SageMaker pipeline as a target.
- sqs
Queue PipeParameters Target Sqs Queue Parameters - The parameters for using a Amazon SQS stream as a target.
- step
Function PipeState Machine Parameters Target State Machine Parameters - The parameters for using a Step Functions state machine as a target.
- timestream
Parameters PipeTarget Timestream Parameters - The parameters for using a Timestream for LiveAnalytics table as a target.
- batch
Job PipeParameters Target Batch Job Parameters - The parameters for using an AWS Batch job as a target.
- cloud
Watch PipeLogs Parameters Target Cloud Watch Logs Parameters - The parameters for using an CloudWatch Logs log stream as a target.
- ecs
Task PipeParameters Target Ecs Task Parameters - The parameters for using an Amazon ECS task as a target.
- event
Bridge PipeEvent Bus Parameters Target Event Bridge Event Bus Parameters - The parameters for using an EventBridge event bus as a target.
- http
Parameters PipeTarget Http Parameters - These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations.
- input
Template string Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format .
To remove an input template, specify an empty string.
- kinesis
Stream PipeParameters Target Kinesis Stream Parameters - The parameters for using a Kinesis stream as a target.
- lambda
Function PipeParameters Target Lambda Function Parameters - The parameters for using a Lambda function as a target.
- redshift
Data PipeParameters Target Redshift Data Parameters - These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement.
- sage
Maker PipePipeline Parameters Target Sage Maker Pipeline Parameters - The parameters for using a SageMaker pipeline as a target.
- sqs
Queue PipeParameters Target Sqs Queue Parameters - The parameters for using a Amazon SQS stream as a target.
- step
Function PipeState Machine Parameters Target State Machine Parameters - The parameters for using a Step Functions state machine as a target.
- timestream
Parameters PipeTarget Timestream Parameters - The parameters for using a Timestream for LiveAnalytics table as a target.
- batch_
job_ Pipeparameters Target Batch Job Parameters - The parameters for using an AWS Batch job as a target.
- cloud_
watch_ Pipelogs_ parameters Target Cloud Watch Logs Parameters - The parameters for using an CloudWatch Logs log stream as a target.
- ecs_
task_ Pipeparameters Target Ecs Task Parameters - The parameters for using an Amazon ECS task as a target.
- event_
bridge_ Pipeevent_ bus_ parameters Target Event Bridge Event Bus Parameters - The parameters for using an EventBridge event bus as a target.
- http_
parameters PipeTarget Http Parameters - These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations.
- input_
template str Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format .
To remove an input template, specify an empty string.
- kinesis_
stream_ Pipeparameters Target Kinesis Stream Parameters - The parameters for using a Kinesis stream as a target.
- lambda_
function_ Pipeparameters Target Lambda Function Parameters - The parameters for using a Lambda function as a target.
- redshift_
data_ Pipeparameters Target Redshift Data Parameters - These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement.
- sage_
maker_ Pipepipeline_ parameters Target Sage Maker Pipeline Parameters - The parameters for using a SageMaker pipeline as a target.
- sqs_
queue_ Pipeparameters Target Sqs Queue Parameters - The parameters for using a Amazon SQS stream as a target.
- step_
function_ Pipestate_ machine_ parameters Target State Machine Parameters - The parameters for using a Step Functions state machine as a target.
- timestream_
parameters PipeTarget Timestream Parameters - The parameters for using a Timestream for LiveAnalytics table as a target.
- batch
Job Property MapParameters - The parameters for using an AWS Batch job as a target.
- cloud
Watch Property MapLogs Parameters - The parameters for using an CloudWatch Logs log stream as a target.
- ecs
Task Property MapParameters - The parameters for using an Amazon ECS task as a target.
- event
Bridge Property MapEvent Bus Parameters - The parameters for using an EventBridge event bus as a target.
- http
Parameters Property Map - These are custom parameter to be used when the target is an API Gateway REST APIs or EventBridge ApiDestinations.
- input
Template String Valid JSON text passed to the target. In this case, nothing from the event itself is passed to the target. For more information, see The JavaScript Object Notation (JSON) Data Interchange Format .
To remove an input template, specify an empty string.
- kinesis
Stream Property MapParameters - The parameters for using a Kinesis stream as a target.
- lambda
Function Property MapParameters - The parameters for using a Lambda function as a target.
- redshift
Data Property MapParameters - These are custom parameters to be used when the target is a Amazon Redshift cluster to invoke the Amazon Redshift Data API BatchExecuteStatement.
- sage
Maker Property MapPipeline Parameters - The parameters for using a SageMaker pipeline as a target.
- sqs
Queue Property MapParameters - The parameters for using a Amazon SQS stream as a target.
- step
Function Property MapState Machine Parameters - The parameters for using a Step Functions state machine as a target.
- timestream
Parameters Property Map - The parameters for using a Timestream for LiveAnalytics table as a target.
PipeTargetRedshiftDataParameters, PipeTargetRedshiftDataParametersArgs
- Database string
- Redshift Database
- Sqls List<string>
- A list of SQLs.
- Db
User string - Database user name
- Secret
Manager stringArn - Optional SecretManager ARN which stores the database credentials
- Statement
Name string - A name for Redshift DataAPI statement which can be used as filter of ListStatement.
- With
Event bool - Indicates whether to send an event back to EventBridge after the SQL statement runs.
- Database string
- Redshift Database
- Sqls []string
- A list of SQLs.
- Db
User string - Database user name
- Secret
Manager stringArn - Optional SecretManager ARN which stores the database credentials
- Statement
Name string - A name for Redshift DataAPI statement which can be used as filter of ListStatement.
- With
Event bool - Indicates whether to send an event back to EventBridge after the SQL statement runs.
- database String
- Redshift Database
- sqls List<String>
- A list of SQLs.
- db
User String - Database user name
- secret
Manager StringArn - Optional SecretManager ARN which stores the database credentials
- statement
Name String - A name for Redshift DataAPI statement which can be used as filter of ListStatement.
- with
Event Boolean - Indicates whether to send an event back to EventBridge after the SQL statement runs.
- database string
- Redshift Database
- sqls string[]
- A list of SQLs.
- db
User string - Database user name
- secret
Manager stringArn - Optional SecretManager ARN which stores the database credentials
- statement
Name string - A name for Redshift DataAPI statement which can be used as filter of ListStatement.
- with
Event boolean - Indicates whether to send an event back to EventBridge after the SQL statement runs.
- database str
- Redshift Database
- sqls Sequence[str]
- A list of SQLs.
- db_
user str - Database user name
- secret_
manager_ strarn - Optional SecretManager ARN which stores the database credentials
- statement_
name str - A name for Redshift DataAPI statement which can be used as filter of ListStatement.
- with_
event bool - Indicates whether to send an event back to EventBridge after the SQL statement runs.
- database String
- Redshift Database
- sqls List<String>
- A list of SQLs.
- db
User String - Database user name
- secret
Manager StringArn - Optional SecretManager ARN which stores the database credentials
- statement
Name String - A name for Redshift DataAPI statement which can be used as filter of ListStatement.
- with
Event Boolean - Indicates whether to send an event back to EventBridge after the SQL statement runs.
PipeTargetSageMakerPipelineParameters, PipeTargetSageMakerPipelineParametersArgs
- Pipeline
Parameter List<Pulumi.List Aws Native. Pipes. Inputs. Pipe Sage Maker Pipeline Parameter> - List of Parameter names and values for SageMaker Model Building Pipeline execution.
- Pipeline
Parameter []PipeList Sage Maker Pipeline Parameter - List of Parameter names and values for SageMaker Model Building Pipeline execution.
- pipeline
Parameter List<PipeList Sage Maker Pipeline Parameter> - List of Parameter names and values for SageMaker Model Building Pipeline execution.
- pipeline
Parameter PipeList Sage Maker Pipeline Parameter[] - List of Parameter names and values for SageMaker Model Building Pipeline execution.
- pipeline_
parameter_ Sequence[Pipelist Sage Maker Pipeline Parameter] - List of Parameter names and values for SageMaker Model Building Pipeline execution.
- pipeline
Parameter List<Property Map>List - List of Parameter names and values for SageMaker Model Building Pipeline execution.
PipeTargetSqsQueueParameters, PipeTargetSqsQueueParametersArgs
- Message
Deduplication stringId This parameter applies only to FIFO (first-in-first-out) queues.
The token used for deduplication of sent messages.
- Message
Group stringId - The FIFO message group ID to use as the target.
- Message
Deduplication stringId This parameter applies only to FIFO (first-in-first-out) queues.
The token used for deduplication of sent messages.
- Message
Group stringId - The FIFO message group ID to use as the target.
- message
Deduplication StringId This parameter applies only to FIFO (first-in-first-out) queues.
The token used for deduplication of sent messages.
- message
Group StringId - The FIFO message group ID to use as the target.
- message
Deduplication stringId This parameter applies only to FIFO (first-in-first-out) queues.
The token used for deduplication of sent messages.
- message
Group stringId - The FIFO message group ID to use as the target.
- message_
deduplication_ strid This parameter applies only to FIFO (first-in-first-out) queues.
The token used for deduplication of sent messages.
- message_
group_ strid - The FIFO message group ID to use as the target.
- message
Deduplication StringId This parameter applies only to FIFO (first-in-first-out) queues.
The token used for deduplication of sent messages.
- message
Group StringId - The FIFO message group ID to use as the target.
PipeTargetStateMachineParameters, PipeTargetStateMachineParametersArgs
- Invocation
Type Pulumi.Aws Native. Pipes. Pipe Target Invocation Type Specify whether to invoke the Step Functions state machine synchronously or asynchronously.
REQUEST_RESPONSE
(default) - Invoke synchronously. For more information, see StartSyncExecution in the AWS Step Functions API Reference .
REQUEST_RESPONSE
is not supported forSTANDARD
state machine workflows.FIRE_AND_FORGET
- Invoke asynchronously. For more information, see StartExecution in the AWS Step Functions API Reference .
For more information, see Invocation types in the Amazon EventBridge User Guide .
- Invocation
Type PipeTarget Invocation Type Specify whether to invoke the Step Functions state machine synchronously or asynchronously.
REQUEST_RESPONSE
(default) - Invoke synchronously. For more information, see StartSyncExecution in the AWS Step Functions API Reference .
REQUEST_RESPONSE
is not supported forSTANDARD
state machine workflows.FIRE_AND_FORGET
- Invoke asynchronously. For more information, see StartExecution in the AWS Step Functions API Reference .
For more information, see Invocation types in the Amazon EventBridge User Guide .
- invocation
Type PipeTarget Invocation Type Specify whether to invoke the Step Functions state machine synchronously or asynchronously.
REQUEST_RESPONSE
(default) - Invoke synchronously. For more information, see StartSyncExecution in the AWS Step Functions API Reference .
REQUEST_RESPONSE
is not supported forSTANDARD
state machine workflows.FIRE_AND_FORGET
- Invoke asynchronously. For more information, see StartExecution in the AWS Step Functions API Reference .
For more information, see Invocation types in the Amazon EventBridge User Guide .
- invocation
Type PipeTarget Invocation Type Specify whether to invoke the Step Functions state machine synchronously or asynchronously.
REQUEST_RESPONSE
(default) - Invoke synchronously. For more information, see StartSyncExecution in the AWS Step Functions API Reference .
REQUEST_RESPONSE
is not supported forSTANDARD
state machine workflows.FIRE_AND_FORGET
- Invoke asynchronously. For more information, see StartExecution in the AWS Step Functions API Reference .
For more information, see Invocation types in the Amazon EventBridge User Guide .
- invocation_
type PipeTarget Invocation Type Specify whether to invoke the Step Functions state machine synchronously or asynchronously.
REQUEST_RESPONSE
(default) - Invoke synchronously. For more information, see StartSyncExecution in the AWS Step Functions API Reference .
REQUEST_RESPONSE
is not supported forSTANDARD
state machine workflows.FIRE_AND_FORGET
- Invoke asynchronously. For more information, see StartExecution in the AWS Step Functions API Reference .
For more information, see Invocation types in the Amazon EventBridge User Guide .
- invocation
Type "REQUEST_RESPONSE" | "FIRE_AND_FORGET" Specify whether to invoke the Step Functions state machine synchronously or asynchronously.
REQUEST_RESPONSE
(default) - Invoke synchronously. For more information, see StartSyncExecution in the AWS Step Functions API Reference .
REQUEST_RESPONSE
is not supported forSTANDARD
state machine workflows.FIRE_AND_FORGET
- Invoke asynchronously. For more information, see StartExecution in the AWS Step Functions API Reference .
For more information, see Invocation types in the Amazon EventBridge User Guide .
PipeTargetTimestreamParameters, PipeTargetTimestreamParametersArgs
- Dimension
Mappings List<Pulumi.Aws Native. Pipes. Inputs. Pipe Dimension Mapping> Map source data to dimensions in the target Timestream for LiveAnalytics table.
For more information, see Amazon Timestream for LiveAnalytics concepts
- Time
Value string - Dynamic path to the source data field that represents the time value for your data.
- Version
Value string 64 bit version value or source data field that represents the version value for your data.
Write requests with a higher version number will update the existing measure values of the record and version. In cases where the measure value is the same, the version will still be updated.
Default value is 1.
Timestream for LiveAnalytics does not support updating partial measure values in a record.
Write requests for duplicate data with a higher version number will update the existing measure value and version. In cases where the measure value is the same,
Version
will still be updated. Default value is1
.Version
must be1
or greater, or you will receive aValidationException
error.- Epoch
Time Pulumi.Unit Aws Native. Pipes. Pipe Epoch Time Unit The granularity of the time units used. Default is
MILLISECONDS
.Required if
TimeFieldType
is specified asEPOCH
.- Multi
Measure List<Pulumi.Mappings Aws Native. Pipes. Inputs. Pipe Multi Measure Mapping> - Maps multiple measures from the source event to the same record in the specified Timestream for LiveAnalytics table.
- Single
Measure List<Pulumi.Mappings Aws Native. Pipes. Inputs. Pipe Single Measure Mapping> - Mappings of single source data fields to individual records in the specified Timestream for LiveAnalytics table.
- Time
Field Pulumi.Type Aws Native. Pipes. Pipe Time Field Type The type of time value used.
The default is
EPOCH
.- Timestamp
Format string How to format the timestamps. For example,
yyyy-MM-dd'T'HH:mm:ss'Z'
.Required if
TimeFieldType
is specified asTIMESTAMP_FORMAT
.
- Dimension
Mappings []PipeDimension Mapping Map source data to dimensions in the target Timestream for LiveAnalytics table.
For more information, see Amazon Timestream for LiveAnalytics concepts
- Time
Value string - Dynamic path to the source data field that represents the time value for your data.
- Version
Value string 64 bit version value or source data field that represents the version value for your data.
Write requests with a higher version number will update the existing measure values of the record and version. In cases where the measure value is the same, the version will still be updated.
Default value is 1.
Timestream for LiveAnalytics does not support updating partial measure values in a record.
Write requests for duplicate data with a higher version number will update the existing measure value and version. In cases where the measure value is the same,
Version
will still be updated. Default value is1
.Version
must be1
or greater, or you will receive aValidationException
error.- Epoch
Time PipeUnit Epoch Time Unit The granularity of the time units used. Default is
MILLISECONDS
.Required if
TimeFieldType
is specified asEPOCH
.- Multi
Measure []PipeMappings Multi Measure Mapping - Maps multiple measures from the source event to the same record in the specified Timestream for LiveAnalytics table.
- Single
Measure []PipeMappings Single Measure Mapping - Mappings of single source data fields to individual records in the specified Timestream for LiveAnalytics table.
- Time
Field PipeType Time Field Type The type of time value used.
The default is
EPOCH
.- Timestamp
Format string How to format the timestamps. For example,
yyyy-MM-dd'T'HH:mm:ss'Z'
.Required if
TimeFieldType
is specified asTIMESTAMP_FORMAT
.
- dimension
Mappings List<PipeDimension Mapping> Map source data to dimensions in the target Timestream for LiveAnalytics table.
For more information, see Amazon Timestream for LiveAnalytics concepts
- time
Value String - Dynamic path to the source data field that represents the time value for your data.
- version
Value String 64 bit version value or source data field that represents the version value for your data.
Write requests with a higher version number will update the existing measure values of the record and version. In cases where the measure value is the same, the version will still be updated.
Default value is 1.
Timestream for LiveAnalytics does not support updating partial measure values in a record.
Write requests for duplicate data with a higher version number will update the existing measure value and version. In cases where the measure value is the same,
Version
will still be updated. Default value is1
.Version
must be1
or greater, or you will receive aValidationException
error.- epoch
Time PipeUnit Epoch Time Unit The granularity of the time units used. Default is
MILLISECONDS
.Required if
TimeFieldType
is specified asEPOCH
.- multi
Measure List<PipeMappings Multi Measure Mapping> - Maps multiple measures from the source event to the same record in the specified Timestream for LiveAnalytics table.
- single
Measure List<PipeMappings Single Measure Mapping> - Mappings of single source data fields to individual records in the specified Timestream for LiveAnalytics table.
- time
Field PipeType Time Field Type The type of time value used.
The default is
EPOCH
.- timestamp
Format String How to format the timestamps. For example,
yyyy-MM-dd'T'HH:mm:ss'Z'
.Required if
TimeFieldType
is specified asTIMESTAMP_FORMAT
.
- dimension
Mappings PipeDimension Mapping[] Map source data to dimensions in the target Timestream for LiveAnalytics table.
For more information, see Amazon Timestream for LiveAnalytics concepts
- time
Value string - Dynamic path to the source data field that represents the time value for your data.
- version
Value string 64 bit version value or source data field that represents the version value for your data.
Write requests with a higher version number will update the existing measure values of the record and version. In cases where the measure value is the same, the version will still be updated.
Default value is 1.
Timestream for LiveAnalytics does not support updating partial measure values in a record.
Write requests for duplicate data with a higher version number will update the existing measure value and version. In cases where the measure value is the same,
Version
will still be updated. Default value is1
.Version
must be1
or greater, or you will receive aValidationException
error.- epoch
Time PipeUnit Epoch Time Unit The granularity of the time units used. Default is
MILLISECONDS
.Required if
TimeFieldType
is specified asEPOCH
.- multi
Measure PipeMappings Multi Measure Mapping[] - Maps multiple measures from the source event to the same record in the specified Timestream for LiveAnalytics table.
- single
Measure PipeMappings Single Measure Mapping[] - Mappings of single source data fields to individual records in the specified Timestream for LiveAnalytics table.
- time
Field PipeType Time Field Type The type of time value used.
The default is
EPOCH
.- timestamp
Format string How to format the timestamps. For example,
yyyy-MM-dd'T'HH:mm:ss'Z'
.Required if
TimeFieldType
is specified asTIMESTAMP_FORMAT
.
- dimension_
mappings Sequence[PipeDimension Mapping] Map source data to dimensions in the target Timestream for LiveAnalytics table.
For more information, see Amazon Timestream for LiveAnalytics concepts
- time_
value str - Dynamic path to the source data field that represents the time value for your data.
- version_
value str 64 bit version value or source data field that represents the version value for your data.
Write requests with a higher version number will update the existing measure values of the record and version. In cases where the measure value is the same, the version will still be updated.
Default value is 1.
Timestream for LiveAnalytics does not support updating partial measure values in a record.
Write requests for duplicate data with a higher version number will update the existing measure value and version. In cases where the measure value is the same,
Version
will still be updated. Default value is1
.Version
must be1
or greater, or you will receive aValidationException
error.- epoch_
time_ Pipeunit Epoch Time Unit The granularity of the time units used. Default is
MILLISECONDS
.Required if
TimeFieldType
is specified asEPOCH
.- multi_
measure_ Sequence[Pipemappings Multi Measure Mapping] - Maps multiple measures from the source event to the same record in the specified Timestream for LiveAnalytics table.
- single_
measure_ Sequence[Pipemappings Single Measure Mapping] - Mappings of single source data fields to individual records in the specified Timestream for LiveAnalytics table.
- time_
field_ Pipetype Time Field Type The type of time value used.
The default is
EPOCH
.- timestamp_
format str How to format the timestamps. For example,
yyyy-MM-dd'T'HH:mm:ss'Z'
.Required if
TimeFieldType
is specified asTIMESTAMP_FORMAT
.
- dimension
Mappings List<Property Map> Map source data to dimensions in the target Timestream for LiveAnalytics table.
For more information, see Amazon Timestream for LiveAnalytics concepts
- time
Value String - Dynamic path to the source data field that represents the time value for your data.
- version
Value String 64 bit version value or source data field that represents the version value for your data.
Write requests with a higher version number will update the existing measure values of the record and version. In cases where the measure value is the same, the version will still be updated.
Default value is 1.
Timestream for LiveAnalytics does not support updating partial measure values in a record.
Write requests for duplicate data with a higher version number will update the existing measure value and version. In cases where the measure value is the same,
Version
will still be updated. Default value is1
.Version
must be1
or greater, or you will receive aValidationException
error.- epoch
Time "MILLISECONDS" | "SECONDS" | "MICROSECONDS" | "NANOSECONDS"Unit The granularity of the time units used. Default is
MILLISECONDS
.Required if
TimeFieldType
is specified asEPOCH
.- multi
Measure List<Property Map>Mappings - Maps multiple measures from the source event to the same record in the specified Timestream for LiveAnalytics table.
- single
Measure List<Property Map>Mappings - Mappings of single source data fields to individual records in the specified Timestream for LiveAnalytics table.
- time
Field "EPOCH" | "TIMESTAMP_FORMAT"Type The type of time value used.
The default is
EPOCH
.- timestamp
Format String How to format the timestamps. For example,
yyyy-MM-dd'T'HH:mm:ss'Z'
.Required if
TimeFieldType
is specified asTIMESTAMP_FORMAT
.
PipeTimeFieldType, PipeTimeFieldTypeArgs
- Epoch
- EPOCH
- Timestamp
Format - TIMESTAMP_FORMAT
- Pipe
Time Field Type Epoch - EPOCH
- Pipe
Time Field Type Timestamp Format - TIMESTAMP_FORMAT
- Epoch
- EPOCH
- Timestamp
Format - TIMESTAMP_FORMAT
- Epoch
- EPOCH
- Timestamp
Format - TIMESTAMP_FORMAT
- EPOCH
- EPOCH
- TIMESTAMP_FORMAT
- TIMESTAMP_FORMAT
- "EPOCH"
- EPOCH
- "TIMESTAMP_FORMAT"
- TIMESTAMP_FORMAT
Package Details
- Repository
- AWS Native pulumi/pulumi-aws-native
- License
- Apache-2.0
We recommend new projects start with resources from the AWS provider.