1. Packages
  2. Mongodbatlas Provider
  3. API Docs
  4. StreamProcessor
MongoDB Atlas v3.20.4 published on Wednesday, Oct 30, 2024 by Pulumi

mongodbatlas.StreamProcessor

Explore with Pulumi AI

mongodbatlas logo
MongoDB Atlas v3.20.4 published on Wednesday, Oct 30, 2024 by Pulumi

    Example Usage

    S

    Coming soon!
    
    Coming soon!
    
    Coming soon!
    
    Coming soon!
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.mongodbatlas.StreamInstance;
    import com.pulumi.mongodbatlas.StreamInstanceArgs;
    import com.pulumi.mongodbatlas.inputs.StreamInstanceDataProcessRegionArgs;
    import com.pulumi.mongodbatlas.StreamConnection;
    import com.pulumi.mongodbatlas.StreamConnectionArgs;
    import com.pulumi.mongodbatlas.inputs.StreamConnectionDbRoleToExecuteArgs;
    import com.pulumi.mongodbatlas.inputs.StreamConnectionAuthenticationArgs;
    import com.pulumi.mongodbatlas.inputs.StreamConnectionSecurityArgs;
    import com.pulumi.mongodbatlas.StreamProcessor;
    import com.pulumi.mongodbatlas.StreamProcessorArgs;
    import com.pulumi.mongodbatlas.inputs.StreamProcessorOptionsArgs;
    import com.pulumi.mongodbatlas.inputs.StreamProcessorOptionsDlqArgs;
    import com.pulumi.mongodbatlas.MongodbatlasFunctions;
    import com.pulumi.mongodbatlas.inputs.GetStreamProcessorsArgs;
    import com.pulumi.mongodbatlas.inputs.GetStreamProcessorArgs;
    import static com.pulumi.codegen.internal.Serialization.*;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var example = new StreamInstance("example", StreamInstanceArgs.builder()
                .projectId(projectId)
                .instanceName("InstanceName")
                .dataProcessRegion(StreamInstanceDataProcessRegionArgs.builder()
                    .region("VIRGINIA_USA")
                    .cloud_provider("AWS")
                    .build())
                .build());
    
            var example_sample = new StreamConnection("example-sample", StreamConnectionArgs.builder()
                .projectId(projectId)
                .instanceName(example.instanceName())
                .connectionName("sample_stream_solar")
                .type("Sample")
                .build());
    
            var example_cluster = new StreamConnection("example-cluster", StreamConnectionArgs.builder()
                .projectId(projectId)
                .instanceName(example.instanceName())
                .connectionName("ClusterConnection")
                .type("Cluster")
                .clusterName(clusterName)
                .dbRoleToExecute(StreamConnectionDbRoleToExecuteArgs.builder()
                    .role("atlasAdmin")
                    .type("BUILT_IN")
                    .build())
                .build());
    
            var example_kafka = new StreamConnection("example-kafka", StreamConnectionArgs.builder()
                .projectId(projectId)
                .instanceName(example.instanceName())
                .connectionName("KafkaPlaintextConnection")
                .type("Kafka")
                .authentication(StreamConnectionAuthenticationArgs.builder()
                    .mechanism("PLAIN")
                    .username(kafkaUsername)
                    .password(kafkaPassword)
                    .build())
                .bootstrapServers("localhost:9092,localhost:9092")
                .config(Map.of("auto.offset.reset", "earliest"))
                .security(StreamConnectionSecurityArgs.builder()
                    .protocol("PLAINTEXT")
                    .build())
                .build());
    
            var stream_processor_sample_example = new StreamProcessor("stream-processor-sample-example", StreamProcessorArgs.builder()
                .projectId(projectId)
                .instanceName(example.instanceName())
                .processorName("sampleProcessorName")
                .pipeline(serializeJson(
                    jsonArray(
                        jsonObject(
                            jsonProperty("$source", jsonObject(
                                jsonProperty("connectionName", mongodbatlasStreamConnection.example-sample().connectionName())
                            ))
                        ), 
                        jsonObject(
                            jsonProperty("$emit", jsonObject(
                                jsonProperty("connectionName", mongodbatlasStreamConnection.example-cluster().connectionName()),
                                jsonProperty("db", "sample"),
                                jsonProperty("coll", "solar"),
                                jsonProperty("timeseries", jsonObject(
                                    jsonProperty("timeField", "_ts")
                                ))
                            ))
                        )
                    )))
                .state("STARTED")
                .build());
    
            var stream_processor_cluster_to_kafka_example = new StreamProcessor("stream-processor-cluster-to-kafka-example", StreamProcessorArgs.builder()
                .projectId(projectId)
                .instanceName(example.instanceName())
                .processorName("clusterProcessorName")
                .pipeline(serializeJson(
                    jsonArray(
                        jsonObject(
                            jsonProperty("$source", jsonObject(
                                jsonProperty("connectionName", mongodbatlasStreamConnection.example-cluster().connectionName())
                            ))
                        ), 
                        jsonObject(
                            jsonProperty("$emit", jsonObject(
                                jsonProperty("connectionName", mongodbatlasStreamConnection.example-kafka().connectionName()),
                                jsonProperty("topic", "topic_from_cluster")
                            ))
                        )
                    )))
                .state("CREATED")
                .build());
    
            var stream_processor_kafka_to_cluster_example = new StreamProcessor("stream-processor-kafka-to-cluster-example", StreamProcessorArgs.builder()
                .projectId(projectId)
                .instanceName(example.instanceName())
                .processorName("kafkaProcessorName")
                .pipeline(serializeJson(
                    jsonArray(
                        jsonObject(
                            jsonProperty("$source", jsonObject(
                                jsonProperty("connectionName", mongodbatlasStreamConnection.example-kafka().connectionName()),
                                jsonProperty("topic", "topic_source")
                            ))
                        ), 
                        jsonObject(
                            jsonProperty("$emit", jsonObject(
                                jsonProperty("connectionName", mongodbatlasStreamConnection.example-cluster().connectionName()),
                                jsonProperty("db", "kafka"),
                                jsonProperty("coll", "topic_source"),
                                jsonProperty("timeseries", jsonObject(
                                    jsonProperty("timeField", "ts")
                                ))
                            ))
                        )
                    )))
                .state("CREATED")
                .options(StreamProcessorOptionsArgs.builder()
                    .dlq(StreamProcessorOptionsDlqArgs.builder()
                        .coll("exampleColumn")
                        .connectionName(mongodbatlasStreamConnection.example-cluster().connectionName())
                        .db("exampleDb")
                        .build())
                    .build())
                .build());
    
            final var example-stream-processors = MongodbatlasFunctions.getStreamProcessors(GetStreamProcessorsArgs.builder()
                .projectId(projectId)
                .instanceName(example.instanceName())
                .build());
    
            final var example-stream-processor = MongodbatlasFunctions.getStreamProcessor(GetStreamProcessorArgs.builder()
                .projectId(projectId)
                .instanceName(example.instanceName())
                .processorName(stream_processor_sample_example.processorName())
                .build());
    
            ctx.export("streamProcessorsState", example_stream_processor.applyValue(example_stream_processor -> example_stream_processor.state()));
            ctx.export("streamProcessorsResults", example_stream_processors.applyValue(example_stream_processors -> example_stream_processors.results()));
        }
    }
    
    resources:
      example:
        type: mongodbatlas:StreamInstance
        properties:
          projectId: ${projectId}
          instanceName: InstanceName
          dataProcessRegion:
            region: VIRGINIA_USA
            cloud_provider: AWS
      example-sample:
        type: mongodbatlas:StreamConnection
        properties:
          projectId: ${projectId}
          instanceName: ${example.instanceName}
          connectionName: sample_stream_solar
          type: Sample
      example-cluster:
        type: mongodbatlas:StreamConnection
        properties:
          projectId: ${projectId}
          instanceName: ${example.instanceName}
          connectionName: ClusterConnection
          type: Cluster
          clusterName: ${clusterName}
          dbRoleToExecute:
            role: atlasAdmin
            type: BUILT_IN
      example-kafka:
        type: mongodbatlas:StreamConnection
        properties:
          projectId: ${projectId}
          instanceName: ${example.instanceName}
          connectionName: KafkaPlaintextConnection
          type: Kafka
          authentication:
            mechanism: PLAIN
            username: ${kafkaUsername}
            password: ${kafkaPassword}
          bootstrapServers: localhost:9092,localhost:9092
          config:
            auto.offset.reset: earliest
          security:
            protocol: PLAINTEXT
      stream-processor-sample-example:
        type: mongodbatlas:StreamProcessor
        properties:
          projectId: ${projectId}
          instanceName: ${example.instanceName}
          processorName: sampleProcessorName
          pipeline:
            fn::toJSON:
              - $source:
                  connectionName: ${mongodbatlasStreamConnection"example-sample"[%!s(MISSING)].connectionName}
              - $emit:
                  connectionName: ${mongodbatlasStreamConnection"example-cluster"[%!s(MISSING)].connectionName}
                  db: sample
                  coll: solar
                  timeseries:
                    timeField: _ts
          state: STARTED
      stream-processor-cluster-to-kafka-example:
        type: mongodbatlas:StreamProcessor
        properties:
          projectId: ${projectId}
          instanceName: ${example.instanceName}
          processorName: clusterProcessorName
          pipeline:
            fn::toJSON:
              - $source:
                  connectionName: ${mongodbatlasStreamConnection"example-cluster"[%!s(MISSING)].connectionName}
              - $emit:
                  connectionName: ${mongodbatlasStreamConnection"example-kafka"[%!s(MISSING)].connectionName}
                  topic: topic_from_cluster
          state: CREATED
      stream-processor-kafka-to-cluster-example:
        type: mongodbatlas:StreamProcessor
        properties:
          projectId: ${projectId}
          instanceName: ${example.instanceName}
          processorName: kafkaProcessorName
          pipeline:
            fn::toJSON:
              - $source:
                  connectionName: ${mongodbatlasStreamConnection"example-kafka"[%!s(MISSING)].connectionName}
                  topic: topic_source
              - $emit:
                  connectionName: ${mongodbatlasStreamConnection"example-cluster"[%!s(MISSING)].connectionName}
                  db: kafka
                  coll: topic_source
                  timeseries:
                    timeField: ts
          state: CREATED
          options:
            dlq:
              coll: exampleColumn
              connectionName: ${mongodbatlasStreamConnection"example-cluster"[%!s(MISSING)].connectionName}
              db: exampleDb
    variables:
      example-stream-processors:
        fn::invoke:
          Function: mongodbatlas:getStreamProcessors
          Arguments:
            projectId: ${projectId}
            instanceName: ${example.instanceName}
      example-stream-processor:
        fn::invoke:
          Function: mongodbatlas:getStreamProcessor
          Arguments:
            projectId: ${projectId}
            instanceName: ${example.instanceName}
            processorName: ${["stream-processor-sample-example"].processorName}
    outputs:
      # example making use of data sources
      streamProcessorsState: ${["example-stream-processor"].state}
      streamProcessorsResults: ${["example-stream-processors"].results}
    

    Create StreamProcessor Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new StreamProcessor(name: string, args: StreamProcessorArgs, opts?: CustomResourceOptions);
    @overload
    def StreamProcessor(resource_name: str,
                        args: StreamProcessorArgs,
                        opts: Optional[ResourceOptions] = None)
    
    @overload
    def StreamProcessor(resource_name: str,
                        opts: Optional[ResourceOptions] = None,
                        instance_name: Optional[str] = None,
                        pipeline: Optional[str] = None,
                        processor_name: Optional[str] = None,
                        project_id: Optional[str] = None,
                        options: Optional[StreamProcessorOptionsArgs] = None,
                        state: Optional[str] = None)
    func NewStreamProcessor(ctx *Context, name string, args StreamProcessorArgs, opts ...ResourceOption) (*StreamProcessor, error)
    public StreamProcessor(string name, StreamProcessorArgs args, CustomResourceOptions? opts = null)
    public StreamProcessor(String name, StreamProcessorArgs args)
    public StreamProcessor(String name, StreamProcessorArgs args, CustomResourceOptions options)
    
    type: mongodbatlas:StreamProcessor
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args StreamProcessorArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args StreamProcessorArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args StreamProcessorArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args StreamProcessorArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args StreamProcessorArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var streamProcessorResource = new Mongodbatlas.StreamProcessor("streamProcessorResource", new()
    {
        InstanceName = "string",
        Pipeline = "string",
        ProcessorName = "string",
        ProjectId = "string",
        Options = new Mongodbatlas.Inputs.StreamProcessorOptionsArgs
        {
            Dlq = new Mongodbatlas.Inputs.StreamProcessorOptionsDlqArgs
            {
                Coll = "string",
                ConnectionName = "string",
                Db = "string",
            },
        },
        State = "string",
    });
    
    example, err := mongodbatlas.NewStreamProcessor(ctx, "streamProcessorResource", &mongodbatlas.StreamProcessorArgs{
    	InstanceName:  pulumi.String("string"),
    	Pipeline:      pulumi.String("string"),
    	ProcessorName: pulumi.String("string"),
    	ProjectId:     pulumi.String("string"),
    	Options: &mongodbatlas.StreamProcessorOptionsArgs{
    		Dlq: &mongodbatlas.StreamProcessorOptionsDlqArgs{
    			Coll:           pulumi.String("string"),
    			ConnectionName: pulumi.String("string"),
    			Db:             pulumi.String("string"),
    		},
    	},
    	State: pulumi.String("string"),
    })
    
    var streamProcessorResource = new StreamProcessor("streamProcessorResource", StreamProcessorArgs.builder()
        .instanceName("string")
        .pipeline("string")
        .processorName("string")
        .projectId("string")
        .options(StreamProcessorOptionsArgs.builder()
            .dlq(StreamProcessorOptionsDlqArgs.builder()
                .coll("string")
                .connectionName("string")
                .db("string")
                .build())
            .build())
        .state("string")
        .build());
    
    stream_processor_resource = mongodbatlas.StreamProcessor("streamProcessorResource",
        instance_name="string",
        pipeline="string",
        processor_name="string",
        project_id="string",
        options={
            "dlq": {
                "coll": "string",
                "connection_name": "string",
                "db": "string",
            },
        },
        state="string")
    
    const streamProcessorResource = new mongodbatlas.StreamProcessor("streamProcessorResource", {
        instanceName: "string",
        pipeline: "string",
        processorName: "string",
        projectId: "string",
        options: {
            dlq: {
                coll: "string",
                connectionName: "string",
                db: "string",
            },
        },
        state: "string",
    });
    
    type: mongodbatlas:StreamProcessor
    properties:
        instanceName: string
        options:
            dlq:
                coll: string
                connectionName: string
                db: string
        pipeline: string
        processorName: string
        projectId: string
        state: string
    

    StreamProcessor Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The StreamProcessor resource accepts the following input properties:

    InstanceName string
    Human-readable label that identifies the stream instance.
    Pipeline string
    Stream aggregation pipeline you want to apply to your streaming data. MongoDB Atlas Docs contain more information. Using jsonencode is recommended when settig this attribute. For more details see Aggregation Pipelines Documentation
    ProcessorName string
    Human-readable label that identifies the stream processor.
    ProjectId string
    Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
    Options StreamProcessorOptions
    Optional configuration for the stream processor.
    State string
    The state of the stream processor. Commonly occurring states are 'CREATED', 'STARTED', 'STOPPED' and 'FAILED'. Used to start or stop the Stream Processor. Valid values are CREATED, STARTED or STOPPED. When a Stream Processor is created without specifying the state, it will default to CREATED state. NOTE When a stream processor is created, the only valid states are CREATED or STARTED. A stream processor can be automatically started when creating it if the state is set to STARTED.
    InstanceName string
    Human-readable label that identifies the stream instance.
    Pipeline string
    Stream aggregation pipeline you want to apply to your streaming data. MongoDB Atlas Docs contain more information. Using jsonencode is recommended when settig this attribute. For more details see Aggregation Pipelines Documentation
    ProcessorName string
    Human-readable label that identifies the stream processor.
    ProjectId string
    Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
    Options StreamProcessorOptionsArgs
    Optional configuration for the stream processor.
    State string
    The state of the stream processor. Commonly occurring states are 'CREATED', 'STARTED', 'STOPPED' and 'FAILED'. Used to start or stop the Stream Processor. Valid values are CREATED, STARTED or STOPPED. When a Stream Processor is created without specifying the state, it will default to CREATED state. NOTE When a stream processor is created, the only valid states are CREATED or STARTED. A stream processor can be automatically started when creating it if the state is set to STARTED.
    instanceName String
    Human-readable label that identifies the stream instance.
    pipeline String
    Stream aggregation pipeline you want to apply to your streaming data. MongoDB Atlas Docs contain more information. Using jsonencode is recommended when settig this attribute. For more details see Aggregation Pipelines Documentation
    processorName String
    Human-readable label that identifies the stream processor.
    projectId String
    Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
    options StreamProcessorOptions
    Optional configuration for the stream processor.
    state String
    The state of the stream processor. Commonly occurring states are 'CREATED', 'STARTED', 'STOPPED' and 'FAILED'. Used to start or stop the Stream Processor. Valid values are CREATED, STARTED or STOPPED. When a Stream Processor is created without specifying the state, it will default to CREATED state. NOTE When a stream processor is created, the only valid states are CREATED or STARTED. A stream processor can be automatically started when creating it if the state is set to STARTED.
    instanceName string
    Human-readable label that identifies the stream instance.
    pipeline string
    Stream aggregation pipeline you want to apply to your streaming data. MongoDB Atlas Docs contain more information. Using jsonencode is recommended when settig this attribute. For more details see Aggregation Pipelines Documentation
    processorName string
    Human-readable label that identifies the stream processor.
    projectId string
    Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
    options StreamProcessorOptions
    Optional configuration for the stream processor.
    state string
    The state of the stream processor. Commonly occurring states are 'CREATED', 'STARTED', 'STOPPED' and 'FAILED'. Used to start or stop the Stream Processor. Valid values are CREATED, STARTED or STOPPED. When a Stream Processor is created without specifying the state, it will default to CREATED state. NOTE When a stream processor is created, the only valid states are CREATED or STARTED. A stream processor can be automatically started when creating it if the state is set to STARTED.
    instance_name str
    Human-readable label that identifies the stream instance.
    pipeline str
    Stream aggregation pipeline you want to apply to your streaming data. MongoDB Atlas Docs contain more information. Using jsonencode is recommended when settig this attribute. For more details see Aggregation Pipelines Documentation
    processor_name str
    Human-readable label that identifies the stream processor.
    project_id str
    Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
    options StreamProcessorOptionsArgs
    Optional configuration for the stream processor.
    state str
    The state of the stream processor. Commonly occurring states are 'CREATED', 'STARTED', 'STOPPED' and 'FAILED'. Used to start or stop the Stream Processor. Valid values are CREATED, STARTED or STOPPED. When a Stream Processor is created without specifying the state, it will default to CREATED state. NOTE When a stream processor is created, the only valid states are CREATED or STARTED. A stream processor can be automatically started when creating it if the state is set to STARTED.
    instanceName String
    Human-readable label that identifies the stream instance.
    pipeline String
    Stream aggregation pipeline you want to apply to your streaming data. MongoDB Atlas Docs contain more information. Using jsonencode is recommended when settig this attribute. For more details see Aggregation Pipelines Documentation
    processorName String
    Human-readable label that identifies the stream processor.
    projectId String
    Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
    options Property Map
    Optional configuration for the stream processor.
    state String
    The state of the stream processor. Commonly occurring states are 'CREATED', 'STARTED', 'STOPPED' and 'FAILED'. Used to start or stop the Stream Processor. Valid values are CREATED, STARTED or STOPPED. When a Stream Processor is created without specifying the state, it will default to CREATED state. NOTE When a stream processor is created, the only valid states are CREATED or STARTED. A stream processor can be automatically started when creating it if the state is set to STARTED.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the StreamProcessor resource produces the following output properties:

    Id string
    The provider-assigned unique ID for this managed resource.
    Stats string
    The stats associated with the stream processor. Refer to the MongoDB Atlas Docs for more information.
    Id string
    The provider-assigned unique ID for this managed resource.
    Stats string
    The stats associated with the stream processor. Refer to the MongoDB Atlas Docs for more information.
    id String
    The provider-assigned unique ID for this managed resource.
    stats String
    The stats associated with the stream processor. Refer to the MongoDB Atlas Docs for more information.
    id string
    The provider-assigned unique ID for this managed resource.
    stats string
    The stats associated with the stream processor. Refer to the MongoDB Atlas Docs for more information.
    id str
    The provider-assigned unique ID for this managed resource.
    stats str
    The stats associated with the stream processor. Refer to the MongoDB Atlas Docs for more information.
    id String
    The provider-assigned unique ID for this managed resource.
    stats String
    The stats associated with the stream processor. Refer to the MongoDB Atlas Docs for more information.

    Look up Existing StreamProcessor Resource

    Get an existing StreamProcessor resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: StreamProcessorState, opts?: CustomResourceOptions): StreamProcessor
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            instance_name: Optional[str] = None,
            options: Optional[StreamProcessorOptionsArgs] = None,
            pipeline: Optional[str] = None,
            processor_name: Optional[str] = None,
            project_id: Optional[str] = None,
            state: Optional[str] = None,
            stats: Optional[str] = None) -> StreamProcessor
    func GetStreamProcessor(ctx *Context, name string, id IDInput, state *StreamProcessorState, opts ...ResourceOption) (*StreamProcessor, error)
    public static StreamProcessor Get(string name, Input<string> id, StreamProcessorState? state, CustomResourceOptions? opts = null)
    public static StreamProcessor get(String name, Output<String> id, StreamProcessorState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    InstanceName string
    Human-readable label that identifies the stream instance.
    Options StreamProcessorOptions
    Optional configuration for the stream processor.
    Pipeline string
    Stream aggregation pipeline you want to apply to your streaming data. MongoDB Atlas Docs contain more information. Using jsonencode is recommended when settig this attribute. For more details see Aggregation Pipelines Documentation
    ProcessorName string
    Human-readable label that identifies the stream processor.
    ProjectId string
    Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
    State string
    The state of the stream processor. Commonly occurring states are 'CREATED', 'STARTED', 'STOPPED' and 'FAILED'. Used to start or stop the Stream Processor. Valid values are CREATED, STARTED or STOPPED. When a Stream Processor is created without specifying the state, it will default to CREATED state. NOTE When a stream processor is created, the only valid states are CREATED or STARTED. A stream processor can be automatically started when creating it if the state is set to STARTED.
    Stats string
    The stats associated with the stream processor. Refer to the MongoDB Atlas Docs for more information.
    InstanceName string
    Human-readable label that identifies the stream instance.
    Options StreamProcessorOptionsArgs
    Optional configuration for the stream processor.
    Pipeline string
    Stream aggregation pipeline you want to apply to your streaming data. MongoDB Atlas Docs contain more information. Using jsonencode is recommended when settig this attribute. For more details see Aggregation Pipelines Documentation
    ProcessorName string
    Human-readable label that identifies the stream processor.
    ProjectId string
    Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
    State string
    The state of the stream processor. Commonly occurring states are 'CREATED', 'STARTED', 'STOPPED' and 'FAILED'. Used to start or stop the Stream Processor. Valid values are CREATED, STARTED or STOPPED. When a Stream Processor is created without specifying the state, it will default to CREATED state. NOTE When a stream processor is created, the only valid states are CREATED or STARTED. A stream processor can be automatically started when creating it if the state is set to STARTED.
    Stats string
    The stats associated with the stream processor. Refer to the MongoDB Atlas Docs for more information.
    instanceName String
    Human-readable label that identifies the stream instance.
    options StreamProcessorOptions
    Optional configuration for the stream processor.
    pipeline String
    Stream aggregation pipeline you want to apply to your streaming data. MongoDB Atlas Docs contain more information. Using jsonencode is recommended when settig this attribute. For more details see Aggregation Pipelines Documentation
    processorName String
    Human-readable label that identifies the stream processor.
    projectId String
    Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
    state String
    The state of the stream processor. Commonly occurring states are 'CREATED', 'STARTED', 'STOPPED' and 'FAILED'. Used to start or stop the Stream Processor. Valid values are CREATED, STARTED or STOPPED. When a Stream Processor is created without specifying the state, it will default to CREATED state. NOTE When a stream processor is created, the only valid states are CREATED or STARTED. A stream processor can be automatically started when creating it if the state is set to STARTED.
    stats String
    The stats associated with the stream processor. Refer to the MongoDB Atlas Docs for more information.
    instanceName string
    Human-readable label that identifies the stream instance.
    options StreamProcessorOptions
    Optional configuration for the stream processor.
    pipeline string
    Stream aggregation pipeline you want to apply to your streaming data. MongoDB Atlas Docs contain more information. Using jsonencode is recommended when settig this attribute. For more details see Aggregation Pipelines Documentation
    processorName string
    Human-readable label that identifies the stream processor.
    projectId string
    Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
    state string
    The state of the stream processor. Commonly occurring states are 'CREATED', 'STARTED', 'STOPPED' and 'FAILED'. Used to start or stop the Stream Processor. Valid values are CREATED, STARTED or STOPPED. When a Stream Processor is created without specifying the state, it will default to CREATED state. NOTE When a stream processor is created, the only valid states are CREATED or STARTED. A stream processor can be automatically started when creating it if the state is set to STARTED.
    stats string
    The stats associated with the stream processor. Refer to the MongoDB Atlas Docs for more information.
    instance_name str
    Human-readable label that identifies the stream instance.
    options StreamProcessorOptionsArgs
    Optional configuration for the stream processor.
    pipeline str
    Stream aggregation pipeline you want to apply to your streaming data. MongoDB Atlas Docs contain more information. Using jsonencode is recommended when settig this attribute. For more details see Aggregation Pipelines Documentation
    processor_name str
    Human-readable label that identifies the stream processor.
    project_id str
    Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
    state str
    The state of the stream processor. Commonly occurring states are 'CREATED', 'STARTED', 'STOPPED' and 'FAILED'. Used to start or stop the Stream Processor. Valid values are CREATED, STARTED or STOPPED. When a Stream Processor is created without specifying the state, it will default to CREATED state. NOTE When a stream processor is created, the only valid states are CREATED or STARTED. A stream processor can be automatically started when creating it if the state is set to STARTED.
    stats str
    The stats associated with the stream processor. Refer to the MongoDB Atlas Docs for more information.
    instanceName String
    Human-readable label that identifies the stream instance.
    options Property Map
    Optional configuration for the stream processor.
    pipeline String
    Stream aggregation pipeline you want to apply to your streaming data. MongoDB Atlas Docs contain more information. Using jsonencode is recommended when settig this attribute. For more details see Aggregation Pipelines Documentation
    processorName String
    Human-readable label that identifies the stream processor.
    projectId String
    Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
    state String
    The state of the stream processor. Commonly occurring states are 'CREATED', 'STARTED', 'STOPPED' and 'FAILED'. Used to start or stop the Stream Processor. Valid values are CREATED, STARTED or STOPPED. When a Stream Processor is created without specifying the state, it will default to CREATED state. NOTE When a stream processor is created, the only valid states are CREATED or STARTED. A stream processor can be automatically started when creating it if the state is set to STARTED.
    stats String
    The stats associated with the stream processor. Refer to the MongoDB Atlas Docs for more information.

    Supporting Types

    StreamProcessorOptions, StreamProcessorOptionsArgs

    Dlq StreamProcessorOptionsDlq
    Dead letter queue for the stream processor. Refer to the MongoDB Atlas Docs for more information.
    Dlq StreamProcessorOptionsDlq
    Dead letter queue for the stream processor. Refer to the MongoDB Atlas Docs for more information.
    dlq StreamProcessorOptionsDlq
    Dead letter queue for the stream processor. Refer to the MongoDB Atlas Docs for more information.
    dlq StreamProcessorOptionsDlq
    Dead letter queue for the stream processor. Refer to the MongoDB Atlas Docs for more information.
    dlq StreamProcessorOptionsDlq
    Dead letter queue for the stream processor. Refer to the MongoDB Atlas Docs for more information.
    dlq Property Map
    Dead letter queue for the stream processor. Refer to the MongoDB Atlas Docs for more information.

    StreamProcessorOptionsDlq, StreamProcessorOptionsDlqArgs

    Coll string
    Name of the collection to use for the DLQ.
    ConnectionName string
    Name of the connection to write DLQ messages to. Must be an Atlas connection.
    Db string
    Name of the database to use for the DLQ.
    Coll string
    Name of the collection to use for the DLQ.
    ConnectionName string
    Name of the connection to write DLQ messages to. Must be an Atlas connection.
    Db string
    Name of the database to use for the DLQ.
    coll String
    Name of the collection to use for the DLQ.
    connectionName String
    Name of the connection to write DLQ messages to. Must be an Atlas connection.
    db String
    Name of the database to use for the DLQ.
    coll string
    Name of the collection to use for the DLQ.
    connectionName string
    Name of the connection to write DLQ messages to. Must be an Atlas connection.
    db string
    Name of the database to use for the DLQ.
    coll str
    Name of the collection to use for the DLQ.
    connection_name str
    Name of the connection to write DLQ messages to. Must be an Atlas connection.
    db str
    Name of the database to use for the DLQ.
    coll String
    Name of the collection to use for the DLQ.
    connectionName String
    Name of the connection to write DLQ messages to. Must be an Atlas connection.
    db String
    Name of the database to use for the DLQ.

    Package Details

    Repository
    MongoDB Atlas pulumi/pulumi-mongodbatlas
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the mongodbatlas Terraform Provider.
    mongodbatlas logo
    MongoDB Atlas v3.20.4 published on Wednesday, Oct 30, 2024 by Pulumi