1. Packages
  2. Mongodbatlas Provider
  3. API Docs
  4. getStreamProcessors
MongoDB Atlas v3.20.4 published on Wednesday, Oct 30, 2024 by Pulumi

mongodbatlas.getStreamProcessors

Explore with Pulumi AI

mongodbatlas logo
MongoDB Atlas v3.20.4 published on Wednesday, Oct 30, 2024 by Pulumi

    # Data Source: mongodbatlas.getStreamProcessors

    mongodbatlas.getStreamProcessors returns all stream processors in a stream instance.

    Example Usage

    S

    Coming soon!
    
    Coming soon!
    
    Coming soon!
    
    Coming soon!
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.mongodbatlas.StreamInstance;
    import com.pulumi.mongodbatlas.StreamInstanceArgs;
    import com.pulumi.mongodbatlas.inputs.StreamInstanceDataProcessRegionArgs;
    import com.pulumi.mongodbatlas.StreamConnection;
    import com.pulumi.mongodbatlas.StreamConnectionArgs;
    import com.pulumi.mongodbatlas.inputs.StreamConnectionDbRoleToExecuteArgs;
    import com.pulumi.mongodbatlas.inputs.StreamConnectionAuthenticationArgs;
    import com.pulumi.mongodbatlas.inputs.StreamConnectionSecurityArgs;
    import com.pulumi.mongodbatlas.StreamProcessor;
    import com.pulumi.mongodbatlas.StreamProcessorArgs;
    import com.pulumi.mongodbatlas.inputs.StreamProcessorOptionsArgs;
    import com.pulumi.mongodbatlas.inputs.StreamProcessorOptionsDlqArgs;
    import com.pulumi.mongodbatlas.MongodbatlasFunctions;
    import com.pulumi.mongodbatlas.inputs.GetStreamProcessorsArgs;
    import com.pulumi.mongodbatlas.inputs.GetStreamProcessorArgs;
    import static com.pulumi.codegen.internal.Serialization.*;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var example = new StreamInstance("example", StreamInstanceArgs.builder()
                .projectId(projectId)
                .instanceName("InstanceName")
                .dataProcessRegion(StreamInstanceDataProcessRegionArgs.builder()
                    .region("VIRGINIA_USA")
                    .cloud_provider("AWS")
                    .build())
                .build());
    
            var example_sample = new StreamConnection("example-sample", StreamConnectionArgs.builder()
                .projectId(projectId)
                .instanceName(example.instanceName())
                .connectionName("sample_stream_solar")
                .type("Sample")
                .build());
    
            var example_cluster = new StreamConnection("example-cluster", StreamConnectionArgs.builder()
                .projectId(projectId)
                .instanceName(example.instanceName())
                .connectionName("ClusterConnection")
                .type("Cluster")
                .clusterName(clusterName)
                .dbRoleToExecute(StreamConnectionDbRoleToExecuteArgs.builder()
                    .role("atlasAdmin")
                    .type("BUILT_IN")
                    .build())
                .build());
    
            var example_kafka = new StreamConnection("example-kafka", StreamConnectionArgs.builder()
                .projectId(projectId)
                .instanceName(example.instanceName())
                .connectionName("KafkaPlaintextConnection")
                .type("Kafka")
                .authentication(StreamConnectionAuthenticationArgs.builder()
                    .mechanism("PLAIN")
                    .username(kafkaUsername)
                    .password(kafkaPassword)
                    .build())
                .bootstrapServers("localhost:9092,localhost:9092")
                .config(Map.of("auto.offset.reset", "earliest"))
                .security(StreamConnectionSecurityArgs.builder()
                    .protocol("PLAINTEXT")
                    .build())
                .build());
    
            var stream_processor_sample_example = new StreamProcessor("stream-processor-sample-example", StreamProcessorArgs.builder()
                .projectId(projectId)
                .instanceName(example.instanceName())
                .processorName("sampleProcessorName")
                .pipeline(serializeJson(
                    jsonArray(
                        jsonObject(
                            jsonProperty("$source", jsonObject(
                                jsonProperty("connectionName", mongodbatlasStreamConnection.example-sample().connectionName())
                            ))
                        ), 
                        jsonObject(
                            jsonProperty("$emit", jsonObject(
                                jsonProperty("connectionName", mongodbatlasStreamConnection.example-cluster().connectionName()),
                                jsonProperty("db", "sample"),
                                jsonProperty("coll", "solar"),
                                jsonProperty("timeseries", jsonObject(
                                    jsonProperty("timeField", "_ts")
                                ))
                            ))
                        )
                    )))
                .state("STARTED")
                .build());
    
            var stream_processor_cluster_to_kafka_example = new StreamProcessor("stream-processor-cluster-to-kafka-example", StreamProcessorArgs.builder()
                .projectId(projectId)
                .instanceName(example.instanceName())
                .processorName("clusterProcessorName")
                .pipeline(serializeJson(
                    jsonArray(
                        jsonObject(
                            jsonProperty("$source", jsonObject(
                                jsonProperty("connectionName", mongodbatlasStreamConnection.example-cluster().connectionName())
                            ))
                        ), 
                        jsonObject(
                            jsonProperty("$emit", jsonObject(
                                jsonProperty("connectionName", mongodbatlasStreamConnection.example-kafka().connectionName()),
                                jsonProperty("topic", "topic_from_cluster")
                            ))
                        )
                    )))
                .state("CREATED")
                .build());
    
            var stream_processor_kafka_to_cluster_example = new StreamProcessor("stream-processor-kafka-to-cluster-example", StreamProcessorArgs.builder()
                .projectId(projectId)
                .instanceName(example.instanceName())
                .processorName("kafkaProcessorName")
                .pipeline(serializeJson(
                    jsonArray(
                        jsonObject(
                            jsonProperty("$source", jsonObject(
                                jsonProperty("connectionName", mongodbatlasStreamConnection.example-kafka().connectionName()),
                                jsonProperty("topic", "topic_source")
                            ))
                        ), 
                        jsonObject(
                            jsonProperty("$emit", jsonObject(
                                jsonProperty("connectionName", mongodbatlasStreamConnection.example-cluster().connectionName()),
                                jsonProperty("db", "kafka"),
                                jsonProperty("coll", "topic_source"),
                                jsonProperty("timeseries", jsonObject(
                                    jsonProperty("timeField", "ts")
                                ))
                            ))
                        )
                    )))
                .state("CREATED")
                .options(StreamProcessorOptionsArgs.builder()
                    .dlq(StreamProcessorOptionsDlqArgs.builder()
                        .coll("exampleColumn")
                        .connectionName(mongodbatlasStreamConnection.example-cluster().connectionName())
                        .db("exampleDb")
                        .build())
                    .build())
                .build());
    
            final var example-stream-processors = MongodbatlasFunctions.getStreamProcessors(GetStreamProcessorsArgs.builder()
                .projectId(projectId)
                .instanceName(example.instanceName())
                .build());
    
            final var example-stream-processor = MongodbatlasFunctions.getStreamProcessor(GetStreamProcessorArgs.builder()
                .projectId(projectId)
                .instanceName(example.instanceName())
                .processorName(stream_processor_sample_example.processorName())
                .build());
    
            ctx.export("streamProcessorsState", example_stream_processor.applyValue(example_stream_processor -> example_stream_processor.state()));
            ctx.export("streamProcessorsResults", example_stream_processors.applyValue(example_stream_processors -> example_stream_processors.results()));
        }
    }
    
    resources:
      example:
        type: mongodbatlas:StreamInstance
        properties:
          projectId: ${projectId}
          instanceName: InstanceName
          dataProcessRegion:
            region: VIRGINIA_USA
            cloud_provider: AWS
      example-sample:
        type: mongodbatlas:StreamConnection
        properties:
          projectId: ${projectId}
          instanceName: ${example.instanceName}
          connectionName: sample_stream_solar
          type: Sample
      example-cluster:
        type: mongodbatlas:StreamConnection
        properties:
          projectId: ${projectId}
          instanceName: ${example.instanceName}
          connectionName: ClusterConnection
          type: Cluster
          clusterName: ${clusterName}
          dbRoleToExecute:
            role: atlasAdmin
            type: BUILT_IN
      example-kafka:
        type: mongodbatlas:StreamConnection
        properties:
          projectId: ${projectId}
          instanceName: ${example.instanceName}
          connectionName: KafkaPlaintextConnection
          type: Kafka
          authentication:
            mechanism: PLAIN
            username: ${kafkaUsername}
            password: ${kafkaPassword}
          bootstrapServers: localhost:9092,localhost:9092
          config:
            auto.offset.reset: earliest
          security:
            protocol: PLAINTEXT
      stream-processor-sample-example:
        type: mongodbatlas:StreamProcessor
        properties:
          projectId: ${projectId}
          instanceName: ${example.instanceName}
          processorName: sampleProcessorName
          pipeline:
            fn::toJSON:
              - $source:
                  connectionName: ${mongodbatlasStreamConnection"example-sample"[%!s(MISSING)].connectionName}
              - $emit:
                  connectionName: ${mongodbatlasStreamConnection"example-cluster"[%!s(MISSING)].connectionName}
                  db: sample
                  coll: solar
                  timeseries:
                    timeField: _ts
          state: STARTED
      stream-processor-cluster-to-kafka-example:
        type: mongodbatlas:StreamProcessor
        properties:
          projectId: ${projectId}
          instanceName: ${example.instanceName}
          processorName: clusterProcessorName
          pipeline:
            fn::toJSON:
              - $source:
                  connectionName: ${mongodbatlasStreamConnection"example-cluster"[%!s(MISSING)].connectionName}
              - $emit:
                  connectionName: ${mongodbatlasStreamConnection"example-kafka"[%!s(MISSING)].connectionName}
                  topic: topic_from_cluster
          state: CREATED
      stream-processor-kafka-to-cluster-example:
        type: mongodbatlas:StreamProcessor
        properties:
          projectId: ${projectId}
          instanceName: ${example.instanceName}
          processorName: kafkaProcessorName
          pipeline:
            fn::toJSON:
              - $source:
                  connectionName: ${mongodbatlasStreamConnection"example-kafka"[%!s(MISSING)].connectionName}
                  topic: topic_source
              - $emit:
                  connectionName: ${mongodbatlasStreamConnection"example-cluster"[%!s(MISSING)].connectionName}
                  db: kafka
                  coll: topic_source
                  timeseries:
                    timeField: ts
          state: CREATED
          options:
            dlq:
              coll: exampleColumn
              connectionName: ${mongodbatlasStreamConnection"example-cluster"[%!s(MISSING)].connectionName}
              db: exampleDb
    variables:
      example-stream-processors:
        fn::invoke:
          Function: mongodbatlas:getStreamProcessors
          Arguments:
            projectId: ${projectId}
            instanceName: ${example.instanceName}
      example-stream-processor:
        fn::invoke:
          Function: mongodbatlas:getStreamProcessor
          Arguments:
            projectId: ${projectId}
            instanceName: ${example.instanceName}
            processorName: ${["stream-processor-sample-example"].processorName}
    outputs:
      # example making use of data sources
      streamProcessorsState: ${["example-stream-processor"].state}
      streamProcessorsResults: ${["example-stream-processors"].results}
    

    Using getStreamProcessors

    Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.

    function getStreamProcessors(args: GetStreamProcessorsArgs, opts?: InvokeOptions): Promise<GetStreamProcessorsResult>
    function getStreamProcessorsOutput(args: GetStreamProcessorsOutputArgs, opts?: InvokeOptions): Output<GetStreamProcessorsResult>
    def get_stream_processors(instance_name: Optional[str] = None,
                              project_id: Optional[str] = None,
                              opts: Optional[InvokeOptions] = None) -> GetStreamProcessorsResult
    def get_stream_processors_output(instance_name: Optional[pulumi.Input[str]] = None,
                              project_id: Optional[pulumi.Input[str]] = None,
                              opts: Optional[InvokeOptions] = None) -> Output[GetStreamProcessorsResult]
    func LookupStreamProcessors(ctx *Context, args *LookupStreamProcessorsArgs, opts ...InvokeOption) (*LookupStreamProcessorsResult, error)
    func LookupStreamProcessorsOutput(ctx *Context, args *LookupStreamProcessorsOutputArgs, opts ...InvokeOption) LookupStreamProcessorsResultOutput

    > Note: This function is named LookupStreamProcessors in the Go SDK.

    public static class GetStreamProcessors 
    {
        public static Task<GetStreamProcessorsResult> InvokeAsync(GetStreamProcessorsArgs args, InvokeOptions? opts = null)
        public static Output<GetStreamProcessorsResult> Invoke(GetStreamProcessorsInvokeArgs args, InvokeOptions? opts = null)
    }
    public static CompletableFuture<GetStreamProcessorsResult> getStreamProcessors(GetStreamProcessorsArgs args, InvokeOptions options)
    // Output-based functions aren't available in Java yet
    
    fn::invoke:
      function: mongodbatlas:index/getStreamProcessors:getStreamProcessors
      arguments:
        # arguments dictionary

    The following arguments are supported:

    InstanceName string
    Human-readable label that identifies the stream instance.
    ProjectId string
    Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
    InstanceName string
    Human-readable label that identifies the stream instance.
    ProjectId string
    Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
    instanceName String
    Human-readable label that identifies the stream instance.
    projectId String
    Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
    instanceName string
    Human-readable label that identifies the stream instance.
    projectId string
    Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
    instance_name str
    Human-readable label that identifies the stream instance.
    project_id str
    Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
    instanceName String
    Human-readable label that identifies the stream instance.
    projectId String
    Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.

    getStreamProcessors Result

    The following output properties are available:

    Id string
    The provider-assigned unique ID for this managed resource.
    InstanceName string
    Human-readable label that identifies the stream instance.
    ProjectId string
    Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
    Results List<GetStreamProcessorsResult>
    Id string
    The provider-assigned unique ID for this managed resource.
    InstanceName string
    Human-readable label that identifies the stream instance.
    ProjectId string
    Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
    Results []GetStreamProcessorsResult
    id String
    The provider-assigned unique ID for this managed resource.
    instanceName String
    Human-readable label that identifies the stream instance.
    projectId String
    Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
    results List<GetStreamProcessorsResult>
    id string
    The provider-assigned unique ID for this managed resource.
    instanceName string
    Human-readable label that identifies the stream instance.
    projectId string
    Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
    results GetStreamProcessorsResult[]
    id str
    The provider-assigned unique ID for this managed resource.
    instance_name str
    Human-readable label that identifies the stream instance.
    project_id str
    Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
    results Sequence[GetStreamProcessorsResult]
    id String
    The provider-assigned unique ID for this managed resource.
    instanceName String
    Human-readable label that identifies the stream instance.
    projectId String
    Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
    results List<Property Map>

    Supporting Types

    GetStreamProcessorsResult

    Id string
    Unique 24-hexadecimal character string that identifies the stream processor.
    InstanceName string
    Human-readable label that identifies the stream instance.
    Options GetStreamProcessorsResultOptions
    Optional configuration for the stream processor.
    Pipeline string
    Stream aggregation pipeline you want to apply to your streaming data.
    ProcessorName string
    Human-readable label that identifies the stream processor.
    ProjectId string
    Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
    State string
    The state of the stream processor.
    Stats string
    The stats associated with the stream processor.
    Id string
    Unique 24-hexadecimal character string that identifies the stream processor.
    InstanceName string
    Human-readable label that identifies the stream instance.
    Options GetStreamProcessorsResultOptions
    Optional configuration for the stream processor.
    Pipeline string
    Stream aggregation pipeline you want to apply to your streaming data.
    ProcessorName string
    Human-readable label that identifies the stream processor.
    ProjectId string
    Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
    State string
    The state of the stream processor.
    Stats string
    The stats associated with the stream processor.
    id String
    Unique 24-hexadecimal character string that identifies the stream processor.
    instanceName String
    Human-readable label that identifies the stream instance.
    options GetStreamProcessorsResultOptions
    Optional configuration for the stream processor.
    pipeline String
    Stream aggregation pipeline you want to apply to your streaming data.
    processorName String
    Human-readable label that identifies the stream processor.
    projectId String
    Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
    state String
    The state of the stream processor.
    stats String
    The stats associated with the stream processor.
    id string
    Unique 24-hexadecimal character string that identifies the stream processor.
    instanceName string
    Human-readable label that identifies the stream instance.
    options GetStreamProcessorsResultOptions
    Optional configuration for the stream processor.
    pipeline string
    Stream aggregation pipeline you want to apply to your streaming data.
    processorName string
    Human-readable label that identifies the stream processor.
    projectId string
    Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
    state string
    The state of the stream processor.
    stats string
    The stats associated with the stream processor.
    id str
    Unique 24-hexadecimal character string that identifies the stream processor.
    instance_name str
    Human-readable label that identifies the stream instance.
    options GetStreamProcessorsResultOptions
    Optional configuration for the stream processor.
    pipeline str
    Stream aggregation pipeline you want to apply to your streaming data.
    processor_name str
    Human-readable label that identifies the stream processor.
    project_id str
    Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
    state str
    The state of the stream processor.
    stats str
    The stats associated with the stream processor.
    id String
    Unique 24-hexadecimal character string that identifies the stream processor.
    instanceName String
    Human-readable label that identifies the stream instance.
    options Property Map
    Optional configuration for the stream processor.
    pipeline String
    Stream aggregation pipeline you want to apply to your streaming data.
    processorName String
    Human-readable label that identifies the stream processor.
    projectId String
    Unique 24-hexadecimal digit string that identifies your project. Use the /groups endpoint to retrieve all projects to which the authenticated user has access.
    state String
    The state of the stream processor.
    stats String
    The stats associated with the stream processor.

    GetStreamProcessorsResultOptions

    Dlq GetStreamProcessorsResultOptionsDlq
    Dead letter queue for the stream processor. Refer to the MongoDB Atlas Docs for more information.
    Dlq GetStreamProcessorsResultOptionsDlq
    Dead letter queue for the stream processor. Refer to the MongoDB Atlas Docs for more information.
    dlq GetStreamProcessorsResultOptionsDlq
    Dead letter queue for the stream processor. Refer to the MongoDB Atlas Docs for more information.
    dlq GetStreamProcessorsResultOptionsDlq
    Dead letter queue for the stream processor. Refer to the MongoDB Atlas Docs for more information.
    dlq GetStreamProcessorsResultOptionsDlq
    Dead letter queue for the stream processor. Refer to the MongoDB Atlas Docs for more information.
    dlq Property Map
    Dead letter queue for the stream processor. Refer to the MongoDB Atlas Docs for more information.

    GetStreamProcessorsResultOptionsDlq

    Coll string
    Name of the collection to use for the DLQ.
    ConnectionName string
    Name of the connection to write DLQ messages to. Must be an Atlas connection.
    Db string
    Name of the database to use for the DLQ.
    Coll string
    Name of the collection to use for the DLQ.
    ConnectionName string
    Name of the connection to write DLQ messages to. Must be an Atlas connection.
    Db string
    Name of the database to use for the DLQ.
    coll String
    Name of the collection to use for the DLQ.
    connectionName String
    Name of the connection to write DLQ messages to. Must be an Atlas connection.
    db String
    Name of the database to use for the DLQ.
    coll string
    Name of the collection to use for the DLQ.
    connectionName string
    Name of the connection to write DLQ messages to. Must be an Atlas connection.
    db string
    Name of the database to use for the DLQ.
    coll str
    Name of the collection to use for the DLQ.
    connection_name str
    Name of the connection to write DLQ messages to. Must be an Atlas connection.
    db str
    Name of the database to use for the DLQ.
    coll String
    Name of the collection to use for the DLQ.
    connectionName String
    Name of the connection to write DLQ messages to. Must be an Atlas connection.
    db String
    Name of the database to use for the DLQ.

    Package Details

    Repository
    MongoDB Atlas pulumi/pulumi-mongodbatlas
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the mongodbatlas Terraform Provider.
    mongodbatlas logo
    MongoDB Atlas v3.20.4 published on Wednesday, Oct 30, 2024 by Pulumi