1. Packages
  2. Confluent Provider
  3. API Docs
  4. getKafkaTopic
Confluent v2.10.0 published on Wednesday, Nov 20, 2024 by Pulumi

confluentcloud.getKafkaTopic

Explore with Pulumi AI

confluentcloud logo
Confluent v2.10.0 published on Wednesday, Nov 20, 2024 by Pulumi

    General Availability

    confluentcloud.KafkaTopic describes a Kafka Topic data source.

    Example Usage

    Option #1: Manage multiple Kafka clusters in the same Pulumi Stack

    import * as pulumi from "@pulumi/pulumi";
    import * as confluentcloud from "@pulumi/confluentcloud";
    
    const orders = confluentcloud.getKafkaTopic({
        kafkaCluster: {
            id: basic_cluster.id,
        },
        topicName: "orders",
        restEndpoint: basic_cluster.restEndpoint,
        credentials: {
            key: "<Kafka API Key for confluent_kafka_cluster.basic-cluster>",
            secret: "<Kafka API Secret for confluent_kafka_cluster.basic-cluster>",
        },
    });
    export const config = orders.then(orders => orders.config);
    
    import pulumi
    import pulumi_confluentcloud as confluentcloud
    
    orders = confluentcloud.get_kafka_topic(kafka_cluster={
            "id": basic_cluster["id"],
        },
        topic_name="orders",
        rest_endpoint=basic_cluster["restEndpoint"],
        credentials={
            "key": "<Kafka API Key for confluent_kafka_cluster.basic-cluster>",
            "secret": "<Kafka API Secret for confluent_kafka_cluster.basic-cluster>",
        })
    pulumi.export("config", orders.config)
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-confluentcloud/sdk/v2/go/confluentcloud"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		orders, err := confluentcloud.LookupKafkaTopic(ctx, &confluentcloud.LookupKafkaTopicArgs{
    			KafkaCluster: confluentcloud.GetKafkaTopicKafkaCluster{
    				Id: basic_cluster.Id,
    			},
    			TopicName:    "orders",
    			RestEndpoint: basic_cluster.RestEndpoint,
    			Credentials: confluentcloud.GetKafkaTopicCredentials{
    				Key:    "<Kafka API Key for confluent_kafka_cluster.basic-cluster>",
    				Secret: "<Kafka API Secret for confluent_kafka_cluster.basic-cluster>",
    			},
    		}, nil)
    		if err != nil {
    			return err
    		}
    		ctx.Export("config", orders.Config)
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using ConfluentCloud = Pulumi.ConfluentCloud;
    
    return await Deployment.RunAsync(() => 
    {
        var orders = ConfluentCloud.GetKafkaTopic.Invoke(new()
        {
            KafkaCluster = new ConfluentCloud.Inputs.GetKafkaTopicKafkaClusterInputArgs
            {
                Id = basic_cluster.Id,
            },
            TopicName = "orders",
            RestEndpoint = basic_cluster.RestEndpoint,
            Credentials = new ConfluentCloud.Inputs.GetKafkaTopicCredentialsInputArgs
            {
                Key = "<Kafka API Key for confluent_kafka_cluster.basic-cluster>",
                Secret = "<Kafka API Secret for confluent_kafka_cluster.basic-cluster>",
            },
        });
    
        return new Dictionary<string, object?>
        {
            ["config"] = orders.Apply(getKafkaTopicResult => getKafkaTopicResult.Config),
        };
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.confluentcloud.ConfluentcloudFunctions;
    import com.pulumi.confluentcloud.inputs.GetKafkaTopicArgs;
    import com.pulumi.confluentcloud.inputs.GetKafkaTopicKafkaClusterArgs;
    import com.pulumi.confluentcloud.inputs.GetKafkaTopicCredentialsArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            final var orders = ConfluentcloudFunctions.getKafkaTopic(GetKafkaTopicArgs.builder()
                .kafkaCluster(GetKafkaTopicKafkaClusterArgs.builder()
                    .id(basic_cluster.id())
                    .build())
                .topicName("orders")
                .restEndpoint(basic_cluster.restEndpoint())
                .credentials(GetKafkaTopicCredentialsArgs.builder()
                    .key("<Kafka API Key for confluent_kafka_cluster.basic-cluster>")
                    .secret("<Kafka API Secret for confluent_kafka_cluster.basic-cluster>")
                    .build())
                .build());
    
            ctx.export("config", orders.applyValue(getKafkaTopicResult -> getKafkaTopicResult.config()));
        }
    }
    
    variables:
      orders:
        fn::invoke:
          Function: confluentcloud:getKafkaTopic
          Arguments:
            kafkaCluster:
              id: ${["basic-cluster"].id}
            topicName: orders
            restEndpoint: ${["basic-cluster"].restEndpoint}
            credentials:
              key: <Kafka API Key for confluent_kafka_cluster.basic-cluster>
              secret: <Kafka API Secret for confluent_kafka_cluster.basic-cluster>
    outputs:
      config: ${orders.config}
    

    Option #2: Manage a single Kafka cluster in the same Pulumi Stack

    import * as pulumi from "@pulumi/pulumi";
    import * as confluentcloud from "@pulumi/confluentcloud";
    
    const orders = confluentcloud.getKafkaTopic({
        topicName: "orders",
    });
    export const config = orders.then(orders => orders.config);
    
    import pulumi
    import pulumi_confluentcloud as confluentcloud
    
    orders = confluentcloud.get_kafka_topic(topic_name="orders")
    pulumi.export("config", orders.config)
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-confluentcloud/sdk/v2/go/confluentcloud"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		orders, err := confluentcloud.LookupKafkaTopic(ctx, &confluentcloud.LookupKafkaTopicArgs{
    			TopicName: "orders",
    		}, nil)
    		if err != nil {
    			return err
    		}
    		ctx.Export("config", orders.Config)
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using ConfluentCloud = Pulumi.ConfluentCloud;
    
    return await Deployment.RunAsync(() => 
    {
        var orders = ConfluentCloud.GetKafkaTopic.Invoke(new()
        {
            TopicName = "orders",
        });
    
        return new Dictionary<string, object?>
        {
            ["config"] = orders.Apply(getKafkaTopicResult => getKafkaTopicResult.Config),
        };
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.confluentcloud.ConfluentcloudFunctions;
    import com.pulumi.confluentcloud.inputs.GetKafkaTopicArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            final var orders = ConfluentcloudFunctions.getKafkaTopic(GetKafkaTopicArgs.builder()
                .topicName("orders")
                .build());
    
            ctx.export("config", orders.applyValue(getKafkaTopicResult -> getKafkaTopicResult.config()));
        }
    }
    
    variables:
      orders:
        fn::invoke:
          Function: confluentcloud:getKafkaTopic
          Arguments:
            topicName: orders
    outputs:
      config: ${orders.config}
    

    Using getKafkaTopic

    Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.

    function getKafkaTopic(args: GetKafkaTopicArgs, opts?: InvokeOptions): Promise<GetKafkaTopicResult>
    function getKafkaTopicOutput(args: GetKafkaTopicOutputArgs, opts?: InvokeOptions): Output<GetKafkaTopicResult>
    def get_kafka_topic(credentials: Optional[GetKafkaTopicCredentials] = None,
                        kafka_cluster: Optional[GetKafkaTopicKafkaCluster] = None,
                        rest_endpoint: Optional[str] = None,
                        topic_name: Optional[str] = None,
                        opts: Optional[InvokeOptions] = None) -> GetKafkaTopicResult
    def get_kafka_topic_output(credentials: Optional[pulumi.Input[GetKafkaTopicCredentialsArgs]] = None,
                        kafka_cluster: Optional[pulumi.Input[GetKafkaTopicKafkaClusterArgs]] = None,
                        rest_endpoint: Optional[pulumi.Input[str]] = None,
                        topic_name: Optional[pulumi.Input[str]] = None,
                        opts: Optional[InvokeOptions] = None) -> Output[GetKafkaTopicResult]
    func LookupKafkaTopic(ctx *Context, args *LookupKafkaTopicArgs, opts ...InvokeOption) (*LookupKafkaTopicResult, error)
    func LookupKafkaTopicOutput(ctx *Context, args *LookupKafkaTopicOutputArgs, opts ...InvokeOption) LookupKafkaTopicResultOutput

    > Note: This function is named LookupKafkaTopic in the Go SDK.

    public static class GetKafkaTopic 
    {
        public static Task<GetKafkaTopicResult> InvokeAsync(GetKafkaTopicArgs args, InvokeOptions? opts = null)
        public static Output<GetKafkaTopicResult> Invoke(GetKafkaTopicInvokeArgs args, InvokeOptions? opts = null)
    }
    public static CompletableFuture<GetKafkaTopicResult> getKafkaTopic(GetKafkaTopicArgs args, InvokeOptions options)
    // Output-based functions aren't available in Java yet
    
    fn::invoke:
      function: confluentcloud:index/getKafkaTopic:getKafkaTopic
      arguments:
        # arguments dictionary

    The following arguments are supported:

    RestEndpoint string
    The REST endpoint of the Kafka cluster, for example, https://pkc-00000.us-central1.gcp.confluent.cloud:443).
    TopicName string
    The name of the topic, for example, orders-1. The topic name can be up to 255 characters in length and can contain only alphanumeric characters, hyphens, and underscores.
    Credentials Pulumi.ConfluentCloud.Inputs.GetKafkaTopicCredentials
    KafkaCluster Pulumi.ConfluentCloud.Inputs.GetKafkaTopicKafkaCluster
    RestEndpoint string
    The REST endpoint of the Kafka cluster, for example, https://pkc-00000.us-central1.gcp.confluent.cloud:443).
    TopicName string
    The name of the topic, for example, orders-1. The topic name can be up to 255 characters in length and can contain only alphanumeric characters, hyphens, and underscores.
    Credentials GetKafkaTopicCredentials
    KafkaCluster GetKafkaTopicKafkaCluster
    restEndpoint String
    The REST endpoint of the Kafka cluster, for example, https://pkc-00000.us-central1.gcp.confluent.cloud:443).
    topicName String
    The name of the topic, for example, orders-1. The topic name can be up to 255 characters in length and can contain only alphanumeric characters, hyphens, and underscores.
    credentials GetKafkaTopicCredentials
    kafkaCluster GetKafkaTopicKafkaCluster
    restEndpoint string
    The REST endpoint of the Kafka cluster, for example, https://pkc-00000.us-central1.gcp.confluent.cloud:443).
    topicName string
    The name of the topic, for example, orders-1. The topic name can be up to 255 characters in length and can contain only alphanumeric characters, hyphens, and underscores.
    credentials GetKafkaTopicCredentials
    kafkaCluster GetKafkaTopicKafkaCluster
    rest_endpoint str
    The REST endpoint of the Kafka cluster, for example, https://pkc-00000.us-central1.gcp.confluent.cloud:443).
    topic_name str
    The name of the topic, for example, orders-1. The topic name can be up to 255 characters in length and can contain only alphanumeric characters, hyphens, and underscores.
    credentials GetKafkaTopicCredentials
    kafka_cluster GetKafkaTopicKafkaCluster
    restEndpoint String
    The REST endpoint of the Kafka cluster, for example, https://pkc-00000.us-central1.gcp.confluent.cloud:443).
    topicName String
    The name of the topic, for example, orders-1. The topic name can be up to 255 characters in length and can contain only alphanumeric characters, hyphens, and underscores.
    credentials Property Map
    kafkaCluster Property Map

    getKafkaTopic Result

    The following output properties are available:

    Config Dictionary<string, string>
    (Optional Map) The custom topic settings:
    Id string
    The provider-assigned unique ID for this managed resource.
    PartitionsCount int
    (Required Number) The number of partitions to create in the topic. Defaults to 6.
    RestEndpoint string
    TopicName string
    Credentials Pulumi.ConfluentCloud.Outputs.GetKafkaTopicCredentials
    KafkaCluster Pulumi.ConfluentCloud.Outputs.GetKafkaTopicKafkaCluster
    Config map[string]string
    (Optional Map) The custom topic settings:
    Id string
    The provider-assigned unique ID for this managed resource.
    PartitionsCount int
    (Required Number) The number of partitions to create in the topic. Defaults to 6.
    RestEndpoint string
    TopicName string
    Credentials GetKafkaTopicCredentials
    KafkaCluster GetKafkaTopicKafkaCluster
    config Map<String,String>
    (Optional Map) The custom topic settings:
    id String
    The provider-assigned unique ID for this managed resource.
    partitionsCount Integer
    (Required Number) The number of partitions to create in the topic. Defaults to 6.
    restEndpoint String
    topicName String
    credentials GetKafkaTopicCredentials
    kafkaCluster GetKafkaTopicKafkaCluster
    config {[key: string]: string}
    (Optional Map) The custom topic settings:
    id string
    The provider-assigned unique ID for this managed resource.
    partitionsCount number
    (Required Number) The number of partitions to create in the topic. Defaults to 6.
    restEndpoint string
    topicName string
    credentials GetKafkaTopicCredentials
    kafkaCluster GetKafkaTopicKafkaCluster
    config Mapping[str, str]
    (Optional Map) The custom topic settings:
    id str
    The provider-assigned unique ID for this managed resource.
    partitions_count int
    (Required Number) The number of partitions to create in the topic. Defaults to 6.
    rest_endpoint str
    topic_name str
    credentials GetKafkaTopicCredentials
    kafka_cluster GetKafkaTopicKafkaCluster
    config Map<String>
    (Optional Map) The custom topic settings:
    id String
    The provider-assigned unique ID for this managed resource.
    partitionsCount Number
    (Required Number) The number of partitions to create in the topic. Defaults to 6.
    restEndpoint String
    topicName String
    credentials Property Map
    kafkaCluster Property Map

    Supporting Types

    GetKafkaTopicCredentials

    Key string
    The Kafka API Key.
    Secret string
    The Cluster API Secret for your Confluent Cloud cluster.
    Key string
    The Kafka API Key.
    Secret string
    The Cluster API Secret for your Confluent Cloud cluster.
    key String
    The Kafka API Key.
    secret String
    The Cluster API Secret for your Confluent Cloud cluster.
    key string
    The Kafka API Key.
    secret string
    The Cluster API Secret for your Confluent Cloud cluster.
    key str
    The Kafka API Key.
    secret str
    The Cluster API Secret for your Confluent Cloud cluster.
    key String
    The Kafka API Key.
    secret String
    The Cluster API Secret for your Confluent Cloud cluster.

    GetKafkaTopicKafkaCluster

    Id string
    The ID of the Kafka cluster, for example, lkc-abc123.
    Id string
    The ID of the Kafka cluster, for example, lkc-abc123.
    id String
    The ID of the Kafka cluster, for example, lkc-abc123.
    id string
    The ID of the Kafka cluster, for example, lkc-abc123.
    id str
    The ID of the Kafka cluster, for example, lkc-abc123.
    id String
    The ID of the Kafka cluster, for example, lkc-abc123.

    Package Details

    Repository
    Confluent Cloud pulumi/pulumi-confluentcloud
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the confluent Terraform Provider.
    confluentcloud logo
    Confluent v2.10.0 published on Wednesday, Nov 20, 2024 by Pulumi