1. Packages
  2. Confluent Provider
  3. API Docs
  4. KafkaTopic
Confluent v2.10.0 published on Wednesday, Nov 20, 2024 by Pulumi

confluentcloud.KafkaTopic

Explore with Pulumi AI

confluentcloud logo
Confluent v2.10.0 published on Wednesday, Nov 20, 2024 by Pulumi

    Example Usage

    Option #1: Manage multiple Kafka clusters in the same Pulumi Stack

    import * as pulumi from "@pulumi/pulumi";
    import * as confluentcloud from "@pulumi/confluentcloud";
    
    const orders = new confluentcloud.KafkaTopic("orders", {
        kafkaCluster: {
            id: basic_cluster.id,
        },
        topicName: "orders",
        restEndpoint: basic_cluster.restEndpoint,
        credentials: {
            key: app_manager_kafka_api_key.id,
            secret: app_manager_kafka_api_key.secret,
        },
    });
    
    import pulumi
    import pulumi_confluentcloud as confluentcloud
    
    orders = confluentcloud.KafkaTopic("orders",
        kafka_cluster={
            "id": basic_cluster["id"],
        },
        topic_name="orders",
        rest_endpoint=basic_cluster["restEndpoint"],
        credentials={
            "key": app_manager_kafka_api_key["id"],
            "secret": app_manager_kafka_api_key["secret"],
        })
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-confluentcloud/sdk/v2/go/confluentcloud"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := confluentcloud.NewKafkaTopic(ctx, "orders", &confluentcloud.KafkaTopicArgs{
    			KafkaCluster: &confluentcloud.KafkaTopicKafkaClusterArgs{
    				Id: pulumi.Any(basic_cluster.Id),
    			},
    			TopicName:    pulumi.String("orders"),
    			RestEndpoint: pulumi.Any(basic_cluster.RestEndpoint),
    			Credentials: &confluentcloud.KafkaTopicCredentialsArgs{
    				Key:    pulumi.Any(app_manager_kafka_api_key.Id),
    				Secret: pulumi.Any(app_manager_kafka_api_key.Secret),
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using ConfluentCloud = Pulumi.ConfluentCloud;
    
    return await Deployment.RunAsync(() => 
    {
        var orders = new ConfluentCloud.KafkaTopic("orders", new()
        {
            KafkaCluster = new ConfluentCloud.Inputs.KafkaTopicKafkaClusterArgs
            {
                Id = basic_cluster.Id,
            },
            TopicName = "orders",
            RestEndpoint = basic_cluster.RestEndpoint,
            Credentials = new ConfluentCloud.Inputs.KafkaTopicCredentialsArgs
            {
                Key = app_manager_kafka_api_key.Id,
                Secret = app_manager_kafka_api_key.Secret,
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.confluentcloud.KafkaTopic;
    import com.pulumi.confluentcloud.KafkaTopicArgs;
    import com.pulumi.confluentcloud.inputs.KafkaTopicKafkaClusterArgs;
    import com.pulumi.confluentcloud.inputs.KafkaTopicCredentialsArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var orders = new KafkaTopic("orders", KafkaTopicArgs.builder()
                .kafkaCluster(KafkaTopicKafkaClusterArgs.builder()
                    .id(basic_cluster.id())
                    .build())
                .topicName("orders")
                .restEndpoint(basic_cluster.restEndpoint())
                .credentials(KafkaTopicCredentialsArgs.builder()
                    .key(app_manager_kafka_api_key.id())
                    .secret(app_manager_kafka_api_key.secret())
                    .build())
                .build());
    
        }
    }
    
    resources:
      orders:
        type: confluentcloud:KafkaTopic
        properties:
          kafkaCluster:
            id: ${["basic-cluster"].id}
          topicName: orders
          restEndpoint: ${["basic-cluster"].restEndpoint}
          credentials:
            key: ${["app-manager-kafka-api-key"].id}
            secret: ${["app-manager-kafka-api-key"].secret}
    

    Option #2: Manage a single Kafka cluster in the same Pulumi Stack

    import * as pulumi from "@pulumi/pulumi";
    import * as confluentcloud from "@pulumi/confluentcloud";
    
    const orders = new confluentcloud.KafkaTopic("orders", {topicName: "orders"});
    
    import pulumi
    import pulumi_confluentcloud as confluentcloud
    
    orders = confluentcloud.KafkaTopic("orders", topic_name="orders")
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-confluentcloud/sdk/v2/go/confluentcloud"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := confluentcloud.NewKafkaTopic(ctx, "orders", &confluentcloud.KafkaTopicArgs{
    			TopicName: pulumi.String("orders"),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using ConfluentCloud = Pulumi.ConfluentCloud;
    
    return await Deployment.RunAsync(() => 
    {
        var orders = new ConfluentCloud.KafkaTopic("orders", new()
        {
            TopicName = "orders",
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.confluentcloud.KafkaTopic;
    import com.pulumi.confluentcloud.KafkaTopicArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var orders = new KafkaTopic("orders", KafkaTopicArgs.builder()
                .topicName("orders")
                .build());
    
        }
    }
    
    resources:
      orders:
        type: confluentcloud:KafkaTopic
        properties:
          topicName: orders
    

    Getting Started

    The following end-to-end examples might help to get started with confluentcloud.KafkaTopic resource:

    • basic-kafka-acls: Basic Kafka cluster with authorization using ACLs
    • basic-kafka-acls-with-alias: Basic Kafka cluster with authorization using ACLs
    • standard-kafka-acls: Standard Kafka cluster with authorization using ACLs
    • standard-kafka-rbac: Standard Kafka cluster with authorization using RBAC
    • dedicated-public-kafka-acls: Dedicated Kafka cluster that is accessible over the public internet with authorization using ACLs
    • dedicated-public-kafka-rbac: Dedicated Kafka cluster that is accessible over the public internet with authorization using RBAC
    • dedicated-privatelink-aws-kafka-acls: Dedicated Kafka cluster on AWS that is accessible via PrivateLink connections with authorization using ACLs
    • dedicated-privatelink-aws-kafka-rbac: Dedicated Kafka cluster on AWS that is accessible via PrivateLink connections with authorization using RBAC
    • dedicated-privatelink-azure-kafka-rbac: Dedicated Kafka cluster on Azure that is accessible via PrivateLink connections with authorization using RBAC
    • dedicated-privatelink-azure-kafka-acls: Dedicated Kafka cluster on Azure that is accessible via PrivateLink connections with authorization using ACLs
    • dedicated-private-service-connect-gcp-kafka-acls: Dedicated Kafka cluster on GCP that is accessible via Private Service Connect connections with authorization using ACLs
    • dedicated-private-service-connect-gcp-kafka-rbac: Dedicated Kafka cluster on GCP that is accessible via Private Service Connect connections with authorization using RBAC
    • dedicated-vnet-peering-azure-kafka-acls: Dedicated Kafka cluster on Azure that is accessible via VPC Peering connections with authorization using ACLs
    • dedicated-vnet-peering-azure-kafka-rbac: Dedicated Kafka cluster on Azure that is accessible via VPC Peering connections with authorization using RBAC
    • dedicated-vpc-peering-aws-kafka-acls: Dedicated Kafka cluster on AWS that is accessible via VPC Peering connections with authorization using ACLs
    • dedicated-vpc-peering-aws-kafka-rbac: Dedicated Kafka cluster on AWS that is accessible via VPC Peering connections with authorization using RBAC
    • dedicated-vpc-peering-gcp-kafka-acls: Dedicated Kafka cluster on GCP that is accessible via VPC Peering connections with authorization using ACLs
    • dedicated-vpc-peering-gcp-kafka-rbac: Dedicated Kafka cluster on GCP that is accessible via VPC Peering connections with authorization using RBAC
    • dedicated-transit-gateway-attachment-aws-kafka-acls: Dedicated Kafka cluster on AWS that is accessible via Transit Gateway Endpoint with authorization using ACLs
    • dedicated-transit-gateway-attachment-aws-kafka-rbac: Dedicated Kafka cluster on AWS that is accessible via Transit Gateway Endpoint with authorization using RBAC
    • enterprise-privatelinkattachment-aws-kafka-acls: Enterprise Kafka cluster on AWS that is accessible via PrivateLink connections with authorization using ACLs

    Create KafkaTopic Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new KafkaTopic(name: string, args: KafkaTopicArgs, opts?: CustomResourceOptions);
    @overload
    def KafkaTopic(resource_name: str,
                   args: KafkaTopicArgs,
                   opts: Optional[ResourceOptions] = None)
    
    @overload
    def KafkaTopic(resource_name: str,
                   opts: Optional[ResourceOptions] = None,
                   topic_name: Optional[str] = None,
                   config: Optional[Mapping[str, str]] = None,
                   credentials: Optional[KafkaTopicCredentialsArgs] = None,
                   http_endpoint: Optional[str] = None,
                   kafka_cluster: Optional[KafkaTopicKafkaClusterArgs] = None,
                   partitions_count: Optional[int] = None,
                   rest_endpoint: Optional[str] = None)
    func NewKafkaTopic(ctx *Context, name string, args KafkaTopicArgs, opts ...ResourceOption) (*KafkaTopic, error)
    public KafkaTopic(string name, KafkaTopicArgs args, CustomResourceOptions? opts = null)
    public KafkaTopic(String name, KafkaTopicArgs args)
    public KafkaTopic(String name, KafkaTopicArgs args, CustomResourceOptions options)
    
    type: confluentcloud:KafkaTopic
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args KafkaTopicArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args KafkaTopicArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args KafkaTopicArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args KafkaTopicArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args KafkaTopicArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var kafkaTopicResource = new ConfluentCloud.KafkaTopic("kafkaTopicResource", new()
    {
        TopicName = "string",
        Config = 
        {
            { "string", "string" },
        },
        Credentials = new ConfluentCloud.Inputs.KafkaTopicCredentialsArgs
        {
            Key = "string",
            Secret = "string",
        },
        KafkaCluster = new ConfluentCloud.Inputs.KafkaTopicKafkaClusterArgs
        {
            Id = "string",
        },
        PartitionsCount = 0,
        RestEndpoint = "string",
    });
    
    example, err := confluentcloud.NewKafkaTopic(ctx, "kafkaTopicResource", &confluentcloud.KafkaTopicArgs{
    	TopicName: pulumi.String("string"),
    	Config: pulumi.StringMap{
    		"string": pulumi.String("string"),
    	},
    	Credentials: &confluentcloud.KafkaTopicCredentialsArgs{
    		Key:    pulumi.String("string"),
    		Secret: pulumi.String("string"),
    	},
    	KafkaCluster: &confluentcloud.KafkaTopicKafkaClusterArgs{
    		Id: pulumi.String("string"),
    	},
    	PartitionsCount: pulumi.Int(0),
    	RestEndpoint:    pulumi.String("string"),
    })
    
    var kafkaTopicResource = new KafkaTopic("kafkaTopicResource", KafkaTopicArgs.builder()
        .topicName("string")
        .config(Map.of("string", "string"))
        .credentials(KafkaTopicCredentialsArgs.builder()
            .key("string")
            .secret("string")
            .build())
        .kafkaCluster(KafkaTopicKafkaClusterArgs.builder()
            .id("string")
            .build())
        .partitionsCount(0)
        .restEndpoint("string")
        .build());
    
    kafka_topic_resource = confluentcloud.KafkaTopic("kafkaTopicResource",
        topic_name="string",
        config={
            "string": "string",
        },
        credentials={
            "key": "string",
            "secret": "string",
        },
        kafka_cluster={
            "id": "string",
        },
        partitions_count=0,
        rest_endpoint="string")
    
    const kafkaTopicResource = new confluentcloud.KafkaTopic("kafkaTopicResource", {
        topicName: "string",
        config: {
            string: "string",
        },
        credentials: {
            key: "string",
            secret: "string",
        },
        kafkaCluster: {
            id: "string",
        },
        partitionsCount: 0,
        restEndpoint: "string",
    });
    
    type: confluentcloud:KafkaTopic
    properties:
        config:
            string: string
        credentials:
            key: string
            secret: string
        kafkaCluster:
            id: string
        partitionsCount: 0
        restEndpoint: string
        topicName: string
    

    KafkaTopic Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The KafkaTopic resource accepts the following input properties:

    TopicName string
    The name of the topic, for example, orders-1. The topic name can be up to 249 characters in length, and can include the following characters: a-z, A-Z, 0-9, . (dot), _ (underscore), and - (dash). As a best practice, we recommend against using any personally identifiable information (PII) when naming your topic.
    Config Dictionary<string, string>
    The custom topic settings to set:
    Credentials Pulumi.ConfluentCloud.Inputs.KafkaTopicCredentials
    The Cluster API Credentials.
    HttpEndpoint string

    Deprecated: This property has been deprecated. Please use "restEndpoint" instead.

    KafkaCluster Pulumi.ConfluentCloud.Inputs.KafkaTopicKafkaCluster
    PartitionsCount int
    The number of partitions to create in the topic. Defaults to 6.
    RestEndpoint string
    The REST endpoint of the Kafka cluster, for example, https://pkc-00000.us-central1.gcp.confluent.cloud:443).
    TopicName string
    The name of the topic, for example, orders-1. The topic name can be up to 249 characters in length, and can include the following characters: a-z, A-Z, 0-9, . (dot), _ (underscore), and - (dash). As a best practice, we recommend against using any personally identifiable information (PII) when naming your topic.
    Config map[string]string
    The custom topic settings to set:
    Credentials KafkaTopicCredentialsArgs
    The Cluster API Credentials.
    HttpEndpoint string

    Deprecated: This property has been deprecated. Please use "restEndpoint" instead.

    KafkaCluster KafkaTopicKafkaClusterArgs
    PartitionsCount int
    The number of partitions to create in the topic. Defaults to 6.
    RestEndpoint string
    The REST endpoint of the Kafka cluster, for example, https://pkc-00000.us-central1.gcp.confluent.cloud:443).
    topicName String
    The name of the topic, for example, orders-1. The topic name can be up to 249 characters in length, and can include the following characters: a-z, A-Z, 0-9, . (dot), _ (underscore), and - (dash). As a best practice, we recommend against using any personally identifiable information (PII) when naming your topic.
    config Map<String,String>
    The custom topic settings to set:
    credentials KafkaTopicCredentials
    The Cluster API Credentials.
    httpEndpoint String

    Deprecated: This property has been deprecated. Please use "restEndpoint" instead.

    kafkaCluster KafkaTopicKafkaCluster
    partitionsCount Integer
    The number of partitions to create in the topic. Defaults to 6.
    restEndpoint String
    The REST endpoint of the Kafka cluster, for example, https://pkc-00000.us-central1.gcp.confluent.cloud:443).
    topicName string
    The name of the topic, for example, orders-1. The topic name can be up to 249 characters in length, and can include the following characters: a-z, A-Z, 0-9, . (dot), _ (underscore), and - (dash). As a best practice, we recommend against using any personally identifiable information (PII) when naming your topic.
    config {[key: string]: string}
    The custom topic settings to set:
    credentials KafkaTopicCredentials
    The Cluster API Credentials.
    httpEndpoint string

    Deprecated: This property has been deprecated. Please use "restEndpoint" instead.

    kafkaCluster KafkaTopicKafkaCluster
    partitionsCount number
    The number of partitions to create in the topic. Defaults to 6.
    restEndpoint string
    The REST endpoint of the Kafka cluster, for example, https://pkc-00000.us-central1.gcp.confluent.cloud:443).
    topic_name str
    The name of the topic, for example, orders-1. The topic name can be up to 249 characters in length, and can include the following characters: a-z, A-Z, 0-9, . (dot), _ (underscore), and - (dash). As a best practice, we recommend against using any personally identifiable information (PII) when naming your topic.
    config Mapping[str, str]
    The custom topic settings to set:
    credentials KafkaTopicCredentialsArgs
    The Cluster API Credentials.
    http_endpoint str

    Deprecated: This property has been deprecated. Please use "restEndpoint" instead.

    kafka_cluster KafkaTopicKafkaClusterArgs
    partitions_count int
    The number of partitions to create in the topic. Defaults to 6.
    rest_endpoint str
    The REST endpoint of the Kafka cluster, for example, https://pkc-00000.us-central1.gcp.confluent.cloud:443).
    topicName String
    The name of the topic, for example, orders-1. The topic name can be up to 249 characters in length, and can include the following characters: a-z, A-Z, 0-9, . (dot), _ (underscore), and - (dash). As a best practice, we recommend against using any personally identifiable information (PII) when naming your topic.
    config Map<String>
    The custom topic settings to set:
    credentials Property Map
    The Cluster API Credentials.
    httpEndpoint String

    Deprecated: This property has been deprecated. Please use "restEndpoint" instead.

    kafkaCluster Property Map
    partitionsCount Number
    The number of partitions to create in the topic. Defaults to 6.
    restEndpoint String
    The REST endpoint of the Kafka cluster, for example, https://pkc-00000.us-central1.gcp.confluent.cloud:443).

    Outputs

    All input properties are implicitly available as output properties. Additionally, the KafkaTopic resource produces the following output properties:

    Id string
    The provider-assigned unique ID for this managed resource.
    Id string
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.
    id string
    The provider-assigned unique ID for this managed resource.
    id str
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.

    Look up Existing KafkaTopic Resource

    Get an existing KafkaTopic resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: KafkaTopicState, opts?: CustomResourceOptions): KafkaTopic
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            config: Optional[Mapping[str, str]] = None,
            credentials: Optional[KafkaTopicCredentialsArgs] = None,
            http_endpoint: Optional[str] = None,
            kafka_cluster: Optional[KafkaTopicKafkaClusterArgs] = None,
            partitions_count: Optional[int] = None,
            rest_endpoint: Optional[str] = None,
            topic_name: Optional[str] = None) -> KafkaTopic
    func GetKafkaTopic(ctx *Context, name string, id IDInput, state *KafkaTopicState, opts ...ResourceOption) (*KafkaTopic, error)
    public static KafkaTopic Get(string name, Input<string> id, KafkaTopicState? state, CustomResourceOptions? opts = null)
    public static KafkaTopic get(String name, Output<String> id, KafkaTopicState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    Config Dictionary<string, string>
    The custom topic settings to set:
    Credentials Pulumi.ConfluentCloud.Inputs.KafkaTopicCredentials
    The Cluster API Credentials.
    HttpEndpoint string

    Deprecated: This property has been deprecated. Please use "restEndpoint" instead.

    KafkaCluster Pulumi.ConfluentCloud.Inputs.KafkaTopicKafkaCluster
    PartitionsCount int
    The number of partitions to create in the topic. Defaults to 6.
    RestEndpoint string
    The REST endpoint of the Kafka cluster, for example, https://pkc-00000.us-central1.gcp.confluent.cloud:443).
    TopicName string
    The name of the topic, for example, orders-1. The topic name can be up to 249 characters in length, and can include the following characters: a-z, A-Z, 0-9, . (dot), _ (underscore), and - (dash). As a best practice, we recommend against using any personally identifiable information (PII) when naming your topic.
    Config map[string]string
    The custom topic settings to set:
    Credentials KafkaTopicCredentialsArgs
    The Cluster API Credentials.
    HttpEndpoint string

    Deprecated: This property has been deprecated. Please use "restEndpoint" instead.

    KafkaCluster KafkaTopicKafkaClusterArgs
    PartitionsCount int
    The number of partitions to create in the topic. Defaults to 6.
    RestEndpoint string
    The REST endpoint of the Kafka cluster, for example, https://pkc-00000.us-central1.gcp.confluent.cloud:443).
    TopicName string
    The name of the topic, for example, orders-1. The topic name can be up to 249 characters in length, and can include the following characters: a-z, A-Z, 0-9, . (dot), _ (underscore), and - (dash). As a best practice, we recommend against using any personally identifiable information (PII) when naming your topic.
    config Map<String,String>
    The custom topic settings to set:
    credentials KafkaTopicCredentials
    The Cluster API Credentials.
    httpEndpoint String

    Deprecated: This property has been deprecated. Please use "restEndpoint" instead.

    kafkaCluster KafkaTopicKafkaCluster
    partitionsCount Integer
    The number of partitions to create in the topic. Defaults to 6.
    restEndpoint String
    The REST endpoint of the Kafka cluster, for example, https://pkc-00000.us-central1.gcp.confluent.cloud:443).
    topicName String
    The name of the topic, for example, orders-1. The topic name can be up to 249 characters in length, and can include the following characters: a-z, A-Z, 0-9, . (dot), _ (underscore), and - (dash). As a best practice, we recommend against using any personally identifiable information (PII) when naming your topic.
    config {[key: string]: string}
    The custom topic settings to set:
    credentials KafkaTopicCredentials
    The Cluster API Credentials.
    httpEndpoint string

    Deprecated: This property has been deprecated. Please use "restEndpoint" instead.

    kafkaCluster KafkaTopicKafkaCluster
    partitionsCount number
    The number of partitions to create in the topic. Defaults to 6.
    restEndpoint string
    The REST endpoint of the Kafka cluster, for example, https://pkc-00000.us-central1.gcp.confluent.cloud:443).
    topicName string
    The name of the topic, for example, orders-1. The topic name can be up to 249 characters in length, and can include the following characters: a-z, A-Z, 0-9, . (dot), _ (underscore), and - (dash). As a best practice, we recommend against using any personally identifiable information (PII) when naming your topic.
    config Mapping[str, str]
    The custom topic settings to set:
    credentials KafkaTopicCredentialsArgs
    The Cluster API Credentials.
    http_endpoint str

    Deprecated: This property has been deprecated. Please use "restEndpoint" instead.

    kafka_cluster KafkaTopicKafkaClusterArgs
    partitions_count int
    The number of partitions to create in the topic. Defaults to 6.
    rest_endpoint str
    The REST endpoint of the Kafka cluster, for example, https://pkc-00000.us-central1.gcp.confluent.cloud:443).
    topic_name str
    The name of the topic, for example, orders-1. The topic name can be up to 249 characters in length, and can include the following characters: a-z, A-Z, 0-9, . (dot), _ (underscore), and - (dash). As a best practice, we recommend against using any personally identifiable information (PII) when naming your topic.
    config Map<String>
    The custom topic settings to set:
    credentials Property Map
    The Cluster API Credentials.
    httpEndpoint String

    Deprecated: This property has been deprecated. Please use "restEndpoint" instead.

    kafkaCluster Property Map
    partitionsCount Number
    The number of partitions to create in the topic. Defaults to 6.
    restEndpoint String
    The REST endpoint of the Kafka cluster, for example, https://pkc-00000.us-central1.gcp.confluent.cloud:443).
    topicName String
    The name of the topic, for example, orders-1. The topic name can be up to 249 characters in length, and can include the following characters: a-z, A-Z, 0-9, . (dot), _ (underscore), and - (dash). As a best practice, we recommend against using any personally identifiable information (PII) when naming your topic.

    Supporting Types

    KafkaTopicCredentials, KafkaTopicCredentialsArgs

    Key string
    The Kafka API Key.
    Secret string

    The Kafka API Secret.

    Note: A Kafka API key consists of a key and a secret. Kafka API keys are required to interact with Kafka clusters in Confluent Cloud. Each Kafka API key is valid for one specific Kafka cluster.

    Note: Use Option #2 to simplify the key rotation process. When using Option #1, to rotate a Kafka API key, create a new Kafka API key, update the credentials block in all configuration files to use the new Kafka API key, run pulumi up -target="confluent_kafka_topic.orders", and remove the old Kafka API key. Alternatively, in case the old Kafka API Key was deleted already, you might need to run pulumi preview -refresh=false -target="confluent_kafka_topic.orders" -out=rotate-kafka-api-key and pulumi up rotate-kafka-api-key instead.

    Key string
    The Kafka API Key.
    Secret string

    The Kafka API Secret.

    Note: A Kafka API key consists of a key and a secret. Kafka API keys are required to interact with Kafka clusters in Confluent Cloud. Each Kafka API key is valid for one specific Kafka cluster.

    Note: Use Option #2 to simplify the key rotation process. When using Option #1, to rotate a Kafka API key, create a new Kafka API key, update the credentials block in all configuration files to use the new Kafka API key, run pulumi up -target="confluent_kafka_topic.orders", and remove the old Kafka API key. Alternatively, in case the old Kafka API Key was deleted already, you might need to run pulumi preview -refresh=false -target="confluent_kafka_topic.orders" -out=rotate-kafka-api-key and pulumi up rotate-kafka-api-key instead.

    key String
    The Kafka API Key.
    secret String

    The Kafka API Secret.

    Note: A Kafka API key consists of a key and a secret. Kafka API keys are required to interact with Kafka clusters in Confluent Cloud. Each Kafka API key is valid for one specific Kafka cluster.

    Note: Use Option #2 to simplify the key rotation process. When using Option #1, to rotate a Kafka API key, create a new Kafka API key, update the credentials block in all configuration files to use the new Kafka API key, run pulumi up -target="confluent_kafka_topic.orders", and remove the old Kafka API key. Alternatively, in case the old Kafka API Key was deleted already, you might need to run pulumi preview -refresh=false -target="confluent_kafka_topic.orders" -out=rotate-kafka-api-key and pulumi up rotate-kafka-api-key instead.

    key string
    The Kafka API Key.
    secret string

    The Kafka API Secret.

    Note: A Kafka API key consists of a key and a secret. Kafka API keys are required to interact with Kafka clusters in Confluent Cloud. Each Kafka API key is valid for one specific Kafka cluster.

    Note: Use Option #2 to simplify the key rotation process. When using Option #1, to rotate a Kafka API key, create a new Kafka API key, update the credentials block in all configuration files to use the new Kafka API key, run pulumi up -target="confluent_kafka_topic.orders", and remove the old Kafka API key. Alternatively, in case the old Kafka API Key was deleted already, you might need to run pulumi preview -refresh=false -target="confluent_kafka_topic.orders" -out=rotate-kafka-api-key and pulumi up rotate-kafka-api-key instead.

    key str
    The Kafka API Key.
    secret str

    The Kafka API Secret.

    Note: A Kafka API key consists of a key and a secret. Kafka API keys are required to interact with Kafka clusters in Confluent Cloud. Each Kafka API key is valid for one specific Kafka cluster.

    Note: Use Option #2 to simplify the key rotation process. When using Option #1, to rotate a Kafka API key, create a new Kafka API key, update the credentials block in all configuration files to use the new Kafka API key, run pulumi up -target="confluent_kafka_topic.orders", and remove the old Kafka API key. Alternatively, in case the old Kafka API Key was deleted already, you might need to run pulumi preview -refresh=false -target="confluent_kafka_topic.orders" -out=rotate-kafka-api-key and pulumi up rotate-kafka-api-key instead.

    key String
    The Kafka API Key.
    secret String

    The Kafka API Secret.

    Note: A Kafka API key consists of a key and a secret. Kafka API keys are required to interact with Kafka clusters in Confluent Cloud. Each Kafka API key is valid for one specific Kafka cluster.

    Note: Use Option #2 to simplify the key rotation process. When using Option #1, to rotate a Kafka API key, create a new Kafka API key, update the credentials block in all configuration files to use the new Kafka API key, run pulumi up -target="confluent_kafka_topic.orders", and remove the old Kafka API key. Alternatively, in case the old Kafka API Key was deleted already, you might need to run pulumi preview -refresh=false -target="confluent_kafka_topic.orders" -out=rotate-kafka-api-key and pulumi up rotate-kafka-api-key instead.

    KafkaTopicKafkaCluster, KafkaTopicKafkaClusterArgs

    Id string
    The ID of the Kafka cluster, for example, lkc-abc123.
    Id string
    The ID of the Kafka cluster, for example, lkc-abc123.
    id String
    The ID of the Kafka cluster, for example, lkc-abc123.
    id string
    The ID of the Kafka cluster, for example, lkc-abc123.
    id str
    The ID of the Kafka cluster, for example, lkc-abc123.
    id String
    The ID of the Kafka cluster, for example, lkc-abc123.

    Import

    You can import a Kafka topic by using the Kafka cluster ID and Kafka topic name in the format <Kafka cluster ID>/<Kafka topic name>, for example:

    Option #1: Manage multiple Kafka clusters in the same Pulumi Stack

    $ export IMPORT_KAFKA_API_KEY="<kafka_api_key>"

    $ export IMPORT_KAFKA_API_SECRET="<kafka_api_secret>"

    $ export IMPORT_KAFKA_REST_ENDPOINT="<kafka_rest_endpoint>"

    $ pulumi import confluentcloud:index/kafkaTopic:KafkaTopic my_topic lkc-abc123/orders-123
    

    Option #2: Manage a single Kafka cluster in the same Pulumi Stack

    $ pulumi import confluentcloud:index/kafkaTopic:KafkaTopic my_topic lkc-abc123/orders-123
    

    resource “confluent_kafka_topic” “orders” {

    kafka_cluster {

    id = confluent_kafka_cluster.basic-cluster.id
    

    }

    topic_name = “orders”

    partitions_count = 4

    rest_endpoint = confluent_kafka_cluster.basic-cluster.rest_endpoint

    https://docs.confluent.io/cloud/current/client-apps/topics/manage.html#ak-topic-configurations-for-all-ccloud-cluster-types

    config = {

    "cleanup.policy"                      = "delete"
    
    "delete.retention.ms"                 = "86400000"
    
    "max.compaction.lag.ms"               = "9223372036854775807"
    
    "max.message.bytes"                   = "2097164"
    
    "message.timestamp.after.max.ms"      = "9223372036854775807"
    
    "message.timestamp.before.max.ms"     = "9223372036854775807"      
    
    "message.timestamp.difference.max.ms" = "9223372036854775807"
    
    "message.timestamp.type"              = "CreateTime"
    
    "min.compaction.lag.ms"               = "0"
    
    "min.insync.replicas"                 = "2"
    
    "retention.bytes"                     = "-1"
    
    "retention.ms"                        = "604800000"
    
    "segment.bytes"                       = "104857600"
    
    "segment.ms"                          = "604800000"
    

    }

    credentials {

    key    = confluent_api_key.app-manager-kafka-api-key.id
    
    secret = confluent_api_key.app-manager-kafka-api-key.secret
    

    }

    }

    !> Warning: Do not forget to delete terminal command history afterwards for security purposes.

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    Confluent Cloud pulumi/pulumi-confluentcloud
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the confluent Terraform Provider.
    confluentcloud logo
    Confluent v2.10.0 published on Wednesday, Nov 20, 2024 by Pulumi