Confluent v2.10.0 published on Wednesday, Nov 20, 2024 by Pulumi
confluentcloud.getKafkaTopic
Explore with Pulumi AI
confluentcloud.KafkaTopic
describes a Kafka Topic data source.
Example Usage
Option #1: Manage multiple Kafka clusters in the same Pulumi Stack
import * as pulumi from "@pulumi/pulumi";
import * as confluentcloud from "@pulumi/confluentcloud";
const orders = confluentcloud.getKafkaTopic({
kafkaCluster: {
id: basic_cluster.id,
},
topicName: "orders",
restEndpoint: basic_cluster.restEndpoint,
credentials: {
key: "<Kafka API Key for confluent_kafka_cluster.basic-cluster>",
secret: "<Kafka API Secret for confluent_kafka_cluster.basic-cluster>",
},
});
export const config = orders.then(orders => orders.config);
import pulumi
import pulumi_confluentcloud as confluentcloud
orders = confluentcloud.get_kafka_topic(kafka_cluster={
"id": basic_cluster["id"],
},
topic_name="orders",
rest_endpoint=basic_cluster["restEndpoint"],
credentials={
"key": "<Kafka API Key for confluent_kafka_cluster.basic-cluster>",
"secret": "<Kafka API Secret for confluent_kafka_cluster.basic-cluster>",
})
pulumi.export("config", orders.config)
package main
import (
"github.com/pulumi/pulumi-confluentcloud/sdk/v2/go/confluentcloud"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
orders, err := confluentcloud.LookupKafkaTopic(ctx, &confluentcloud.LookupKafkaTopicArgs{
KafkaCluster: confluentcloud.GetKafkaTopicKafkaCluster{
Id: basic_cluster.Id,
},
TopicName: "orders",
RestEndpoint: basic_cluster.RestEndpoint,
Credentials: confluentcloud.GetKafkaTopicCredentials{
Key: "<Kafka API Key for confluent_kafka_cluster.basic-cluster>",
Secret: "<Kafka API Secret for confluent_kafka_cluster.basic-cluster>",
},
}, nil)
if err != nil {
return err
}
ctx.Export("config", orders.Config)
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using ConfluentCloud = Pulumi.ConfluentCloud;
return await Deployment.RunAsync(() =>
{
var orders = ConfluentCloud.GetKafkaTopic.Invoke(new()
{
KafkaCluster = new ConfluentCloud.Inputs.GetKafkaTopicKafkaClusterInputArgs
{
Id = basic_cluster.Id,
},
TopicName = "orders",
RestEndpoint = basic_cluster.RestEndpoint,
Credentials = new ConfluentCloud.Inputs.GetKafkaTopicCredentialsInputArgs
{
Key = "<Kafka API Key for confluent_kafka_cluster.basic-cluster>",
Secret = "<Kafka API Secret for confluent_kafka_cluster.basic-cluster>",
},
});
return new Dictionary<string, object?>
{
["config"] = orders.Apply(getKafkaTopicResult => getKafkaTopicResult.Config),
};
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.confluentcloud.ConfluentcloudFunctions;
import com.pulumi.confluentcloud.inputs.GetKafkaTopicArgs;
import com.pulumi.confluentcloud.inputs.GetKafkaTopicKafkaClusterArgs;
import com.pulumi.confluentcloud.inputs.GetKafkaTopicCredentialsArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var orders = ConfluentcloudFunctions.getKafkaTopic(GetKafkaTopicArgs.builder()
.kafkaCluster(GetKafkaTopicKafkaClusterArgs.builder()
.id(basic_cluster.id())
.build())
.topicName("orders")
.restEndpoint(basic_cluster.restEndpoint())
.credentials(GetKafkaTopicCredentialsArgs.builder()
.key("<Kafka API Key for confluent_kafka_cluster.basic-cluster>")
.secret("<Kafka API Secret for confluent_kafka_cluster.basic-cluster>")
.build())
.build());
ctx.export("config", orders.applyValue(getKafkaTopicResult -> getKafkaTopicResult.config()));
}
}
variables:
orders:
fn::invoke:
Function: confluentcloud:getKafkaTopic
Arguments:
kafkaCluster:
id: ${["basic-cluster"].id}
topicName: orders
restEndpoint: ${["basic-cluster"].restEndpoint}
credentials:
key: <Kafka API Key for confluent_kafka_cluster.basic-cluster>
secret: <Kafka API Secret for confluent_kafka_cluster.basic-cluster>
outputs:
config: ${orders.config}
Option #2: Manage a single Kafka cluster in the same Pulumi Stack
import * as pulumi from "@pulumi/pulumi";
import * as confluentcloud from "@pulumi/confluentcloud";
const orders = confluentcloud.getKafkaTopic({
topicName: "orders",
});
export const config = orders.then(orders => orders.config);
import pulumi
import pulumi_confluentcloud as confluentcloud
orders = confluentcloud.get_kafka_topic(topic_name="orders")
pulumi.export("config", orders.config)
package main
import (
"github.com/pulumi/pulumi-confluentcloud/sdk/v2/go/confluentcloud"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
orders, err := confluentcloud.LookupKafkaTopic(ctx, &confluentcloud.LookupKafkaTopicArgs{
TopicName: "orders",
}, nil)
if err != nil {
return err
}
ctx.Export("config", orders.Config)
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using ConfluentCloud = Pulumi.ConfluentCloud;
return await Deployment.RunAsync(() =>
{
var orders = ConfluentCloud.GetKafkaTopic.Invoke(new()
{
TopicName = "orders",
});
return new Dictionary<string, object?>
{
["config"] = orders.Apply(getKafkaTopicResult => getKafkaTopicResult.Config),
};
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.confluentcloud.ConfluentcloudFunctions;
import com.pulumi.confluentcloud.inputs.GetKafkaTopicArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var orders = ConfluentcloudFunctions.getKafkaTopic(GetKafkaTopicArgs.builder()
.topicName("orders")
.build());
ctx.export("config", orders.applyValue(getKafkaTopicResult -> getKafkaTopicResult.config()));
}
}
variables:
orders:
fn::invoke:
Function: confluentcloud:getKafkaTopic
Arguments:
topicName: orders
outputs:
config: ${orders.config}
Using getKafkaTopic
Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.
function getKafkaTopic(args: GetKafkaTopicArgs, opts?: InvokeOptions): Promise<GetKafkaTopicResult>
function getKafkaTopicOutput(args: GetKafkaTopicOutputArgs, opts?: InvokeOptions): Output<GetKafkaTopicResult>
def get_kafka_topic(credentials: Optional[GetKafkaTopicCredentials] = None,
kafka_cluster: Optional[GetKafkaTopicKafkaCluster] = None,
rest_endpoint: Optional[str] = None,
topic_name: Optional[str] = None,
opts: Optional[InvokeOptions] = None) -> GetKafkaTopicResult
def get_kafka_topic_output(credentials: Optional[pulumi.Input[GetKafkaTopicCredentialsArgs]] = None,
kafka_cluster: Optional[pulumi.Input[GetKafkaTopicKafkaClusterArgs]] = None,
rest_endpoint: Optional[pulumi.Input[str]] = None,
topic_name: Optional[pulumi.Input[str]] = None,
opts: Optional[InvokeOptions] = None) -> Output[GetKafkaTopicResult]
func LookupKafkaTopic(ctx *Context, args *LookupKafkaTopicArgs, opts ...InvokeOption) (*LookupKafkaTopicResult, error)
func LookupKafkaTopicOutput(ctx *Context, args *LookupKafkaTopicOutputArgs, opts ...InvokeOption) LookupKafkaTopicResultOutput
> Note: This function is named LookupKafkaTopic
in the Go SDK.
public static class GetKafkaTopic
{
public static Task<GetKafkaTopicResult> InvokeAsync(GetKafkaTopicArgs args, InvokeOptions? opts = null)
public static Output<GetKafkaTopicResult> Invoke(GetKafkaTopicInvokeArgs args, InvokeOptions? opts = null)
}
public static CompletableFuture<GetKafkaTopicResult> getKafkaTopic(GetKafkaTopicArgs args, InvokeOptions options)
// Output-based functions aren't available in Java yet
fn::invoke:
function: confluentcloud:index/getKafkaTopic:getKafkaTopic
arguments:
# arguments dictionary
The following arguments are supported:
- Rest
Endpoint string - The REST endpoint of the Kafka cluster, for example,
https://pkc-00000.us-central1.gcp.confluent.cloud:443
). - Topic
Name string - The name of the topic, for example,
orders-1
. The topic name can be up to 255 characters in length and can contain only alphanumeric characters, hyphens, and underscores. - Credentials
Pulumi.
Confluent Cloud. Inputs. Get Kafka Topic Credentials - Kafka
Cluster Pulumi.Confluent Cloud. Inputs. Get Kafka Topic Kafka Cluster
- Rest
Endpoint string - The REST endpoint of the Kafka cluster, for example,
https://pkc-00000.us-central1.gcp.confluent.cloud:443
). - Topic
Name string - The name of the topic, for example,
orders-1
. The topic name can be up to 255 characters in length and can contain only alphanumeric characters, hyphens, and underscores. - Credentials
Get
Kafka Topic Credentials - Kafka
Cluster GetKafka Topic Kafka Cluster
- rest
Endpoint String - The REST endpoint of the Kafka cluster, for example,
https://pkc-00000.us-central1.gcp.confluent.cloud:443
). - topic
Name String - The name of the topic, for example,
orders-1
. The topic name can be up to 255 characters in length and can contain only alphanumeric characters, hyphens, and underscores. - credentials
Get
Kafka Topic Credentials - kafka
Cluster GetKafka Topic Kafka Cluster
- rest
Endpoint string - The REST endpoint of the Kafka cluster, for example,
https://pkc-00000.us-central1.gcp.confluent.cloud:443
). - topic
Name string - The name of the topic, for example,
orders-1
. The topic name can be up to 255 characters in length and can contain only alphanumeric characters, hyphens, and underscores. - credentials
Get
Kafka Topic Credentials - kafka
Cluster GetKafka Topic Kafka Cluster
- rest_
endpoint str - The REST endpoint of the Kafka cluster, for example,
https://pkc-00000.us-central1.gcp.confluent.cloud:443
). - topic_
name str - The name of the topic, for example,
orders-1
. The topic name can be up to 255 characters in length and can contain only alphanumeric characters, hyphens, and underscores. - credentials
Get
Kafka Topic Credentials - kafka_
cluster GetKafka Topic Kafka Cluster
- rest
Endpoint String - The REST endpoint of the Kafka cluster, for example,
https://pkc-00000.us-central1.gcp.confluent.cloud:443
). - topic
Name String - The name of the topic, for example,
orders-1
. The topic name can be up to 255 characters in length and can contain only alphanumeric characters, hyphens, and underscores. - credentials Property Map
- kafka
Cluster Property Map
getKafkaTopic Result
The following output properties are available:
- Config Dictionary<string, string>
- (Optional Map) The custom topic settings:
- Id string
- The provider-assigned unique ID for this managed resource.
- Partitions
Count int - (Required Number) The number of partitions to create in the topic. Defaults to
6
. - Rest
Endpoint string - Topic
Name string - Credentials
Pulumi.
Confluent Cloud. Outputs. Get Kafka Topic Credentials - Kafka
Cluster Pulumi.Confluent Cloud. Outputs. Get Kafka Topic Kafka Cluster
- Config map[string]string
- (Optional Map) The custom topic settings:
- Id string
- The provider-assigned unique ID for this managed resource.
- Partitions
Count int - (Required Number) The number of partitions to create in the topic. Defaults to
6
. - Rest
Endpoint string - Topic
Name string - Credentials
Get
Kafka Topic Credentials - Kafka
Cluster GetKafka Topic Kafka Cluster
- config Map<String,String>
- (Optional Map) The custom topic settings:
- id String
- The provider-assigned unique ID for this managed resource.
- partitions
Count Integer - (Required Number) The number of partitions to create in the topic. Defaults to
6
. - rest
Endpoint String - topic
Name String - credentials
Get
Kafka Topic Credentials - kafka
Cluster GetKafka Topic Kafka Cluster
- config {[key: string]: string}
- (Optional Map) The custom topic settings:
- id string
- The provider-assigned unique ID for this managed resource.
- partitions
Count number - (Required Number) The number of partitions to create in the topic. Defaults to
6
. - rest
Endpoint string - topic
Name string - credentials
Get
Kafka Topic Credentials - kafka
Cluster GetKafka Topic Kafka Cluster
- config Mapping[str, str]
- (Optional Map) The custom topic settings:
- id str
- The provider-assigned unique ID for this managed resource.
- partitions_
count int - (Required Number) The number of partitions to create in the topic. Defaults to
6
. - rest_
endpoint str - topic_
name str - credentials
Get
Kafka Topic Credentials - kafka_
cluster GetKafka Topic Kafka Cluster
- config Map<String>
- (Optional Map) The custom topic settings:
- id String
- The provider-assigned unique ID for this managed resource.
- partitions
Count Number - (Required Number) The number of partitions to create in the topic. Defaults to
6
. - rest
Endpoint String - topic
Name String - credentials Property Map
- kafka
Cluster Property Map
Supporting Types
GetKafkaTopicCredentials
GetKafkaTopicKafkaCluster
- Id string
- The ID of the Kafka cluster, for example,
lkc-abc123
.
- Id string
- The ID of the Kafka cluster, for example,
lkc-abc123
.
- id String
- The ID of the Kafka cluster, for example,
lkc-abc123
.
- id string
- The ID of the Kafka cluster, for example,
lkc-abc123
.
- id str
- The ID of the Kafka cluster, for example,
lkc-abc123
.
- id String
- The ID of the Kafka cluster, for example,
lkc-abc123
.
Package Details
- Repository
- Confluent Cloud pulumi/pulumi-confluentcloud
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
confluent
Terraform Provider.