upstash.KafkaConnector
Explore with Pulumi AI
Example Usage
using Pulumi;
using Upstash = Pulumi.Upstash;
class MyStack : Stack
{
public MyStack()
{
// Not necessary if the topic belongs to an already created cluster.
var exampleKafkaCluster = new Upstash.KafkaCluster("exampleKafkaCluster", new Upstash.KafkaClusterArgs
{
ClusterName = "Terraform_Upstash_Cluster",
Region = "eu-west-1",
Multizone = false,
});
var exampleKafkaTopic = new Upstash.KafkaTopic("exampleKafkaTopic", new Upstash.KafkaTopicArgs
{
TopicName = "TerraformTopic",
Partitions = 1,
RetentionTime = 625135,
RetentionSize = 725124,
MaxMessageSize = 829213,
CleanupPolicy = "delete",
ClusterId = resource.Upstash_kafka_cluster.ExampleKafkaCluster.Cluster_id,
});
var exampleKafkaConnector = new Upstash.KafkaConnector("exampleKafkaConnector", new Upstash.KafkaConnectorArgs
{
ClusterId = exampleKafkaCluster.ClusterId,
Properties =
{
{ "collection", "user123" },
{ "connection.uri", "mongodb+srv://test:test@cluster0.fohyg7p.mongodb.net/?retryWrites=true&w=majority" },
{ "connector.class", "com.mongodb.kafka.connect.MongoSourceConnector" },
{ "database", "myshinynewdb2" },
{ "topics", exampleKafkaTopic.TopicName },
},
});
// OPTIONAL: change between restart-running-paused
// running_state = "running"
}
}
package main
import (
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
"github.com/upstash/pulumi-upstash/sdk/go/upstash"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
exampleKafkaCluster, err := upstash.NewKafkaCluster(ctx, "exampleKafkaCluster", &upstash.KafkaClusterArgs{
ClusterName: pulumi.String("Terraform_Upstash_Cluster"),
Region: pulumi.String("eu-west-1"),
Multizone: pulumi.Bool(false),
})
if err != nil {
return err
}
exampleKafkaTopic, err := upstash.NewKafkaTopic(ctx, "exampleKafkaTopic", &upstash.KafkaTopicArgs{
TopicName: pulumi.String("TerraformTopic"),
Partitions: pulumi.Int(1),
RetentionTime: pulumi.Int(625135),
RetentionSize: pulumi.Int(725124),
MaxMessageSize: pulumi.Int(829213),
CleanupPolicy: pulumi.String("delete"),
ClusterId: pulumi.Any(resource.Upstash_kafka_cluster.ExampleKafkaCluster.Cluster_id),
})
if err != nil {
return err
}
_, err = upstash.NewKafkaConnector(ctx, "exampleKafkaConnector", &upstash.KafkaConnectorArgs{
ClusterId: exampleKafkaCluster.ClusterId,
Properties: pulumi.AnyMap{
"collection": pulumi.Any("user123"),
"connection.uri": pulumi.Any("mongodb+srv://test:test@cluster0.fohyg7p.mongodb.net/?retryWrites=true&w=majority"),
"connector.class": pulumi.Any("com.mongodb.kafka.connect.MongoSourceConnector"),
"database": pulumi.Any("myshinynewdb2"),
"topics": exampleKafkaTopic.TopicName,
},
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import java.util.*;
import java.io.*;
import java.nio.*;
import com.pulumi.*;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var exampleKafkaCluster = new KafkaCluster("exampleKafkaCluster", KafkaClusterArgs.builder()
.clusterName("Terraform_Upstash_Cluster")
.region("eu-west-1")
.multizone(false)
.build());
var exampleKafkaTopic = new KafkaTopic("exampleKafkaTopic", KafkaTopicArgs.builder()
.topicName("TerraformTopic")
.partitions(1)
.retentionTime(625135)
.retentionSize(725124)
.maxMessageSize(829213)
.cleanupPolicy("delete")
.clusterId(resource.upstash_kafka_cluster().exampleKafkaCluster().cluster_id())
.build());
var exampleKafkaConnector = new KafkaConnector("exampleKafkaConnector", KafkaConnectorArgs.builder()
.clusterId(exampleKafkaCluster.clusterId())
.properties(Map.ofEntries(
Map.entry("collection", "user123"),
Map.entry("connection.uri", "mongodb+srv://test:test@cluster0.fohyg7p.mongodb.net/?retryWrites=true&w=majority"),
Map.entry("connector.class", "com.mongodb.kafka.connect.MongoSourceConnector"),
Map.entry("database", "myshinynewdb2"),
Map.entry("topics", exampleKafkaTopic.topicName())
))
.build());
}
}
import pulumi
import upstash_pulumi as upstash
# Not necessary if the topic belongs to an already created cluster.
example_kafka_cluster = upstash.KafkaCluster("exampleKafkaCluster",
cluster_name="Terraform_Upstash_Cluster",
region="eu-west-1",
multizone=False)
example_kafka_topic = upstash.KafkaTopic("exampleKafkaTopic",
topic_name="TerraformTopic",
partitions=1,
retention_time=625135,
retention_size=725124,
max_message_size=829213,
cleanup_policy="delete",
cluster_id=resource["upstash_kafka_cluster"]["exampleKafkaCluster"]["cluster_id"])
example_kafka_connector = upstash.KafkaConnector("exampleKafkaConnector",
cluster_id=example_kafka_cluster.cluster_id,
properties={
"collection": "user123",
"connection.uri": "mongodb+srv://test:test@cluster0.fohyg7p.mongodb.net/?retryWrites=true&w=majority",
"connector.class": "com.mongodb.kafka.connect.MongoSourceConnector",
"database": "myshinynewdb2",
"topics": example_kafka_topic.topic_name,
})
# OPTIONAL: change between restart-running-paused
# running_state = "running"
import * as pulumi from "@pulumi/pulumi";
import * as pulumi from "@upstash/pulumi";
// Not necessary if the topic belongs to an already created cluster.
const exampleKafkaCluster = new upstash.KafkaCluster("exampleKafkaCluster", {
clusterName: "Terraform_Upstash_Cluster",
region: "eu-west-1",
multizone: false,
});
const exampleKafkaTopic = new upstash.KafkaTopic("exampleKafkaTopic", {
topicName: "TerraformTopic",
partitions: 1,
retentionTime: 625135,
retentionSize: 725124,
maxMessageSize: 829213,
cleanupPolicy: "delete",
clusterId: resource.upstash_kafka_cluster.exampleKafkaCluster.cluster_id,
});
const exampleKafkaConnector = new upstash.KafkaConnector("exampleKafkaConnector", {
clusterId: exampleKafkaCluster.clusterId,
properties: {
collection: "user123",
"connection.uri": "mongodb+srv://test:test@cluster0.fohyg7p.mongodb.net/?retryWrites=true&w=majority",
"connector.class": "com.mongodb.kafka.connect.MongoSourceConnector",
database: "myshinynewdb2",
topics: exampleKafkaTopic.topicName,
},
});
// OPTIONAL: change between restart-running-paused
// running_state = "running"
resources:
exampleKafkaCluster:
type: upstash:KafkaCluster
properties:
clusterName: Terraform_Upstash_Cluster
region: eu-west-1
multizone: false
exampleKafkaTopic:
type: upstash:KafkaTopic
properties:
topicName: TerraformTopic
partitions: 1
retentionTime: 625135
retentionSize: 725124
maxMessageSize: 829213
cleanupPolicy: delete
clusterId: ${resource.upstash_kafka_cluster.exampleKafkaCluster.cluster_id}
exampleKafkaConnector:
type: upstash:KafkaConnector
properties:
clusterId: ${exampleKafkaCluster.clusterId}
properties:
collection: user123
connection.uri: mongodb+srv://test:test@cluster0.fohyg7p.mongodb.net/?retryWrites=true&w=majority
connector.class: com.mongodb.kafka.connect.MongoSourceConnector
database: myshinynewdb2
topics: ${exampleKafkaTopic.topicName}
Create KafkaConnector Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new KafkaConnector(name: string, args: KafkaConnectorArgs, opts?: CustomResourceOptions);
@overload
def KafkaConnector(resource_name: str,
args: KafkaConnectorArgs,
opts: Optional[ResourceOptions] = None)
@overload
def KafkaConnector(resource_name: str,
opts: Optional[ResourceOptions] = None,
cluster_id: Optional[str] = None,
properties: Optional[Mapping[str, Any]] = None,
name: Optional[str] = None,
running_state: Optional[str] = None)
func NewKafkaConnector(ctx *Context, name string, args KafkaConnectorArgs, opts ...ResourceOption) (*KafkaConnector, error)
public KafkaConnector(string name, KafkaConnectorArgs args, CustomResourceOptions? opts = null)
public KafkaConnector(String name, KafkaConnectorArgs args)
public KafkaConnector(String name, KafkaConnectorArgs args, CustomResourceOptions options)
type: upstash:KafkaConnector
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args KafkaConnectorArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args KafkaConnectorArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args KafkaConnectorArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args KafkaConnectorArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args KafkaConnectorArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var kafkaConnectorResource = new Upstash.KafkaConnector("kafkaConnectorResource", new()
{
ClusterId = "string",
Properties =
{
{ "string", "any" },
},
Name = "string",
RunningState = "string",
});
example, err := upstash.NewKafkaConnector(ctx, "kafkaConnectorResource", &upstash.KafkaConnectorArgs{
ClusterId: pulumi.String("string"),
Properties: pulumi.Map{
"string": pulumi.Any("any"),
},
Name: pulumi.String("string"),
RunningState: pulumi.String("string"),
})
var kafkaConnectorResource = new KafkaConnector("kafkaConnectorResource", KafkaConnectorArgs.builder()
.clusterId("string")
.properties(Map.of("string", "any"))
.name("string")
.runningState("string")
.build());
kafka_connector_resource = upstash.KafkaConnector("kafkaConnectorResource",
cluster_id="string",
properties={
"string": "any",
},
name="string",
running_state="string")
const kafkaConnectorResource = new upstash.KafkaConnector("kafkaConnectorResource", {
clusterId: "string",
properties: {
string: "any",
},
name: "string",
runningState: "string",
});
type: upstash:KafkaConnector
properties:
clusterId: string
name: string
properties:
string: any
runningState: string
KafkaConnector Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The KafkaConnector resource accepts the following input properties:
- Cluster
Id string - Name of the connector
- Properties Dictionary<string, object>
- Properties that the connector will have
- Name string
- Name of the connector
- Running
State string - Running state of the connector. Can be either 'paused', 'running' or 'restart'
- Cluster
Id string - Name of the connector
- Properties map[string]interface{}
- Properties that the connector will have
- Name string
- Name of the connector
- Running
State string - Running state of the connector. Can be either 'paused', 'running' or 'restart'
- cluster
Id String - Name of the connector
- properties Map<String,Object>
- Properties that the connector will have
- name String
- Name of the connector
- running
State String - Running state of the connector. Can be either 'paused', 'running' or 'restart'
- cluster
Id string - Name of the connector
- properties {[key: string]: any}
- Properties that the connector will have
- name string
- Name of the connector
- running
State string - Running state of the connector. Can be either 'paused', 'running' or 'restart'
- cluster_
id str - Name of the connector
- properties Mapping[str, Any]
- Properties that the connector will have
- name str
- Name of the connector
- running_
state str - Running state of the connector. Can be either 'paused', 'running' or 'restart'
- cluster
Id String - Name of the connector
- properties Map<Any>
- Properties that the connector will have
- name String
- Name of the connector
- running
State String - Running state of the connector. Can be either 'paused', 'running' or 'restart'
Outputs
All input properties are implicitly available as output properties. Additionally, the KafkaConnector resource produces the following output properties:
- Connector
Id string - Unique Connector ID for created connector
- Creation
Time int - Creation of the connector
- Id string
- The provider-assigned unique ID for this managed resource.
- Connector
Id string - Unique Connector ID for created connector
- Creation
Time int - Creation of the connector
- Id string
- The provider-assigned unique ID for this managed resource.
- connector
Id String - Unique Connector ID for created connector
- creation
Time Integer - Creation of the connector
- id String
- The provider-assigned unique ID for this managed resource.
- connector
Id string - Unique Connector ID for created connector
- creation
Time number - Creation of the connector
- id string
- The provider-assigned unique ID for this managed resource.
- connector_
id str - Unique Connector ID for created connector
- creation_
time int - Creation of the connector
- id str
- The provider-assigned unique ID for this managed resource.
- connector
Id String - Unique Connector ID for created connector
- creation
Time Number - Creation of the connector
- id String
- The provider-assigned unique ID for this managed resource.
Look up Existing KafkaConnector Resource
Get an existing KafkaConnector resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: KafkaConnectorState, opts?: CustomResourceOptions): KafkaConnector
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
cluster_id: Optional[str] = None,
connector_id: Optional[str] = None,
creation_time: Optional[int] = None,
name: Optional[str] = None,
properties: Optional[Mapping[str, Any]] = None,
running_state: Optional[str] = None) -> KafkaConnector
func GetKafkaConnector(ctx *Context, name string, id IDInput, state *KafkaConnectorState, opts ...ResourceOption) (*KafkaConnector, error)
public static KafkaConnector Get(string name, Input<string> id, KafkaConnectorState? state, CustomResourceOptions? opts = null)
public static KafkaConnector get(String name, Output<String> id, KafkaConnectorState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Cluster
Id string - Name of the connector
- Connector
Id string - Unique Connector ID for created connector
- Creation
Time int - Creation of the connector
- Name string
- Name of the connector
- Properties Dictionary<string, object>
- Properties that the connector will have
- Running
State string - Running state of the connector. Can be either 'paused', 'running' or 'restart'
- Cluster
Id string - Name of the connector
- Connector
Id string - Unique Connector ID for created connector
- Creation
Time int - Creation of the connector
- Name string
- Name of the connector
- Properties map[string]interface{}
- Properties that the connector will have
- Running
State string - Running state of the connector. Can be either 'paused', 'running' or 'restart'
- cluster
Id String - Name of the connector
- connector
Id String - Unique Connector ID for created connector
- creation
Time Integer - Creation of the connector
- name String
- Name of the connector
- properties Map<String,Object>
- Properties that the connector will have
- running
State String - Running state of the connector. Can be either 'paused', 'running' or 'restart'
- cluster
Id string - Name of the connector
- connector
Id string - Unique Connector ID for created connector
- creation
Time number - Creation of the connector
- name string
- Name of the connector
- properties {[key: string]: any}
- Properties that the connector will have
- running
State string - Running state of the connector. Can be either 'paused', 'running' or 'restart'
- cluster_
id str - Name of the connector
- connector_
id str - Unique Connector ID for created connector
- creation_
time int - Creation of the connector
- name str
- Name of the connector
- properties Mapping[str, Any]
- Properties that the connector will have
- running_
state str - Running state of the connector. Can be either 'paused', 'running' or 'restart'
- cluster
Id String - Name of the connector
- connector
Id String - Unique Connector ID for created connector
- creation
Time Number - Creation of the connector
- name String
- Name of the connector
- properties Map<Any>
- Properties that the connector will have
- running
State String - Running state of the connector. Can be either 'paused', 'running' or 'restart'
Package Details
- Repository
- upstash upstash/pulumi-upstash
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
upstash
Terraform Provider.