alicloud.gpdb.StreamingJob
Explore with Pulumi AI
Provides a GPDB Streaming Job resource.
Real-time data tasks.
For information about GPDB Streaming Job and how to use it, see What is Streaming Job.
NOTE: Available since v1.231.0.
Example Usage
Basic Usage
import * as pulumi from "@pulumi/pulumi";
import * as alicloud from "@pulumi/alicloud";
const config = new pulumi.Config();
const name = config.get("name") || "terraform-example";
const defaultTXqb15 = new alicloud.vpc.Network("defaultTXqb15", {cidrBlock: "192.168.0.0/16"});
const defaultaSWhbT = new alicloud.vpc.Switch("defaultaSWhbT", {
vpcId: defaultTXqb15.id,
zoneId: "cn-beijing-h",
cidrBlock: "192.168.1.0/24",
});
const defaulth2ghc1 = new alicloud.gpdb.Instance("defaulth2ghc1", {
instanceSpec: "2C8G",
description: name,
segNodeNum: 2,
segStorageType: "cloud_essd",
instanceNetworkType: "VPC",
dbInstanceCategory: "Basic",
paymentType: "PayAsYouGo",
sslEnabled: 0,
engineVersion: "6.0",
zoneId: "cn-beijing-h",
vswitchId: defaultaSWhbT.id,
storageSize: 50,
masterCu: 4,
vpcId: defaultTXqb15.id,
dbInstanceMode: "StorageElastic",
engine: "gpdb",
});
const default2dUszY = new alicloud.gpdb.StreamingDataService("default2dUszY", {
serviceName: "example",
dbInstanceId: defaulth2ghc1.id,
serviceDescription: "example",
serviceSpec: "8",
});
const defaultcDQItu = new alicloud.gpdb.StreamingDataSource("defaultcDQItu", {
dbInstanceId: defaulth2ghc1.id,
dataSourceName: "example",
dataSourceConfig: JSON.stringify({
brokers: "alikafka-post-cn-g4t3t4eod004-1-vpc.alikafka.aliyuncs.com:9092,alikafka-post-cn-g4t3t4eod004-2-vpc.alikafka.aliyuncs.com:9092,alikafka-post-cn-g4t3t4eod004-3-vpc.alikafka.aliyuncs.com:9092",
delimiter: "|",
format: "delimited",
topic: "ziyuan_example",
}),
dataSourceType: "kafka",
dataSourceDescription: "example",
serviceId: default2dUszY.serviceId,
});
const _default = new alicloud.gpdb.StreamingJob("default", {
account: "example_001",
destSchema: "public",
mode: "professional",
jobName: "example-kafka",
jobDescription: "example-kafka",
destDatabase: "adb_sampledata_tpch",
dbInstanceId: defaulth2ghc1.id,
destTable: "customer",
dataSourceId: defaultcDQItu.dataSourceId,
password: "example_001",
jobConfig: `ATABASE: adb_sampledata_tpch
USER: example_001
PASSWORD: example_001
HOST: gp-2zean69451zsjj139-master.gpdb.rds.aliyuncs.com
PORT: 5432
KAFKA:
INPUT:
SOURCE:
BROKERS: alikafka-post-cn-3mp3t4ekq004-1-vpc.alikafka.aliyuncs.com:9092
TOPIC: ziyuan_example
FALLBACK_OFFSET: LATEST
KEY:
COLUMNS:
- NAME: c_custkey
TYPE: int
FORMAT: delimited
DELIMITED_OPTION:
DELIMITER: \\'|\\'
VALUE:
COLUMNS:
- NAME: c_comment
TYPE: varchar
FORMAT: delimited
DELIMITED_OPTION:
DELIMITER: \\'|\\'
ERROR_LIMIT: 10
OUTPUT:
SCHEMA: public
TABLE: customer
MODE: MERGE
MATCH_COLUMNS:
- c_custkey
ORDER_COLUMNS:
- c_custkey
UPDATE_COLUMNS:
- c_custkey
MAPPING:
- NAME: c_custkey
EXPRESSION: c_custkey
COMMIT:
MAX_ROW: 1000
MINIMAL_INTERVAL: 1000
CONSISTENCY: ATLEAST
POLL:
BATCHSIZE: 1000
TIMEOUT: 1000
PROPERTIES:
group.id: ziyuan_example_01
`,
});
import pulumi
import json
import pulumi_alicloud as alicloud
config = pulumi.Config()
name = config.get("name")
if name is None:
name = "terraform-example"
default_t_xqb15 = alicloud.vpc.Network("defaultTXqb15", cidr_block="192.168.0.0/16")
defaulta_s_whb_t = alicloud.vpc.Switch("defaultaSWhbT",
vpc_id=default_t_xqb15.id,
zone_id="cn-beijing-h",
cidr_block="192.168.1.0/24")
defaulth2ghc1 = alicloud.gpdb.Instance("defaulth2ghc1",
instance_spec="2C8G",
description=name,
seg_node_num=2,
seg_storage_type="cloud_essd",
instance_network_type="VPC",
db_instance_category="Basic",
payment_type="PayAsYouGo",
ssl_enabled=0,
engine_version="6.0",
zone_id="cn-beijing-h",
vswitch_id=defaulta_s_whb_t.id,
storage_size=50,
master_cu=4,
vpc_id=default_t_xqb15.id,
db_instance_mode="StorageElastic",
engine="gpdb")
default2d_usz_y = alicloud.gpdb.StreamingDataService("default2dUszY",
service_name="example",
db_instance_id=defaulth2ghc1.id,
service_description="example",
service_spec="8")
defaultc_dq_itu = alicloud.gpdb.StreamingDataSource("defaultcDQItu",
db_instance_id=defaulth2ghc1.id,
data_source_name="example",
data_source_config=json.dumps({
"brokers": "alikafka-post-cn-g4t3t4eod004-1-vpc.alikafka.aliyuncs.com:9092,alikafka-post-cn-g4t3t4eod004-2-vpc.alikafka.aliyuncs.com:9092,alikafka-post-cn-g4t3t4eod004-3-vpc.alikafka.aliyuncs.com:9092",
"delimiter": "|",
"format": "delimited",
"topic": "ziyuan_example",
}),
data_source_type="kafka",
data_source_description="example",
service_id=default2d_usz_y.service_id)
default = alicloud.gpdb.StreamingJob("default",
account="example_001",
dest_schema="public",
mode="professional",
job_name="example-kafka",
job_description="example-kafka",
dest_database="adb_sampledata_tpch",
db_instance_id=defaulth2ghc1.id,
dest_table="customer",
data_source_id=defaultc_dq_itu.data_source_id,
password="example_001",
job_config="""ATABASE: adb_sampledata_tpch
USER: example_001
PASSWORD: example_001
HOST: gp-2zean69451zsjj139-master.gpdb.rds.aliyuncs.com
PORT: 5432
KAFKA:
INPUT:
SOURCE:
BROKERS: alikafka-post-cn-3mp3t4ekq004-1-vpc.alikafka.aliyuncs.com:9092
TOPIC: ziyuan_example
FALLBACK_OFFSET: LATEST
KEY:
COLUMNS:
- NAME: c_custkey
TYPE: int
FORMAT: delimited
DELIMITED_OPTION:
DELIMITER: \'|\'
VALUE:
COLUMNS:
- NAME: c_comment
TYPE: varchar
FORMAT: delimited
DELIMITED_OPTION:
DELIMITER: \'|\'
ERROR_LIMIT: 10
OUTPUT:
SCHEMA: public
TABLE: customer
MODE: MERGE
MATCH_COLUMNS:
- c_custkey
ORDER_COLUMNS:
- c_custkey
UPDATE_COLUMNS:
- c_custkey
MAPPING:
- NAME: c_custkey
EXPRESSION: c_custkey
COMMIT:
MAX_ROW: 1000
MINIMAL_INTERVAL: 1000
CONSISTENCY: ATLEAST
POLL:
BATCHSIZE: 1000
TIMEOUT: 1000
PROPERTIES:
group.id: ziyuan_example_01
""")
package main
import (
"encoding/json"
"github.com/pulumi/pulumi-alicloud/sdk/v3/go/alicloud/gpdb"
"github.com/pulumi/pulumi-alicloud/sdk/v3/go/alicloud/vpc"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi/config"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
cfg := config.New(ctx, "")
name := "terraform-example"
if param := cfg.Get("name"); param != "" {
name = param
}
defaultTXqb15, err := vpc.NewNetwork(ctx, "defaultTXqb15", &vpc.NetworkArgs{
CidrBlock: pulumi.String("192.168.0.0/16"),
})
if err != nil {
return err
}
defaultaSWhbT, err := vpc.NewSwitch(ctx, "defaultaSWhbT", &vpc.SwitchArgs{
VpcId: defaultTXqb15.ID(),
ZoneId: pulumi.String("cn-beijing-h"),
CidrBlock: pulumi.String("192.168.1.0/24"),
})
if err != nil {
return err
}
defaulth2ghc1, err := gpdb.NewInstance(ctx, "defaulth2ghc1", &gpdb.InstanceArgs{
InstanceSpec: pulumi.String("2C8G"),
Description: pulumi.String(name),
SegNodeNum: pulumi.Int(2),
SegStorageType: pulumi.String("cloud_essd"),
InstanceNetworkType: pulumi.String("VPC"),
DbInstanceCategory: pulumi.String("Basic"),
PaymentType: pulumi.String("PayAsYouGo"),
SslEnabled: pulumi.Int(0),
EngineVersion: pulumi.String("6.0"),
ZoneId: pulumi.String("cn-beijing-h"),
VswitchId: defaultaSWhbT.ID(),
StorageSize: pulumi.Int(50),
MasterCu: pulumi.Int(4),
VpcId: defaultTXqb15.ID(),
DbInstanceMode: pulumi.String("StorageElastic"),
Engine: pulumi.String("gpdb"),
})
if err != nil {
return err
}
default2dUszY, err := gpdb.NewStreamingDataService(ctx, "default2dUszY", &gpdb.StreamingDataServiceArgs{
ServiceName: pulumi.String("example"),
DbInstanceId: defaulth2ghc1.ID(),
ServiceDescription: pulumi.String("example"),
ServiceSpec: pulumi.String("8"),
})
if err != nil {
return err
}
tmpJSON0, err := json.Marshal(map[string]interface{}{
"brokers": "alikafka-post-cn-g4t3t4eod004-1-vpc.alikafka.aliyuncs.com:9092,alikafka-post-cn-g4t3t4eod004-2-vpc.alikafka.aliyuncs.com:9092,alikafka-post-cn-g4t3t4eod004-3-vpc.alikafka.aliyuncs.com:9092",
"delimiter": "|",
"format": "delimited",
"topic": "ziyuan_example",
})
if err != nil {
return err
}
json0 := string(tmpJSON0)
defaultcDQItu, err := gpdb.NewStreamingDataSource(ctx, "defaultcDQItu", &gpdb.StreamingDataSourceArgs{
DbInstanceId: defaulth2ghc1.ID(),
DataSourceName: pulumi.String("example"),
DataSourceConfig: pulumi.String(json0),
DataSourceType: pulumi.String("kafka"),
DataSourceDescription: pulumi.String("example"),
ServiceId: default2dUszY.ServiceId,
})
if err != nil {
return err
}
_, err = gpdb.NewStreamingJob(ctx, "default", &gpdb.StreamingJobArgs{
Account: pulumi.String("example_001"),
DestSchema: pulumi.String("public"),
Mode: pulumi.String("professional"),
JobName: pulumi.String("example-kafka"),
JobDescription: pulumi.String("example-kafka"),
DestDatabase: pulumi.String("adb_sampledata_tpch"),
DbInstanceId: defaulth2ghc1.ID(),
DestTable: pulumi.String("customer"),
DataSourceId: defaultcDQItu.DataSourceId,
Password: pulumi.String("example_001"),
JobConfig: pulumi.String(`ATABASE: adb_sampledata_tpch
USER: example_001
PASSWORD: example_001
HOST: gp-2zean69451zsjj139-master.gpdb.rds.aliyuncs.com
PORT: 5432
KAFKA:
INPUT:
SOURCE:
BROKERS: alikafka-post-cn-3mp3t4ekq004-1-vpc.alikafka.aliyuncs.com:9092
TOPIC: ziyuan_example
FALLBACK_OFFSET: LATEST
KEY:
COLUMNS:
- NAME: c_custkey
TYPE: int
FORMAT: delimited
DELIMITED_OPTION:
DELIMITER: \'|\'
VALUE:
COLUMNS:
- NAME: c_comment
TYPE: varchar
FORMAT: delimited
DELIMITED_OPTION:
DELIMITER: \'|\'
ERROR_LIMIT: 10
OUTPUT:
SCHEMA: public
TABLE: customer
MODE: MERGE
MATCH_COLUMNS:
- c_custkey
ORDER_COLUMNS:
- c_custkey
UPDATE_COLUMNS:
- c_custkey
MAPPING:
- NAME: c_custkey
EXPRESSION: c_custkey
COMMIT:
MAX_ROW: 1000
MINIMAL_INTERVAL: 1000
CONSISTENCY: ATLEAST
POLL:
BATCHSIZE: 1000
TIMEOUT: 1000
PROPERTIES:
group.id: ziyuan_example_01
`),
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
using Pulumi;
using AliCloud = Pulumi.AliCloud;
return await Deployment.RunAsync(() =>
{
var config = new Config();
var name = config.Get("name") ?? "terraform-example";
var defaultTXqb15 = new AliCloud.Vpc.Network("defaultTXqb15", new()
{
CidrBlock = "192.168.0.0/16",
});
var defaultaSWhbT = new AliCloud.Vpc.Switch("defaultaSWhbT", new()
{
VpcId = defaultTXqb15.Id,
ZoneId = "cn-beijing-h",
CidrBlock = "192.168.1.0/24",
});
var defaulth2ghc1 = new AliCloud.Gpdb.Instance("defaulth2ghc1", new()
{
InstanceSpec = "2C8G",
Description = name,
SegNodeNum = 2,
SegStorageType = "cloud_essd",
InstanceNetworkType = "VPC",
DbInstanceCategory = "Basic",
PaymentType = "PayAsYouGo",
SslEnabled = 0,
EngineVersion = "6.0",
ZoneId = "cn-beijing-h",
VswitchId = defaultaSWhbT.Id,
StorageSize = 50,
MasterCu = 4,
VpcId = defaultTXqb15.Id,
DbInstanceMode = "StorageElastic",
Engine = "gpdb",
});
var default2dUszY = new AliCloud.Gpdb.StreamingDataService("default2dUszY", new()
{
ServiceName = "example",
DbInstanceId = defaulth2ghc1.Id,
ServiceDescription = "example",
ServiceSpec = "8",
});
var defaultcDQItu = new AliCloud.Gpdb.StreamingDataSource("defaultcDQItu", new()
{
DbInstanceId = defaulth2ghc1.Id,
DataSourceName = "example",
DataSourceConfig = JsonSerializer.Serialize(new Dictionary<string, object?>
{
["brokers"] = "alikafka-post-cn-g4t3t4eod004-1-vpc.alikafka.aliyuncs.com:9092,alikafka-post-cn-g4t3t4eod004-2-vpc.alikafka.aliyuncs.com:9092,alikafka-post-cn-g4t3t4eod004-3-vpc.alikafka.aliyuncs.com:9092",
["delimiter"] = "|",
["format"] = "delimited",
["topic"] = "ziyuan_example",
}),
DataSourceType = "kafka",
DataSourceDescription = "example",
ServiceId = default2dUszY.ServiceId,
});
var @default = new AliCloud.Gpdb.StreamingJob("default", new()
{
Account = "example_001",
DestSchema = "public",
Mode = "professional",
JobName = "example-kafka",
JobDescription = "example-kafka",
DestDatabase = "adb_sampledata_tpch",
DbInstanceId = defaulth2ghc1.Id,
DestTable = "customer",
DataSourceId = defaultcDQItu.DataSourceId,
Password = "example_001",
JobConfig = @"ATABASE: adb_sampledata_tpch
USER: example_001
PASSWORD: example_001
HOST: gp-2zean69451zsjj139-master.gpdb.rds.aliyuncs.com
PORT: 5432
KAFKA:
INPUT:
SOURCE:
BROKERS: alikafka-post-cn-3mp3t4ekq004-1-vpc.alikafka.aliyuncs.com:9092
TOPIC: ziyuan_example
FALLBACK_OFFSET: LATEST
KEY:
COLUMNS:
- NAME: c_custkey
TYPE: int
FORMAT: delimited
DELIMITED_OPTION:
DELIMITER: \'|\'
VALUE:
COLUMNS:
- NAME: c_comment
TYPE: varchar
FORMAT: delimited
DELIMITED_OPTION:
DELIMITER: \'|\'
ERROR_LIMIT: 10
OUTPUT:
SCHEMA: public
TABLE: customer
MODE: MERGE
MATCH_COLUMNS:
- c_custkey
ORDER_COLUMNS:
- c_custkey
UPDATE_COLUMNS:
- c_custkey
MAPPING:
- NAME: c_custkey
EXPRESSION: c_custkey
COMMIT:
MAX_ROW: 1000
MINIMAL_INTERVAL: 1000
CONSISTENCY: ATLEAST
POLL:
BATCHSIZE: 1000
TIMEOUT: 1000
PROPERTIES:
group.id: ziyuan_example_01
",
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.alicloud.vpc.Network;
import com.pulumi.alicloud.vpc.NetworkArgs;
import com.pulumi.alicloud.vpc.Switch;
import com.pulumi.alicloud.vpc.SwitchArgs;
import com.pulumi.alicloud.gpdb.Instance;
import com.pulumi.alicloud.gpdb.InstanceArgs;
import com.pulumi.alicloud.gpdb.StreamingDataService;
import com.pulumi.alicloud.gpdb.StreamingDataServiceArgs;
import com.pulumi.alicloud.gpdb.StreamingDataSource;
import com.pulumi.alicloud.gpdb.StreamingDataSourceArgs;
import com.pulumi.alicloud.gpdb.StreamingJob;
import com.pulumi.alicloud.gpdb.StreamingJobArgs;
import static com.pulumi.codegen.internal.Serialization.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var config = ctx.config();
final var name = config.get("name").orElse("terraform-example");
var defaultTXqb15 = new Network("defaultTXqb15", NetworkArgs.builder()
.cidrBlock("192.168.0.0/16")
.build());
var defaultaSWhbT = new Switch("defaultaSWhbT", SwitchArgs.builder()
.vpcId(defaultTXqb15.id())
.zoneId("cn-beijing-h")
.cidrBlock("192.168.1.0/24")
.build());
var defaulth2ghc1 = new Instance("defaulth2ghc1", InstanceArgs.builder()
.instanceSpec("2C8G")
.description(name)
.segNodeNum("2")
.segStorageType("cloud_essd")
.instanceNetworkType("VPC")
.dbInstanceCategory("Basic")
.paymentType("PayAsYouGo")
.sslEnabled("0")
.engineVersion("6.0")
.zoneId("cn-beijing-h")
.vswitchId(defaultaSWhbT.id())
.storageSize("50")
.masterCu("4")
.vpcId(defaultTXqb15.id())
.dbInstanceMode("StorageElastic")
.engine("gpdb")
.build());
var default2dUszY = new StreamingDataService("default2dUszY", StreamingDataServiceArgs.builder()
.serviceName("example")
.dbInstanceId(defaulth2ghc1.id())
.serviceDescription("example")
.serviceSpec("8")
.build());
var defaultcDQItu = new StreamingDataSource("defaultcDQItu", StreamingDataSourceArgs.builder()
.dbInstanceId(defaulth2ghc1.id())
.dataSourceName("example")
.dataSourceConfig(serializeJson(
jsonObject(
jsonProperty("brokers", "alikafka-post-cn-g4t3t4eod004-1-vpc.alikafka.aliyuncs.com:9092,alikafka-post-cn-g4t3t4eod004-2-vpc.alikafka.aliyuncs.com:9092,alikafka-post-cn-g4t3t4eod004-3-vpc.alikafka.aliyuncs.com:9092"),
jsonProperty("delimiter", "|"),
jsonProperty("format", "delimited"),
jsonProperty("topic", "ziyuan_example")
)))
.dataSourceType("kafka")
.dataSourceDescription("example")
.serviceId(default2dUszY.serviceId())
.build());
var default_ = new StreamingJob("default", StreamingJobArgs.builder()
.account("example_001")
.destSchema("public")
.mode("professional")
.jobName("example-kafka")
.jobDescription("example-kafka")
.destDatabase("adb_sampledata_tpch")
.dbInstanceId(defaulth2ghc1.id())
.destTable("customer")
.dataSourceId(defaultcDQItu.dataSourceId())
.password("example_001")
.jobConfig("""
ATABASE: adb_sampledata_tpch
USER: example_001
PASSWORD: example_001
HOST: gp-2zean69451zsjj139-master.gpdb.rds.aliyuncs.com
PORT: 5432
KAFKA:
INPUT:
SOURCE:
BROKERS: alikafka-post-cn-3mp3t4ekq004-1-vpc.alikafka.aliyuncs.com:9092
TOPIC: ziyuan_example
FALLBACK_OFFSET: LATEST
KEY:
COLUMNS:
- NAME: c_custkey
TYPE: int
FORMAT: delimited
DELIMITED_OPTION:
DELIMITER: \'|\'
VALUE:
COLUMNS:
- NAME: c_comment
TYPE: varchar
FORMAT: delimited
DELIMITED_OPTION:
DELIMITER: \'|\'
ERROR_LIMIT: 10
OUTPUT:
SCHEMA: public
TABLE: customer
MODE: MERGE
MATCH_COLUMNS:
- c_custkey
ORDER_COLUMNS:
- c_custkey
UPDATE_COLUMNS:
- c_custkey
MAPPING:
- NAME: c_custkey
EXPRESSION: c_custkey
COMMIT:
MAX_ROW: 1000
MINIMAL_INTERVAL: 1000
CONSISTENCY: ATLEAST
POLL:
BATCHSIZE: 1000
TIMEOUT: 1000
PROPERTIES:
group.id: ziyuan_example_01
""")
.build());
}
}
configuration:
name:
type: string
default: terraform-example
resources:
defaultTXqb15:
type: alicloud:vpc:Network
properties:
cidrBlock: 192.168.0.0/16
defaultaSWhbT:
type: alicloud:vpc:Switch
properties:
vpcId: ${defaultTXqb15.id}
zoneId: cn-beijing-h
cidrBlock: 192.168.1.0/24
defaulth2ghc1:
type: alicloud:gpdb:Instance
properties:
instanceSpec: 2C8G
description: ${name}
segNodeNum: '2'
segStorageType: cloud_essd
instanceNetworkType: VPC
dbInstanceCategory: Basic
paymentType: PayAsYouGo
sslEnabled: '0'
engineVersion: '6.0'
zoneId: cn-beijing-h
vswitchId: ${defaultaSWhbT.id}
storageSize: '50'
masterCu: '4'
vpcId: ${defaultTXqb15.id}
dbInstanceMode: StorageElastic
engine: gpdb
default2dUszY:
type: alicloud:gpdb:StreamingDataService
properties:
serviceName: example
dbInstanceId: ${defaulth2ghc1.id}
serviceDescription: example
serviceSpec: '8'
defaultcDQItu:
type: alicloud:gpdb:StreamingDataSource
properties:
dbInstanceId: ${defaulth2ghc1.id}
dataSourceName: example
dataSourceConfig:
fn::toJSON:
brokers: alikafka-post-cn-g4t3t4eod004-1-vpc.alikafka.aliyuncs.com:9092,alikafka-post-cn-g4t3t4eod004-2-vpc.alikafka.aliyuncs.com:9092,alikafka-post-cn-g4t3t4eod004-3-vpc.alikafka.aliyuncs.com:9092
delimiter: '|'
format: delimited
topic: ziyuan_example
dataSourceType: kafka
dataSourceDescription: example
serviceId: ${default2dUszY.serviceId}
default:
type: alicloud:gpdb:StreamingJob
properties:
account: example_001
destSchema: public
mode: professional
jobName: example-kafka
jobDescription: example-kafka
destDatabase: adb_sampledata_tpch
dbInstanceId: ${defaulth2ghc1.id}
destTable: customer
dataSourceId: ${defaultcDQItu.dataSourceId}
password: example_001
jobConfig: |
ATABASE: adb_sampledata_tpch
USER: example_001
PASSWORD: example_001
HOST: gp-2zean69451zsjj139-master.gpdb.rds.aliyuncs.com
PORT: 5432
KAFKA:
INPUT:
SOURCE:
BROKERS: alikafka-post-cn-3mp3t4ekq004-1-vpc.alikafka.aliyuncs.com:9092
TOPIC: ziyuan_example
FALLBACK_OFFSET: LATEST
KEY:
COLUMNS:
- NAME: c_custkey
TYPE: int
FORMAT: delimited
DELIMITED_OPTION:
DELIMITER: \'|\'
VALUE:
COLUMNS:
- NAME: c_comment
TYPE: varchar
FORMAT: delimited
DELIMITED_OPTION:
DELIMITER: \'|\'
ERROR_LIMIT: 10
OUTPUT:
SCHEMA: public
TABLE: customer
MODE: MERGE
MATCH_COLUMNS:
- c_custkey
ORDER_COLUMNS:
- c_custkey
UPDATE_COLUMNS:
- c_custkey
MAPPING:
- NAME: c_custkey
EXPRESSION: c_custkey
COMMIT:
MAX_ROW: 1000
MINIMAL_INTERVAL: 1000
CONSISTENCY: ATLEAST
POLL:
BATCHSIZE: 1000
TIMEOUT: 1000
PROPERTIES:
group.id: ziyuan_example_01
Create StreamingJob Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new StreamingJob(name: string, args: StreamingJobArgs, opts?: CustomResourceOptions);
@overload
def StreamingJob(resource_name: str,
args: StreamingJobArgs,
opts: Optional[ResourceOptions] = None)
@overload
def StreamingJob(resource_name: str,
opts: Optional[ResourceOptions] = None,
db_instance_id: Optional[str] = None,
job_name: Optional[str] = None,
data_source_id: Optional[str] = None,
group_name: Optional[str] = None,
job_description: Optional[str] = None,
dest_database: Optional[str] = None,
dest_schema: Optional[str] = None,
dest_table: Optional[str] = None,
error_limit_count: Optional[int] = None,
fallback_offset: Optional[str] = None,
account: Optional[str] = None,
job_config: Optional[str] = None,
dest_columns: Optional[Sequence[str]] = None,
consistency: Optional[str] = None,
match_columns: Optional[Sequence[str]] = None,
mode: Optional[str] = None,
password: Optional[str] = None,
src_columns: Optional[Sequence[str]] = None,
try_run: Optional[bool] = None,
update_columns: Optional[Sequence[str]] = None,
write_mode: Optional[str] = None)
func NewStreamingJob(ctx *Context, name string, args StreamingJobArgs, opts ...ResourceOption) (*StreamingJob, error)
public StreamingJob(string name, StreamingJobArgs args, CustomResourceOptions? opts = null)
public StreamingJob(String name, StreamingJobArgs args)
public StreamingJob(String name, StreamingJobArgs args, CustomResourceOptions options)
type: alicloud:gpdb:StreamingJob
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args StreamingJobArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args StreamingJobArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args StreamingJobArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args StreamingJobArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args StreamingJobArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var streamingJobResource = new AliCloud.Gpdb.StreamingJob("streamingJobResource", new()
{
DbInstanceId = "string",
JobName = "string",
DataSourceId = "string",
GroupName = "string",
JobDescription = "string",
DestDatabase = "string",
DestSchema = "string",
DestTable = "string",
ErrorLimitCount = 0,
FallbackOffset = "string",
Account = "string",
JobConfig = "string",
DestColumns = new[]
{
"string",
},
Consistency = "string",
MatchColumns = new[]
{
"string",
},
Mode = "string",
Password = "string",
SrcColumns = new[]
{
"string",
},
TryRun = false,
UpdateColumns = new[]
{
"string",
},
WriteMode = "string",
});
example, err := gpdb.NewStreamingJob(ctx, "streamingJobResource", &gpdb.StreamingJobArgs{
DbInstanceId: pulumi.String("string"),
JobName: pulumi.String("string"),
DataSourceId: pulumi.String("string"),
GroupName: pulumi.String("string"),
JobDescription: pulumi.String("string"),
DestDatabase: pulumi.String("string"),
DestSchema: pulumi.String("string"),
DestTable: pulumi.String("string"),
ErrorLimitCount: pulumi.Int(0),
FallbackOffset: pulumi.String("string"),
Account: pulumi.String("string"),
JobConfig: pulumi.String("string"),
DestColumns: pulumi.StringArray{
pulumi.String("string"),
},
Consistency: pulumi.String("string"),
MatchColumns: pulumi.StringArray{
pulumi.String("string"),
},
Mode: pulumi.String("string"),
Password: pulumi.String("string"),
SrcColumns: pulumi.StringArray{
pulumi.String("string"),
},
TryRun: pulumi.Bool(false),
UpdateColumns: pulumi.StringArray{
pulumi.String("string"),
},
WriteMode: pulumi.String("string"),
})
var streamingJobResource = new StreamingJob("streamingJobResource", StreamingJobArgs.builder()
.dbInstanceId("string")
.jobName("string")
.dataSourceId("string")
.groupName("string")
.jobDescription("string")
.destDatabase("string")
.destSchema("string")
.destTable("string")
.errorLimitCount(0)
.fallbackOffset("string")
.account("string")
.jobConfig("string")
.destColumns("string")
.consistency("string")
.matchColumns("string")
.mode("string")
.password("string")
.srcColumns("string")
.tryRun(false)
.updateColumns("string")
.writeMode("string")
.build());
streaming_job_resource = alicloud.gpdb.StreamingJob("streamingJobResource",
db_instance_id="string",
job_name="string",
data_source_id="string",
group_name="string",
job_description="string",
dest_database="string",
dest_schema="string",
dest_table="string",
error_limit_count=0,
fallback_offset="string",
account="string",
job_config="string",
dest_columns=["string"],
consistency="string",
match_columns=["string"],
mode="string",
password="string",
src_columns=["string"],
try_run=False,
update_columns=["string"],
write_mode="string")
const streamingJobResource = new alicloud.gpdb.StreamingJob("streamingJobResource", {
dbInstanceId: "string",
jobName: "string",
dataSourceId: "string",
groupName: "string",
jobDescription: "string",
destDatabase: "string",
destSchema: "string",
destTable: "string",
errorLimitCount: 0,
fallbackOffset: "string",
account: "string",
jobConfig: "string",
destColumns: ["string"],
consistency: "string",
matchColumns: ["string"],
mode: "string",
password: "string",
srcColumns: ["string"],
tryRun: false,
updateColumns: ["string"],
writeMode: "string",
});
type: alicloud:gpdb:StreamingJob
properties:
account: string
consistency: string
dataSourceId: string
dbInstanceId: string
destColumns:
- string
destDatabase: string
destSchema: string
destTable: string
errorLimitCount: 0
fallbackOffset: string
groupName: string
jobConfig: string
jobDescription: string
jobName: string
matchColumns:
- string
mode: string
password: string
srcColumns:
- string
tryRun: false
updateColumns:
- string
writeMode: string
StreamingJob Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The StreamingJob resource accepts the following input properties:
- Data
Source stringId - The data source ID.
- Db
Instance stringId - The instance ID.
- Job
Name string - The name of the job.
- Account string
- The name of the database account.
- Consistency string
The delivery guarantee setting.
Valid values:
- ATLEAST
- EXACTLY
- Dest
Columns List<string> - Target Field
- Dest
Database string - The name of the destination database.
- Dest
Schema string - Target Schema
- Dest
Table string - The name of the destination table.
- Error
Limit intCount - The number of allowed error rows. Write failures occur when Kafka data does not match the destination table in AnalyticDB for PostgreSQL. If the specified value is exceeded, the job fails.
- Fallback
Offset string - Automatic offset reset
- Group
Name string - Group Name
- Job
Config string - The YAML configuration file of the job. This parameter must be specified when Mode is set to professional.
- Job
Description string - The description of the job.
- Match
Columns List<string> - Match Field
- Mode string
The configuration mode. Valid values:
basic: In basic mode, you must configure the configuration parameters.
professional: In professional mode, you can submit a YAML configuration file.
- Password string
- The password of the database account.
- Src
Columns List<string> - Source Field
- Try
Run bool Specifies whether to test the real-time job. Valid values:
- true
- false
Default value: false.
- Update
Columns List<string> - Update Field
- Write
Mode string The write mode.
Valid values:
- insert
- update
- merge
- Data
Source stringId - The data source ID.
- Db
Instance stringId - The instance ID.
- Job
Name string - The name of the job.
- Account string
- The name of the database account.
- Consistency string
The delivery guarantee setting.
Valid values:
- ATLEAST
- EXACTLY
- Dest
Columns []string - Target Field
- Dest
Database string - The name of the destination database.
- Dest
Schema string - Target Schema
- Dest
Table string - The name of the destination table.
- Error
Limit intCount - The number of allowed error rows. Write failures occur when Kafka data does not match the destination table in AnalyticDB for PostgreSQL. If the specified value is exceeded, the job fails.
- Fallback
Offset string - Automatic offset reset
- Group
Name string - Group Name
- Job
Config string - The YAML configuration file of the job. This parameter must be specified when Mode is set to professional.
- Job
Description string - The description of the job.
- Match
Columns []string - Match Field
- Mode string
The configuration mode. Valid values:
basic: In basic mode, you must configure the configuration parameters.
professional: In professional mode, you can submit a YAML configuration file.
- Password string
- The password of the database account.
- Src
Columns []string - Source Field
- Try
Run bool Specifies whether to test the real-time job. Valid values:
- true
- false
Default value: false.
- Update
Columns []string - Update Field
- Write
Mode string The write mode.
Valid values:
- insert
- update
- merge
- data
Source StringId - The data source ID.
- db
Instance StringId - The instance ID.
- job
Name String - The name of the job.
- account String
- The name of the database account.
- consistency String
The delivery guarantee setting.
Valid values:
- ATLEAST
- EXACTLY
- dest
Columns List<String> - Target Field
- dest
Database String - The name of the destination database.
- dest
Schema String - Target Schema
- dest
Table String - The name of the destination table.
- error
Limit IntegerCount - The number of allowed error rows. Write failures occur when Kafka data does not match the destination table in AnalyticDB for PostgreSQL. If the specified value is exceeded, the job fails.
- fallback
Offset String - Automatic offset reset
- group
Name String - Group Name
- job
Config String - The YAML configuration file of the job. This parameter must be specified when Mode is set to professional.
- job
Description String - The description of the job.
- match
Columns List<String> - Match Field
- mode String
The configuration mode. Valid values:
basic: In basic mode, you must configure the configuration parameters.
professional: In professional mode, you can submit a YAML configuration file.
- password String
- The password of the database account.
- src
Columns List<String> - Source Field
- try
Run Boolean Specifies whether to test the real-time job. Valid values:
- true
- false
Default value: false.
- update
Columns List<String> - Update Field
- write
Mode String The write mode.
Valid values:
- insert
- update
- merge
- data
Source stringId - The data source ID.
- db
Instance stringId - The instance ID.
- job
Name string - The name of the job.
- account string
- The name of the database account.
- consistency string
The delivery guarantee setting.
Valid values:
- ATLEAST
- EXACTLY
- dest
Columns string[] - Target Field
- dest
Database string - The name of the destination database.
- dest
Schema string - Target Schema
- dest
Table string - The name of the destination table.
- error
Limit numberCount - The number of allowed error rows. Write failures occur when Kafka data does not match the destination table in AnalyticDB for PostgreSQL. If the specified value is exceeded, the job fails.
- fallback
Offset string - Automatic offset reset
- group
Name string - Group Name
- job
Config string - The YAML configuration file of the job. This parameter must be specified when Mode is set to professional.
- job
Description string - The description of the job.
- match
Columns string[] - Match Field
- mode string
The configuration mode. Valid values:
basic: In basic mode, you must configure the configuration parameters.
professional: In professional mode, you can submit a YAML configuration file.
- password string
- The password of the database account.
- src
Columns string[] - Source Field
- try
Run boolean Specifies whether to test the real-time job. Valid values:
- true
- false
Default value: false.
- update
Columns string[] - Update Field
- write
Mode string The write mode.
Valid values:
- insert
- update
- merge
- data_
source_ strid - The data source ID.
- db_
instance_ strid - The instance ID.
- job_
name str - The name of the job.
- account str
- The name of the database account.
- consistency str
The delivery guarantee setting.
Valid values:
- ATLEAST
- EXACTLY
- dest_
columns Sequence[str] - Target Field
- dest_
database str - The name of the destination database.
- dest_
schema str - Target Schema
- dest_
table str - The name of the destination table.
- error_
limit_ intcount - The number of allowed error rows. Write failures occur when Kafka data does not match the destination table in AnalyticDB for PostgreSQL. If the specified value is exceeded, the job fails.
- fallback_
offset str - Automatic offset reset
- group_
name str - Group Name
- job_
config str - The YAML configuration file of the job. This parameter must be specified when Mode is set to professional.
- job_
description str - The description of the job.
- match_
columns Sequence[str] - Match Field
- mode str
The configuration mode. Valid values:
basic: In basic mode, you must configure the configuration parameters.
professional: In professional mode, you can submit a YAML configuration file.
- password str
- The password of the database account.
- src_
columns Sequence[str] - Source Field
- try_
run bool Specifies whether to test the real-time job. Valid values:
- true
- false
Default value: false.
- update_
columns Sequence[str] - Update Field
- write_
mode str The write mode.
Valid values:
- insert
- update
- merge
- data
Source StringId - The data source ID.
- db
Instance StringId - The instance ID.
- job
Name String - The name of the job.
- account String
- The name of the database account.
- consistency String
The delivery guarantee setting.
Valid values:
- ATLEAST
- EXACTLY
- dest
Columns List<String> - Target Field
- dest
Database String - The name of the destination database.
- dest
Schema String - Target Schema
- dest
Table String - The name of the destination table.
- error
Limit NumberCount - The number of allowed error rows. Write failures occur when Kafka data does not match the destination table in AnalyticDB for PostgreSQL. If the specified value is exceeded, the job fails.
- fallback
Offset String - Automatic offset reset
- group
Name String - Group Name
- job
Config String - The YAML configuration file of the job. This parameter must be specified when Mode is set to professional.
- job
Description String - The description of the job.
- match
Columns List<String> - Match Field
- mode String
The configuration mode. Valid values:
basic: In basic mode, you must configure the configuration parameters.
professional: In professional mode, you can submit a YAML configuration file.
- password String
- The password of the database account.
- src
Columns List<String> - Source Field
- try
Run Boolean Specifies whether to test the real-time job. Valid values:
- true
- false
Default value: false.
- update
Columns List<String> - Update Field
- write
Mode String The write mode.
Valid values:
- insert
- update
- merge
Outputs
All input properties are implicitly available as output properties. Additionally, the StreamingJob resource produces the following output properties:
- Create
Time string - The creation time of the resource
- Id string
- The provider-assigned unique ID for this managed resource.
- Job
Id string - The job ID.
- Status string
- Service status, value:
- Create
Time string - The creation time of the resource
- Id string
- The provider-assigned unique ID for this managed resource.
- Job
Id string - The job ID.
- Status string
- Service status, value:
- create
Time String - The creation time of the resource
- id String
- The provider-assigned unique ID for this managed resource.
- job
Id String - The job ID.
- status String
- Service status, value:
- create
Time string - The creation time of the resource
- id string
- The provider-assigned unique ID for this managed resource.
- job
Id string - The job ID.
- status string
- Service status, value:
- create_
time str - The creation time of the resource
- id str
- The provider-assigned unique ID for this managed resource.
- job_
id str - The job ID.
- status str
- Service status, value:
- create
Time String - The creation time of the resource
- id String
- The provider-assigned unique ID for this managed resource.
- job
Id String - The job ID.
- status String
- Service status, value:
Look up Existing StreamingJob Resource
Get an existing StreamingJob resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: StreamingJobState, opts?: CustomResourceOptions): StreamingJob
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
account: Optional[str] = None,
consistency: Optional[str] = None,
create_time: Optional[str] = None,
data_source_id: Optional[str] = None,
db_instance_id: Optional[str] = None,
dest_columns: Optional[Sequence[str]] = None,
dest_database: Optional[str] = None,
dest_schema: Optional[str] = None,
dest_table: Optional[str] = None,
error_limit_count: Optional[int] = None,
fallback_offset: Optional[str] = None,
group_name: Optional[str] = None,
job_config: Optional[str] = None,
job_description: Optional[str] = None,
job_id: Optional[str] = None,
job_name: Optional[str] = None,
match_columns: Optional[Sequence[str]] = None,
mode: Optional[str] = None,
password: Optional[str] = None,
src_columns: Optional[Sequence[str]] = None,
status: Optional[str] = None,
try_run: Optional[bool] = None,
update_columns: Optional[Sequence[str]] = None,
write_mode: Optional[str] = None) -> StreamingJob
func GetStreamingJob(ctx *Context, name string, id IDInput, state *StreamingJobState, opts ...ResourceOption) (*StreamingJob, error)
public static StreamingJob Get(string name, Input<string> id, StreamingJobState? state, CustomResourceOptions? opts = null)
public static StreamingJob get(String name, Output<String> id, StreamingJobState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Account string
- The name of the database account.
- Consistency string
The delivery guarantee setting.
Valid values:
- ATLEAST
- EXACTLY
- Create
Time string - The creation time of the resource
- Data
Source stringId - The data source ID.
- Db
Instance stringId - The instance ID.
- Dest
Columns List<string> - Target Field
- Dest
Database string - The name of the destination database.
- Dest
Schema string - Target Schema
- Dest
Table string - The name of the destination table.
- Error
Limit intCount - The number of allowed error rows. Write failures occur when Kafka data does not match the destination table in AnalyticDB for PostgreSQL. If the specified value is exceeded, the job fails.
- Fallback
Offset string - Automatic offset reset
- Group
Name string - Group Name
- Job
Config string - The YAML configuration file of the job. This parameter must be specified when Mode is set to professional.
- Job
Description string - The description of the job.
- Job
Id string - The job ID.
- Job
Name string - The name of the job.
- Match
Columns List<string> - Match Field
- Mode string
The configuration mode. Valid values:
basic: In basic mode, you must configure the configuration parameters.
professional: In professional mode, you can submit a YAML configuration file.
- Password string
- The password of the database account.
- Src
Columns List<string> - Source Field
- Status string
- Service status, value:
- Try
Run bool Specifies whether to test the real-time job. Valid values:
- true
- false
Default value: false.
- Update
Columns List<string> - Update Field
- Write
Mode string The write mode.
Valid values:
- insert
- update
- merge
- Account string
- The name of the database account.
- Consistency string
The delivery guarantee setting.
Valid values:
- ATLEAST
- EXACTLY
- Create
Time string - The creation time of the resource
- Data
Source stringId - The data source ID.
- Db
Instance stringId - The instance ID.
- Dest
Columns []string - Target Field
- Dest
Database string - The name of the destination database.
- Dest
Schema string - Target Schema
- Dest
Table string - The name of the destination table.
- Error
Limit intCount - The number of allowed error rows. Write failures occur when Kafka data does not match the destination table in AnalyticDB for PostgreSQL. If the specified value is exceeded, the job fails.
- Fallback
Offset string - Automatic offset reset
- Group
Name string - Group Name
- Job
Config string - The YAML configuration file of the job. This parameter must be specified when Mode is set to professional.
- Job
Description string - The description of the job.
- Job
Id string - The job ID.
- Job
Name string - The name of the job.
- Match
Columns []string - Match Field
- Mode string
The configuration mode. Valid values:
basic: In basic mode, you must configure the configuration parameters.
professional: In professional mode, you can submit a YAML configuration file.
- Password string
- The password of the database account.
- Src
Columns []string - Source Field
- Status string
- Service status, value:
- Try
Run bool Specifies whether to test the real-time job. Valid values:
- true
- false
Default value: false.
- Update
Columns []string - Update Field
- Write
Mode string The write mode.
Valid values:
- insert
- update
- merge
- account String
- The name of the database account.
- consistency String
The delivery guarantee setting.
Valid values:
- ATLEAST
- EXACTLY
- create
Time String - The creation time of the resource
- data
Source StringId - The data source ID.
- db
Instance StringId - The instance ID.
- dest
Columns List<String> - Target Field
- dest
Database String - The name of the destination database.
- dest
Schema String - Target Schema
- dest
Table String - The name of the destination table.
- error
Limit IntegerCount - The number of allowed error rows. Write failures occur when Kafka data does not match the destination table in AnalyticDB for PostgreSQL. If the specified value is exceeded, the job fails.
- fallback
Offset String - Automatic offset reset
- group
Name String - Group Name
- job
Config String - The YAML configuration file of the job. This parameter must be specified when Mode is set to professional.
- job
Description String - The description of the job.
- job
Id String - The job ID.
- job
Name String - The name of the job.
- match
Columns List<String> - Match Field
- mode String
The configuration mode. Valid values:
basic: In basic mode, you must configure the configuration parameters.
professional: In professional mode, you can submit a YAML configuration file.
- password String
- The password of the database account.
- src
Columns List<String> - Source Field
- status String
- Service status, value:
- try
Run Boolean Specifies whether to test the real-time job. Valid values:
- true
- false
Default value: false.
- update
Columns List<String> - Update Field
- write
Mode String The write mode.
Valid values:
- insert
- update
- merge
- account string
- The name of the database account.
- consistency string
The delivery guarantee setting.
Valid values:
- ATLEAST
- EXACTLY
- create
Time string - The creation time of the resource
- data
Source stringId - The data source ID.
- db
Instance stringId - The instance ID.
- dest
Columns string[] - Target Field
- dest
Database string - The name of the destination database.
- dest
Schema string - Target Schema
- dest
Table string - The name of the destination table.
- error
Limit numberCount - The number of allowed error rows. Write failures occur when Kafka data does not match the destination table in AnalyticDB for PostgreSQL. If the specified value is exceeded, the job fails.
- fallback
Offset string - Automatic offset reset
- group
Name string - Group Name
- job
Config string - The YAML configuration file of the job. This parameter must be specified when Mode is set to professional.
- job
Description string - The description of the job.
- job
Id string - The job ID.
- job
Name string - The name of the job.
- match
Columns string[] - Match Field
- mode string
The configuration mode. Valid values:
basic: In basic mode, you must configure the configuration parameters.
professional: In professional mode, you can submit a YAML configuration file.
- password string
- The password of the database account.
- src
Columns string[] - Source Field
- status string
- Service status, value:
- try
Run boolean Specifies whether to test the real-time job. Valid values:
- true
- false
Default value: false.
- update
Columns string[] - Update Field
- write
Mode string The write mode.
Valid values:
- insert
- update
- merge
- account str
- The name of the database account.
- consistency str
The delivery guarantee setting.
Valid values:
- ATLEAST
- EXACTLY
- create_
time str - The creation time of the resource
- data_
source_ strid - The data source ID.
- db_
instance_ strid - The instance ID.
- dest_
columns Sequence[str] - Target Field
- dest_
database str - The name of the destination database.
- dest_
schema str - Target Schema
- dest_
table str - The name of the destination table.
- error_
limit_ intcount - The number of allowed error rows. Write failures occur when Kafka data does not match the destination table in AnalyticDB for PostgreSQL. If the specified value is exceeded, the job fails.
- fallback_
offset str - Automatic offset reset
- group_
name str - Group Name
- job_
config str - The YAML configuration file of the job. This parameter must be specified when Mode is set to professional.
- job_
description str - The description of the job.
- job_
id str - The job ID.
- job_
name str - The name of the job.
- match_
columns Sequence[str] - Match Field
- mode str
The configuration mode. Valid values:
basic: In basic mode, you must configure the configuration parameters.
professional: In professional mode, you can submit a YAML configuration file.
- password str
- The password of the database account.
- src_
columns Sequence[str] - Source Field
- status str
- Service status, value:
- try_
run bool Specifies whether to test the real-time job. Valid values:
- true
- false
Default value: false.
- update_
columns Sequence[str] - Update Field
- write_
mode str The write mode.
Valid values:
- insert
- update
- merge
- account String
- The name of the database account.
- consistency String
The delivery guarantee setting.
Valid values:
- ATLEAST
- EXACTLY
- create
Time String - The creation time of the resource
- data
Source StringId - The data source ID.
- db
Instance StringId - The instance ID.
- dest
Columns List<String> - Target Field
- dest
Database String - The name of the destination database.
- dest
Schema String - Target Schema
- dest
Table String - The name of the destination table.
- error
Limit NumberCount - The number of allowed error rows. Write failures occur when Kafka data does not match the destination table in AnalyticDB for PostgreSQL. If the specified value is exceeded, the job fails.
- fallback
Offset String - Automatic offset reset
- group
Name String - Group Name
- job
Config String - The YAML configuration file of the job. This parameter must be specified when Mode is set to professional.
- job
Description String - The description of the job.
- job
Id String - The job ID.
- job
Name String - The name of the job.
- match
Columns List<String> - Match Field
- mode String
The configuration mode. Valid values:
basic: In basic mode, you must configure the configuration parameters.
professional: In professional mode, you can submit a YAML configuration file.
- password String
- The password of the database account.
- src
Columns List<String> - Source Field
- status String
- Service status, value:
- try
Run Boolean Specifies whether to test the real-time job. Valid values:
- true
- false
Default value: false.
- update
Columns List<String> - Update Field
- write
Mode String The write mode.
Valid values:
- insert
- update
- merge
Import
GPDB Streaming Job can be imported using the id, e.g.
$ pulumi import alicloud:gpdb/streamingJob:StreamingJob example <db_instance_id>:<job_id>
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- Alibaba Cloud pulumi/pulumi-alicloud
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
alicloud
Terraform Provider.