mongodbatlas.CloudBackupSnapshotExportJob
Explore with Pulumi AI
# Resource: mongodbatlas.CloudBackupSnapshotExportJob
mongodbatlas.CloudBackupSnapshotExportJob
allows you to create a cloud backup snapshot export job for the specified project.
NOTE: Groups and projects are synonymous terms. You may find
groupId
in the official documentation.
Example Usage
Export one snapshot
import * as pulumi from "@pulumi/pulumi";
import * as mongodbatlas from "@pulumi/mongodbatlas";
const test = new mongodbatlas.CloudBackupSnapshotExportBucket("test", {
projectId: "{PROJECT_ID}",
iamRoleId: "{IAM_ROLE_ID}",
bucketName: "example_bucket",
cloudProvider: "AWS",
});
const testCloudBackupSnapshotExportJob = new mongodbatlas.CloudBackupSnapshotExportJob("test", {
projectId: "{PROJECT_ID}",
clusterName: "{CLUSTER_NAME}",
snapshotId: "{SNAPSHOT_ID}",
exportBucketId: test.exportBucketId,
customDatas: [{
key: "exported by",
value: "myName",
}],
});
import pulumi
import pulumi_mongodbatlas as mongodbatlas
test = mongodbatlas.CloudBackupSnapshotExportBucket("test",
project_id="{PROJECT_ID}",
iam_role_id="{IAM_ROLE_ID}",
bucket_name="example_bucket",
cloud_provider="AWS")
test_cloud_backup_snapshot_export_job = mongodbatlas.CloudBackupSnapshotExportJob("test",
project_id="{PROJECT_ID}",
cluster_name="{CLUSTER_NAME}",
snapshot_id="{SNAPSHOT_ID}",
export_bucket_id=test.export_bucket_id,
custom_datas=[{
"key": "exported by",
"value": "myName",
}])
package main
import (
"github.com/pulumi/pulumi-mongodbatlas/sdk/v3/go/mongodbatlas"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
test, err := mongodbatlas.NewCloudBackupSnapshotExportBucket(ctx, "test", &mongodbatlas.CloudBackupSnapshotExportBucketArgs{
ProjectId: pulumi.String("{PROJECT_ID}"),
IamRoleId: pulumi.String("{IAM_ROLE_ID}"),
BucketName: pulumi.String("example_bucket"),
CloudProvider: pulumi.String("AWS"),
})
if err != nil {
return err
}
_, err = mongodbatlas.NewCloudBackupSnapshotExportJob(ctx, "test", &mongodbatlas.CloudBackupSnapshotExportJobArgs{
ProjectId: pulumi.String("{PROJECT_ID}"),
ClusterName: pulumi.String("{CLUSTER_NAME}"),
SnapshotId: pulumi.String("{SNAPSHOT_ID}"),
ExportBucketId: test.ExportBucketId,
CustomDatas: mongodbatlas.CloudBackupSnapshotExportJobCustomDataArray{
&mongodbatlas.CloudBackupSnapshotExportJobCustomDataArgs{
Key: pulumi.String("exported by"),
Value: pulumi.String("myName"),
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Mongodbatlas = Pulumi.Mongodbatlas;
return await Deployment.RunAsync(() =>
{
var test = new Mongodbatlas.CloudBackupSnapshotExportBucket("test", new()
{
ProjectId = "{PROJECT_ID}",
IamRoleId = "{IAM_ROLE_ID}",
BucketName = "example_bucket",
CloudProvider = "AWS",
});
var testCloudBackupSnapshotExportJob = new Mongodbatlas.CloudBackupSnapshotExportJob("test", new()
{
ProjectId = "{PROJECT_ID}",
ClusterName = "{CLUSTER_NAME}",
SnapshotId = "{SNAPSHOT_ID}",
ExportBucketId = test.ExportBucketId,
CustomDatas = new[]
{
new Mongodbatlas.Inputs.CloudBackupSnapshotExportJobCustomDataArgs
{
Key = "exported by",
Value = "myName",
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.mongodbatlas.CloudBackupSnapshotExportBucket;
import com.pulumi.mongodbatlas.CloudBackupSnapshotExportBucketArgs;
import com.pulumi.mongodbatlas.CloudBackupSnapshotExportJob;
import com.pulumi.mongodbatlas.CloudBackupSnapshotExportJobArgs;
import com.pulumi.mongodbatlas.inputs.CloudBackupSnapshotExportJobCustomDataArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var test = new CloudBackupSnapshotExportBucket("test", CloudBackupSnapshotExportBucketArgs.builder()
.projectId("{PROJECT_ID}")
.iamRoleId("{IAM_ROLE_ID}")
.bucketName("example_bucket")
.cloudProvider("AWS")
.build());
var testCloudBackupSnapshotExportJob = new CloudBackupSnapshotExportJob("testCloudBackupSnapshotExportJob", CloudBackupSnapshotExportJobArgs.builder()
.projectId("{PROJECT_ID}")
.clusterName("{CLUSTER_NAME}")
.snapshotId("{SNAPSHOT_ID}")
.exportBucketId(test.exportBucketId())
.customDatas(CloudBackupSnapshotExportJobCustomDataArgs.builder()
.key("exported by")
.value("myName")
.build())
.build());
}
}
resources:
test:
type: mongodbatlas:CloudBackupSnapshotExportBucket
properties:
projectId: '{PROJECT_ID}'
iamRoleId: '{IAM_ROLE_ID}'
bucketName: example_bucket
cloudProvider: AWS
testCloudBackupSnapshotExportJob:
type: mongodbatlas:CloudBackupSnapshotExportJob
name: test
properties:
projectId: '{PROJECT_ID}'
clusterName: '{CLUSTER_NAME}'
snapshotId: '{SNAPSHOT_ID}'
exportBucketId: ${test.exportBucketId}
customDatas:
- key: exported by
value: myName
Create backup and automatic snapshot export policies
import * as pulumi from "@pulumi/pulumi";
import * as mongodbatlas from "@pulumi/mongodbatlas";
const _export = new mongodbatlas.CloudBackupSnapshotExportBucket("export", {
projectId: "{PROJECT_ID}",
iamRoleId: "{IAM_ROLE_ID}",
bucketName: "example_bucket",
cloudProvider: "AWS",
});
const backup = new mongodbatlas.CloudBackupSchedule("backup", {
projectId: "{PROJECT_ID}",
clusterName: "{CLUSTER_NAME}",
autoExportEnabled: true,
"export": {
exportBucketId: _export.exportBucketId,
frequencyType: "daily",
},
useOrgAndGroupNamesInExportPrefix: true,
referenceHourOfDay: 7,
referenceMinuteOfHour: 0,
restoreWindowDays: 5,
policyItemHourly: {
frequencyInterval: 6,
retentionUnit: "days",
retentionValue: 7,
},
policyItemDaily: {
frequencyInterval: 1,
retentionUnit: "days",
retentionValue: 7,
},
policyItemWeeklies: [{
frequencyInterval: 6,
retentionUnit: "weeks",
retentionValue: 4,
}],
policyItemMonthlies: [{
frequencyInterval: 28,
retentionUnit: "months",
retentionValue: 12,
}],
});
import pulumi
import pulumi_mongodbatlas as mongodbatlas
export = mongodbatlas.CloudBackupSnapshotExportBucket("export",
project_id="{PROJECT_ID}",
iam_role_id="{IAM_ROLE_ID}",
bucket_name="example_bucket",
cloud_provider="AWS")
backup = mongodbatlas.CloudBackupSchedule("backup",
project_id="{PROJECT_ID}",
cluster_name="{CLUSTER_NAME}",
auto_export_enabled=True,
export={
"export_bucket_id": export.export_bucket_id,
"frequency_type": "daily",
},
use_org_and_group_names_in_export_prefix=True,
reference_hour_of_day=7,
reference_minute_of_hour=0,
restore_window_days=5,
policy_item_hourly={
"frequency_interval": 6,
"retention_unit": "days",
"retention_value": 7,
},
policy_item_daily={
"frequency_interval": 1,
"retention_unit": "days",
"retention_value": 7,
},
policy_item_weeklies=[{
"frequency_interval": 6,
"retention_unit": "weeks",
"retention_value": 4,
}],
policy_item_monthlies=[{
"frequency_interval": 28,
"retention_unit": "months",
"retention_value": 12,
}])
package main
import (
"github.com/pulumi/pulumi-mongodbatlas/sdk/v3/go/mongodbatlas"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
export, err := mongodbatlas.NewCloudBackupSnapshotExportBucket(ctx, "export", &mongodbatlas.CloudBackupSnapshotExportBucketArgs{
ProjectId: pulumi.String("{PROJECT_ID}"),
IamRoleId: pulumi.String("{IAM_ROLE_ID}"),
BucketName: pulumi.String("example_bucket"),
CloudProvider: pulumi.String("AWS"),
})
if err != nil {
return err
}
_, err = mongodbatlas.NewCloudBackupSchedule(ctx, "backup", &mongodbatlas.CloudBackupScheduleArgs{
ProjectId: pulumi.String("{PROJECT_ID}"),
ClusterName: pulumi.String("{CLUSTER_NAME}"),
AutoExportEnabled: pulumi.Bool(true),
Export: &mongodbatlas.CloudBackupScheduleExportArgs{
ExportBucketId: export.ExportBucketId,
FrequencyType: pulumi.String("daily"),
},
UseOrgAndGroupNamesInExportPrefix: pulumi.Bool(true),
ReferenceHourOfDay: pulumi.Int(7),
ReferenceMinuteOfHour: pulumi.Int(0),
RestoreWindowDays: pulumi.Int(5),
PolicyItemHourly: &mongodbatlas.CloudBackupSchedulePolicyItemHourlyArgs{
FrequencyInterval: pulumi.Int(6),
RetentionUnit: pulumi.String("days"),
RetentionValue: pulumi.Int(7),
},
PolicyItemDaily: &mongodbatlas.CloudBackupSchedulePolicyItemDailyArgs{
FrequencyInterval: pulumi.Int(1),
RetentionUnit: pulumi.String("days"),
RetentionValue: pulumi.Int(7),
},
PolicyItemWeeklies: mongodbatlas.CloudBackupSchedulePolicyItemWeeklyArray{
&mongodbatlas.CloudBackupSchedulePolicyItemWeeklyArgs{
FrequencyInterval: pulumi.Int(6),
RetentionUnit: pulumi.String("weeks"),
RetentionValue: pulumi.Int(4),
},
},
PolicyItemMonthlies: mongodbatlas.CloudBackupSchedulePolicyItemMonthlyArray{
&mongodbatlas.CloudBackupSchedulePolicyItemMonthlyArgs{
FrequencyInterval: pulumi.Int(28),
RetentionUnit: pulumi.String("months"),
RetentionValue: pulumi.Int(12),
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Mongodbatlas = Pulumi.Mongodbatlas;
return await Deployment.RunAsync(() =>
{
var export = new Mongodbatlas.CloudBackupSnapshotExportBucket("export", new()
{
ProjectId = "{PROJECT_ID}",
IamRoleId = "{IAM_ROLE_ID}",
BucketName = "example_bucket",
CloudProvider = "AWS",
});
var backup = new Mongodbatlas.CloudBackupSchedule("backup", new()
{
ProjectId = "{PROJECT_ID}",
ClusterName = "{CLUSTER_NAME}",
AutoExportEnabled = true,
Export = new Mongodbatlas.Inputs.CloudBackupScheduleExportArgs
{
ExportBucketId = export.ExportBucketId,
FrequencyType = "daily",
},
UseOrgAndGroupNamesInExportPrefix = true,
ReferenceHourOfDay = 7,
ReferenceMinuteOfHour = 0,
RestoreWindowDays = 5,
PolicyItemHourly = new Mongodbatlas.Inputs.CloudBackupSchedulePolicyItemHourlyArgs
{
FrequencyInterval = 6,
RetentionUnit = "days",
RetentionValue = 7,
},
PolicyItemDaily = new Mongodbatlas.Inputs.CloudBackupSchedulePolicyItemDailyArgs
{
FrequencyInterval = 1,
RetentionUnit = "days",
RetentionValue = 7,
},
PolicyItemWeeklies = new[]
{
new Mongodbatlas.Inputs.CloudBackupSchedulePolicyItemWeeklyArgs
{
FrequencyInterval = 6,
RetentionUnit = "weeks",
RetentionValue = 4,
},
},
PolicyItemMonthlies = new[]
{
new Mongodbatlas.Inputs.CloudBackupSchedulePolicyItemMonthlyArgs
{
FrequencyInterval = 28,
RetentionUnit = "months",
RetentionValue = 12,
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.mongodbatlas.CloudBackupSnapshotExportBucket;
import com.pulumi.mongodbatlas.CloudBackupSnapshotExportBucketArgs;
import com.pulumi.mongodbatlas.CloudBackupSchedule;
import com.pulumi.mongodbatlas.CloudBackupScheduleArgs;
import com.pulumi.mongodbatlas.inputs.CloudBackupScheduleExportArgs;
import com.pulumi.mongodbatlas.inputs.CloudBackupSchedulePolicyItemHourlyArgs;
import com.pulumi.mongodbatlas.inputs.CloudBackupSchedulePolicyItemDailyArgs;
import com.pulumi.mongodbatlas.inputs.CloudBackupSchedulePolicyItemWeeklyArgs;
import com.pulumi.mongodbatlas.inputs.CloudBackupSchedulePolicyItemMonthlyArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var export = new CloudBackupSnapshotExportBucket("export", CloudBackupSnapshotExportBucketArgs.builder()
.projectId("{PROJECT_ID}")
.iamRoleId("{IAM_ROLE_ID}")
.bucketName("example_bucket")
.cloudProvider("AWS")
.build());
var backup = new CloudBackupSchedule("backup", CloudBackupScheduleArgs.builder()
.projectId("{PROJECT_ID}")
.clusterName("{CLUSTER_NAME}")
.autoExportEnabled(true)
.export(CloudBackupScheduleExportArgs.builder()
.exportBucketId(export.exportBucketId())
.frequencyType("daily")
.build())
.useOrgAndGroupNamesInExportPrefix(true)
.referenceHourOfDay(7)
.referenceMinuteOfHour(0)
.restoreWindowDays(5)
.policyItemHourly(CloudBackupSchedulePolicyItemHourlyArgs.builder()
.frequencyInterval(6)
.retentionUnit("days")
.retentionValue(7)
.build())
.policyItemDaily(CloudBackupSchedulePolicyItemDailyArgs.builder()
.frequencyInterval(1)
.retentionUnit("days")
.retentionValue(7)
.build())
.policyItemWeeklies(CloudBackupSchedulePolicyItemWeeklyArgs.builder()
.frequencyInterval(6)
.retentionUnit("weeks")
.retentionValue(4)
.build())
.policyItemMonthlies(CloudBackupSchedulePolicyItemMonthlyArgs.builder()
.frequencyInterval(28)
.retentionUnit("months")
.retentionValue(12)
.build())
.build());
}
}
resources:
export:
type: mongodbatlas:CloudBackupSnapshotExportBucket
properties:
projectId: '{PROJECT_ID}'
iamRoleId: '{IAM_ROLE_ID}'
bucketName: example_bucket
cloudProvider: AWS
backup:
type: mongodbatlas:CloudBackupSchedule
properties:
projectId: '{PROJECT_ID}'
clusterName: '{CLUSTER_NAME}'
autoExportEnabled: true
export:
exportBucketId: ${export.exportBucketId}
frequencyType: daily
useOrgAndGroupNamesInExportPrefix: true
referenceHourOfDay: 7
referenceMinuteOfHour: 0
restoreWindowDays: 5
policyItemHourly:
frequencyInterval: 6
retentionUnit: days
retentionValue: 7
policyItemDaily:
frequencyInterval: 1
retentionUnit: days
retentionValue: 7
policyItemWeeklies:
- frequencyInterval: 6
retentionUnit: weeks
retentionValue: 4
policyItemMonthlies:
- frequencyInterval: 28
retentionUnit: months
retentionValue: 12
Create CloudBackupSnapshotExportJob Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new CloudBackupSnapshotExportJob(name: string, args: CloudBackupSnapshotExportJobArgs, opts?: CustomResourceOptions);
@overload
def CloudBackupSnapshotExportJob(resource_name: str,
args: CloudBackupSnapshotExportJobArgs,
opts: Optional[ResourceOptions] = None)
@overload
def CloudBackupSnapshotExportJob(resource_name: str,
opts: Optional[ResourceOptions] = None,
cluster_name: Optional[str] = None,
custom_datas: Optional[Sequence[CloudBackupSnapshotExportJobCustomDataArgs]] = None,
export_bucket_id: Optional[str] = None,
project_id: Optional[str] = None,
snapshot_id: Optional[str] = None)
func NewCloudBackupSnapshotExportJob(ctx *Context, name string, args CloudBackupSnapshotExportJobArgs, opts ...ResourceOption) (*CloudBackupSnapshotExportJob, error)
public CloudBackupSnapshotExportJob(string name, CloudBackupSnapshotExportJobArgs args, CustomResourceOptions? opts = null)
public CloudBackupSnapshotExportJob(String name, CloudBackupSnapshotExportJobArgs args)
public CloudBackupSnapshotExportJob(String name, CloudBackupSnapshotExportJobArgs args, CustomResourceOptions options)
type: mongodbatlas:CloudBackupSnapshotExportJob
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args CloudBackupSnapshotExportJobArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args CloudBackupSnapshotExportJobArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args CloudBackupSnapshotExportJobArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args CloudBackupSnapshotExportJobArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args CloudBackupSnapshotExportJobArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var cloudBackupSnapshotExportJobResource = new Mongodbatlas.CloudBackupSnapshotExportJob("cloudBackupSnapshotExportJobResource", new()
{
ClusterName = "string",
CustomDatas = new[]
{
new Mongodbatlas.Inputs.CloudBackupSnapshotExportJobCustomDataArgs
{
Key = "string",
Value = "string",
},
},
ExportBucketId = "string",
ProjectId = "string",
SnapshotId = "string",
});
example, err := mongodbatlas.NewCloudBackupSnapshotExportJob(ctx, "cloudBackupSnapshotExportJobResource", &mongodbatlas.CloudBackupSnapshotExportJobArgs{
ClusterName: pulumi.String("string"),
CustomDatas: mongodbatlas.CloudBackupSnapshotExportJobCustomDataArray{
&mongodbatlas.CloudBackupSnapshotExportJobCustomDataArgs{
Key: pulumi.String("string"),
Value: pulumi.String("string"),
},
},
ExportBucketId: pulumi.String("string"),
ProjectId: pulumi.String("string"),
SnapshotId: pulumi.String("string"),
})
var cloudBackupSnapshotExportJobResource = new CloudBackupSnapshotExportJob("cloudBackupSnapshotExportJobResource", CloudBackupSnapshotExportJobArgs.builder()
.clusterName("string")
.customDatas(CloudBackupSnapshotExportJobCustomDataArgs.builder()
.key("string")
.value("string")
.build())
.exportBucketId("string")
.projectId("string")
.snapshotId("string")
.build());
cloud_backup_snapshot_export_job_resource = mongodbatlas.CloudBackupSnapshotExportJob("cloudBackupSnapshotExportJobResource",
cluster_name="string",
custom_datas=[{
"key": "string",
"value": "string",
}],
export_bucket_id="string",
project_id="string",
snapshot_id="string")
const cloudBackupSnapshotExportJobResource = new mongodbatlas.CloudBackupSnapshotExportJob("cloudBackupSnapshotExportJobResource", {
clusterName: "string",
customDatas: [{
key: "string",
value: "string",
}],
exportBucketId: "string",
projectId: "string",
snapshotId: "string",
});
type: mongodbatlas:CloudBackupSnapshotExportJob
properties:
clusterName: string
customDatas:
- key: string
value: string
exportBucketId: string
projectId: string
snapshotId: string
CloudBackupSnapshotExportJob Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The CloudBackupSnapshotExportJob resource accepts the following input properties:
- Cluster
Name string - Name of the Atlas cluster whose snapshot you want to export.
- Custom
Datas List<CloudBackup Snapshot Export Job Custom Data> - Custom data to include in the metadata file named
.complete
that Atlas uploads to the bucket when the export job finishes. Custom data can be specified as key and value pairs. - Export
Bucket stringId - Unique identifier of the AWS bucket to export the Cloud Backup snapshot to. If necessary, use the Get All Snapshot Export Buckets API to retrieve the IDs of all available export buckets for a project or use the data source mongodbatlas_cloud_backup_snapshot_export_buckets
- Project
Id string - Unique 24-hexadecimal digit string that identifies the project which contains the Atlas cluster whose snapshot you want to export.
- Snapshot
Id string - Unique identifier of the Cloud Backup snapshot to export. If necessary, use the Get All Cloud Backups API to retrieve the list of snapshot IDs for a cluster or use the data source mongodbatlas_cloud_cloud_backup_snapshots
- Cluster
Name string - Name of the Atlas cluster whose snapshot you want to export.
- Custom
Datas []CloudBackup Snapshot Export Job Custom Data Args - Custom data to include in the metadata file named
.complete
that Atlas uploads to the bucket when the export job finishes. Custom data can be specified as key and value pairs. - Export
Bucket stringId - Unique identifier of the AWS bucket to export the Cloud Backup snapshot to. If necessary, use the Get All Snapshot Export Buckets API to retrieve the IDs of all available export buckets for a project or use the data source mongodbatlas_cloud_backup_snapshot_export_buckets
- Project
Id string - Unique 24-hexadecimal digit string that identifies the project which contains the Atlas cluster whose snapshot you want to export.
- Snapshot
Id string - Unique identifier of the Cloud Backup snapshot to export. If necessary, use the Get All Cloud Backups API to retrieve the list of snapshot IDs for a cluster or use the data source mongodbatlas_cloud_cloud_backup_snapshots
- cluster
Name String - Name of the Atlas cluster whose snapshot you want to export.
- custom
Datas List<CloudBackup Snapshot Export Job Custom Data> - Custom data to include in the metadata file named
.complete
that Atlas uploads to the bucket when the export job finishes. Custom data can be specified as key and value pairs. - export
Bucket StringId - Unique identifier of the AWS bucket to export the Cloud Backup snapshot to. If necessary, use the Get All Snapshot Export Buckets API to retrieve the IDs of all available export buckets for a project or use the data source mongodbatlas_cloud_backup_snapshot_export_buckets
- project
Id String - Unique 24-hexadecimal digit string that identifies the project which contains the Atlas cluster whose snapshot you want to export.
- snapshot
Id String - Unique identifier of the Cloud Backup snapshot to export. If necessary, use the Get All Cloud Backups API to retrieve the list of snapshot IDs for a cluster or use the data source mongodbatlas_cloud_cloud_backup_snapshots
- cluster
Name string - Name of the Atlas cluster whose snapshot you want to export.
- custom
Datas CloudBackup Snapshot Export Job Custom Data[] - Custom data to include in the metadata file named
.complete
that Atlas uploads to the bucket when the export job finishes. Custom data can be specified as key and value pairs. - export
Bucket stringId - Unique identifier of the AWS bucket to export the Cloud Backup snapshot to. If necessary, use the Get All Snapshot Export Buckets API to retrieve the IDs of all available export buckets for a project or use the data source mongodbatlas_cloud_backup_snapshot_export_buckets
- project
Id string - Unique 24-hexadecimal digit string that identifies the project which contains the Atlas cluster whose snapshot you want to export.
- snapshot
Id string - Unique identifier of the Cloud Backup snapshot to export. If necessary, use the Get All Cloud Backups API to retrieve the list of snapshot IDs for a cluster or use the data source mongodbatlas_cloud_cloud_backup_snapshots
- cluster_
name str - Name of the Atlas cluster whose snapshot you want to export.
- custom_
datas Sequence[CloudBackup Snapshot Export Job Custom Data Args] - Custom data to include in the metadata file named
.complete
that Atlas uploads to the bucket when the export job finishes. Custom data can be specified as key and value pairs. - export_
bucket_ strid - Unique identifier of the AWS bucket to export the Cloud Backup snapshot to. If necessary, use the Get All Snapshot Export Buckets API to retrieve the IDs of all available export buckets for a project or use the data source mongodbatlas_cloud_backup_snapshot_export_buckets
- project_
id str - Unique 24-hexadecimal digit string that identifies the project which contains the Atlas cluster whose snapshot you want to export.
- snapshot_
id str - Unique identifier of the Cloud Backup snapshot to export. If necessary, use the Get All Cloud Backups API to retrieve the list of snapshot IDs for a cluster or use the data source mongodbatlas_cloud_cloud_backup_snapshots
- cluster
Name String - Name of the Atlas cluster whose snapshot you want to export.
- custom
Datas List<Property Map> - Custom data to include in the metadata file named
.complete
that Atlas uploads to the bucket when the export job finishes. Custom data can be specified as key and value pairs. - export
Bucket StringId - Unique identifier of the AWS bucket to export the Cloud Backup snapshot to. If necessary, use the Get All Snapshot Export Buckets API to retrieve the IDs of all available export buckets for a project or use the data source mongodbatlas_cloud_backup_snapshot_export_buckets
- project
Id String - Unique 24-hexadecimal digit string that identifies the project which contains the Atlas cluster whose snapshot you want to export.
- snapshot
Id String - Unique identifier of the Cloud Backup snapshot to export. If necessary, use the Get All Cloud Backups API to retrieve the list of snapshot IDs for a cluster or use the data source mongodbatlas_cloud_cloud_backup_snapshots
Outputs
All input properties are implicitly available as output properties. Additionally, the CloudBackupSnapshotExportJob resource produces the following output properties:
- Components
List<Cloud
Backup Snapshot Export Job Component> - Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
- Created
At string - Timestamp in ISO 8601 date and time format in UTC when the export job was created.
- Export
Job stringId - Unique identifier of the export job.
prefix
- Full path on the cloud provider bucket to the folder where the snapshot is exported. The path is in the following format:/exported_snapshots/{ORG-NAME}/{PROJECT-NAME}/{CLUSTER-NAME}/{SNAPSHOT-INITIATION-DATE}/{TIMESTAMP}
- Export
Status intExported Collections - Export
Status intTotal Collections - Finished
At string - Timestamp in ISO 8601 date and time format in UTC when the export job completes.
- Id string
- The provider-assigned unique ID for this managed resource.
- Prefix string
- State string
- Status of the export job. Value can be one of the following:
Queued
- indicates that the export job is queuedInProgress
- indicates that the snapshot is being exportedSuccessful
- indicates that the export job has completed successfullyFailed
- indicates that the export job has failed
- Components
[]Cloud
Backup Snapshot Export Job Component - Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
- Created
At string - Timestamp in ISO 8601 date and time format in UTC when the export job was created.
- Export
Job stringId - Unique identifier of the export job.
prefix
- Full path on the cloud provider bucket to the folder where the snapshot is exported. The path is in the following format:/exported_snapshots/{ORG-NAME}/{PROJECT-NAME}/{CLUSTER-NAME}/{SNAPSHOT-INITIATION-DATE}/{TIMESTAMP}
- Export
Status intExported Collections - Export
Status intTotal Collections - Finished
At string - Timestamp in ISO 8601 date and time format in UTC when the export job completes.
- Id string
- The provider-assigned unique ID for this managed resource.
- Prefix string
- State string
- Status of the export job. Value can be one of the following:
Queued
- indicates that the export job is queuedInProgress
- indicates that the snapshot is being exportedSuccessful
- indicates that the export job has completed successfullyFailed
- indicates that the export job has failed
- components
List<Cloud
Backup Snapshot Export Job Component> - Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
- created
At String - Timestamp in ISO 8601 date and time format in UTC when the export job was created.
- export
Job StringId - Unique identifier of the export job.
prefix
- Full path on the cloud provider bucket to the folder where the snapshot is exported. The path is in the following format:/exported_snapshots/{ORG-NAME}/{PROJECT-NAME}/{CLUSTER-NAME}/{SNAPSHOT-INITIATION-DATE}/{TIMESTAMP}
- export
Status IntegerExported Collections - export
Status IntegerTotal Collections - finished
At String - Timestamp in ISO 8601 date and time format in UTC when the export job completes.
- id String
- The provider-assigned unique ID for this managed resource.
- prefix String
- state String
- Status of the export job. Value can be one of the following:
Queued
- indicates that the export job is queuedInProgress
- indicates that the snapshot is being exportedSuccessful
- indicates that the export job has completed successfullyFailed
- indicates that the export job has failed
- components
Cloud
Backup Snapshot Export Job Component[] - Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
- created
At string - Timestamp in ISO 8601 date and time format in UTC when the export job was created.
- export
Job stringId - Unique identifier of the export job.
prefix
- Full path on the cloud provider bucket to the folder where the snapshot is exported. The path is in the following format:/exported_snapshots/{ORG-NAME}/{PROJECT-NAME}/{CLUSTER-NAME}/{SNAPSHOT-INITIATION-DATE}/{TIMESTAMP}
- export
Status numberExported Collections - export
Status numberTotal Collections - finished
At string - Timestamp in ISO 8601 date and time format in UTC when the export job completes.
- id string
- The provider-assigned unique ID for this managed resource.
- prefix string
- state string
- Status of the export job. Value can be one of the following:
Queued
- indicates that the export job is queuedInProgress
- indicates that the snapshot is being exportedSuccessful
- indicates that the export job has completed successfullyFailed
- indicates that the export job has failed
- components
Sequence[Cloud
Backup Snapshot Export Job Component] - Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
- created_
at str - Timestamp in ISO 8601 date and time format in UTC when the export job was created.
- export_
job_ strid - Unique identifier of the export job.
prefix
- Full path on the cloud provider bucket to the folder where the snapshot is exported. The path is in the following format:/exported_snapshots/{ORG-NAME}/{PROJECT-NAME}/{CLUSTER-NAME}/{SNAPSHOT-INITIATION-DATE}/{TIMESTAMP}
- export_
status_ intexported_ collections - export_
status_ inttotal_ collections - finished_
at str - Timestamp in ISO 8601 date and time format in UTC when the export job completes.
- id str
- The provider-assigned unique ID for this managed resource.
- prefix str
- state str
- Status of the export job. Value can be one of the following:
Queued
- indicates that the export job is queuedInProgress
- indicates that the snapshot is being exportedSuccessful
- indicates that the export job has completed successfullyFailed
- indicates that the export job has failed
- components List<Property Map>
- Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
- created
At String - Timestamp in ISO 8601 date and time format in UTC when the export job was created.
- export
Job StringId - Unique identifier of the export job.
prefix
- Full path on the cloud provider bucket to the folder where the snapshot is exported. The path is in the following format:/exported_snapshots/{ORG-NAME}/{PROJECT-NAME}/{CLUSTER-NAME}/{SNAPSHOT-INITIATION-DATE}/{TIMESTAMP}
- export
Status NumberExported Collections - export
Status NumberTotal Collections - finished
At String - Timestamp in ISO 8601 date and time format in UTC when the export job completes.
- id String
- The provider-assigned unique ID for this managed resource.
- prefix String
- state String
- Status of the export job. Value can be one of the following:
Queued
- indicates that the export job is queuedInProgress
- indicates that the snapshot is being exportedSuccessful
- indicates that the export job has completed successfullyFailed
- indicates that the export job has failed
Look up Existing CloudBackupSnapshotExportJob Resource
Get an existing CloudBackupSnapshotExportJob resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: CloudBackupSnapshotExportJobState, opts?: CustomResourceOptions): CloudBackupSnapshotExportJob
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
cluster_name: Optional[str] = None,
components: Optional[Sequence[CloudBackupSnapshotExportJobComponentArgs]] = None,
created_at: Optional[str] = None,
custom_datas: Optional[Sequence[CloudBackupSnapshotExportJobCustomDataArgs]] = None,
export_bucket_id: Optional[str] = None,
export_job_id: Optional[str] = None,
export_status_exported_collections: Optional[int] = None,
export_status_total_collections: Optional[int] = None,
finished_at: Optional[str] = None,
prefix: Optional[str] = None,
project_id: Optional[str] = None,
snapshot_id: Optional[str] = None,
state: Optional[str] = None) -> CloudBackupSnapshotExportJob
func GetCloudBackupSnapshotExportJob(ctx *Context, name string, id IDInput, state *CloudBackupSnapshotExportJobState, opts ...ResourceOption) (*CloudBackupSnapshotExportJob, error)
public static CloudBackupSnapshotExportJob Get(string name, Input<string> id, CloudBackupSnapshotExportJobState? state, CustomResourceOptions? opts = null)
public static CloudBackupSnapshotExportJob get(String name, Output<String> id, CloudBackupSnapshotExportJobState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Cluster
Name string - Name of the Atlas cluster whose snapshot you want to export.
- Components
List<Cloud
Backup Snapshot Export Job Component> - Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
- Created
At string - Timestamp in ISO 8601 date and time format in UTC when the export job was created.
- Custom
Datas List<CloudBackup Snapshot Export Job Custom Data> - Custom data to include in the metadata file named
.complete
that Atlas uploads to the bucket when the export job finishes. Custom data can be specified as key and value pairs. - Export
Bucket stringId - Unique identifier of the AWS bucket to export the Cloud Backup snapshot to. If necessary, use the Get All Snapshot Export Buckets API to retrieve the IDs of all available export buckets for a project or use the data source mongodbatlas_cloud_backup_snapshot_export_buckets
- Export
Job stringId - Unique identifier of the export job.
prefix
- Full path on the cloud provider bucket to the folder where the snapshot is exported. The path is in the following format:/exported_snapshots/{ORG-NAME}/{PROJECT-NAME}/{CLUSTER-NAME}/{SNAPSHOT-INITIATION-DATE}/{TIMESTAMP}
- Export
Status intExported Collections - Export
Status intTotal Collections - Finished
At string - Timestamp in ISO 8601 date and time format in UTC when the export job completes.
- Prefix string
- Project
Id string - Unique 24-hexadecimal digit string that identifies the project which contains the Atlas cluster whose snapshot you want to export.
- Snapshot
Id string - Unique identifier of the Cloud Backup snapshot to export. If necessary, use the Get All Cloud Backups API to retrieve the list of snapshot IDs for a cluster or use the data source mongodbatlas_cloud_cloud_backup_snapshots
- State string
- Status of the export job. Value can be one of the following:
Queued
- indicates that the export job is queuedInProgress
- indicates that the snapshot is being exportedSuccessful
- indicates that the export job has completed successfullyFailed
- indicates that the export job has failed
- Cluster
Name string - Name of the Atlas cluster whose snapshot you want to export.
- Components
[]Cloud
Backup Snapshot Export Job Component Args - Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
- Created
At string - Timestamp in ISO 8601 date and time format in UTC when the export job was created.
- Custom
Datas []CloudBackup Snapshot Export Job Custom Data Args - Custom data to include in the metadata file named
.complete
that Atlas uploads to the bucket when the export job finishes. Custom data can be specified as key and value pairs. - Export
Bucket stringId - Unique identifier of the AWS bucket to export the Cloud Backup snapshot to. If necessary, use the Get All Snapshot Export Buckets API to retrieve the IDs of all available export buckets for a project or use the data source mongodbatlas_cloud_backup_snapshot_export_buckets
- Export
Job stringId - Unique identifier of the export job.
prefix
- Full path on the cloud provider bucket to the folder where the snapshot is exported. The path is in the following format:/exported_snapshots/{ORG-NAME}/{PROJECT-NAME}/{CLUSTER-NAME}/{SNAPSHOT-INITIATION-DATE}/{TIMESTAMP}
- Export
Status intExported Collections - Export
Status intTotal Collections - Finished
At string - Timestamp in ISO 8601 date and time format in UTC when the export job completes.
- Prefix string
- Project
Id string - Unique 24-hexadecimal digit string that identifies the project which contains the Atlas cluster whose snapshot you want to export.
- Snapshot
Id string - Unique identifier of the Cloud Backup snapshot to export. If necessary, use the Get All Cloud Backups API to retrieve the list of snapshot IDs for a cluster or use the data source mongodbatlas_cloud_cloud_backup_snapshots
- State string
- Status of the export job. Value can be one of the following:
Queued
- indicates that the export job is queuedInProgress
- indicates that the snapshot is being exportedSuccessful
- indicates that the export job has completed successfullyFailed
- indicates that the export job has failed
- cluster
Name String - Name of the Atlas cluster whose snapshot you want to export.
- components
List<Cloud
Backup Snapshot Export Job Component> - Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
- created
At String - Timestamp in ISO 8601 date and time format in UTC when the export job was created.
- custom
Datas List<CloudBackup Snapshot Export Job Custom Data> - Custom data to include in the metadata file named
.complete
that Atlas uploads to the bucket when the export job finishes. Custom data can be specified as key and value pairs. - export
Bucket StringId - Unique identifier of the AWS bucket to export the Cloud Backup snapshot to. If necessary, use the Get All Snapshot Export Buckets API to retrieve the IDs of all available export buckets for a project or use the data source mongodbatlas_cloud_backup_snapshot_export_buckets
- export
Job StringId - Unique identifier of the export job.
prefix
- Full path on the cloud provider bucket to the folder where the snapshot is exported. The path is in the following format:/exported_snapshots/{ORG-NAME}/{PROJECT-NAME}/{CLUSTER-NAME}/{SNAPSHOT-INITIATION-DATE}/{TIMESTAMP}
- export
Status IntegerExported Collections - export
Status IntegerTotal Collections - finished
At String - Timestamp in ISO 8601 date and time format in UTC when the export job completes.
- prefix String
- project
Id String - Unique 24-hexadecimal digit string that identifies the project which contains the Atlas cluster whose snapshot you want to export.
- snapshot
Id String - Unique identifier of the Cloud Backup snapshot to export. If necessary, use the Get All Cloud Backups API to retrieve the list of snapshot IDs for a cluster or use the data source mongodbatlas_cloud_cloud_backup_snapshots
- state String
- Status of the export job. Value can be one of the following:
Queued
- indicates that the export job is queuedInProgress
- indicates that the snapshot is being exportedSuccessful
- indicates that the export job has completed successfullyFailed
- indicates that the export job has failed
- cluster
Name string - Name of the Atlas cluster whose snapshot you want to export.
- components
Cloud
Backup Snapshot Export Job Component[] - Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
- created
At string - Timestamp in ISO 8601 date and time format in UTC when the export job was created.
- custom
Datas CloudBackup Snapshot Export Job Custom Data[] - Custom data to include in the metadata file named
.complete
that Atlas uploads to the bucket when the export job finishes. Custom data can be specified as key and value pairs. - export
Bucket stringId - Unique identifier of the AWS bucket to export the Cloud Backup snapshot to. If necessary, use the Get All Snapshot Export Buckets API to retrieve the IDs of all available export buckets for a project or use the data source mongodbatlas_cloud_backup_snapshot_export_buckets
- export
Job stringId - Unique identifier of the export job.
prefix
- Full path on the cloud provider bucket to the folder where the snapshot is exported. The path is in the following format:/exported_snapshots/{ORG-NAME}/{PROJECT-NAME}/{CLUSTER-NAME}/{SNAPSHOT-INITIATION-DATE}/{TIMESTAMP}
- export
Status numberExported Collections - export
Status numberTotal Collections - finished
At string - Timestamp in ISO 8601 date and time format in UTC when the export job completes.
- prefix string
- project
Id string - Unique 24-hexadecimal digit string that identifies the project which contains the Atlas cluster whose snapshot you want to export.
- snapshot
Id string - Unique identifier of the Cloud Backup snapshot to export. If necessary, use the Get All Cloud Backups API to retrieve the list of snapshot IDs for a cluster or use the data source mongodbatlas_cloud_cloud_backup_snapshots
- state string
- Status of the export job. Value can be one of the following:
Queued
- indicates that the export job is queuedInProgress
- indicates that the snapshot is being exportedSuccessful
- indicates that the export job has completed successfullyFailed
- indicates that the export job has failed
- cluster_
name str - Name of the Atlas cluster whose snapshot you want to export.
- components
Sequence[Cloud
Backup Snapshot Export Job Component Args] - Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
- created_
at str - Timestamp in ISO 8601 date and time format in UTC when the export job was created.
- custom_
datas Sequence[CloudBackup Snapshot Export Job Custom Data Args] - Custom data to include in the metadata file named
.complete
that Atlas uploads to the bucket when the export job finishes. Custom data can be specified as key and value pairs. - export_
bucket_ strid - Unique identifier of the AWS bucket to export the Cloud Backup snapshot to. If necessary, use the Get All Snapshot Export Buckets API to retrieve the IDs of all available export buckets for a project or use the data source mongodbatlas_cloud_backup_snapshot_export_buckets
- export_
job_ strid - Unique identifier of the export job.
prefix
- Full path on the cloud provider bucket to the folder where the snapshot is exported. The path is in the following format:/exported_snapshots/{ORG-NAME}/{PROJECT-NAME}/{CLUSTER-NAME}/{SNAPSHOT-INITIATION-DATE}/{TIMESTAMP}
- export_
status_ intexported_ collections - export_
status_ inttotal_ collections - finished_
at str - Timestamp in ISO 8601 date and time format in UTC when the export job completes.
- prefix str
- project_
id str - Unique 24-hexadecimal digit string that identifies the project which contains the Atlas cluster whose snapshot you want to export.
- snapshot_
id str - Unique identifier of the Cloud Backup snapshot to export. If necessary, use the Get All Cloud Backups API to retrieve the list of snapshot IDs for a cluster or use the data source mongodbatlas_cloud_cloud_backup_snapshots
- state str
- Status of the export job. Value can be one of the following:
Queued
- indicates that the export job is queuedInProgress
- indicates that the snapshot is being exportedSuccessful
- indicates that the export job has completed successfullyFailed
- indicates that the export job has failed
- cluster
Name String - Name of the Atlas cluster whose snapshot you want to export.
- components List<Property Map>
- Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
- created
At String - Timestamp in ISO 8601 date and time format in UTC when the export job was created.
- custom
Datas List<Property Map> - Custom data to include in the metadata file named
.complete
that Atlas uploads to the bucket when the export job finishes. Custom data can be specified as key and value pairs. - export
Bucket StringId - Unique identifier of the AWS bucket to export the Cloud Backup snapshot to. If necessary, use the Get All Snapshot Export Buckets API to retrieve the IDs of all available export buckets for a project or use the data source mongodbatlas_cloud_backup_snapshot_export_buckets
- export
Job StringId - Unique identifier of the export job.
prefix
- Full path on the cloud provider bucket to the folder where the snapshot is exported. The path is in the following format:/exported_snapshots/{ORG-NAME}/{PROJECT-NAME}/{CLUSTER-NAME}/{SNAPSHOT-INITIATION-DATE}/{TIMESTAMP}
- export
Status NumberExported Collections - export
Status NumberTotal Collections - finished
At String - Timestamp in ISO 8601 date and time format in UTC when the export job completes.
- prefix String
- project
Id String - Unique 24-hexadecimal digit string that identifies the project which contains the Atlas cluster whose snapshot you want to export.
- snapshot
Id String - Unique identifier of the Cloud Backup snapshot to export. If necessary, use the Get All Cloud Backups API to retrieve the list of snapshot IDs for a cluster or use the data source mongodbatlas_cloud_cloud_backup_snapshots
- state String
- Status of the export job. Value can be one of the following:
Queued
- indicates that the export job is queuedInProgress
- indicates that the snapshot is being exportedSuccessful
- indicates that the export job has completed successfullyFailed
- indicates that the export job has failed
Supporting Types
CloudBackupSnapshotExportJobComponent, CloudBackupSnapshotExportJobComponentArgs
- Export
Id string - Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
- Replica
Set stringName - Returned for sharded clusters only. Unique identifier of the export job for the replica set.
- Export
Id string - Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
- Replica
Set stringName - Returned for sharded clusters only. Unique identifier of the export job for the replica set.
- export
Id String - Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
- replica
Set StringName - Returned for sharded clusters only. Unique identifier of the export job for the replica set.
- export
Id string - Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
- replica
Set stringName - Returned for sharded clusters only. Unique identifier of the export job for the replica set.
- export_
id str - Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
- replica_
set_ strname - Returned for sharded clusters only. Unique identifier of the export job for the replica set.
- export
Id String - Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
- replica
Set StringName - Returned for sharded clusters only. Unique identifier of the export job for the replica set.
CloudBackupSnapshotExportJobCustomData, CloudBackupSnapshotExportJobCustomDataArgs
Import
Cloud Backup Snapshot Export Backup entries can be imported using project project_id, cluster_name and export_job_id (Unique identifier of the snapshot export job), in the format PROJECTID-CLUSTERNAME-EXPORTJOBID
, e.g.
$ pulumi import mongodbatlas:index/cloudBackupSnapshotExportJob:CloudBackupSnapshotExportJob test 5d0f1f73cf09a29120e173cf-5d116d82014b764445b2f9b5-5d116d82014b764445b2f9b5
For more information see: MongoDB Atlas API Reference.
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- MongoDB Atlas pulumi/pulumi-mongodbatlas
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
mongodbatlas
Terraform Provider.