1. Packages
  2. Databricks
  3. API Docs
  4. MwsLogDelivery
Databricks v1.56.0 published on Tuesday, Nov 12, 2024 by Pulumi

databricks.MwsLogDelivery

Explore with Pulumi AI

databricks logo
Databricks v1.56.0 published on Tuesday, Nov 12, 2024 by Pulumi

    Initialize provider with alias = "mws", host = "https://accounts.cloud.databricks.com" and use provider = databricks.mws

    This resource configures the delivery of the two supported log types from Databricks workspaces: billable usage logs and audit logs.

    You cannot delete a log delivery configuration, but you can disable it when you no longer need it. This fact is important because there is a limit to the number of enabled log delivery configurations that you can create for an account. You can create a maximum of two enabled configurations that use the account level (no workspace filter) and two enabled configurations for every specific workspace (a workspaceId can occur in the workspace filter for two configurations). You can re-enable a disabled configuration, but the request fails if it violates the limits previously described.

    Billable Usage

    CSV files are delivered to <delivery_path_prefix>/billable-usage/csv/ and are named workspaceId=<workspace-id>-usageMonth=<month>.csv, which are delivered daily by overwriting the month’s CSV file for each workspace. Format of CSV file, as well as some usage examples, can be found here.

    Common processing scenario is to apply cost allocation tags, that could be enforced by setting custom_tags on a cluster or through cluster policy. Report contains clusterId field, that could be joined with data from AWS cost and usage reports, that can be joined with user:ClusterId tag from AWS usage report.

    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const usageLogs = new databricks.MwsLogDelivery("usage_logs", {
        accountId: databricksAccountId,
        credentialsId: logWriter.credentialsId,
        storageConfigurationId: logBucket.storageConfigurationId,
        deliveryPathPrefix: "billable-usage",
        configName: "Usage Logs",
        logType: "BILLABLE_USAGE",
        outputFormat: "CSV",
    });
    
    import pulumi
    import pulumi_databricks as databricks
    
    usage_logs = databricks.MwsLogDelivery("usage_logs",
        account_id=databricks_account_id,
        credentials_id=log_writer["credentialsId"],
        storage_configuration_id=log_bucket["storageConfigurationId"],
        delivery_path_prefix="billable-usage",
        config_name="Usage Logs",
        log_type="BILLABLE_USAGE",
        output_format="CSV")
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := databricks.NewMwsLogDelivery(ctx, "usage_logs", &databricks.MwsLogDeliveryArgs{
    			AccountId:              pulumi.Any(databricksAccountId),
    			CredentialsId:          pulumi.Any(logWriter.CredentialsId),
    			StorageConfigurationId: pulumi.Any(logBucket.StorageConfigurationId),
    			DeliveryPathPrefix:     pulumi.String("billable-usage"),
    			ConfigName:             pulumi.String("Usage Logs"),
    			LogType:                pulumi.String("BILLABLE_USAGE"),
    			OutputFormat:           pulumi.String("CSV"),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        var usageLogs = new Databricks.MwsLogDelivery("usage_logs", new()
        {
            AccountId = databricksAccountId,
            CredentialsId = logWriter.CredentialsId,
            StorageConfigurationId = logBucket.StorageConfigurationId,
            DeliveryPathPrefix = "billable-usage",
            ConfigName = "Usage Logs",
            LogType = "BILLABLE_USAGE",
            OutputFormat = "CSV",
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.MwsLogDelivery;
    import com.pulumi.databricks.MwsLogDeliveryArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var usageLogs = new MwsLogDelivery("usageLogs", MwsLogDeliveryArgs.builder()
                .accountId(databricksAccountId)
                .credentialsId(logWriter.credentialsId())
                .storageConfigurationId(logBucket.storageConfigurationId())
                .deliveryPathPrefix("billable-usage")
                .configName("Usage Logs")
                .logType("BILLABLE_USAGE")
                .outputFormat("CSV")
                .build());
    
        }
    }
    
    resources:
      usageLogs:
        type: databricks:MwsLogDelivery
        name: usage_logs
        properties:
          accountId: ${databricksAccountId}
          credentialsId: ${logWriter.credentialsId}
          storageConfigurationId: ${logBucket.storageConfigurationId}
          deliveryPathPrefix: billable-usage
          configName: Usage Logs
          logType: BILLABLE_USAGE
          outputFormat: CSV
    

    Audit Logs

    JSON files with static schema are delivered to <delivery_path_prefix>/workspaceId=<workspaceId>/date=<yyyy-mm-dd>/auditlogs_<internal-id>.json. Logs are available within 15 minutes of activation for audit logs. New JSON files are delivered every few minutes, potentially overwriting existing files for each workspace. Sometimes data may arrive later than 15 minutes. Databricks can overwrite the delivered log files in your bucket at any time. If a file is overwritten, the existing content remains, but there may be additional lines for more auditable events. Overwriting ensures exactly-once semantics without requiring read or delete access to your account.

    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const auditLogs = new databricks.MwsLogDelivery("audit_logs", {
        accountId: databricksAccountId,
        credentialsId: logWriter.credentialsId,
        storageConfigurationId: logBucket.storageConfigurationId,
        deliveryPathPrefix: "audit-logs",
        configName: "Audit Logs",
        logType: "AUDIT_LOGS",
        outputFormat: "JSON",
    });
    
    import pulumi
    import pulumi_databricks as databricks
    
    audit_logs = databricks.MwsLogDelivery("audit_logs",
        account_id=databricks_account_id,
        credentials_id=log_writer["credentialsId"],
        storage_configuration_id=log_bucket["storageConfigurationId"],
        delivery_path_prefix="audit-logs",
        config_name="Audit Logs",
        log_type="AUDIT_LOGS",
        output_format="JSON")
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := databricks.NewMwsLogDelivery(ctx, "audit_logs", &databricks.MwsLogDeliveryArgs{
    			AccountId:              pulumi.Any(databricksAccountId),
    			CredentialsId:          pulumi.Any(logWriter.CredentialsId),
    			StorageConfigurationId: pulumi.Any(logBucket.StorageConfigurationId),
    			DeliveryPathPrefix:     pulumi.String("audit-logs"),
    			ConfigName:             pulumi.String("Audit Logs"),
    			LogType:                pulumi.String("AUDIT_LOGS"),
    			OutputFormat:           pulumi.String("JSON"),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        var auditLogs = new Databricks.MwsLogDelivery("audit_logs", new()
        {
            AccountId = databricksAccountId,
            CredentialsId = logWriter.CredentialsId,
            StorageConfigurationId = logBucket.StorageConfigurationId,
            DeliveryPathPrefix = "audit-logs",
            ConfigName = "Audit Logs",
            LogType = "AUDIT_LOGS",
            OutputFormat = "JSON",
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.MwsLogDelivery;
    import com.pulumi.databricks.MwsLogDeliveryArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var auditLogs = new MwsLogDelivery("auditLogs", MwsLogDeliveryArgs.builder()
                .accountId(databricksAccountId)
                .credentialsId(logWriter.credentialsId())
                .storageConfigurationId(logBucket.storageConfigurationId())
                .deliveryPathPrefix("audit-logs")
                .configName("Audit Logs")
                .logType("AUDIT_LOGS")
                .outputFormat("JSON")
                .build());
    
        }
    }
    
    resources:
      auditLogs:
        type: databricks:MwsLogDelivery
        name: audit_logs
        properties:
          accountId: ${databricksAccountId}
          credentialsId: ${logWriter.credentialsId}
          storageConfigurationId: ${logBucket.storageConfigurationId}
          deliveryPathPrefix: audit-logs
          configName: Audit Logs
          logType: AUDIT_LOGS
          outputFormat: JSON
    

    The following resources are used in the same context:

    • Provisioning Databricks on AWS guide.
    • databricks.MwsCredentials to configure the cross-account role for creation of new workspaces within AWS.
    • databricks.MwsCustomerManagedKeys to configure KMS keys for new workspaces within AWS.
    • databricks.MwsNetworks to configure VPC & subnets for new workspaces within AWS.
    • databricks.MwsStorageConfigurations to configure root bucket new workspaces within AWS.
    • databricks.MwsWorkspaces to set up AWS and GCP workspaces.

    Create MwsLogDelivery Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new MwsLogDelivery(name: string, args: MwsLogDeliveryArgs, opts?: CustomResourceOptions);
    @overload
    def MwsLogDelivery(resource_name: str,
                       args: MwsLogDeliveryArgs,
                       opts: Optional[ResourceOptions] = None)
    
    @overload
    def MwsLogDelivery(resource_name: str,
                       opts: Optional[ResourceOptions] = None,
                       account_id: Optional[str] = None,
                       credentials_id: Optional[str] = None,
                       log_type: Optional[str] = None,
                       output_format: Optional[str] = None,
                       storage_configuration_id: Optional[str] = None,
                       config_id: Optional[str] = None,
                       config_name: Optional[str] = None,
                       delivery_path_prefix: Optional[str] = None,
                       delivery_start_time: Optional[str] = None,
                       status: Optional[str] = None,
                       workspace_ids_filters: Optional[Sequence[int]] = None)
    func NewMwsLogDelivery(ctx *Context, name string, args MwsLogDeliveryArgs, opts ...ResourceOption) (*MwsLogDelivery, error)
    public MwsLogDelivery(string name, MwsLogDeliveryArgs args, CustomResourceOptions? opts = null)
    public MwsLogDelivery(String name, MwsLogDeliveryArgs args)
    public MwsLogDelivery(String name, MwsLogDeliveryArgs args, CustomResourceOptions options)
    
    type: databricks:MwsLogDelivery
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args MwsLogDeliveryArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args MwsLogDeliveryArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args MwsLogDeliveryArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args MwsLogDeliveryArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args MwsLogDeliveryArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var mwsLogDeliveryResource = new Databricks.MwsLogDelivery("mwsLogDeliveryResource", new()
    {
        AccountId = "string",
        CredentialsId = "string",
        LogType = "string",
        OutputFormat = "string",
        StorageConfigurationId = "string",
        ConfigId = "string",
        ConfigName = "string",
        DeliveryPathPrefix = "string",
        DeliveryStartTime = "string",
        Status = "string",
        WorkspaceIdsFilters = new[]
        {
            0,
        },
    });
    
    example, err := databricks.NewMwsLogDelivery(ctx, "mwsLogDeliveryResource", &databricks.MwsLogDeliveryArgs{
    	AccountId:              pulumi.String("string"),
    	CredentialsId:          pulumi.String("string"),
    	LogType:                pulumi.String("string"),
    	OutputFormat:           pulumi.String("string"),
    	StorageConfigurationId: pulumi.String("string"),
    	ConfigId:               pulumi.String("string"),
    	ConfigName:             pulumi.String("string"),
    	DeliveryPathPrefix:     pulumi.String("string"),
    	DeliveryStartTime:      pulumi.String("string"),
    	Status:                 pulumi.String("string"),
    	WorkspaceIdsFilters: pulumi.IntArray{
    		pulumi.Int(0),
    	},
    })
    
    var mwsLogDeliveryResource = new MwsLogDelivery("mwsLogDeliveryResource", MwsLogDeliveryArgs.builder()
        .accountId("string")
        .credentialsId("string")
        .logType("string")
        .outputFormat("string")
        .storageConfigurationId("string")
        .configId("string")
        .configName("string")
        .deliveryPathPrefix("string")
        .deliveryStartTime("string")
        .status("string")
        .workspaceIdsFilters(0)
        .build());
    
    mws_log_delivery_resource = databricks.MwsLogDelivery("mwsLogDeliveryResource",
        account_id="string",
        credentials_id="string",
        log_type="string",
        output_format="string",
        storage_configuration_id="string",
        config_id="string",
        config_name="string",
        delivery_path_prefix="string",
        delivery_start_time="string",
        status="string",
        workspace_ids_filters=[0])
    
    const mwsLogDeliveryResource = new databricks.MwsLogDelivery("mwsLogDeliveryResource", {
        accountId: "string",
        credentialsId: "string",
        logType: "string",
        outputFormat: "string",
        storageConfigurationId: "string",
        configId: "string",
        configName: "string",
        deliveryPathPrefix: "string",
        deliveryStartTime: "string",
        status: "string",
        workspaceIdsFilters: [0],
    });
    
    type: databricks:MwsLogDelivery
    properties:
        accountId: string
        configId: string
        configName: string
        credentialsId: string
        deliveryPathPrefix: string
        deliveryStartTime: string
        logType: string
        outputFormat: string
        status: string
        storageConfigurationId: string
        workspaceIdsFilters:
            - 0
    

    MwsLogDelivery Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The MwsLogDelivery resource accepts the following input properties:

    AccountId string
    Account Id that could be found in the top right corner of Accounts Console.
    CredentialsId string
    The ID for a Databricks credential configuration that represents the AWS IAM role with policy and trust relationship as described in the main billable usage documentation page.
    LogType string
    The type of log delivery. BILLABLE_USAGE and AUDIT_LOGS are supported.
    OutputFormat string
    The file type of log delivery. Currently CSV (for BILLABLE_USAGE) and JSON (for AUDIT_LOGS) are supported.
    StorageConfigurationId string
    The ID for a Databricks storage configuration that represents the S3 bucket with bucket policy as described in the main billable usage documentation page.
    ConfigId string
    Databricks log delivery configuration ID.
    ConfigName string
    The optional human-readable name of the log delivery configuration. Defaults to empty.
    DeliveryPathPrefix string
    Defaults to empty, which means that logs are delivered to the root of the bucket. The value must be a valid S3 object key. It must not start or end with a slash character.
    DeliveryStartTime string
    The optional start month and year for delivery, specified in YYYY-MM format. Defaults to current year and month. Usage is not available before 2019-03.
    Status string
    Status of log delivery configuration. Set to ENABLED or DISABLED. Defaults to ENABLED. This is the only field you can update.
    WorkspaceIdsFilters List<int>
    By default, this log configuration applies to all workspaces associated with your account ID. If your account is on the multitenant version of the platform or on a select custom plan that allows multiple workspaces per account, you may have multiple workspaces associated with your account ID. You can optionally set the field as mentioned earlier to an array of workspace IDs. If you plan to use different log delivery configurations for several workspaces, set this explicitly rather than leaving it blank. If you leave this blank and your account ID gets additional workspaces in the future, this configuration will also apply to the new workspaces.
    AccountId string
    Account Id that could be found in the top right corner of Accounts Console.
    CredentialsId string
    The ID for a Databricks credential configuration that represents the AWS IAM role with policy and trust relationship as described in the main billable usage documentation page.
    LogType string
    The type of log delivery. BILLABLE_USAGE and AUDIT_LOGS are supported.
    OutputFormat string
    The file type of log delivery. Currently CSV (for BILLABLE_USAGE) and JSON (for AUDIT_LOGS) are supported.
    StorageConfigurationId string
    The ID for a Databricks storage configuration that represents the S3 bucket with bucket policy as described in the main billable usage documentation page.
    ConfigId string
    Databricks log delivery configuration ID.
    ConfigName string
    The optional human-readable name of the log delivery configuration. Defaults to empty.
    DeliveryPathPrefix string
    Defaults to empty, which means that logs are delivered to the root of the bucket. The value must be a valid S3 object key. It must not start or end with a slash character.
    DeliveryStartTime string
    The optional start month and year for delivery, specified in YYYY-MM format. Defaults to current year and month. Usage is not available before 2019-03.
    Status string
    Status of log delivery configuration. Set to ENABLED or DISABLED. Defaults to ENABLED. This is the only field you can update.
    WorkspaceIdsFilters []int
    By default, this log configuration applies to all workspaces associated with your account ID. If your account is on the multitenant version of the platform or on a select custom plan that allows multiple workspaces per account, you may have multiple workspaces associated with your account ID. You can optionally set the field as mentioned earlier to an array of workspace IDs. If you plan to use different log delivery configurations for several workspaces, set this explicitly rather than leaving it blank. If you leave this blank and your account ID gets additional workspaces in the future, this configuration will also apply to the new workspaces.
    accountId String
    Account Id that could be found in the top right corner of Accounts Console.
    credentialsId String
    The ID for a Databricks credential configuration that represents the AWS IAM role with policy and trust relationship as described in the main billable usage documentation page.
    logType String
    The type of log delivery. BILLABLE_USAGE and AUDIT_LOGS are supported.
    outputFormat String
    The file type of log delivery. Currently CSV (for BILLABLE_USAGE) and JSON (for AUDIT_LOGS) are supported.
    storageConfigurationId String
    The ID for a Databricks storage configuration that represents the S3 bucket with bucket policy as described in the main billable usage documentation page.
    configId String
    Databricks log delivery configuration ID.
    configName String
    The optional human-readable name of the log delivery configuration. Defaults to empty.
    deliveryPathPrefix String
    Defaults to empty, which means that logs are delivered to the root of the bucket. The value must be a valid S3 object key. It must not start or end with a slash character.
    deliveryStartTime String
    The optional start month and year for delivery, specified in YYYY-MM format. Defaults to current year and month. Usage is not available before 2019-03.
    status String
    Status of log delivery configuration. Set to ENABLED or DISABLED. Defaults to ENABLED. This is the only field you can update.
    workspaceIdsFilters List<Integer>
    By default, this log configuration applies to all workspaces associated with your account ID. If your account is on the multitenant version of the platform or on a select custom plan that allows multiple workspaces per account, you may have multiple workspaces associated with your account ID. You can optionally set the field as mentioned earlier to an array of workspace IDs. If you plan to use different log delivery configurations for several workspaces, set this explicitly rather than leaving it blank. If you leave this blank and your account ID gets additional workspaces in the future, this configuration will also apply to the new workspaces.
    accountId string
    Account Id that could be found in the top right corner of Accounts Console.
    credentialsId string
    The ID for a Databricks credential configuration that represents the AWS IAM role with policy and trust relationship as described in the main billable usage documentation page.
    logType string
    The type of log delivery. BILLABLE_USAGE and AUDIT_LOGS are supported.
    outputFormat string
    The file type of log delivery. Currently CSV (for BILLABLE_USAGE) and JSON (for AUDIT_LOGS) are supported.
    storageConfigurationId string
    The ID for a Databricks storage configuration that represents the S3 bucket with bucket policy as described in the main billable usage documentation page.
    configId string
    Databricks log delivery configuration ID.
    configName string
    The optional human-readable name of the log delivery configuration. Defaults to empty.
    deliveryPathPrefix string
    Defaults to empty, which means that logs are delivered to the root of the bucket. The value must be a valid S3 object key. It must not start or end with a slash character.
    deliveryStartTime string
    The optional start month and year for delivery, specified in YYYY-MM format. Defaults to current year and month. Usage is not available before 2019-03.
    status string
    Status of log delivery configuration. Set to ENABLED or DISABLED. Defaults to ENABLED. This is the only field you can update.
    workspaceIdsFilters number[]
    By default, this log configuration applies to all workspaces associated with your account ID. If your account is on the multitenant version of the platform or on a select custom plan that allows multiple workspaces per account, you may have multiple workspaces associated with your account ID. You can optionally set the field as mentioned earlier to an array of workspace IDs. If you plan to use different log delivery configurations for several workspaces, set this explicitly rather than leaving it blank. If you leave this blank and your account ID gets additional workspaces in the future, this configuration will also apply to the new workspaces.
    account_id str
    Account Id that could be found in the top right corner of Accounts Console.
    credentials_id str
    The ID for a Databricks credential configuration that represents the AWS IAM role with policy and trust relationship as described in the main billable usage documentation page.
    log_type str
    The type of log delivery. BILLABLE_USAGE and AUDIT_LOGS are supported.
    output_format str
    The file type of log delivery. Currently CSV (for BILLABLE_USAGE) and JSON (for AUDIT_LOGS) are supported.
    storage_configuration_id str
    The ID for a Databricks storage configuration that represents the S3 bucket with bucket policy as described in the main billable usage documentation page.
    config_id str
    Databricks log delivery configuration ID.
    config_name str
    The optional human-readable name of the log delivery configuration. Defaults to empty.
    delivery_path_prefix str
    Defaults to empty, which means that logs are delivered to the root of the bucket. The value must be a valid S3 object key. It must not start or end with a slash character.
    delivery_start_time str
    The optional start month and year for delivery, specified in YYYY-MM format. Defaults to current year and month. Usage is not available before 2019-03.
    status str
    Status of log delivery configuration. Set to ENABLED or DISABLED. Defaults to ENABLED. This is the only field you can update.
    workspace_ids_filters Sequence[int]
    By default, this log configuration applies to all workspaces associated with your account ID. If your account is on the multitenant version of the platform or on a select custom plan that allows multiple workspaces per account, you may have multiple workspaces associated with your account ID. You can optionally set the field as mentioned earlier to an array of workspace IDs. If you plan to use different log delivery configurations for several workspaces, set this explicitly rather than leaving it blank. If you leave this blank and your account ID gets additional workspaces in the future, this configuration will also apply to the new workspaces.
    accountId String
    Account Id that could be found in the top right corner of Accounts Console.
    credentialsId String
    The ID for a Databricks credential configuration that represents the AWS IAM role with policy and trust relationship as described in the main billable usage documentation page.
    logType String
    The type of log delivery. BILLABLE_USAGE and AUDIT_LOGS are supported.
    outputFormat String
    The file type of log delivery. Currently CSV (for BILLABLE_USAGE) and JSON (for AUDIT_LOGS) are supported.
    storageConfigurationId String
    The ID for a Databricks storage configuration that represents the S3 bucket with bucket policy as described in the main billable usage documentation page.
    configId String
    Databricks log delivery configuration ID.
    configName String
    The optional human-readable name of the log delivery configuration. Defaults to empty.
    deliveryPathPrefix String
    Defaults to empty, which means that logs are delivered to the root of the bucket. The value must be a valid S3 object key. It must not start or end with a slash character.
    deliveryStartTime String
    The optional start month and year for delivery, specified in YYYY-MM format. Defaults to current year and month. Usage is not available before 2019-03.
    status String
    Status of log delivery configuration. Set to ENABLED or DISABLED. Defaults to ENABLED. This is the only field you can update.
    workspaceIdsFilters List<Number>
    By default, this log configuration applies to all workspaces associated with your account ID. If your account is on the multitenant version of the platform or on a select custom plan that allows multiple workspaces per account, you may have multiple workspaces associated with your account ID. You can optionally set the field as mentioned earlier to an array of workspace IDs. If you plan to use different log delivery configurations for several workspaces, set this explicitly rather than leaving it blank. If you leave this blank and your account ID gets additional workspaces in the future, this configuration will also apply to the new workspaces.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the MwsLogDelivery resource produces the following output properties:

    Id string
    The provider-assigned unique ID for this managed resource.
    Id string
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.
    id string
    The provider-assigned unique ID for this managed resource.
    id str
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.

    Look up Existing MwsLogDelivery Resource

    Get an existing MwsLogDelivery resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: MwsLogDeliveryState, opts?: CustomResourceOptions): MwsLogDelivery
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            account_id: Optional[str] = None,
            config_id: Optional[str] = None,
            config_name: Optional[str] = None,
            credentials_id: Optional[str] = None,
            delivery_path_prefix: Optional[str] = None,
            delivery_start_time: Optional[str] = None,
            log_type: Optional[str] = None,
            output_format: Optional[str] = None,
            status: Optional[str] = None,
            storage_configuration_id: Optional[str] = None,
            workspace_ids_filters: Optional[Sequence[int]] = None) -> MwsLogDelivery
    func GetMwsLogDelivery(ctx *Context, name string, id IDInput, state *MwsLogDeliveryState, opts ...ResourceOption) (*MwsLogDelivery, error)
    public static MwsLogDelivery Get(string name, Input<string> id, MwsLogDeliveryState? state, CustomResourceOptions? opts = null)
    public static MwsLogDelivery get(String name, Output<String> id, MwsLogDeliveryState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    AccountId string
    Account Id that could be found in the top right corner of Accounts Console.
    ConfigId string
    Databricks log delivery configuration ID.
    ConfigName string
    The optional human-readable name of the log delivery configuration. Defaults to empty.
    CredentialsId string
    The ID for a Databricks credential configuration that represents the AWS IAM role with policy and trust relationship as described in the main billable usage documentation page.
    DeliveryPathPrefix string
    Defaults to empty, which means that logs are delivered to the root of the bucket. The value must be a valid S3 object key. It must not start or end with a slash character.
    DeliveryStartTime string
    The optional start month and year for delivery, specified in YYYY-MM format. Defaults to current year and month. Usage is not available before 2019-03.
    LogType string
    The type of log delivery. BILLABLE_USAGE and AUDIT_LOGS are supported.
    OutputFormat string
    The file type of log delivery. Currently CSV (for BILLABLE_USAGE) and JSON (for AUDIT_LOGS) are supported.
    Status string
    Status of log delivery configuration. Set to ENABLED or DISABLED. Defaults to ENABLED. This is the only field you can update.
    StorageConfigurationId string
    The ID for a Databricks storage configuration that represents the S3 bucket with bucket policy as described in the main billable usage documentation page.
    WorkspaceIdsFilters List<int>
    By default, this log configuration applies to all workspaces associated with your account ID. If your account is on the multitenant version of the platform or on a select custom plan that allows multiple workspaces per account, you may have multiple workspaces associated with your account ID. You can optionally set the field as mentioned earlier to an array of workspace IDs. If you plan to use different log delivery configurations for several workspaces, set this explicitly rather than leaving it blank. If you leave this blank and your account ID gets additional workspaces in the future, this configuration will also apply to the new workspaces.
    AccountId string
    Account Id that could be found in the top right corner of Accounts Console.
    ConfigId string
    Databricks log delivery configuration ID.
    ConfigName string
    The optional human-readable name of the log delivery configuration. Defaults to empty.
    CredentialsId string
    The ID for a Databricks credential configuration that represents the AWS IAM role with policy and trust relationship as described in the main billable usage documentation page.
    DeliveryPathPrefix string
    Defaults to empty, which means that logs are delivered to the root of the bucket. The value must be a valid S3 object key. It must not start or end with a slash character.
    DeliveryStartTime string
    The optional start month and year for delivery, specified in YYYY-MM format. Defaults to current year and month. Usage is not available before 2019-03.
    LogType string
    The type of log delivery. BILLABLE_USAGE and AUDIT_LOGS are supported.
    OutputFormat string
    The file type of log delivery. Currently CSV (for BILLABLE_USAGE) and JSON (for AUDIT_LOGS) are supported.
    Status string
    Status of log delivery configuration. Set to ENABLED or DISABLED. Defaults to ENABLED. This is the only field you can update.
    StorageConfigurationId string
    The ID for a Databricks storage configuration that represents the S3 bucket with bucket policy as described in the main billable usage documentation page.
    WorkspaceIdsFilters []int
    By default, this log configuration applies to all workspaces associated with your account ID. If your account is on the multitenant version of the platform or on a select custom plan that allows multiple workspaces per account, you may have multiple workspaces associated with your account ID. You can optionally set the field as mentioned earlier to an array of workspace IDs. If you plan to use different log delivery configurations for several workspaces, set this explicitly rather than leaving it blank. If you leave this blank and your account ID gets additional workspaces in the future, this configuration will also apply to the new workspaces.
    accountId String
    Account Id that could be found in the top right corner of Accounts Console.
    configId String
    Databricks log delivery configuration ID.
    configName String
    The optional human-readable name of the log delivery configuration. Defaults to empty.
    credentialsId String
    The ID for a Databricks credential configuration that represents the AWS IAM role with policy and trust relationship as described in the main billable usage documentation page.
    deliveryPathPrefix String
    Defaults to empty, which means that logs are delivered to the root of the bucket. The value must be a valid S3 object key. It must not start or end with a slash character.
    deliveryStartTime String
    The optional start month and year for delivery, specified in YYYY-MM format. Defaults to current year and month. Usage is not available before 2019-03.
    logType String
    The type of log delivery. BILLABLE_USAGE and AUDIT_LOGS are supported.
    outputFormat String
    The file type of log delivery. Currently CSV (for BILLABLE_USAGE) and JSON (for AUDIT_LOGS) are supported.
    status String
    Status of log delivery configuration. Set to ENABLED or DISABLED. Defaults to ENABLED. This is the only field you can update.
    storageConfigurationId String
    The ID for a Databricks storage configuration that represents the S3 bucket with bucket policy as described in the main billable usage documentation page.
    workspaceIdsFilters List<Integer>
    By default, this log configuration applies to all workspaces associated with your account ID. If your account is on the multitenant version of the platform or on a select custom plan that allows multiple workspaces per account, you may have multiple workspaces associated with your account ID. You can optionally set the field as mentioned earlier to an array of workspace IDs. If you plan to use different log delivery configurations for several workspaces, set this explicitly rather than leaving it blank. If you leave this blank and your account ID gets additional workspaces in the future, this configuration will also apply to the new workspaces.
    accountId string
    Account Id that could be found in the top right corner of Accounts Console.
    configId string
    Databricks log delivery configuration ID.
    configName string
    The optional human-readable name of the log delivery configuration. Defaults to empty.
    credentialsId string
    The ID for a Databricks credential configuration that represents the AWS IAM role with policy and trust relationship as described in the main billable usage documentation page.
    deliveryPathPrefix string
    Defaults to empty, which means that logs are delivered to the root of the bucket. The value must be a valid S3 object key. It must not start or end with a slash character.
    deliveryStartTime string
    The optional start month and year for delivery, specified in YYYY-MM format. Defaults to current year and month. Usage is not available before 2019-03.
    logType string
    The type of log delivery. BILLABLE_USAGE and AUDIT_LOGS are supported.
    outputFormat string
    The file type of log delivery. Currently CSV (for BILLABLE_USAGE) and JSON (for AUDIT_LOGS) are supported.
    status string
    Status of log delivery configuration. Set to ENABLED or DISABLED. Defaults to ENABLED. This is the only field you can update.
    storageConfigurationId string
    The ID for a Databricks storage configuration that represents the S3 bucket with bucket policy as described in the main billable usage documentation page.
    workspaceIdsFilters number[]
    By default, this log configuration applies to all workspaces associated with your account ID. If your account is on the multitenant version of the platform or on a select custom plan that allows multiple workspaces per account, you may have multiple workspaces associated with your account ID. You can optionally set the field as mentioned earlier to an array of workspace IDs. If you plan to use different log delivery configurations for several workspaces, set this explicitly rather than leaving it blank. If you leave this blank and your account ID gets additional workspaces in the future, this configuration will also apply to the new workspaces.
    account_id str
    Account Id that could be found in the top right corner of Accounts Console.
    config_id str
    Databricks log delivery configuration ID.
    config_name str
    The optional human-readable name of the log delivery configuration. Defaults to empty.
    credentials_id str
    The ID for a Databricks credential configuration that represents the AWS IAM role with policy and trust relationship as described in the main billable usage documentation page.
    delivery_path_prefix str
    Defaults to empty, which means that logs are delivered to the root of the bucket. The value must be a valid S3 object key. It must not start or end with a slash character.
    delivery_start_time str
    The optional start month and year for delivery, specified in YYYY-MM format. Defaults to current year and month. Usage is not available before 2019-03.
    log_type str
    The type of log delivery. BILLABLE_USAGE and AUDIT_LOGS are supported.
    output_format str
    The file type of log delivery. Currently CSV (for BILLABLE_USAGE) and JSON (for AUDIT_LOGS) are supported.
    status str
    Status of log delivery configuration. Set to ENABLED or DISABLED. Defaults to ENABLED. This is the only field you can update.
    storage_configuration_id str
    The ID for a Databricks storage configuration that represents the S3 bucket with bucket policy as described in the main billable usage documentation page.
    workspace_ids_filters Sequence[int]
    By default, this log configuration applies to all workspaces associated with your account ID. If your account is on the multitenant version of the platform or on a select custom plan that allows multiple workspaces per account, you may have multiple workspaces associated with your account ID. You can optionally set the field as mentioned earlier to an array of workspace IDs. If you plan to use different log delivery configurations for several workspaces, set this explicitly rather than leaving it blank. If you leave this blank and your account ID gets additional workspaces in the future, this configuration will also apply to the new workspaces.
    accountId String
    Account Id that could be found in the top right corner of Accounts Console.
    configId String
    Databricks log delivery configuration ID.
    configName String
    The optional human-readable name of the log delivery configuration. Defaults to empty.
    credentialsId String
    The ID for a Databricks credential configuration that represents the AWS IAM role with policy and trust relationship as described in the main billable usage documentation page.
    deliveryPathPrefix String
    Defaults to empty, which means that logs are delivered to the root of the bucket. The value must be a valid S3 object key. It must not start or end with a slash character.
    deliveryStartTime String
    The optional start month and year for delivery, specified in YYYY-MM format. Defaults to current year and month. Usage is not available before 2019-03.
    logType String
    The type of log delivery. BILLABLE_USAGE and AUDIT_LOGS are supported.
    outputFormat String
    The file type of log delivery. Currently CSV (for BILLABLE_USAGE) and JSON (for AUDIT_LOGS) are supported.
    status String
    Status of log delivery configuration. Set to ENABLED or DISABLED. Defaults to ENABLED. This is the only field you can update.
    storageConfigurationId String
    The ID for a Databricks storage configuration that represents the S3 bucket with bucket policy as described in the main billable usage documentation page.
    workspaceIdsFilters List<Number>
    By default, this log configuration applies to all workspaces associated with your account ID. If your account is on the multitenant version of the platform or on a select custom plan that allows multiple workspaces per account, you may have multiple workspaces associated with your account ID. You can optionally set the field as mentioned earlier to an array of workspace IDs. If you plan to use different log delivery configurations for several workspaces, set this explicitly rather than leaving it blank. If you leave this blank and your account ID gets additional workspaces in the future, this configuration will also apply to the new workspaces.

    Import

    !> Importing this resource is not currently supported.

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    databricks pulumi/pulumi-databricks
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the databricks Terraform Provider.
    databricks logo
    Databricks v1.56.0 published on Tuesday, Nov 12, 2024 by Pulumi