1. Packages
  2. Azure Native v1
  3. API Docs
  4. synapse
  5. BigDataPool
These are the docs for Azure Native v1. We recommenend using the latest version, Azure Native v2.
Azure Native v1 v1.104.0 published on Thursday, Jul 6, 2023 by Pulumi

azure-native.synapse.BigDataPool

Explore with Pulumi AI

azure-native-v1 logo
These are the docs for Azure Native v1. We recommenend using the latest version, Azure Native v2.
Azure Native v1 v1.104.0 published on Thursday, Jul 6, 2023 by Pulumi

    A Big Data pool API Version: 2021-03-01.

    Example Usage

    Create or update a Big Data pool

    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using AzureNative = Pulumi.AzureNative;
    
    return await Deployment.RunAsync(() => 
    {
        var bigDataPool = new AzureNative.Synapse.BigDataPool("bigDataPool", new()
        {
            AutoPause = new AzureNative.Synapse.Inputs.AutoPausePropertiesArgs
            {
                DelayInMinutes = 15,
                Enabled = true,
            },
            AutoScale = new AzureNative.Synapse.Inputs.AutoScalePropertiesArgs
            {
                Enabled = true,
                MaxNodeCount = 50,
                MinNodeCount = 3,
            },
            BigDataPoolName = "ExamplePool",
            DefaultSparkLogFolder = "/logs",
            LibraryRequirements = new AzureNative.Synapse.Inputs.LibraryRequirementsArgs
            {
                Content = "",
                Filename = "requirements.txt",
            },
            Location = "West US 2",
            NodeCount = 4,
            NodeSize = "Medium",
            NodeSizeFamily = "MemoryOptimized",
            ResourceGroupName = "ExampleResourceGroup",
            SparkEventsFolder = "/events",
            SparkVersion = "3.3",
            Tags = 
            {
                { "key", "value" },
            },
            WorkspaceName = "ExampleWorkspace",
        });
    
    });
    
    package main
    
    import (
    	synapse "github.com/pulumi/pulumi-azure-native-sdk/synapse"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := synapse.NewBigDataPool(ctx, "bigDataPool", &synapse.BigDataPoolArgs{
    			AutoPause: &synapse.AutoPausePropertiesArgs{
    				DelayInMinutes: pulumi.Int(15),
    				Enabled:        pulumi.Bool(true),
    			},
    			AutoScale: &synapse.AutoScalePropertiesArgs{
    				Enabled:      pulumi.Bool(true),
    				MaxNodeCount: pulumi.Int(50),
    				MinNodeCount: pulumi.Int(3),
    			},
    			BigDataPoolName:       pulumi.String("ExamplePool"),
    			DefaultSparkLogFolder: pulumi.String("/logs"),
    			LibraryRequirements: &synapse.LibraryRequirementsArgs{
    				Content:  pulumi.String(""),
    				Filename: pulumi.String("requirements.txt"),
    			},
    			Location:          pulumi.String("West US 2"),
    			NodeCount:         pulumi.Int(4),
    			NodeSize:          pulumi.String("Medium"),
    			NodeSizeFamily:    pulumi.String("MemoryOptimized"),
    			ResourceGroupName: pulumi.String("ExampleResourceGroup"),
    			SparkEventsFolder: pulumi.String("/events"),
    			SparkVersion:      pulumi.String("3.3"),
    			Tags: pulumi.StringMap{
    				"key": pulumi.String("value"),
    			},
    			WorkspaceName: pulumi.String("ExampleWorkspace"),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.azurenative.synapse.BigDataPool;
    import com.pulumi.azurenative.synapse.BigDataPoolArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var bigDataPool = new BigDataPool("bigDataPool", BigDataPoolArgs.builder()        
                .autoPause(Map.ofEntries(
                    Map.entry("delayInMinutes", 15),
                    Map.entry("enabled", true)
                ))
                .autoScale(Map.ofEntries(
                    Map.entry("enabled", true),
                    Map.entry("maxNodeCount", 50),
                    Map.entry("minNodeCount", 3)
                ))
                .bigDataPoolName("ExamplePool")
                .defaultSparkLogFolder("/logs")
                .libraryRequirements(Map.ofEntries(
                    Map.entry("content", ""),
                    Map.entry("filename", "requirements.txt")
                ))
                .location("West US 2")
                .nodeCount(4)
                .nodeSize("Medium")
                .nodeSizeFamily("MemoryOptimized")
                .resourceGroupName("ExampleResourceGroup")
                .sparkEventsFolder("/events")
                .sparkVersion("3.3")
                .tags(Map.of("key", "value"))
                .workspaceName("ExampleWorkspace")
                .build());
    
        }
    }
    
    import pulumi
    import pulumi_azure_native as azure_native
    
    big_data_pool = azure_native.synapse.BigDataPool("bigDataPool",
        auto_pause=azure_native.synapse.AutoPausePropertiesArgs(
            delay_in_minutes=15,
            enabled=True,
        ),
        auto_scale=azure_native.synapse.AutoScalePropertiesArgs(
            enabled=True,
            max_node_count=50,
            min_node_count=3,
        ),
        big_data_pool_name="ExamplePool",
        default_spark_log_folder="/logs",
        library_requirements=azure_native.synapse.LibraryRequirementsArgs(
            content="",
            filename="requirements.txt",
        ),
        location="West US 2",
        node_count=4,
        node_size="Medium",
        node_size_family="MemoryOptimized",
        resource_group_name="ExampleResourceGroup",
        spark_events_folder="/events",
        spark_version="3.3",
        tags={
            "key": "value",
        },
        workspace_name="ExampleWorkspace")
    
    import * as pulumi from "@pulumi/pulumi";
    import * as azure_native from "@pulumi/azure-native";
    
    const bigDataPool = new azure_native.synapse.BigDataPool("bigDataPool", {
        autoPause: {
            delayInMinutes: 15,
            enabled: true,
        },
        autoScale: {
            enabled: true,
            maxNodeCount: 50,
            minNodeCount: 3,
        },
        bigDataPoolName: "ExamplePool",
        defaultSparkLogFolder: "/logs",
        libraryRequirements: {
            content: "",
            filename: "requirements.txt",
        },
        location: "West US 2",
        nodeCount: 4,
        nodeSize: "Medium",
        nodeSizeFamily: "MemoryOptimized",
        resourceGroupName: "ExampleResourceGroup",
        sparkEventsFolder: "/events",
        sparkVersion: "3.3",
        tags: {
            key: "value",
        },
        workspaceName: "ExampleWorkspace",
    });
    
    resources:
      bigDataPool:
        type: azure-native:synapse:BigDataPool
        properties:
          autoPause:
            delayInMinutes: 15
            enabled: true
          autoScale:
            enabled: true
            maxNodeCount: 50
            minNodeCount: 3
          bigDataPoolName: ExamplePool
          defaultSparkLogFolder: /logs
          libraryRequirements:
            content:
            filename: requirements.txt
          location: West US 2
          nodeCount: 4
          nodeSize: Medium
          nodeSizeFamily: MemoryOptimized
          resourceGroupName: ExampleResourceGroup
          sparkEventsFolder: /events
          sparkVersion: '3.3'
          tags:
            key: value
          workspaceName: ExampleWorkspace
    

    Create BigDataPool Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new BigDataPool(name: string, args: BigDataPoolArgs, opts?: CustomResourceOptions);
    @overload
    def BigDataPool(resource_name: str,
                    args: BigDataPoolArgs,
                    opts: Optional[ResourceOptions] = None)
    
    @overload
    def BigDataPool(resource_name: str,
                    opts: Optional[ResourceOptions] = None,
                    resource_group_name: Optional[str] = None,
                    workspace_name: Optional[str] = None,
                    library_requirements: Optional[LibraryRequirementsArgs] = None,
                    node_size: Optional[Union[str, NodeSize]] = None,
                    creation_date: Optional[str] = None,
                    custom_libraries: Optional[Sequence[LibraryInfoArgs]] = None,
                    default_spark_log_folder: Optional[str] = None,
                    dynamic_executor_allocation: Optional[DynamicExecutorAllocationArgs] = None,
                    force: Optional[bool] = None,
                    is_compute_isolation_enabled: Optional[bool] = None,
                    auto_pause: Optional[AutoPausePropertiesArgs] = None,
                    location: Optional[str] = None,
                    node_count: Optional[int] = None,
                    cache_size: Optional[int] = None,
                    node_size_family: Optional[Union[str, NodeSizeFamily]] = None,
                    provisioning_state: Optional[str] = None,
                    big_data_pool_name: Optional[str] = None,
                    session_level_packages_enabled: Optional[bool] = None,
                    spark_config_properties: Optional[LibraryRequirementsArgs] = None,
                    spark_events_folder: Optional[str] = None,
                    spark_version: Optional[str] = None,
                    tags: Optional[Mapping[str, str]] = None,
                    auto_scale: Optional[AutoScalePropertiesArgs] = None)
    func NewBigDataPool(ctx *Context, name string, args BigDataPoolArgs, opts ...ResourceOption) (*BigDataPool, error)
    public BigDataPool(string name, BigDataPoolArgs args, CustomResourceOptions? opts = null)
    public BigDataPool(String name, BigDataPoolArgs args)
    public BigDataPool(String name, BigDataPoolArgs args, CustomResourceOptions options)
    
    type: azure-native:synapse:BigDataPool
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args BigDataPoolArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args BigDataPoolArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args BigDataPoolArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args BigDataPoolArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args BigDataPoolArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var bigDataPoolResource = new AzureNative.Synapse.BigDataPool("bigDataPoolResource", new()
    {
        ResourceGroupName = "string",
        WorkspaceName = "string",
        LibraryRequirements = 
        {
            { "content", "string" },
            { "filename", "string" },
        },
        NodeSize = "string",
        CreationDate = "string",
        CustomLibraries = new[]
        {
            
            {
                { "containerName", "string" },
                { "name", "string" },
                { "path", "string" },
                { "type", "string" },
            },
        },
        DefaultSparkLogFolder = "string",
        DynamicExecutorAllocation = 
        {
            { "enabled", false },
        },
        Force = false,
        IsComputeIsolationEnabled = false,
        AutoPause = 
        {
            { "delayInMinutes", 0 },
            { "enabled", false },
        },
        Location = "string",
        NodeCount = 0,
        CacheSize = 0,
        NodeSizeFamily = "string",
        ProvisioningState = "string",
        BigDataPoolName = "string",
        SessionLevelPackagesEnabled = false,
        SparkConfigProperties = 
        {
            { "content", "string" },
            { "filename", "string" },
        },
        SparkEventsFolder = "string",
        SparkVersion = "string",
        Tags = 
        {
            { "string", "string" },
        },
        AutoScale = 
        {
            { "enabled", false },
            { "maxNodeCount", 0 },
            { "minNodeCount", 0 },
        },
    });
    
    example, err := synapse.NewBigDataPool(ctx, "bigDataPoolResource", &synapse.BigDataPoolArgs{
    	ResourceGroupName: "string",
    	WorkspaceName:     "string",
    	LibraryRequirements: map[string]interface{}{
    		"content":  "string",
    		"filename": "string",
    	},
    	NodeSize:     "string",
    	CreationDate: "string",
    	CustomLibraries: []map[string]interface{}{
    		map[string]interface{}{
    			"containerName": "string",
    			"name":          "string",
    			"path":          "string",
    			"type":          "string",
    		},
    	},
    	DefaultSparkLogFolder: "string",
    	DynamicExecutorAllocation: map[string]interface{}{
    		"enabled": false,
    	},
    	Force:                     false,
    	IsComputeIsolationEnabled: false,
    	AutoPause: map[string]interface{}{
    		"delayInMinutes": 0,
    		"enabled":        false,
    	},
    	Location:                    "string",
    	NodeCount:                   0,
    	CacheSize:                   0,
    	NodeSizeFamily:              "string",
    	ProvisioningState:           "string",
    	BigDataPoolName:             "string",
    	SessionLevelPackagesEnabled: false,
    	SparkConfigProperties: map[string]interface{}{
    		"content":  "string",
    		"filename": "string",
    	},
    	SparkEventsFolder: "string",
    	SparkVersion:      "string",
    	Tags: map[string]interface{}{
    		"string": "string",
    	},
    	AutoScale: map[string]interface{}{
    		"enabled":      false,
    		"maxNodeCount": 0,
    		"minNodeCount": 0,
    	},
    })
    
    var bigDataPoolResource = new BigDataPool("bigDataPoolResource", BigDataPoolArgs.builder()
        .resourceGroupName("string")
        .workspaceName("string")
        .libraryRequirements(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
        .nodeSize("string")
        .creationDate("string")
        .customLibraries(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
        .defaultSparkLogFolder("string")
        .dynamicExecutorAllocation(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
        .force(false)
        .isComputeIsolationEnabled(false)
        .autoPause(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
        .location("string")
        .nodeCount(0)
        .cacheSize(0)
        .nodeSizeFamily("string")
        .provisioningState("string")
        .bigDataPoolName("string")
        .sessionLevelPackagesEnabled(false)
        .sparkConfigProperties(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
        .sparkEventsFolder("string")
        .sparkVersion("string")
        .tags(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
        .autoScale(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
        .build());
    
    big_data_pool_resource = azure_native.synapse.BigDataPool("bigDataPoolResource",
        resource_group_name=string,
        workspace_name=string,
        library_requirements={
            content: string,
            filename: string,
        },
        node_size=string,
        creation_date=string,
        custom_libraries=[{
            containerName: string,
            name: string,
            path: string,
            type: string,
        }],
        default_spark_log_folder=string,
        dynamic_executor_allocation={
            enabled: False,
        },
        force=False,
        is_compute_isolation_enabled=False,
        auto_pause={
            delayInMinutes: 0,
            enabled: False,
        },
        location=string,
        node_count=0,
        cache_size=0,
        node_size_family=string,
        provisioning_state=string,
        big_data_pool_name=string,
        session_level_packages_enabled=False,
        spark_config_properties={
            content: string,
            filename: string,
        },
        spark_events_folder=string,
        spark_version=string,
        tags={
            string: string,
        },
        auto_scale={
            enabled: False,
            maxNodeCount: 0,
            minNodeCount: 0,
        })
    
    const bigDataPoolResource = new azure_native.synapse.BigDataPool("bigDataPoolResource", {
        resourceGroupName: "string",
        workspaceName: "string",
        libraryRequirements: {
            content: "string",
            filename: "string",
        },
        nodeSize: "string",
        creationDate: "string",
        customLibraries: [{
            containerName: "string",
            name: "string",
            path: "string",
            type: "string",
        }],
        defaultSparkLogFolder: "string",
        dynamicExecutorAllocation: {
            enabled: false,
        },
        force: false,
        isComputeIsolationEnabled: false,
        autoPause: {
            delayInMinutes: 0,
            enabled: false,
        },
        location: "string",
        nodeCount: 0,
        cacheSize: 0,
        nodeSizeFamily: "string",
        provisioningState: "string",
        bigDataPoolName: "string",
        sessionLevelPackagesEnabled: false,
        sparkConfigProperties: {
            content: "string",
            filename: "string",
        },
        sparkEventsFolder: "string",
        sparkVersion: "string",
        tags: {
            string: "string",
        },
        autoScale: {
            enabled: false,
            maxNodeCount: 0,
            minNodeCount: 0,
        },
    });
    
    type: azure-native:synapse:BigDataPool
    properties:
        autoPause:
            delayInMinutes: 0
            enabled: false
        autoScale:
            enabled: false
            maxNodeCount: 0
            minNodeCount: 0
        bigDataPoolName: string
        cacheSize: 0
        creationDate: string
        customLibraries:
            - containerName: string
              name: string
              path: string
              type: string
        defaultSparkLogFolder: string
        dynamicExecutorAllocation:
            enabled: false
        force: false
        isComputeIsolationEnabled: false
        libraryRequirements:
            content: string
            filename: string
        location: string
        nodeCount: 0
        nodeSize: string
        nodeSizeFamily: string
        provisioningState: string
        resourceGroupName: string
        sessionLevelPackagesEnabled: false
        sparkConfigProperties:
            content: string
            filename: string
        sparkEventsFolder: string
        sparkVersion: string
        tags:
            string: string
        workspaceName: string
    

    BigDataPool Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The BigDataPool resource accepts the following input properties:

    ResourceGroupName string
    The name of the resource group. The name is case insensitive.
    WorkspaceName string
    The name of the workspace
    AutoPause Pulumi.AzureNative.Synapse.Inputs.AutoPauseProperties
    Auto-pausing properties
    AutoScale Pulumi.AzureNative.Synapse.Inputs.AutoScaleProperties
    Auto-scaling properties
    BigDataPoolName string
    Big Data pool name
    CacheSize int
    The cache size
    CreationDate string
    The time when the Big Data pool was created.
    CustomLibraries List<Pulumi.AzureNative.Synapse.Inputs.LibraryInfo>
    List of custom libraries/packages associated with the spark pool.
    DefaultSparkLogFolder string
    The default folder where Spark logs will be written.
    DynamicExecutorAllocation Pulumi.AzureNative.Synapse.Inputs.DynamicExecutorAllocation
    Dynamic Executor Allocation
    Force bool
    Whether to stop any running jobs in the Big Data pool
    IsComputeIsolationEnabled bool
    Whether compute isolation is required or not.
    LibraryRequirements Pulumi.AzureNative.Synapse.Inputs.LibraryRequirements
    Library version requirements
    Location string
    The geo-location where the resource lives
    NodeCount int
    The number of nodes in the Big Data pool.
    NodeSize string | Pulumi.AzureNative.Synapse.NodeSize
    The level of compute power that each node in the Big Data pool has.
    NodeSizeFamily string | Pulumi.AzureNative.Synapse.NodeSizeFamily
    The kind of nodes that the Big Data pool provides.
    ProvisioningState string
    The state of the Big Data pool.
    SessionLevelPackagesEnabled bool
    Whether session level packages enabled.
    SparkConfigProperties Pulumi.AzureNative.Synapse.Inputs.LibraryRequirements
    Spark configuration file to specify additional properties
    SparkEventsFolder string
    The Spark events folder
    SparkVersion string
    The Apache Spark version.
    Tags Dictionary<string, string>
    Resource tags.
    ResourceGroupName string
    The name of the resource group. The name is case insensitive.
    WorkspaceName string
    The name of the workspace
    AutoPause AutoPausePropertiesArgs
    Auto-pausing properties
    AutoScale AutoScalePropertiesArgs
    Auto-scaling properties
    BigDataPoolName string
    Big Data pool name
    CacheSize int
    The cache size
    CreationDate string
    The time when the Big Data pool was created.
    CustomLibraries []LibraryInfoArgs
    List of custom libraries/packages associated with the spark pool.
    DefaultSparkLogFolder string
    The default folder where Spark logs will be written.
    DynamicExecutorAllocation DynamicExecutorAllocationArgs
    Dynamic Executor Allocation
    Force bool
    Whether to stop any running jobs in the Big Data pool
    IsComputeIsolationEnabled bool
    Whether compute isolation is required or not.
    LibraryRequirements LibraryRequirementsArgs
    Library version requirements
    Location string
    The geo-location where the resource lives
    NodeCount int
    The number of nodes in the Big Data pool.
    NodeSize string | NodeSize
    The level of compute power that each node in the Big Data pool has.
    NodeSizeFamily string | NodeSizeFamily
    The kind of nodes that the Big Data pool provides.
    ProvisioningState string
    The state of the Big Data pool.
    SessionLevelPackagesEnabled bool
    Whether session level packages enabled.
    SparkConfigProperties LibraryRequirementsArgs
    Spark configuration file to specify additional properties
    SparkEventsFolder string
    The Spark events folder
    SparkVersion string
    The Apache Spark version.
    Tags map[string]string
    Resource tags.
    resourceGroupName String
    The name of the resource group. The name is case insensitive.
    workspaceName String
    The name of the workspace
    autoPause AutoPauseProperties
    Auto-pausing properties
    autoScale AutoScaleProperties
    Auto-scaling properties
    bigDataPoolName String
    Big Data pool name
    cacheSize Integer
    The cache size
    creationDate String
    The time when the Big Data pool was created.
    customLibraries List<LibraryInfo>
    List of custom libraries/packages associated with the spark pool.
    defaultSparkLogFolder String
    The default folder where Spark logs will be written.
    dynamicExecutorAllocation DynamicExecutorAllocation
    Dynamic Executor Allocation
    force Boolean
    Whether to stop any running jobs in the Big Data pool
    isComputeIsolationEnabled Boolean
    Whether compute isolation is required or not.
    libraryRequirements LibraryRequirements
    Library version requirements
    location String
    The geo-location where the resource lives
    nodeCount Integer
    The number of nodes in the Big Data pool.
    nodeSize String | NodeSize
    The level of compute power that each node in the Big Data pool has.
    nodeSizeFamily String | NodeSizeFamily
    The kind of nodes that the Big Data pool provides.
    provisioningState String
    The state of the Big Data pool.
    sessionLevelPackagesEnabled Boolean
    Whether session level packages enabled.
    sparkConfigProperties LibraryRequirements
    Spark configuration file to specify additional properties
    sparkEventsFolder String
    The Spark events folder
    sparkVersion String
    The Apache Spark version.
    tags Map<String,String>
    Resource tags.
    resourceGroupName string
    The name of the resource group. The name is case insensitive.
    workspaceName string
    The name of the workspace
    autoPause AutoPauseProperties
    Auto-pausing properties
    autoScale AutoScaleProperties
    Auto-scaling properties
    bigDataPoolName string
    Big Data pool name
    cacheSize number
    The cache size
    creationDate string
    The time when the Big Data pool was created.
    customLibraries LibraryInfo[]
    List of custom libraries/packages associated with the spark pool.
    defaultSparkLogFolder string
    The default folder where Spark logs will be written.
    dynamicExecutorAllocation DynamicExecutorAllocation
    Dynamic Executor Allocation
    force boolean
    Whether to stop any running jobs in the Big Data pool
    isComputeIsolationEnabled boolean
    Whether compute isolation is required or not.
    libraryRequirements LibraryRequirements
    Library version requirements
    location string
    The geo-location where the resource lives
    nodeCount number
    The number of nodes in the Big Data pool.
    nodeSize string | NodeSize
    The level of compute power that each node in the Big Data pool has.
    nodeSizeFamily string | NodeSizeFamily
    The kind of nodes that the Big Data pool provides.
    provisioningState string
    The state of the Big Data pool.
    sessionLevelPackagesEnabled boolean
    Whether session level packages enabled.
    sparkConfigProperties LibraryRequirements
    Spark configuration file to specify additional properties
    sparkEventsFolder string
    The Spark events folder
    sparkVersion string
    The Apache Spark version.
    tags {[key: string]: string}
    Resource tags.
    resource_group_name str
    The name of the resource group. The name is case insensitive.
    workspace_name str
    The name of the workspace
    auto_pause AutoPausePropertiesArgs
    Auto-pausing properties
    auto_scale AutoScalePropertiesArgs
    Auto-scaling properties
    big_data_pool_name str
    Big Data pool name
    cache_size int
    The cache size
    creation_date str
    The time when the Big Data pool was created.
    custom_libraries Sequence[LibraryInfoArgs]
    List of custom libraries/packages associated with the spark pool.
    default_spark_log_folder str
    The default folder where Spark logs will be written.
    dynamic_executor_allocation DynamicExecutorAllocationArgs
    Dynamic Executor Allocation
    force bool
    Whether to stop any running jobs in the Big Data pool
    is_compute_isolation_enabled bool
    Whether compute isolation is required or not.
    library_requirements LibraryRequirementsArgs
    Library version requirements
    location str
    The geo-location where the resource lives
    node_count int
    The number of nodes in the Big Data pool.
    node_size str | NodeSize
    The level of compute power that each node in the Big Data pool has.
    node_size_family str | NodeSizeFamily
    The kind of nodes that the Big Data pool provides.
    provisioning_state str
    The state of the Big Data pool.
    session_level_packages_enabled bool
    Whether session level packages enabled.
    spark_config_properties LibraryRequirementsArgs
    Spark configuration file to specify additional properties
    spark_events_folder str
    The Spark events folder
    spark_version str
    The Apache Spark version.
    tags Mapping[str, str]
    Resource tags.
    resourceGroupName String
    The name of the resource group. The name is case insensitive.
    workspaceName String
    The name of the workspace
    autoPause Property Map
    Auto-pausing properties
    autoScale Property Map
    Auto-scaling properties
    bigDataPoolName String
    Big Data pool name
    cacheSize Number
    The cache size
    creationDate String
    The time when the Big Data pool was created.
    customLibraries List<Property Map>
    List of custom libraries/packages associated with the spark pool.
    defaultSparkLogFolder String
    The default folder where Spark logs will be written.
    dynamicExecutorAllocation Property Map
    Dynamic Executor Allocation
    force Boolean
    Whether to stop any running jobs in the Big Data pool
    isComputeIsolationEnabled Boolean
    Whether compute isolation is required or not.
    libraryRequirements Property Map
    Library version requirements
    location String
    The geo-location where the resource lives
    nodeCount Number
    The number of nodes in the Big Data pool.
    nodeSize String | "None" | "Small" | "Medium" | "Large" | "XLarge" | "XXLarge" | "XXXLarge"
    The level of compute power that each node in the Big Data pool has.
    nodeSizeFamily String | "None" | "MemoryOptimized" | "HardwareAcceleratedFPGA" | "HardwareAcceleratedGPU"
    The kind of nodes that the Big Data pool provides.
    provisioningState String
    The state of the Big Data pool.
    sessionLevelPackagesEnabled Boolean
    Whether session level packages enabled.
    sparkConfigProperties Property Map
    Spark configuration file to specify additional properties
    sparkEventsFolder String
    The Spark events folder
    sparkVersion String
    The Apache Spark version.
    tags Map<String>
    Resource tags.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the BigDataPool resource produces the following output properties:

    Id string
    The provider-assigned unique ID for this managed resource.
    LastSucceededTimestamp string
    The time when the Big Data pool was updated successfully.
    Name string
    The name of the resource
    Type string
    The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
    Id string
    The provider-assigned unique ID for this managed resource.
    LastSucceededTimestamp string
    The time when the Big Data pool was updated successfully.
    Name string
    The name of the resource
    Type string
    The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
    id String
    The provider-assigned unique ID for this managed resource.
    lastSucceededTimestamp String
    The time when the Big Data pool was updated successfully.
    name String
    The name of the resource
    type String
    The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
    id string
    The provider-assigned unique ID for this managed resource.
    lastSucceededTimestamp string
    The time when the Big Data pool was updated successfully.
    name string
    The name of the resource
    type string
    The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
    id str
    The provider-assigned unique ID for this managed resource.
    last_succeeded_timestamp str
    The time when the Big Data pool was updated successfully.
    name str
    The name of the resource
    type str
    The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
    id String
    The provider-assigned unique ID for this managed resource.
    lastSucceededTimestamp String
    The time when the Big Data pool was updated successfully.
    name String
    The name of the resource
    type String
    The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"

    Supporting Types

    AutoPauseProperties, AutoPausePropertiesArgs

    DelayInMinutes int
    Number of minutes of idle time before the Big Data pool is automatically paused.
    Enabled bool
    Whether auto-pausing is enabled for the Big Data pool.
    DelayInMinutes int
    Number of minutes of idle time before the Big Data pool is automatically paused.
    Enabled bool
    Whether auto-pausing is enabled for the Big Data pool.
    delayInMinutes Integer
    Number of minutes of idle time before the Big Data pool is automatically paused.
    enabled Boolean
    Whether auto-pausing is enabled for the Big Data pool.
    delayInMinutes number
    Number of minutes of idle time before the Big Data pool is automatically paused.
    enabled boolean
    Whether auto-pausing is enabled for the Big Data pool.
    delay_in_minutes int
    Number of minutes of idle time before the Big Data pool is automatically paused.
    enabled bool
    Whether auto-pausing is enabled for the Big Data pool.
    delayInMinutes Number
    Number of minutes of idle time before the Big Data pool is automatically paused.
    enabled Boolean
    Whether auto-pausing is enabled for the Big Data pool.

    AutoPausePropertiesResponse, AutoPausePropertiesResponseArgs

    DelayInMinutes int
    Number of minutes of idle time before the Big Data pool is automatically paused.
    Enabled bool
    Whether auto-pausing is enabled for the Big Data pool.
    DelayInMinutes int
    Number of minutes of idle time before the Big Data pool is automatically paused.
    Enabled bool
    Whether auto-pausing is enabled for the Big Data pool.
    delayInMinutes Integer
    Number of minutes of idle time before the Big Data pool is automatically paused.
    enabled Boolean
    Whether auto-pausing is enabled for the Big Data pool.
    delayInMinutes number
    Number of minutes of idle time before the Big Data pool is automatically paused.
    enabled boolean
    Whether auto-pausing is enabled for the Big Data pool.
    delay_in_minutes int
    Number of minutes of idle time before the Big Data pool is automatically paused.
    enabled bool
    Whether auto-pausing is enabled for the Big Data pool.
    delayInMinutes Number
    Number of minutes of idle time before the Big Data pool is automatically paused.
    enabled Boolean
    Whether auto-pausing is enabled for the Big Data pool.

    AutoScaleProperties, AutoScalePropertiesArgs

    Enabled bool
    Whether automatic scaling is enabled for the Big Data pool.
    MaxNodeCount int
    The maximum number of nodes the Big Data pool can support.
    MinNodeCount int
    The minimum number of nodes the Big Data pool can support.
    Enabled bool
    Whether automatic scaling is enabled for the Big Data pool.
    MaxNodeCount int
    The maximum number of nodes the Big Data pool can support.
    MinNodeCount int
    The minimum number of nodes the Big Data pool can support.
    enabled Boolean
    Whether automatic scaling is enabled for the Big Data pool.
    maxNodeCount Integer
    The maximum number of nodes the Big Data pool can support.
    minNodeCount Integer
    The minimum number of nodes the Big Data pool can support.
    enabled boolean
    Whether automatic scaling is enabled for the Big Data pool.
    maxNodeCount number
    The maximum number of nodes the Big Data pool can support.
    minNodeCount number
    The minimum number of nodes the Big Data pool can support.
    enabled bool
    Whether automatic scaling is enabled for the Big Data pool.
    max_node_count int
    The maximum number of nodes the Big Data pool can support.
    min_node_count int
    The minimum number of nodes the Big Data pool can support.
    enabled Boolean
    Whether automatic scaling is enabled for the Big Data pool.
    maxNodeCount Number
    The maximum number of nodes the Big Data pool can support.
    minNodeCount Number
    The minimum number of nodes the Big Data pool can support.

    AutoScalePropertiesResponse, AutoScalePropertiesResponseArgs

    Enabled bool
    Whether automatic scaling is enabled for the Big Data pool.
    MaxNodeCount int
    The maximum number of nodes the Big Data pool can support.
    MinNodeCount int
    The minimum number of nodes the Big Data pool can support.
    Enabled bool
    Whether automatic scaling is enabled for the Big Data pool.
    MaxNodeCount int
    The maximum number of nodes the Big Data pool can support.
    MinNodeCount int
    The minimum number of nodes the Big Data pool can support.
    enabled Boolean
    Whether automatic scaling is enabled for the Big Data pool.
    maxNodeCount Integer
    The maximum number of nodes the Big Data pool can support.
    minNodeCount Integer
    The minimum number of nodes the Big Data pool can support.
    enabled boolean
    Whether automatic scaling is enabled for the Big Data pool.
    maxNodeCount number
    The maximum number of nodes the Big Data pool can support.
    minNodeCount number
    The minimum number of nodes the Big Data pool can support.
    enabled bool
    Whether automatic scaling is enabled for the Big Data pool.
    max_node_count int
    The maximum number of nodes the Big Data pool can support.
    min_node_count int
    The minimum number of nodes the Big Data pool can support.
    enabled Boolean
    Whether automatic scaling is enabled for the Big Data pool.
    maxNodeCount Number
    The maximum number of nodes the Big Data pool can support.
    minNodeCount Number
    The minimum number of nodes the Big Data pool can support.

    DynamicExecutorAllocation, DynamicExecutorAllocationArgs

    Enabled bool
    Indicates whether Dynamic Executor Allocation is enabled or not.
    Enabled bool
    Indicates whether Dynamic Executor Allocation is enabled or not.
    enabled Boolean
    Indicates whether Dynamic Executor Allocation is enabled or not.
    enabled boolean
    Indicates whether Dynamic Executor Allocation is enabled or not.
    enabled bool
    Indicates whether Dynamic Executor Allocation is enabled or not.
    enabled Boolean
    Indicates whether Dynamic Executor Allocation is enabled or not.

    DynamicExecutorAllocationResponse, DynamicExecutorAllocationResponseArgs

    Enabled bool
    Indicates whether Dynamic Executor Allocation is enabled or not.
    Enabled bool
    Indicates whether Dynamic Executor Allocation is enabled or not.
    enabled Boolean
    Indicates whether Dynamic Executor Allocation is enabled or not.
    enabled boolean
    Indicates whether Dynamic Executor Allocation is enabled or not.
    enabled bool
    Indicates whether Dynamic Executor Allocation is enabled or not.
    enabled Boolean
    Indicates whether Dynamic Executor Allocation is enabled or not.

    LibraryInfo, LibraryInfoArgs

    ContainerName string
    Storage blob container name.
    Name string
    Name of the library.
    Path string
    Storage blob path of library.
    Type string
    Type of the library.
    ContainerName string
    Storage blob container name.
    Name string
    Name of the library.
    Path string
    Storage blob path of library.
    Type string
    Type of the library.
    containerName String
    Storage blob container name.
    name String
    Name of the library.
    path String
    Storage blob path of library.
    type String
    Type of the library.
    containerName string
    Storage blob container name.
    name string
    Name of the library.
    path string
    Storage blob path of library.
    type string
    Type of the library.
    container_name str
    Storage blob container name.
    name str
    Name of the library.
    path str
    Storage blob path of library.
    type str
    Type of the library.
    containerName String
    Storage blob container name.
    name String
    Name of the library.
    path String
    Storage blob path of library.
    type String
    Type of the library.

    LibraryInfoResponse, LibraryInfoResponseArgs

    CreatorId string
    Creator Id of the library/package.
    ProvisioningStatus string
    Provisioning status of the library/package.
    UploadedTimestamp string
    The last update time of the library.
    ContainerName string
    Storage blob container name.
    Name string
    Name of the library.
    Path string
    Storage blob path of library.
    Type string
    Type of the library.
    CreatorId string
    Creator Id of the library/package.
    ProvisioningStatus string
    Provisioning status of the library/package.
    UploadedTimestamp string
    The last update time of the library.
    ContainerName string
    Storage blob container name.
    Name string
    Name of the library.
    Path string
    Storage blob path of library.
    Type string
    Type of the library.
    creatorId String
    Creator Id of the library/package.
    provisioningStatus String
    Provisioning status of the library/package.
    uploadedTimestamp String
    The last update time of the library.
    containerName String
    Storage blob container name.
    name String
    Name of the library.
    path String
    Storage blob path of library.
    type String
    Type of the library.
    creatorId string
    Creator Id of the library/package.
    provisioningStatus string
    Provisioning status of the library/package.
    uploadedTimestamp string
    The last update time of the library.
    containerName string
    Storage blob container name.
    name string
    Name of the library.
    path string
    Storage blob path of library.
    type string
    Type of the library.
    creator_id str
    Creator Id of the library/package.
    provisioning_status str
    Provisioning status of the library/package.
    uploaded_timestamp str
    The last update time of the library.
    container_name str
    Storage blob container name.
    name str
    Name of the library.
    path str
    Storage blob path of library.
    type str
    Type of the library.
    creatorId String
    Creator Id of the library/package.
    provisioningStatus String
    Provisioning status of the library/package.
    uploadedTimestamp String
    The last update time of the library.
    containerName String
    Storage blob container name.
    name String
    Name of the library.
    path String
    Storage blob path of library.
    type String
    Type of the library.

    LibraryRequirements, LibraryRequirementsArgs

    Content string
    The library requirements.
    Filename string
    The filename of the library requirements file.
    Content string
    The library requirements.
    Filename string
    The filename of the library requirements file.
    content String
    The library requirements.
    filename String
    The filename of the library requirements file.
    content string
    The library requirements.
    filename string
    The filename of the library requirements file.
    content str
    The library requirements.
    filename str
    The filename of the library requirements file.
    content String
    The library requirements.
    filename String
    The filename of the library requirements file.

    LibraryRequirementsResponse, LibraryRequirementsResponseArgs

    Time string
    The last update time of the library requirements file.
    Content string
    The library requirements.
    Filename string
    The filename of the library requirements file.
    Time string
    The last update time of the library requirements file.
    Content string
    The library requirements.
    Filename string
    The filename of the library requirements file.
    time String
    The last update time of the library requirements file.
    content String
    The library requirements.
    filename String
    The filename of the library requirements file.
    time string
    The last update time of the library requirements file.
    content string
    The library requirements.
    filename string
    The filename of the library requirements file.
    time str
    The last update time of the library requirements file.
    content str
    The library requirements.
    filename str
    The filename of the library requirements file.
    time String
    The last update time of the library requirements file.
    content String
    The library requirements.
    filename String
    The filename of the library requirements file.

    NodeSize, NodeSizeArgs

    None
    None
    Small
    Small
    Medium
    Medium
    Large
    Large
    XLarge
    XLarge
    XXLarge
    XXLarge
    XXXLarge
    XXXLarge
    NodeSizeNone
    None
    NodeSizeSmall
    Small
    NodeSizeMedium
    Medium
    NodeSizeLarge
    Large
    NodeSizeXLarge
    XLarge
    NodeSizeXXLarge
    XXLarge
    NodeSizeXXXLarge
    XXXLarge
    None
    None
    Small
    Small
    Medium
    Medium
    Large
    Large
    XLarge
    XLarge
    XXLarge
    XXLarge
    XXXLarge
    XXXLarge
    None
    None
    Small
    Small
    Medium
    Medium
    Large
    Large
    XLarge
    XLarge
    XXLarge
    XXLarge
    XXXLarge
    XXXLarge
    NONE
    None
    SMALL
    Small
    MEDIUM
    Medium
    LARGE
    Large
    X_LARGE
    XLarge
    XX_LARGE
    XXLarge
    XXX_LARGE
    XXXLarge
    "None"
    None
    "Small"
    Small
    "Medium"
    Medium
    "Large"
    Large
    "XLarge"
    XLarge
    "XXLarge"
    XXLarge
    "XXXLarge"
    XXXLarge

    NodeSizeFamily, NodeSizeFamilyArgs

    None
    None
    MemoryOptimized
    MemoryOptimized
    HardwareAcceleratedFPGA
    HardwareAcceleratedFPGA
    HardwareAcceleratedGPU
    HardwareAcceleratedGPU
    NodeSizeFamilyNone
    None
    NodeSizeFamilyMemoryOptimized
    MemoryOptimized
    NodeSizeFamilyHardwareAcceleratedFPGA
    HardwareAcceleratedFPGA
    NodeSizeFamilyHardwareAcceleratedGPU
    HardwareAcceleratedGPU
    None
    None
    MemoryOptimized
    MemoryOptimized
    HardwareAcceleratedFPGA
    HardwareAcceleratedFPGA
    HardwareAcceleratedGPU
    HardwareAcceleratedGPU
    None
    None
    MemoryOptimized
    MemoryOptimized
    HardwareAcceleratedFPGA
    HardwareAcceleratedFPGA
    HardwareAcceleratedGPU
    HardwareAcceleratedGPU
    NONE
    None
    MEMORY_OPTIMIZED
    MemoryOptimized
    HARDWARE_ACCELERATED_FPGA
    HardwareAcceleratedFPGA
    HARDWARE_ACCELERATED_GPU
    HardwareAcceleratedGPU
    "None"
    None
    "MemoryOptimized"
    MemoryOptimized
    "HardwareAcceleratedFPGA"
    HardwareAcceleratedFPGA
    "HardwareAcceleratedGPU"
    HardwareAcceleratedGPU

    Import

    An existing resource can be imported using its type token, name, and identifier, e.g.

    $ pulumi import azure-native:synapse:BigDataPool ExamplePool /subscriptions/01234567-89ab-4def-0123-456789abcdef/resourceGroups/ExampleResourceGroup/providers/Microsoft.Synapse/workspaces/ExampleWorkspace/bigDataPools/ExamplePool 
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    azure-native-v1 pulumi/pulumi-azure-native
    License
    Apache-2.0
    azure-native-v1 logo
    These are the docs for Azure Native v1. We recommenend using the latest version, Azure Native v2.
    Azure Native v1 v1.104.0 published on Thursday, Jul 6, 2023 by Pulumi