azure-native.synapse.BigDataPool
Explore with Pulumi AI
A Big Data pool Azure REST API version: 2021-06-01. Prior API version in Azure Native 1.x: 2021-03-01.
Other available API versions: 2021-05-01, 2021-06-01-preview.
Example Usage
Create or update a Big Data pool
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AzureNative = Pulumi.AzureNative;
return await Deployment.RunAsync(() =>
{
var bigDataPool = new AzureNative.Synapse.BigDataPool("bigDataPool", new()
{
AutoPause = new AzureNative.Synapse.Inputs.AutoPausePropertiesArgs
{
DelayInMinutes = 15,
Enabled = true,
},
AutoScale = new AzureNative.Synapse.Inputs.AutoScalePropertiesArgs
{
Enabled = true,
MaxNodeCount = 50,
MinNodeCount = 3,
},
BigDataPoolName = "ExamplePool",
DefaultSparkLogFolder = "/logs",
IsAutotuneEnabled = false,
LibraryRequirements = new AzureNative.Synapse.Inputs.LibraryRequirementsArgs
{
Content = "",
Filename = "requirements.txt",
},
Location = "West US 2",
NodeCount = 4,
NodeSize = AzureNative.Synapse.NodeSize.Medium,
NodeSizeFamily = AzureNative.Synapse.NodeSizeFamily.MemoryOptimized,
ResourceGroupName = "ExampleResourceGroup",
SparkEventsFolder = "/events",
SparkVersion = "3.3",
Tags =
{
{ "key", "value" },
},
WorkspaceName = "ExampleWorkspace",
});
});
package main
import (
synapse "github.com/pulumi/pulumi-azure-native-sdk/synapse/v2"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := synapse.NewBigDataPool(ctx, "bigDataPool", &synapse.BigDataPoolArgs{
AutoPause: &synapse.AutoPausePropertiesArgs{
DelayInMinutes: pulumi.Int(15),
Enabled: pulumi.Bool(true),
},
AutoScale: &synapse.AutoScalePropertiesArgs{
Enabled: pulumi.Bool(true),
MaxNodeCount: pulumi.Int(50),
MinNodeCount: pulumi.Int(3),
},
BigDataPoolName: pulumi.String("ExamplePool"),
DefaultSparkLogFolder: pulumi.String("/logs"),
IsAutotuneEnabled: pulumi.Bool(false),
LibraryRequirements: &synapse.LibraryRequirementsArgs{
Content: pulumi.String(""),
Filename: pulumi.String("requirements.txt"),
},
Location: pulumi.String("West US 2"),
NodeCount: pulumi.Int(4),
NodeSize: pulumi.String(synapse.NodeSizeMedium),
NodeSizeFamily: pulumi.String(synapse.NodeSizeFamilyMemoryOptimized),
ResourceGroupName: pulumi.String("ExampleResourceGroup"),
SparkEventsFolder: pulumi.String("/events"),
SparkVersion: pulumi.String("3.3"),
Tags: pulumi.StringMap{
"key": pulumi.String("value"),
},
WorkspaceName: pulumi.String("ExampleWorkspace"),
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azurenative.synapse.BigDataPool;
import com.pulumi.azurenative.synapse.BigDataPoolArgs;
import com.pulumi.azurenative.synapse.inputs.AutoPausePropertiesArgs;
import com.pulumi.azurenative.synapse.inputs.AutoScalePropertiesArgs;
import com.pulumi.azurenative.synapse.inputs.LibraryRequirementsArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var bigDataPool = new BigDataPool("bigDataPool", BigDataPoolArgs.builder()
.autoPause(AutoPausePropertiesArgs.builder()
.delayInMinutes(15)
.enabled(true)
.build())
.autoScale(AutoScalePropertiesArgs.builder()
.enabled(true)
.maxNodeCount(50)
.minNodeCount(3)
.build())
.bigDataPoolName("ExamplePool")
.defaultSparkLogFolder("/logs")
.isAutotuneEnabled(false)
.libraryRequirements(LibraryRequirementsArgs.builder()
.content("")
.filename("requirements.txt")
.build())
.location("West US 2")
.nodeCount(4)
.nodeSize("Medium")
.nodeSizeFamily("MemoryOptimized")
.resourceGroupName("ExampleResourceGroup")
.sparkEventsFolder("/events")
.sparkVersion("3.3")
.tags(Map.of("key", "value"))
.workspaceName("ExampleWorkspace")
.build());
}
}
import pulumi
import pulumi_azure_native as azure_native
big_data_pool = azure_native.synapse.BigDataPool("bigDataPool",
auto_pause={
"delay_in_minutes": 15,
"enabled": True,
},
auto_scale={
"enabled": True,
"max_node_count": 50,
"min_node_count": 3,
},
big_data_pool_name="ExamplePool",
default_spark_log_folder="/logs",
is_autotune_enabled=False,
library_requirements={
"content": "",
"filename": "requirements.txt",
},
location="West US 2",
node_count=4,
node_size=azure_native.synapse.NodeSize.MEDIUM,
node_size_family=azure_native.synapse.NodeSizeFamily.MEMORY_OPTIMIZED,
resource_group_name="ExampleResourceGroup",
spark_events_folder="/events",
spark_version="3.3",
tags={
"key": "value",
},
workspace_name="ExampleWorkspace")
import * as pulumi from "@pulumi/pulumi";
import * as azure_native from "@pulumi/azure-native";
const bigDataPool = new azure_native.synapse.BigDataPool("bigDataPool", {
autoPause: {
delayInMinutes: 15,
enabled: true,
},
autoScale: {
enabled: true,
maxNodeCount: 50,
minNodeCount: 3,
},
bigDataPoolName: "ExamplePool",
defaultSparkLogFolder: "/logs",
isAutotuneEnabled: false,
libraryRequirements: {
content: "",
filename: "requirements.txt",
},
location: "West US 2",
nodeCount: 4,
nodeSize: azure_native.synapse.NodeSize.Medium,
nodeSizeFamily: azure_native.synapse.NodeSizeFamily.MemoryOptimized,
resourceGroupName: "ExampleResourceGroup",
sparkEventsFolder: "/events",
sparkVersion: "3.3",
tags: {
key: "value",
},
workspaceName: "ExampleWorkspace",
});
resources:
bigDataPool:
type: azure-native:synapse:BigDataPool
properties:
autoPause:
delayInMinutes: 15
enabled: true
autoScale:
enabled: true
maxNodeCount: 50
minNodeCount: 3
bigDataPoolName: ExamplePool
defaultSparkLogFolder: /logs
isAutotuneEnabled: false
libraryRequirements:
content:
filename: requirements.txt
location: West US 2
nodeCount: 4
nodeSize: Medium
nodeSizeFamily: MemoryOptimized
resourceGroupName: ExampleResourceGroup
sparkEventsFolder: /events
sparkVersion: '3.3'
tags:
key: value
workspaceName: ExampleWorkspace
Create BigDataPool Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new BigDataPool(name: string, args: BigDataPoolArgs, opts?: CustomResourceOptions);
@overload
def BigDataPool(resource_name: str,
args: BigDataPoolArgs,
opts: Optional[ResourceOptions] = None)
@overload
def BigDataPool(resource_name: str,
opts: Optional[ResourceOptions] = None,
resource_group_name: Optional[str] = None,
workspace_name: Optional[str] = None,
library_requirements: Optional[LibraryRequirementsArgs] = None,
node_size: Optional[Union[str, NodeSize]] = None,
custom_libraries: Optional[Sequence[LibraryInfoArgs]] = None,
default_spark_log_folder: Optional[str] = None,
dynamic_executor_allocation: Optional[DynamicExecutorAllocationArgs] = None,
force: Optional[bool] = None,
is_autotune_enabled: Optional[bool] = None,
is_compute_isolation_enabled: Optional[bool] = None,
auto_pause: Optional[AutoPausePropertiesArgs] = None,
location: Optional[str] = None,
node_count: Optional[int] = None,
cache_size: Optional[int] = None,
node_size_family: Optional[Union[str, NodeSizeFamily]] = None,
provisioning_state: Optional[str] = None,
big_data_pool_name: Optional[str] = None,
session_level_packages_enabled: Optional[bool] = None,
spark_config_properties: Optional[SparkConfigPropertiesArgs] = None,
spark_events_folder: Optional[str] = None,
spark_version: Optional[str] = None,
tags: Optional[Mapping[str, str]] = None,
auto_scale: Optional[AutoScalePropertiesArgs] = None)
func NewBigDataPool(ctx *Context, name string, args BigDataPoolArgs, opts ...ResourceOption) (*BigDataPool, error)
public BigDataPool(string name, BigDataPoolArgs args, CustomResourceOptions? opts = null)
public BigDataPool(String name, BigDataPoolArgs args)
public BigDataPool(String name, BigDataPoolArgs args, CustomResourceOptions options)
type: azure-native:synapse:BigDataPool
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args BigDataPoolArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args BigDataPoolArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args BigDataPoolArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args BigDataPoolArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args BigDataPoolArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var bigDataPoolResource = new AzureNative.Synapse.BigDataPool("bigDataPoolResource", new()
{
ResourceGroupName = "string",
WorkspaceName = "string",
LibraryRequirements = new AzureNative.Synapse.Inputs.LibraryRequirementsArgs
{
Content = "string",
Filename = "string",
},
NodeSize = "string",
CustomLibraries = new[]
{
new AzureNative.Synapse.Inputs.LibraryInfoArgs
{
ContainerName = "string",
Name = "string",
Path = "string",
Type = "string",
},
},
DefaultSparkLogFolder = "string",
DynamicExecutorAllocation = new AzureNative.Synapse.Inputs.DynamicExecutorAllocationArgs
{
Enabled = false,
MaxExecutors = 0,
MinExecutors = 0,
},
Force = false,
IsAutotuneEnabled = false,
IsComputeIsolationEnabled = false,
AutoPause = new AzureNative.Synapse.Inputs.AutoPausePropertiesArgs
{
DelayInMinutes = 0,
Enabled = false,
},
Location = "string",
NodeCount = 0,
CacheSize = 0,
NodeSizeFamily = "string",
ProvisioningState = "string",
BigDataPoolName = "string",
SessionLevelPackagesEnabled = false,
SparkConfigProperties = new AzureNative.Synapse.Inputs.SparkConfigPropertiesArgs
{
ConfigurationType = "string",
Content = "string",
Filename = "string",
},
SparkEventsFolder = "string",
SparkVersion = "string",
Tags =
{
{ "string", "string" },
},
AutoScale = new AzureNative.Synapse.Inputs.AutoScalePropertiesArgs
{
Enabled = false,
MaxNodeCount = 0,
MinNodeCount = 0,
},
});
example, err := synapse.NewBigDataPool(ctx, "bigDataPoolResource", &synapse.BigDataPoolArgs{
ResourceGroupName: pulumi.String("string"),
WorkspaceName: pulumi.String("string"),
LibraryRequirements: &synapse.LibraryRequirementsArgs{
Content: pulumi.String("string"),
Filename: pulumi.String("string"),
},
NodeSize: pulumi.String("string"),
CustomLibraries: synapse.LibraryInfoArray{
&synapse.LibraryInfoArgs{
ContainerName: pulumi.String("string"),
Name: pulumi.String("string"),
Path: pulumi.String("string"),
Type: pulumi.String("string"),
},
},
DefaultSparkLogFolder: pulumi.String("string"),
DynamicExecutorAllocation: &synapse.DynamicExecutorAllocationArgs{
Enabled: pulumi.Bool(false),
MaxExecutors: pulumi.Int(0),
MinExecutors: pulumi.Int(0),
},
Force: pulumi.Bool(false),
IsAutotuneEnabled: pulumi.Bool(false),
IsComputeIsolationEnabled: pulumi.Bool(false),
AutoPause: &synapse.AutoPausePropertiesArgs{
DelayInMinutes: pulumi.Int(0),
Enabled: pulumi.Bool(false),
},
Location: pulumi.String("string"),
NodeCount: pulumi.Int(0),
CacheSize: pulumi.Int(0),
NodeSizeFamily: pulumi.String("string"),
ProvisioningState: pulumi.String("string"),
BigDataPoolName: pulumi.String("string"),
SessionLevelPackagesEnabled: pulumi.Bool(false),
SparkConfigProperties: &synapse.SparkConfigPropertiesArgs{
ConfigurationType: pulumi.String("string"),
Content: pulumi.String("string"),
Filename: pulumi.String("string"),
},
SparkEventsFolder: pulumi.String("string"),
SparkVersion: pulumi.String("string"),
Tags: pulumi.StringMap{
"string": pulumi.String("string"),
},
AutoScale: &synapse.AutoScalePropertiesArgs{
Enabled: pulumi.Bool(false),
MaxNodeCount: pulumi.Int(0),
MinNodeCount: pulumi.Int(0),
},
})
var bigDataPoolResource = new BigDataPool("bigDataPoolResource", BigDataPoolArgs.builder()
.resourceGroupName("string")
.workspaceName("string")
.libraryRequirements(LibraryRequirementsArgs.builder()
.content("string")
.filename("string")
.build())
.nodeSize("string")
.customLibraries(LibraryInfoArgs.builder()
.containerName("string")
.name("string")
.path("string")
.type("string")
.build())
.defaultSparkLogFolder("string")
.dynamicExecutorAllocation(DynamicExecutorAllocationArgs.builder()
.enabled(false)
.maxExecutors(0)
.minExecutors(0)
.build())
.force(false)
.isAutotuneEnabled(false)
.isComputeIsolationEnabled(false)
.autoPause(AutoPausePropertiesArgs.builder()
.delayInMinutes(0)
.enabled(false)
.build())
.location("string")
.nodeCount(0)
.cacheSize(0)
.nodeSizeFamily("string")
.provisioningState("string")
.bigDataPoolName("string")
.sessionLevelPackagesEnabled(false)
.sparkConfigProperties(SparkConfigPropertiesArgs.builder()
.configurationType("string")
.content("string")
.filename("string")
.build())
.sparkEventsFolder("string")
.sparkVersion("string")
.tags(Map.of("string", "string"))
.autoScale(AutoScalePropertiesArgs.builder()
.enabled(false)
.maxNodeCount(0)
.minNodeCount(0)
.build())
.build());
big_data_pool_resource = azure_native.synapse.BigDataPool("bigDataPoolResource",
resource_group_name="string",
workspace_name="string",
library_requirements={
"content": "string",
"filename": "string",
},
node_size="string",
custom_libraries=[{
"container_name": "string",
"name": "string",
"path": "string",
"type": "string",
}],
default_spark_log_folder="string",
dynamic_executor_allocation={
"enabled": False,
"max_executors": 0,
"min_executors": 0,
},
force=False,
is_autotune_enabled=False,
is_compute_isolation_enabled=False,
auto_pause={
"delay_in_minutes": 0,
"enabled": False,
},
location="string",
node_count=0,
cache_size=0,
node_size_family="string",
provisioning_state="string",
big_data_pool_name="string",
session_level_packages_enabled=False,
spark_config_properties={
"configuration_type": "string",
"content": "string",
"filename": "string",
},
spark_events_folder="string",
spark_version="string",
tags={
"string": "string",
},
auto_scale={
"enabled": False,
"max_node_count": 0,
"min_node_count": 0,
})
const bigDataPoolResource = new azure_native.synapse.BigDataPool("bigDataPoolResource", {
resourceGroupName: "string",
workspaceName: "string",
libraryRequirements: {
content: "string",
filename: "string",
},
nodeSize: "string",
customLibraries: [{
containerName: "string",
name: "string",
path: "string",
type: "string",
}],
defaultSparkLogFolder: "string",
dynamicExecutorAllocation: {
enabled: false,
maxExecutors: 0,
minExecutors: 0,
},
force: false,
isAutotuneEnabled: false,
isComputeIsolationEnabled: false,
autoPause: {
delayInMinutes: 0,
enabled: false,
},
location: "string",
nodeCount: 0,
cacheSize: 0,
nodeSizeFamily: "string",
provisioningState: "string",
bigDataPoolName: "string",
sessionLevelPackagesEnabled: false,
sparkConfigProperties: {
configurationType: "string",
content: "string",
filename: "string",
},
sparkEventsFolder: "string",
sparkVersion: "string",
tags: {
string: "string",
},
autoScale: {
enabled: false,
maxNodeCount: 0,
minNodeCount: 0,
},
});
type: azure-native:synapse:BigDataPool
properties:
autoPause:
delayInMinutes: 0
enabled: false
autoScale:
enabled: false
maxNodeCount: 0
minNodeCount: 0
bigDataPoolName: string
cacheSize: 0
customLibraries:
- containerName: string
name: string
path: string
type: string
defaultSparkLogFolder: string
dynamicExecutorAllocation:
enabled: false
maxExecutors: 0
minExecutors: 0
force: false
isAutotuneEnabled: false
isComputeIsolationEnabled: false
libraryRequirements:
content: string
filename: string
location: string
nodeCount: 0
nodeSize: string
nodeSizeFamily: string
provisioningState: string
resourceGroupName: string
sessionLevelPackagesEnabled: false
sparkConfigProperties:
configurationType: string
content: string
filename: string
sparkEventsFolder: string
sparkVersion: string
tags:
string: string
workspaceName: string
BigDataPool Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The BigDataPool resource accepts the following input properties:
- Resource
Group stringName - The name of the resource group. The name is case insensitive.
- Workspace
Name string - The name of the workspace.
- Auto
Pause Pulumi.Azure Native. Synapse. Inputs. Auto Pause Properties - Auto-pausing properties
- Auto
Scale Pulumi.Azure Native. Synapse. Inputs. Auto Scale Properties - Auto-scaling properties
- Big
Data stringPool Name - Big Data pool name
- Cache
Size int - The cache size
- Custom
Libraries List<Pulumi.Azure Native. Synapse. Inputs. Library Info> - List of custom libraries/packages associated with the spark pool.
- Default
Spark stringLog Folder - The default folder where Spark logs will be written.
- Dynamic
Executor Pulumi.Allocation Azure Native. Synapse. Inputs. Dynamic Executor Allocation - Dynamic Executor Allocation
- Force bool
- Whether to stop any running jobs in the Big Data pool
- Is
Autotune boolEnabled - Whether autotune is required or not.
- Is
Compute boolIsolation Enabled - Whether compute isolation is required or not.
- Library
Requirements Pulumi.Azure Native. Synapse. Inputs. Library Requirements - Library version requirements
- Location string
- The geo-location where the resource lives
- Node
Count int - The number of nodes in the Big Data pool.
- Node
Size string | Pulumi.Azure Native. Synapse. Node Size - The level of compute power that each node in the Big Data pool has.
- Node
Size string | Pulumi.Family Azure Native. Synapse. Node Size Family - The kind of nodes that the Big Data pool provides.
- Provisioning
State string - The state of the Big Data pool.
- Session
Level boolPackages Enabled - Whether session level packages enabled.
- Spark
Config Pulumi.Properties Azure Native. Synapse. Inputs. Spark Config Properties - Spark configuration file to specify additional properties
- Spark
Events stringFolder - The Spark events folder
- Spark
Version string - The Apache Spark version.
- Dictionary<string, string>
- Resource tags.
- Resource
Group stringName - The name of the resource group. The name is case insensitive.
- Workspace
Name string - The name of the workspace.
- Auto
Pause AutoPause Properties Args - Auto-pausing properties
- Auto
Scale AutoScale Properties Args - Auto-scaling properties
- Big
Data stringPool Name - Big Data pool name
- Cache
Size int - The cache size
- Custom
Libraries []LibraryInfo Args - List of custom libraries/packages associated with the spark pool.
- Default
Spark stringLog Folder - The default folder where Spark logs will be written.
- Dynamic
Executor DynamicAllocation Executor Allocation Args - Dynamic Executor Allocation
- Force bool
- Whether to stop any running jobs in the Big Data pool
- Is
Autotune boolEnabled - Whether autotune is required or not.
- Is
Compute boolIsolation Enabled - Whether compute isolation is required or not.
- Library
Requirements LibraryRequirements Args - Library version requirements
- Location string
- The geo-location where the resource lives
- Node
Count int - The number of nodes in the Big Data pool.
- Node
Size string | NodeSize - The level of compute power that each node in the Big Data pool has.
- Node
Size string | NodeFamily Size Family - The kind of nodes that the Big Data pool provides.
- Provisioning
State string - The state of the Big Data pool.
- Session
Level boolPackages Enabled - Whether session level packages enabled.
- Spark
Config SparkProperties Config Properties Args - Spark configuration file to specify additional properties
- Spark
Events stringFolder - The Spark events folder
- Spark
Version string - The Apache Spark version.
- map[string]string
- Resource tags.
- resource
Group StringName - The name of the resource group. The name is case insensitive.
- workspace
Name String - The name of the workspace.
- auto
Pause AutoPause Properties - Auto-pausing properties
- auto
Scale AutoScale Properties - Auto-scaling properties
- big
Data StringPool Name - Big Data pool name
- cache
Size Integer - The cache size
- custom
Libraries List<LibraryInfo> - List of custom libraries/packages associated with the spark pool.
- default
Spark StringLog Folder - The default folder where Spark logs will be written.
- dynamic
Executor DynamicAllocation Executor Allocation - Dynamic Executor Allocation
- force Boolean
- Whether to stop any running jobs in the Big Data pool
- is
Autotune BooleanEnabled - Whether autotune is required or not.
- is
Compute BooleanIsolation Enabled - Whether compute isolation is required or not.
- library
Requirements LibraryRequirements - Library version requirements
- location String
- The geo-location where the resource lives
- node
Count Integer - The number of nodes in the Big Data pool.
- node
Size String | NodeSize - The level of compute power that each node in the Big Data pool has.
- node
Size String | NodeFamily Size Family - The kind of nodes that the Big Data pool provides.
- provisioning
State String - The state of the Big Data pool.
- session
Level BooleanPackages Enabled - Whether session level packages enabled.
- spark
Config SparkProperties Config Properties - Spark configuration file to specify additional properties
- spark
Events StringFolder - The Spark events folder
- spark
Version String - The Apache Spark version.
- Map<String,String>
- Resource tags.
- resource
Group stringName - The name of the resource group. The name is case insensitive.
- workspace
Name string - The name of the workspace.
- auto
Pause AutoPause Properties - Auto-pausing properties
- auto
Scale AutoScale Properties - Auto-scaling properties
- big
Data stringPool Name - Big Data pool name
- cache
Size number - The cache size
- custom
Libraries LibraryInfo[] - List of custom libraries/packages associated with the spark pool.
- default
Spark stringLog Folder - The default folder where Spark logs will be written.
- dynamic
Executor DynamicAllocation Executor Allocation - Dynamic Executor Allocation
- force boolean
- Whether to stop any running jobs in the Big Data pool
- is
Autotune booleanEnabled - Whether autotune is required or not.
- is
Compute booleanIsolation Enabled - Whether compute isolation is required or not.
- library
Requirements LibraryRequirements - Library version requirements
- location string
- The geo-location where the resource lives
- node
Count number - The number of nodes in the Big Data pool.
- node
Size string | NodeSize - The level of compute power that each node in the Big Data pool has.
- node
Size string | NodeFamily Size Family - The kind of nodes that the Big Data pool provides.
- provisioning
State string - The state of the Big Data pool.
- session
Level booleanPackages Enabled - Whether session level packages enabled.
- spark
Config SparkProperties Config Properties - Spark configuration file to specify additional properties
- spark
Events stringFolder - The Spark events folder
- spark
Version string - The Apache Spark version.
- {[key: string]: string}
- Resource tags.
- resource_
group_ strname - The name of the resource group. The name is case insensitive.
- workspace_
name str - The name of the workspace.
- auto_
pause AutoPause Properties Args - Auto-pausing properties
- auto_
scale AutoScale Properties Args - Auto-scaling properties
- big_
data_ strpool_ name - Big Data pool name
- cache_
size int - The cache size
- custom_
libraries Sequence[LibraryInfo Args] - List of custom libraries/packages associated with the spark pool.
- default_
spark_ strlog_ folder - The default folder where Spark logs will be written.
- dynamic_
executor_ Dynamicallocation Executor Allocation Args - Dynamic Executor Allocation
- force bool
- Whether to stop any running jobs in the Big Data pool
- is_
autotune_ boolenabled - Whether autotune is required or not.
- is_
compute_ boolisolation_ enabled - Whether compute isolation is required or not.
- library_
requirements LibraryRequirements Args - Library version requirements
- location str
- The geo-location where the resource lives
- node_
count int - The number of nodes in the Big Data pool.
- node_
size str | NodeSize - The level of compute power that each node in the Big Data pool has.
- node_
size_ str | Nodefamily Size Family - The kind of nodes that the Big Data pool provides.
- provisioning_
state str - The state of the Big Data pool.
- session_
level_ boolpackages_ enabled - Whether session level packages enabled.
- spark_
config_ Sparkproperties Config Properties Args - Spark configuration file to specify additional properties
- spark_
events_ strfolder - The Spark events folder
- spark_
version str - The Apache Spark version.
- Mapping[str, str]
- Resource tags.
- resource
Group StringName - The name of the resource group. The name is case insensitive.
- workspace
Name String - The name of the workspace.
- auto
Pause Property Map - Auto-pausing properties
- auto
Scale Property Map - Auto-scaling properties
- big
Data StringPool Name - Big Data pool name
- cache
Size Number - The cache size
- custom
Libraries List<Property Map> - List of custom libraries/packages associated with the spark pool.
- default
Spark StringLog Folder - The default folder where Spark logs will be written.
- dynamic
Executor Property MapAllocation - Dynamic Executor Allocation
- force Boolean
- Whether to stop any running jobs in the Big Data pool
- is
Autotune BooleanEnabled - Whether autotune is required or not.
- is
Compute BooleanIsolation Enabled - Whether compute isolation is required or not.
- library
Requirements Property Map - Library version requirements
- location String
- The geo-location where the resource lives
- node
Count Number - The number of nodes in the Big Data pool.
- node
Size String | "None" | "Small" | "Medium" | "Large" | "XLarge" | "XXLarge" | "XXXLarge" - The level of compute power that each node in the Big Data pool has.
- node
Size String | "None" | "MemoryFamily Optimized" | "Hardware Accelerated FPGA" | "Hardware Accelerated GPU" - The kind of nodes that the Big Data pool provides.
- provisioning
State String - The state of the Big Data pool.
- session
Level BooleanPackages Enabled - Whether session level packages enabled.
- spark
Config Property MapProperties - Spark configuration file to specify additional properties
- spark
Events StringFolder - The Spark events folder
- spark
Version String - The Apache Spark version.
- Map<String>
- Resource tags.
Outputs
All input properties are implicitly available as output properties. Additionally, the BigDataPool resource produces the following output properties:
- Creation
Date string - The time when the Big Data pool was created.
- Id string
- The provider-assigned unique ID for this managed resource.
- Last
Succeeded stringTimestamp - The time when the Big Data pool was updated successfully.
- Name string
- The name of the resource
- Type string
- The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
- Creation
Date string - The time when the Big Data pool was created.
- Id string
- The provider-assigned unique ID for this managed resource.
- Last
Succeeded stringTimestamp - The time when the Big Data pool was updated successfully.
- Name string
- The name of the resource
- Type string
- The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
- creation
Date String - The time when the Big Data pool was created.
- id String
- The provider-assigned unique ID for this managed resource.
- last
Succeeded StringTimestamp - The time when the Big Data pool was updated successfully.
- name String
- The name of the resource
- type String
- The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
- creation
Date string - The time when the Big Data pool was created.
- id string
- The provider-assigned unique ID for this managed resource.
- last
Succeeded stringTimestamp - The time when the Big Data pool was updated successfully.
- name string
- The name of the resource
- type string
- The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
- creation_
date str - The time when the Big Data pool was created.
- id str
- The provider-assigned unique ID for this managed resource.
- last_
succeeded_ strtimestamp - The time when the Big Data pool was updated successfully.
- name str
- The name of the resource
- type str
- The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
- creation
Date String - The time when the Big Data pool was created.
- id String
- The provider-assigned unique ID for this managed resource.
- last
Succeeded StringTimestamp - The time when the Big Data pool was updated successfully.
- name String
- The name of the resource
- type String
- The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
Supporting Types
AutoPauseProperties, AutoPausePropertiesArgs
- Delay
In intMinutes - Number of minutes of idle time before the Big Data pool is automatically paused.
- Enabled bool
- Whether auto-pausing is enabled for the Big Data pool.
- Delay
In intMinutes - Number of minutes of idle time before the Big Data pool is automatically paused.
- Enabled bool
- Whether auto-pausing is enabled for the Big Data pool.
- delay
In IntegerMinutes - Number of minutes of idle time before the Big Data pool is automatically paused.
- enabled Boolean
- Whether auto-pausing is enabled for the Big Data pool.
- delay
In numberMinutes - Number of minutes of idle time before the Big Data pool is automatically paused.
- enabled boolean
- Whether auto-pausing is enabled for the Big Data pool.
- delay_
in_ intminutes - Number of minutes of idle time before the Big Data pool is automatically paused.
- enabled bool
- Whether auto-pausing is enabled for the Big Data pool.
- delay
In NumberMinutes - Number of minutes of idle time before the Big Data pool is automatically paused.
- enabled Boolean
- Whether auto-pausing is enabled for the Big Data pool.
AutoPausePropertiesResponse, AutoPausePropertiesResponseArgs
- Delay
In intMinutes - Number of minutes of idle time before the Big Data pool is automatically paused.
- Enabled bool
- Whether auto-pausing is enabled for the Big Data pool.
- Delay
In intMinutes - Number of minutes of idle time before the Big Data pool is automatically paused.
- Enabled bool
- Whether auto-pausing is enabled for the Big Data pool.
- delay
In IntegerMinutes - Number of minutes of idle time before the Big Data pool is automatically paused.
- enabled Boolean
- Whether auto-pausing is enabled for the Big Data pool.
- delay
In numberMinutes - Number of minutes of idle time before the Big Data pool is automatically paused.
- enabled boolean
- Whether auto-pausing is enabled for the Big Data pool.
- delay_
in_ intminutes - Number of minutes of idle time before the Big Data pool is automatically paused.
- enabled bool
- Whether auto-pausing is enabled for the Big Data pool.
- delay
In NumberMinutes - Number of minutes of idle time before the Big Data pool is automatically paused.
- enabled Boolean
- Whether auto-pausing is enabled for the Big Data pool.
AutoScaleProperties, AutoScalePropertiesArgs
- Enabled bool
- Whether automatic scaling is enabled for the Big Data pool.
- Max
Node intCount - The maximum number of nodes the Big Data pool can support.
- Min
Node intCount - The minimum number of nodes the Big Data pool can support.
- Enabled bool
- Whether automatic scaling is enabled for the Big Data pool.
- Max
Node intCount - The maximum number of nodes the Big Data pool can support.
- Min
Node intCount - The minimum number of nodes the Big Data pool can support.
- enabled Boolean
- Whether automatic scaling is enabled for the Big Data pool.
- max
Node IntegerCount - The maximum number of nodes the Big Data pool can support.
- min
Node IntegerCount - The minimum number of nodes the Big Data pool can support.
- enabled boolean
- Whether automatic scaling is enabled for the Big Data pool.
- max
Node numberCount - The maximum number of nodes the Big Data pool can support.
- min
Node numberCount - The minimum number of nodes the Big Data pool can support.
- enabled bool
- Whether automatic scaling is enabled for the Big Data pool.
- max_
node_ intcount - The maximum number of nodes the Big Data pool can support.
- min_
node_ intcount - The minimum number of nodes the Big Data pool can support.
- enabled Boolean
- Whether automatic scaling is enabled for the Big Data pool.
- max
Node NumberCount - The maximum number of nodes the Big Data pool can support.
- min
Node NumberCount - The minimum number of nodes the Big Data pool can support.
AutoScalePropertiesResponse, AutoScalePropertiesResponseArgs
- Enabled bool
- Whether automatic scaling is enabled for the Big Data pool.
- Max
Node intCount - The maximum number of nodes the Big Data pool can support.
- Min
Node intCount - The minimum number of nodes the Big Data pool can support.
- Enabled bool
- Whether automatic scaling is enabled for the Big Data pool.
- Max
Node intCount - The maximum number of nodes the Big Data pool can support.
- Min
Node intCount - The minimum number of nodes the Big Data pool can support.
- enabled Boolean
- Whether automatic scaling is enabled for the Big Data pool.
- max
Node IntegerCount - The maximum number of nodes the Big Data pool can support.
- min
Node IntegerCount - The minimum number of nodes the Big Data pool can support.
- enabled boolean
- Whether automatic scaling is enabled for the Big Data pool.
- max
Node numberCount - The maximum number of nodes the Big Data pool can support.
- min
Node numberCount - The minimum number of nodes the Big Data pool can support.
- enabled bool
- Whether automatic scaling is enabled for the Big Data pool.
- max_
node_ intcount - The maximum number of nodes the Big Data pool can support.
- min_
node_ intcount - The minimum number of nodes the Big Data pool can support.
- enabled Boolean
- Whether automatic scaling is enabled for the Big Data pool.
- max
Node NumberCount - The maximum number of nodes the Big Data pool can support.
- min
Node NumberCount - The minimum number of nodes the Big Data pool can support.
ConfigurationType, ConfigurationTypeArgs
- File
- File
- Artifact
- Artifact
- Configuration
Type File - File
- Configuration
Type Artifact - Artifact
- File
- File
- Artifact
- Artifact
- File
- File
- Artifact
- Artifact
- FILE
- File
- ARTIFACT
- Artifact
- "File"
- File
- "Artifact"
- Artifact
DynamicExecutorAllocation, DynamicExecutorAllocationArgs
- Enabled bool
- Indicates whether Dynamic Executor Allocation is enabled or not.
- Max
Executors int - The maximum number of executors alloted
- Min
Executors int - The minimum number of executors alloted
- Enabled bool
- Indicates whether Dynamic Executor Allocation is enabled or not.
- Max
Executors int - The maximum number of executors alloted
- Min
Executors int - The minimum number of executors alloted
- enabled Boolean
- Indicates whether Dynamic Executor Allocation is enabled or not.
- max
Executors Integer - The maximum number of executors alloted
- min
Executors Integer - The minimum number of executors alloted
- enabled boolean
- Indicates whether Dynamic Executor Allocation is enabled or not.
- max
Executors number - The maximum number of executors alloted
- min
Executors number - The minimum number of executors alloted
- enabled bool
- Indicates whether Dynamic Executor Allocation is enabled or not.
- max_
executors int - The maximum number of executors alloted
- min_
executors int - The minimum number of executors alloted
- enabled Boolean
- Indicates whether Dynamic Executor Allocation is enabled or not.
- max
Executors Number - The maximum number of executors alloted
- min
Executors Number - The minimum number of executors alloted
DynamicExecutorAllocationResponse, DynamicExecutorAllocationResponseArgs
- Enabled bool
- Indicates whether Dynamic Executor Allocation is enabled or not.
- Max
Executors int - The maximum number of executors alloted
- Min
Executors int - The minimum number of executors alloted
- Enabled bool
- Indicates whether Dynamic Executor Allocation is enabled or not.
- Max
Executors int - The maximum number of executors alloted
- Min
Executors int - The minimum number of executors alloted
- enabled Boolean
- Indicates whether Dynamic Executor Allocation is enabled or not.
- max
Executors Integer - The maximum number of executors alloted
- min
Executors Integer - The minimum number of executors alloted
- enabled boolean
- Indicates whether Dynamic Executor Allocation is enabled or not.
- max
Executors number - The maximum number of executors alloted
- min
Executors number - The minimum number of executors alloted
- enabled bool
- Indicates whether Dynamic Executor Allocation is enabled or not.
- max_
executors int - The maximum number of executors alloted
- min_
executors int - The minimum number of executors alloted
- enabled Boolean
- Indicates whether Dynamic Executor Allocation is enabled or not.
- max
Executors Number - The maximum number of executors alloted
- min
Executors Number - The minimum number of executors alloted
LibraryInfo, LibraryInfoArgs
- Container
Name string - Storage blob container name.
- Name string
- Name of the library.
- Path string
- Storage blob path of library.
- Type string
- Type of the library.
- Container
Name string - Storage blob container name.
- Name string
- Name of the library.
- Path string
- Storage blob path of library.
- Type string
- Type of the library.
- container
Name String - Storage blob container name.
- name String
- Name of the library.
- path String
- Storage blob path of library.
- type String
- Type of the library.
- container
Name string - Storage blob container name.
- name string
- Name of the library.
- path string
- Storage blob path of library.
- type string
- Type of the library.
- container_
name str - Storage blob container name.
- name str
- Name of the library.
- path str
- Storage blob path of library.
- type str
- Type of the library.
- container
Name String - Storage blob container name.
- name String
- Name of the library.
- path String
- Storage blob path of library.
- type String
- Type of the library.
LibraryInfoResponse, LibraryInfoResponseArgs
- Creator
Id string - Creator Id of the library/package.
- Provisioning
Status string - Provisioning status of the library/package.
- Uploaded
Timestamp string - The last update time of the library.
- Container
Name string - Storage blob container name.
- Name string
- Name of the library.
- Path string
- Storage blob path of library.
- Type string
- Type of the library.
- Creator
Id string - Creator Id of the library/package.
- Provisioning
Status string - Provisioning status of the library/package.
- Uploaded
Timestamp string - The last update time of the library.
- Container
Name string - Storage blob container name.
- Name string
- Name of the library.
- Path string
- Storage blob path of library.
- Type string
- Type of the library.
- creator
Id String - Creator Id of the library/package.
- provisioning
Status String - Provisioning status of the library/package.
- uploaded
Timestamp String - The last update time of the library.
- container
Name String - Storage blob container name.
- name String
- Name of the library.
- path String
- Storage blob path of library.
- type String
- Type of the library.
- creator
Id string - Creator Id of the library/package.
- provisioning
Status string - Provisioning status of the library/package.
- uploaded
Timestamp string - The last update time of the library.
- container
Name string - Storage blob container name.
- name string
- Name of the library.
- path string
- Storage blob path of library.
- type string
- Type of the library.
- creator_
id str - Creator Id of the library/package.
- provisioning_
status str - Provisioning status of the library/package.
- uploaded_
timestamp str - The last update time of the library.
- container_
name str - Storage blob container name.
- name str
- Name of the library.
- path str
- Storage blob path of library.
- type str
- Type of the library.
- creator
Id String - Creator Id of the library/package.
- provisioning
Status String - Provisioning status of the library/package.
- uploaded
Timestamp String - The last update time of the library.
- container
Name String - Storage blob container name.
- name String
- Name of the library.
- path String
- Storage blob path of library.
- type String
- Type of the library.
LibraryRequirements, LibraryRequirementsArgs
LibraryRequirementsResponse, LibraryRequirementsResponseArgs
NodeSize, NodeSizeArgs
- None
- None
- Small
- Small
- Medium
- Medium
- Large
- Large
- XLarge
- XLarge
- XXLarge
- XXLarge
- XXXLarge
- XXXLarge
- Node
Size None - None
- Node
Size Small - Small
- Node
Size Medium - Medium
- Node
Size Large - Large
- Node
Size XLarge - XLarge
- Node
Size XXLarge - XXLarge
- Node
Size XXXLarge - XXXLarge
- None
- None
- Small
- Small
- Medium
- Medium
- Large
- Large
- XLarge
- XLarge
- XXLarge
- XXLarge
- XXXLarge
- XXXLarge
- None
- None
- Small
- Small
- Medium
- Medium
- Large
- Large
- XLarge
- XLarge
- XXLarge
- XXLarge
- XXXLarge
- XXXLarge
- NONE
- None
- SMALL
- Small
- MEDIUM
- Medium
- LARGE
- Large
- X_LARGE
- XLarge
- XX_LARGE
- XXLarge
- XXX_LARGE
- XXXLarge
- "None"
- None
- "Small"
- Small
- "Medium"
- Medium
- "Large"
- Large
- "XLarge"
- XLarge
- "XXLarge"
- XXLarge
- "XXXLarge"
- XXXLarge
NodeSizeFamily, NodeSizeFamilyArgs
- None
- None
- Memory
Optimized - MemoryOptimized
- Hardware
Accelerated FPGA - HardwareAcceleratedFPGA
- Hardware
Accelerated GPU - HardwareAcceleratedGPU
- Node
Size Family None - None
- Node
Size Family Memory Optimized - MemoryOptimized
- Node
Size Family Hardware Accelerated FPGA - HardwareAcceleratedFPGA
- Node
Size Family Hardware Accelerated GPU - HardwareAcceleratedGPU
- None
- None
- Memory
Optimized - MemoryOptimized
- Hardware
Accelerated FPGA - HardwareAcceleratedFPGA
- Hardware
Accelerated GPU - HardwareAcceleratedGPU
- None
- None
- Memory
Optimized - MemoryOptimized
- Hardware
Accelerated FPGA - HardwareAcceleratedFPGA
- Hardware
Accelerated GPU - HardwareAcceleratedGPU
- NONE
- None
- MEMORY_OPTIMIZED
- MemoryOptimized
- HARDWARE_ACCELERATED_FPGA
- HardwareAcceleratedFPGA
- HARDWARE_ACCELERATED_GPU
- HardwareAcceleratedGPU
- "None"
- None
- "Memory
Optimized" - MemoryOptimized
- "Hardware
Accelerated FPGA" - HardwareAcceleratedFPGA
- "Hardware
Accelerated GPU" - HardwareAcceleratedGPU
SparkConfigProperties, SparkConfigPropertiesArgs
- Configuration
Type string | Pulumi.Azure Native. Synapse. Configuration Type - The type of the spark config properties file.
- Content string
- The spark config properties.
- Filename string
- The filename of the spark config properties file.
- Configuration
Type string | ConfigurationType - The type of the spark config properties file.
- Content string
- The spark config properties.
- Filename string
- The filename of the spark config properties file.
- configuration
Type String | ConfigurationType - The type of the spark config properties file.
- content String
- The spark config properties.
- filename String
- The filename of the spark config properties file.
- configuration
Type string | ConfigurationType - The type of the spark config properties file.
- content string
- The spark config properties.
- filename string
- The filename of the spark config properties file.
- configuration_
type str | ConfigurationType - The type of the spark config properties file.
- content str
- The spark config properties.
- filename str
- The filename of the spark config properties file.
- configuration
Type String | "File" | "Artifact" - The type of the spark config properties file.
- content String
- The spark config properties.
- filename String
- The filename of the spark config properties file.
SparkConfigPropertiesResponse, SparkConfigPropertiesResponseArgs
- Time string
- The last update time of the spark config properties file.
- Configuration
Type string - The type of the spark config properties file.
- Content string
- The spark config properties.
- Filename string
- The filename of the spark config properties file.
- Time string
- The last update time of the spark config properties file.
- Configuration
Type string - The type of the spark config properties file.
- Content string
- The spark config properties.
- Filename string
- The filename of the spark config properties file.
- time String
- The last update time of the spark config properties file.
- configuration
Type String - The type of the spark config properties file.
- content String
- The spark config properties.
- filename String
- The filename of the spark config properties file.
- time string
- The last update time of the spark config properties file.
- configuration
Type string - The type of the spark config properties file.
- content string
- The spark config properties.
- filename string
- The filename of the spark config properties file.
- time str
- The last update time of the spark config properties file.
- configuration_
type str - The type of the spark config properties file.
- content str
- The spark config properties.
- filename str
- The filename of the spark config properties file.
- time String
- The last update time of the spark config properties file.
- configuration
Type String - The type of the spark config properties file.
- content String
- The spark config properties.
- filename String
- The filename of the spark config properties file.
Import
An existing resource can be imported using its type token, name, and identifier, e.g.
$ pulumi import azure-native:synapse:BigDataPool ExamplePool /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/bigDataPools/{bigDataPoolName}
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- Azure Native pulumi/pulumi-azure-native
- License
- Apache-2.0