databricks.getClusters
Explore with Pulumi AI
Note If you have a fully automated setup with workspaces created by databricks.MwsWorkspaces or azurerm_databricks_workspace, please make sure to add depends_on attribute in order to prevent default auth: cannot configure default credentials errors.
Retrieves a list of databricks.Cluster ids, that were created by Pulumi or manually, with or without databricks_cluster_policy.
Example Usage
Retrieve cluster IDs for all clusters:
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const all = databricks.getClusters({});
import pulumi
import pulumi_databricks as databricks
all = databricks.get_clusters()
package main
import (
"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := databricks.GetClusters(ctx, &databricks.GetClustersArgs{}, nil)
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() =>
{
var all = Databricks.GetClusters.Invoke();
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.DatabricksFunctions;
import com.pulumi.databricks.inputs.GetClustersArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var all = DatabricksFunctions.getClusters();
}
}
variables:
all:
fn::invoke:
Function: databricks:getClusters
Arguments: {}
Retrieve cluster IDs for all clusters having “Shared” in the cluster name:
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const allShared = databricks.getClusters({
clusterNameContains: "shared",
});
import pulumi
import pulumi_databricks as databricks
all_shared = databricks.get_clusters(cluster_name_contains="shared")
package main
import (
"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := databricks.GetClusters(ctx, &databricks.GetClustersArgs{
ClusterNameContains: pulumi.StringRef("shared"),
}, nil)
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() =>
{
var allShared = Databricks.GetClusters.Invoke(new()
{
ClusterNameContains = "shared",
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.DatabricksFunctions;
import com.pulumi.databricks.inputs.GetClustersArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var allShared = DatabricksFunctions.getClusters(GetClustersArgs.builder()
.clusterNameContains("shared")
.build());
}
}
variables:
allShared:
fn::invoke:
Function: databricks:getClusters
Arguments:
clusterNameContains: shared
Related Resources
The following resources are used in the same context:
- End to end workspace management guide.
- databricks.Cluster to create Databricks Clusters.
- databricks.ClusterPolicy to create a databricks.Cluster policy, which limits the ability to create clusters based on a set of rules.
- databricks.InstancePool to manage instance pools to reduce cluster start and auto-scaling times by maintaining a set of idle, ready-to-use instances.
- databricks.Job to manage Databricks Jobs to run non-interactive code in a databricks_cluster.
- databricks.Library to install a library on databricks_cluster.
- databricks.Pipeline to deploy Delta Live Tables.
Using getClusters
Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.
function getClusters(args: GetClustersArgs, opts?: InvokeOptions): Promise<GetClustersResult>
function getClustersOutput(args: GetClustersOutputArgs, opts?: InvokeOptions): Output<GetClustersResult>
def get_clusters(cluster_name_contains: Optional[str] = None,
filter_by: Optional[GetClustersFilterBy] = None,
id: Optional[str] = None,
ids: Optional[Sequence[str]] = None,
opts: Optional[InvokeOptions] = None) -> GetClustersResult
def get_clusters_output(cluster_name_contains: Optional[pulumi.Input[str]] = None,
filter_by: Optional[pulumi.Input[GetClustersFilterByArgs]] = None,
id: Optional[pulumi.Input[str]] = None,
ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
opts: Optional[InvokeOptions] = None) -> Output[GetClustersResult]
func GetClusters(ctx *Context, args *GetClustersArgs, opts ...InvokeOption) (*GetClustersResult, error)
func GetClustersOutput(ctx *Context, args *GetClustersOutputArgs, opts ...InvokeOption) GetClustersResultOutput
> Note: This function is named GetClusters
in the Go SDK.
public static class GetClusters
{
public static Task<GetClustersResult> InvokeAsync(GetClustersArgs args, InvokeOptions? opts = null)
public static Output<GetClustersResult> Invoke(GetClustersInvokeArgs args, InvokeOptions? opts = null)
}
public static CompletableFuture<GetClustersResult> getClusters(GetClustersArgs args, InvokeOptions options)
// Output-based functions aren't available in Java yet
fn::invoke:
function: databricks:index/getClusters:getClusters
arguments:
# arguments dictionary
The following arguments are supported:
- Cluster
Name stringContains - Only return databricks.Cluster ids that match the given name string.
- Filter
By GetClusters Filter By - Filters to apply to the listed clusters. See filter_by Configuration Block below for details.
- Id string
- Ids List<string>
- list of databricks.Cluster ids
- Cluster
Name stringContains - Only return databricks.Cluster ids that match the given name string.
- Filter
By GetClusters Filter By - Filters to apply to the listed clusters. See filter_by Configuration Block below for details.
- Id string
- Ids []string
- list of databricks.Cluster ids
- cluster
Name StringContains - Only return databricks.Cluster ids that match the given name string.
- filter
By GetClusters Filter By - Filters to apply to the listed clusters. See filter_by Configuration Block below for details.
- id String
- ids List<String>
- list of databricks.Cluster ids
- cluster
Name stringContains - Only return databricks.Cluster ids that match the given name string.
- filter
By GetClusters Filter By - Filters to apply to the listed clusters. See filter_by Configuration Block below for details.
- id string
- ids string[]
- list of databricks.Cluster ids
- cluster_
name_ strcontains - Only return databricks.Cluster ids that match the given name string.
- filter_
by GetClusters Filter By - Filters to apply to the listed clusters. See filter_by Configuration Block below for details.
- id str
- ids Sequence[str]
- list of databricks.Cluster ids
- cluster
Name StringContains - Only return databricks.Cluster ids that match the given name string.
- filter
By Property Map - Filters to apply to the listed clusters. See filter_by Configuration Block below for details.
- id String
- ids List<String>
- list of databricks.Cluster ids
getClusters Result
The following output properties are available:
- Id string
- Ids List<string>
- list of databricks.Cluster ids
- Cluster
Name stringContains - Filter
By GetClusters Filter By
- Id string
- Ids []string
- list of databricks.Cluster ids
- Cluster
Name stringContains - Filter
By GetClusters Filter By
- id String
- ids List<String>
- list of databricks.Cluster ids
- cluster
Name StringContains - filter
By GetClusters Filter By
- id string
- ids string[]
- list of databricks.Cluster ids
- cluster
Name stringContains - filter
By GetClusters Filter By
- id str
- ids Sequence[str]
- list of databricks.Cluster ids
- cluster_
name_ strcontains - filter_
by GetClusters Filter By
- id String
- ids List<String>
- list of databricks.Cluster ids
- cluster
Name StringContains - filter
By Property Map
Supporting Types
GetClustersFilterBy
- Cluster
Sources List<string> - List of cluster sources to filter by. Possible values are
API
,JOB
,MODELS
,PIPELINE
,PIPELINE_MAINTENANCE
,SQL
, andUI
. - Cluster
States List<string> - List of cluster states to filter by. Possible values are
RUNNING
,PENDING
,RESIZING
,RESTARTING
,TERMINATING
,TERMINATED
,ERROR
, andUNKNOWN
. - Is
Pinned bool - Whether to filter by pinned clusters.
- Policy
Id string - Filter by databricks.ClusterPolicy id.
- Cluster
Sources []string - List of cluster sources to filter by. Possible values are
API
,JOB
,MODELS
,PIPELINE
,PIPELINE_MAINTENANCE
,SQL
, andUI
. - Cluster
States []string - List of cluster states to filter by. Possible values are
RUNNING
,PENDING
,RESIZING
,RESTARTING
,TERMINATING
,TERMINATED
,ERROR
, andUNKNOWN
. - Is
Pinned bool - Whether to filter by pinned clusters.
- Policy
Id string - Filter by databricks.ClusterPolicy id.
- cluster
Sources List<String> - List of cluster sources to filter by. Possible values are
API
,JOB
,MODELS
,PIPELINE
,PIPELINE_MAINTENANCE
,SQL
, andUI
. - cluster
States List<String> - List of cluster states to filter by. Possible values are
RUNNING
,PENDING
,RESIZING
,RESTARTING
,TERMINATING
,TERMINATED
,ERROR
, andUNKNOWN
. - is
Pinned Boolean - Whether to filter by pinned clusters.
- policy
Id String - Filter by databricks.ClusterPolicy id.
- cluster
Sources string[] - List of cluster sources to filter by. Possible values are
API
,JOB
,MODELS
,PIPELINE
,PIPELINE_MAINTENANCE
,SQL
, andUI
. - cluster
States string[] - List of cluster states to filter by. Possible values are
RUNNING
,PENDING
,RESIZING
,RESTARTING
,TERMINATING
,TERMINATED
,ERROR
, andUNKNOWN
. - is
Pinned boolean - Whether to filter by pinned clusters.
- policy
Id string - Filter by databricks.ClusterPolicy id.
- cluster_
sources Sequence[str] - List of cluster sources to filter by. Possible values are
API
,JOB
,MODELS
,PIPELINE
,PIPELINE_MAINTENANCE
,SQL
, andUI
. - cluster_
states Sequence[str] - List of cluster states to filter by. Possible values are
RUNNING
,PENDING
,RESIZING
,RESTARTING
,TERMINATING
,TERMINATED
,ERROR
, andUNKNOWN
. - is_
pinned bool - Whether to filter by pinned clusters.
- policy_
id str - Filter by databricks.ClusterPolicy id.
- cluster
Sources List<String> - List of cluster sources to filter by. Possible values are
API
,JOB
,MODELS
,PIPELINE
,PIPELINE_MAINTENANCE
,SQL
, andUI
. - cluster
States List<String> - List of cluster states to filter by. Possible values are
RUNNING
,PENDING
,RESIZING
,RESTARTING
,TERMINATING
,TERMINATED
,ERROR
, andUNKNOWN
. - is
Pinned Boolean - Whether to filter by pinned clusters.
- policy
Id String - Filter by databricks.ClusterPolicy id.
Package Details
- Repository
- databricks pulumi/pulumi-databricks
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
databricks
Terraform Provider.