1. Packages
  2. Databricks
  3. API Docs
  4. getJobs
Databricks v1.56.0 published on Tuesday, Nov 12, 2024 by Pulumi

databricks.getJobs

Explore with Pulumi AI

databricks logo
Databricks v1.56.0 published on Tuesday, Nov 12, 2024 by Pulumi

    Note If you have a fully automated setup with workspaces created by databricks.MwsWorkspaces or azurerm_databricks_workspace, please make sure to add depends_on attribute in order to prevent default auth: cannot configure default credentials errors.

    Retrieves a list of databricks.Job ids, that were created by Pulumi or manually, so that special handling could be applied.

    Note Data resource will error in case of jobs with duplicate names.

    Example Usage

    Granting view databricks.Permissions to all databricks.Job within the workspace:

    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    export = async () => {
        const this = await databricks.getJobs({});
        const everyoneCanViewAllJobs: databricks.Permissions[] = [];
        for (const range of Object.entries(_this.ids).map(([k, v]) => ({key: k, value: v}))) {
            everyoneCanViewAllJobs.push(new databricks.Permissions(`everyone_can_view_all_jobs-${range.key}`, {
                jobId: range.value,
                accessControls: [{
                    groupName: "users",
                    permissionLevel: "CAN_VIEW",
                }],
            }));
        }
    }
    
    import pulumi
    import pulumi_databricks as databricks
    
    this = databricks.get_jobs()
    everyone_can_view_all_jobs = []
    for range in [{"key": k, "value": v} for [k, v] in enumerate(this.ids)]:
        everyone_can_view_all_jobs.append(databricks.Permissions(f"everyone_can_view_all_jobs-{range['key']}",
            job_id=range["value"],
            access_controls=[{
                "group_name": "users",
                "permission_level": "CAN_VIEW",
            }]))
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		this, err := databricks.GetJobs(ctx, &databricks.GetJobsArgs{}, nil)
    		if err != nil {
    			return err
    		}
    		var everyoneCanViewAllJobs []*databricks.Permissions
    		for key0, val0 := range this.Ids {
    			__res, err := databricks.NewPermissions(ctx, fmt.Sprintf("everyone_can_view_all_jobs-%v", key0), &databricks.PermissionsArgs{
    				JobId: pulumi.String(val0),
    				AccessControls: databricks.PermissionsAccessControlArray{
    					&databricks.PermissionsAccessControlArgs{
    						GroupName:       pulumi.String("users"),
    						PermissionLevel: pulumi.String("CAN_VIEW"),
    					},
    				},
    			})
    			if err != nil {
    				return err
    			}
    			everyoneCanViewAllJobs = append(everyoneCanViewAllJobs, __res)
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using System.Threading.Tasks;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(async() => 
    {
        var @this = await Databricks.GetJobs.InvokeAsync();
    
        var everyoneCanViewAllJobs = new List<Databricks.Permissions>();
        foreach (var range in )
        {
            everyoneCanViewAllJobs.Add(new Databricks.Permissions($"everyone_can_view_all_jobs-{range.Key}", new()
            {
                JobId = range.Value,
                AccessControls = new[]
                {
                    new Databricks.Inputs.PermissionsAccessControlArgs
                    {
                        GroupName = "users",
                        PermissionLevel = "CAN_VIEW",
                    },
                },
            }));
        }
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.DatabricksFunctions;
    import com.pulumi.databricks.inputs.GetJobsArgs;
    import com.pulumi.databricks.Permissions;
    import com.pulumi.databricks.PermissionsArgs;
    import com.pulumi.databricks.inputs.PermissionsAccessControlArgs;
    import com.pulumi.codegen.internal.KeyedValue;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            final var this = DatabricksFunctions.getJobs();
    
            final var everyoneCanViewAllJobs = this.applyValue(getJobsResult -> {
                final var resources = new ArrayList<Permissions>();
                for (var range : KeyedValue.of(getJobsResult.ids()) {
                    var resource = new Permissions("everyoneCanViewAllJobs-" + range.key(), PermissionsArgs.builder()
                        .jobId(range.value())
                        .accessControls(PermissionsAccessControlArgs.builder()
                            .groupName("users")
                            .permissionLevel("CAN_VIEW")
                            .build())
                        .build());
    
                    resources.add(resource);
                }
    
                return resources;
            });
    
        }
    }
    
    resources:
      everyoneCanViewAllJobs:
        type: databricks:Permissions
        name: everyone_can_view_all_jobs
        properties:
          jobId: ${range.value}
          accessControls:
            - groupName: users
              permissionLevel: CAN_VIEW
        options: {}
    variables:
      this:
        fn::invoke:
          Function: databricks:getJobs
          Arguments: {}
    

    Getting ID of specific databricks.Job by name:

    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const this = databricks.getJobs({});
    export const x = _this.then(_this => `ID of `x` job is ${_this.ids?.x}`);
    
    import pulumi
    import pulumi_databricks as databricks
    
    this = databricks.get_jobs()
    pulumi.export("x", f"ID of `x` job is {this.ids['x']}")
    
    package main
    
    import (
    	"fmt"
    
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		this, err := databricks.GetJobs(ctx, &databricks.GetJobsArgs{}, nil)
    		if err != nil {
    			return err
    		}
    		ctx.Export("x", pulumi.Sprintf("ID of `x` job is %v", this.Ids.X))
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        var @this = Databricks.GetJobs.Invoke();
    
        return new Dictionary<string, object?>
        {
            ["x"] = @this.Apply(@this => $"ID of `x` job is {@this.Apply(getJobsResult => getJobsResult.Ids?.X)}"),
        };
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.DatabricksFunctions;
    import com.pulumi.databricks.inputs.GetJobsArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            final var this = DatabricksFunctions.getJobs();
    
            ctx.export("x", String.format("ID of `x` job is %s", this_.ids().x()));
        }
    }
    
    variables:
      this:
        fn::invoke:
          Function: databricks:getJobs
          Arguments: {}
    outputs:
      x: ID of `x` job is ${this.ids.x}
    

    The following resources are used in the same context:

    • databricks.Job to manage Databricks Jobs to run non-interactive code in a databricks_cluster.

    Using getJobs

    Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.

    function getJobs(args: GetJobsArgs, opts?: InvokeOptions): Promise<GetJobsResult>
    function getJobsOutput(args: GetJobsOutputArgs, opts?: InvokeOptions): Output<GetJobsResult>
    def get_jobs(ids: Optional[Mapping[str, str]] = None,
                 opts: Optional[InvokeOptions] = None) -> GetJobsResult
    def get_jobs_output(ids: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
                 opts: Optional[InvokeOptions] = None) -> Output[GetJobsResult]
    func GetJobs(ctx *Context, args *GetJobsArgs, opts ...InvokeOption) (*GetJobsResult, error)
    func GetJobsOutput(ctx *Context, args *GetJobsOutputArgs, opts ...InvokeOption) GetJobsResultOutput

    > Note: This function is named GetJobs in the Go SDK.

    public static class GetJobs 
    {
        public static Task<GetJobsResult> InvokeAsync(GetJobsArgs args, InvokeOptions? opts = null)
        public static Output<GetJobsResult> Invoke(GetJobsInvokeArgs args, InvokeOptions? opts = null)
    }
    public static CompletableFuture<GetJobsResult> getJobs(GetJobsArgs args, InvokeOptions options)
    // Output-based functions aren't available in Java yet
    
    fn::invoke:
      function: databricks:index/getJobs:getJobs
      arguments:
        # arguments dictionary

    The following arguments are supported:

    Ids Dictionary<string, string>
    map of databricks.Job names to ids
    Ids map[string]string
    map of databricks.Job names to ids
    ids Map<String,String>
    map of databricks.Job names to ids
    ids {[key: string]: string}
    map of databricks.Job names to ids
    ids Mapping[str, str]
    map of databricks.Job names to ids
    ids Map<String>
    map of databricks.Job names to ids

    getJobs Result

    The following output properties are available:

    Id string
    The provider-assigned unique ID for this managed resource.
    Ids Dictionary<string, string>
    map of databricks.Job names to ids
    Id string
    The provider-assigned unique ID for this managed resource.
    Ids map[string]string
    map of databricks.Job names to ids
    id String
    The provider-assigned unique ID for this managed resource.
    ids Map<String,String>
    map of databricks.Job names to ids
    id string
    The provider-assigned unique ID for this managed resource.
    ids {[key: string]: string}
    map of databricks.Job names to ids
    id str
    The provider-assigned unique ID for this managed resource.
    ids Mapping[str, str]
    map of databricks.Job names to ids
    id String
    The provider-assigned unique ID for this managed resource.
    ids Map<String>
    map of databricks.Job names to ids

    Package Details

    Repository
    databricks pulumi/pulumi-databricks
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the databricks Terraform Provider.
    databricks logo
    Databricks v1.56.0 published on Tuesday, Nov 12, 2024 by Pulumi