1. Packages
  2. Databricks
  3. API Docs
  4. MlflowWebhook
Databricks v1.56.0 published on Tuesday, Nov 12, 2024 by Pulumi

databricks.MlflowWebhook

Explore with Pulumi AI

databricks logo
Databricks v1.56.0 published on Tuesday, Nov 12, 2024 by Pulumi

    This resource allows you to create MLflow Model Registry Webhooks in Databricks. Webhooks enable you to listen for Model Registry events so your integrations can automatically trigger actions. You can use webhooks to automate and integrate your machine learning pipeline with existing CI/CD tools and workflows. Webhooks allow trigger execution of a Databricks job or call a web service on specific event(s) that is generated in the MLflow Registry - stage transitioning, creation of registered model, creation of transition request, etc.

    Example Usage

    Triggering Databricks job

    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    import * as std from "@pulumi/std";
    
    const me = databricks.getCurrentUser({});
    const latest = databricks.getSparkVersion({});
    const smallest = databricks.getNodeType({
        localDisk: true,
    });
    const _this = new databricks.Notebook("this", {
        path: me.then(me => `${me.home}/MLFlowWebhook`),
        language: "PYTHON",
        contentBase64: std.base64encode({
            input: `import json
     
    event_message = dbutils.widgets.get("event_message")
    event_message_dict = json.loads(event_message)
    print(f"event data={event_message_dict}")
    `,
        }).then(invoke => invoke.result),
    });
    const thisJob = new databricks.Job("this", {
        name: me.then(me => `Pulumi MLflowWebhook Demo (${me.alphanumeric})`),
        tasks: [{
            taskKey: "task1",
            newCluster: {
                numWorkers: 1,
                sparkVersion: latest.then(latest => latest.id),
                nodeTypeId: smallest.then(smallest => smallest.id),
            },
            notebookTask: {
                notebookPath: _this.path,
            },
        }],
    });
    const patForWebhook = new databricks.Token("pat_for_webhook", {
        comment: "MLflow Webhook",
        lifetimeSeconds: 86400000,
    });
    const job = new databricks.MlflowWebhook("job", {
        events: ["TRANSITION_REQUEST_CREATED"],
        description: "Databricks Job webhook trigger",
        status: "ACTIVE",
        jobSpec: {
            jobId: thisJob.id,
            workspaceUrl: me.then(me => me.workspaceUrl),
            accessToken: patForWebhook.tokenValue,
        },
    });
    
    import pulumi
    import pulumi_databricks as databricks
    import pulumi_std as std
    
    me = databricks.get_current_user()
    latest = databricks.get_spark_version()
    smallest = databricks.get_node_type(local_disk=True)
    this = databricks.Notebook("this",
        path=f"{me.home}/MLFlowWebhook",
        language="PYTHON",
        content_base64=std.base64encode(input="""import json
     
    event_message = dbutils.widgets.get("event_message")
    event_message_dict = json.loads(event_message)
    print(f"event data={event_message_dict}")
    """).result)
    this_job = databricks.Job("this",
        name=f"Pulumi MLflowWebhook Demo ({me.alphanumeric})",
        tasks=[{
            "task_key": "task1",
            "new_cluster": {
                "num_workers": 1,
                "spark_version": latest.id,
                "node_type_id": smallest.id,
            },
            "notebook_task": {
                "notebook_path": this.path,
            },
        }])
    pat_for_webhook = databricks.Token("pat_for_webhook",
        comment="MLflow Webhook",
        lifetime_seconds=86400000)
    job = databricks.MlflowWebhook("job",
        events=["TRANSITION_REQUEST_CREATED"],
        description="Databricks Job webhook trigger",
        status="ACTIVE",
        job_spec={
            "job_id": this_job.id,
            "workspace_url": me.workspace_url,
            "access_token": pat_for_webhook.token_value,
        })
    
    package main
    
    import (
    	"fmt"
    
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi-std/sdk/go/std"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		me, err := databricks.GetCurrentUser(ctx, map[string]interface{}{}, nil)
    		if err != nil {
    			return err
    		}
    		latest, err := databricks.GetSparkVersion(ctx, &databricks.GetSparkVersionArgs{}, nil)
    		if err != nil {
    			return err
    		}
    		smallest, err := databricks.GetNodeType(ctx, &databricks.GetNodeTypeArgs{
    			LocalDisk: pulumi.BoolRef(true),
    		}, nil)
    		if err != nil {
    			return err
    		}
    		invokeBase64encode, err := std.Base64encode(ctx, &std.Base64encodeArgs{
    			Input: `import json
     
    event_message = dbutils.widgets.get("event_message")
    event_message_dict = json.loads(event_message)
    print(f"event data={event_message_dict}")
    `,
    		}, nil)
    		if err != nil {
    			return err
    		}
    		this, err := databricks.NewNotebook(ctx, "this", &databricks.NotebookArgs{
    			Path:          pulumi.Sprintf("%v/MLFlowWebhook", me.Home),
    			Language:      pulumi.String("PYTHON"),
    			ContentBase64: pulumi.String(invokeBase64encode.Result),
    		})
    		if err != nil {
    			return err
    		}
    		thisJob, err := databricks.NewJob(ctx, "this", &databricks.JobArgs{
    			Name: pulumi.Sprintf("Pulumi MLflowWebhook Demo (%v)", me.Alphanumeric),
    			Tasks: databricks.JobTaskArray{
    				&databricks.JobTaskArgs{
    					TaskKey: pulumi.String("task1"),
    					NewCluster: &databricks.JobTaskNewClusterArgs{
    						NumWorkers:   pulumi.Int(1),
    						SparkVersion: pulumi.String(latest.Id),
    						NodeTypeId:   pulumi.String(smallest.Id),
    					},
    					NotebookTask: &databricks.JobTaskNotebookTaskArgs{
    						NotebookPath: this.Path,
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		patForWebhook, err := databricks.NewToken(ctx, "pat_for_webhook", &databricks.TokenArgs{
    			Comment:         pulumi.String("MLflow Webhook"),
    			LifetimeSeconds: pulumi.Int(86400000),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = databricks.NewMlflowWebhook(ctx, "job", &databricks.MlflowWebhookArgs{
    			Events: pulumi.StringArray{
    				pulumi.String("TRANSITION_REQUEST_CREATED"),
    			},
    			Description: pulumi.String("Databricks Job webhook trigger"),
    			Status:      pulumi.String("ACTIVE"),
    			JobSpec: &databricks.MlflowWebhookJobSpecArgs{
    				JobId:        thisJob.ID(),
    				WorkspaceUrl: pulumi.String(me.WorkspaceUrl),
    				AccessToken:  patForWebhook.TokenValue,
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    using Std = Pulumi.Std;
    
    return await Deployment.RunAsync(() => 
    {
        var me = Databricks.GetCurrentUser.Invoke();
    
        var latest = Databricks.GetSparkVersion.Invoke();
    
        var smallest = Databricks.GetNodeType.Invoke(new()
        {
            LocalDisk = true,
        });
    
        var @this = new Databricks.Notebook("this", new()
        {
            Path = $"{me.Apply(getCurrentUserResult => getCurrentUserResult.Home)}/MLFlowWebhook",
            Language = "PYTHON",
            ContentBase64 = Std.Base64encode.Invoke(new()
            {
                Input = @"import json
     
    event_message = dbutils.widgets.get(""event_message"")
    event_message_dict = json.loads(event_message)
    print(f""event data={event_message_dict}"")
    ",
            }).Apply(invoke => invoke.Result),
        });
    
        var thisJob = new Databricks.Job("this", new()
        {
            Name = $"Pulumi MLflowWebhook Demo ({me.Apply(getCurrentUserResult => getCurrentUserResult.Alphanumeric)})",
            Tasks = new[]
            {
                new Databricks.Inputs.JobTaskArgs
                {
                    TaskKey = "task1",
                    NewCluster = new Databricks.Inputs.JobTaskNewClusterArgs
                    {
                        NumWorkers = 1,
                        SparkVersion = latest.Apply(getSparkVersionResult => getSparkVersionResult.Id),
                        NodeTypeId = smallest.Apply(getNodeTypeResult => getNodeTypeResult.Id),
                    },
                    NotebookTask = new Databricks.Inputs.JobTaskNotebookTaskArgs
                    {
                        NotebookPath = @this.Path,
                    },
                },
            },
        });
    
        var patForWebhook = new Databricks.Token("pat_for_webhook", new()
        {
            Comment = "MLflow Webhook",
            LifetimeSeconds = 86400000,
        });
    
        var job = new Databricks.MlflowWebhook("job", new()
        {
            Events = new[]
            {
                "TRANSITION_REQUEST_CREATED",
            },
            Description = "Databricks Job webhook trigger",
            Status = "ACTIVE",
            JobSpec = new Databricks.Inputs.MlflowWebhookJobSpecArgs
            {
                JobId = thisJob.Id,
                WorkspaceUrl = me.Apply(getCurrentUserResult => getCurrentUserResult.WorkspaceUrl),
                AccessToken = patForWebhook.TokenValue,
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.DatabricksFunctions;
    import com.pulumi.databricks.inputs.GetSparkVersionArgs;
    import com.pulumi.databricks.inputs.GetNodeTypeArgs;
    import com.pulumi.databricks.Notebook;
    import com.pulumi.databricks.NotebookArgs;
    import com.pulumi.databricks.Job;
    import com.pulumi.databricks.JobArgs;
    import com.pulumi.databricks.inputs.JobTaskArgs;
    import com.pulumi.databricks.inputs.JobTaskNewClusterArgs;
    import com.pulumi.databricks.inputs.JobTaskNotebookTaskArgs;
    import com.pulumi.databricks.Token;
    import com.pulumi.databricks.TokenArgs;
    import com.pulumi.databricks.MlflowWebhook;
    import com.pulumi.databricks.MlflowWebhookArgs;
    import com.pulumi.databricks.inputs.MlflowWebhookJobSpecArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            final var me = DatabricksFunctions.getCurrentUser();
    
            final var latest = DatabricksFunctions.getSparkVersion();
    
            final var smallest = DatabricksFunctions.getNodeType(GetNodeTypeArgs.builder()
                .localDisk(true)
                .build());
    
            var this_ = new Notebook("this", NotebookArgs.builder()
                .path(String.format("%s/MLFlowWebhook", me.applyValue(getCurrentUserResult -> getCurrentUserResult.home())))
                .language("PYTHON")
                .contentBase64(StdFunctions.base64encode(Base64encodeArgs.builder()
                    .input("""
    import json
     
    event_message = dbutils.widgets.get("event_message")
    event_message_dict = json.loads(event_message)
    print(f"event data={event_message_dict}")
                    """)
                    .build()).result())
                .build());
    
            var thisJob = new Job("thisJob", JobArgs.builder()
                .name(String.format("Pulumi MLflowWebhook Demo (%s)", me.applyValue(getCurrentUserResult -> getCurrentUserResult.alphanumeric())))
                .tasks(JobTaskArgs.builder()
                    .taskKey("task1")
                    .newCluster(JobTaskNewClusterArgs.builder()
                        .numWorkers(1)
                        .sparkVersion(latest.applyValue(getSparkVersionResult -> getSparkVersionResult.id()))
                        .nodeTypeId(smallest.applyValue(getNodeTypeResult -> getNodeTypeResult.id()))
                        .build())
                    .notebookTask(JobTaskNotebookTaskArgs.builder()
                        .notebookPath(this_.path())
                        .build())
                    .build())
                .build());
    
            var patForWebhook = new Token("patForWebhook", TokenArgs.builder()
                .comment("MLflow Webhook")
                .lifetimeSeconds(86400000)
                .build());
    
            var job = new MlflowWebhook("job", MlflowWebhookArgs.builder()
                .events("TRANSITION_REQUEST_CREATED")
                .description("Databricks Job webhook trigger")
                .status("ACTIVE")
                .jobSpec(MlflowWebhookJobSpecArgs.builder()
                    .jobId(thisJob.id())
                    .workspaceUrl(me.applyValue(getCurrentUserResult -> getCurrentUserResult.workspaceUrl()))
                    .accessToken(patForWebhook.tokenValue())
                    .build())
                .build());
    
        }
    }
    
    resources:
      this:
        type: databricks:Notebook
        properties:
          path: ${me.home}/MLFlowWebhook
          language: PYTHON
          contentBase64:
            fn::invoke:
              Function: std:base64encode
              Arguments:
                input: "import json\n \nevent_message = dbutils.widgets.get(\"event_message\")\nevent_message_dict = json.loads(event_message)\nprint(f\"event data={event_message_dict}\")\n"
              Return: result
      thisJob:
        type: databricks:Job
        name: this
        properties:
          name: Pulumi MLflowWebhook Demo (${me.alphanumeric})
          tasks:
            - taskKey: task1
              newCluster:
                numWorkers: 1
                sparkVersion: ${latest.id}
                nodeTypeId: ${smallest.id}
              notebookTask:
                notebookPath: ${this.path}
      patForWebhook:
        type: databricks:Token
        name: pat_for_webhook
        properties:
          comment: MLflow Webhook
          lifetimeSeconds: 8.64e+07
      job:
        type: databricks:MlflowWebhook
        properties:
          events:
            - TRANSITION_REQUEST_CREATED
          description: Databricks Job webhook trigger
          status: ACTIVE
          jobSpec:
            jobId: ${thisJob.id}
            workspaceUrl: ${me.workspaceUrl}
            accessToken: ${patForWebhook.tokenValue}
    variables:
      me:
        fn::invoke:
          Function: databricks:getCurrentUser
          Arguments: {}
      latest:
        fn::invoke:
          Function: databricks:getSparkVersion
          Arguments: {}
      smallest:
        fn::invoke:
          Function: databricks:getNodeType
          Arguments:
            localDisk: true
    

    POSTing to URL

    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const url = new databricks.MlflowWebhook("url", {
        events: ["TRANSITION_REQUEST_CREATED"],
        description: "URL webhook trigger",
        httpUrlSpec: {
            url: "https://my_cool_host/webhook",
        },
    });
    
    import pulumi
    import pulumi_databricks as databricks
    
    url = databricks.MlflowWebhook("url",
        events=["TRANSITION_REQUEST_CREATED"],
        description="URL webhook trigger",
        http_url_spec={
            "url": "https://my_cool_host/webhook",
        })
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := databricks.NewMlflowWebhook(ctx, "url", &databricks.MlflowWebhookArgs{
    			Events: pulumi.StringArray{
    				pulumi.String("TRANSITION_REQUEST_CREATED"),
    			},
    			Description: pulumi.String("URL webhook trigger"),
    			HttpUrlSpec: &databricks.MlflowWebhookHttpUrlSpecArgs{
    				Url: pulumi.String("https://my_cool_host/webhook"),
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        var url = new Databricks.MlflowWebhook("url", new()
        {
            Events = new[]
            {
                "TRANSITION_REQUEST_CREATED",
            },
            Description = "URL webhook trigger",
            HttpUrlSpec = new Databricks.Inputs.MlflowWebhookHttpUrlSpecArgs
            {
                Url = "https://my_cool_host/webhook",
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.MlflowWebhook;
    import com.pulumi.databricks.MlflowWebhookArgs;
    import com.pulumi.databricks.inputs.MlflowWebhookHttpUrlSpecArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var url = new MlflowWebhook("url", MlflowWebhookArgs.builder()
                .events("TRANSITION_REQUEST_CREATED")
                .description("URL webhook trigger")
                .httpUrlSpec(MlflowWebhookHttpUrlSpecArgs.builder()
                    .url("https://my_cool_host/webhook")
                    .build())
                .build());
    
        }
    }
    
    resources:
      url:
        type: databricks:MlflowWebhook
        properties:
          events:
            - TRANSITION_REQUEST_CREATED
          description: URL webhook trigger
          httpUrlSpec:
            url: https://my_cool_host/webhook
    

    Access Control

    • MLflow webhooks could be configured only by workspace admins.

    The following resources are often used in the same context:

    Create MlflowWebhook Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new MlflowWebhook(name: string, args: MlflowWebhookArgs, opts?: CustomResourceOptions);
    @overload
    def MlflowWebhook(resource_name: str,
                      args: MlflowWebhookArgs,
                      opts: Optional[ResourceOptions] = None)
    
    @overload
    def MlflowWebhook(resource_name: str,
                      opts: Optional[ResourceOptions] = None,
                      events: Optional[Sequence[str]] = None,
                      description: Optional[str] = None,
                      http_url_spec: Optional[MlflowWebhookHttpUrlSpecArgs] = None,
                      job_spec: Optional[MlflowWebhookJobSpecArgs] = None,
                      model_name: Optional[str] = None,
                      status: Optional[str] = None)
    func NewMlflowWebhook(ctx *Context, name string, args MlflowWebhookArgs, opts ...ResourceOption) (*MlflowWebhook, error)
    public MlflowWebhook(string name, MlflowWebhookArgs args, CustomResourceOptions? opts = null)
    public MlflowWebhook(String name, MlflowWebhookArgs args)
    public MlflowWebhook(String name, MlflowWebhookArgs args, CustomResourceOptions options)
    
    type: databricks:MlflowWebhook
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args MlflowWebhookArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args MlflowWebhookArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args MlflowWebhookArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args MlflowWebhookArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args MlflowWebhookArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var mlflowWebhookResource = new Databricks.MlflowWebhook("mlflowWebhookResource", new()
    {
        Events = new[]
        {
            "string",
        },
        Description = "string",
        HttpUrlSpec = new Databricks.Inputs.MlflowWebhookHttpUrlSpecArgs
        {
            Url = "string",
            Authorization = "string",
            EnableSslVerification = false,
            Secret = "string",
        },
        JobSpec = new Databricks.Inputs.MlflowWebhookJobSpecArgs
        {
            AccessToken = "string",
            JobId = "string",
            WorkspaceUrl = "string",
        },
        ModelName = "string",
        Status = "string",
    });
    
    example, err := databricks.NewMlflowWebhook(ctx, "mlflowWebhookResource", &databricks.MlflowWebhookArgs{
    	Events: pulumi.StringArray{
    		pulumi.String("string"),
    	},
    	Description: pulumi.String("string"),
    	HttpUrlSpec: &databricks.MlflowWebhookHttpUrlSpecArgs{
    		Url:                   pulumi.String("string"),
    		Authorization:         pulumi.String("string"),
    		EnableSslVerification: pulumi.Bool(false),
    		Secret:                pulumi.String("string"),
    	},
    	JobSpec: &databricks.MlflowWebhookJobSpecArgs{
    		AccessToken:  pulumi.String("string"),
    		JobId:        pulumi.String("string"),
    		WorkspaceUrl: pulumi.String("string"),
    	},
    	ModelName: pulumi.String("string"),
    	Status:    pulumi.String("string"),
    })
    
    var mlflowWebhookResource = new MlflowWebhook("mlflowWebhookResource", MlflowWebhookArgs.builder()
        .events("string")
        .description("string")
        .httpUrlSpec(MlflowWebhookHttpUrlSpecArgs.builder()
            .url("string")
            .authorization("string")
            .enableSslVerification(false)
            .secret("string")
            .build())
        .jobSpec(MlflowWebhookJobSpecArgs.builder()
            .accessToken("string")
            .jobId("string")
            .workspaceUrl("string")
            .build())
        .modelName("string")
        .status("string")
        .build());
    
    mlflow_webhook_resource = databricks.MlflowWebhook("mlflowWebhookResource",
        events=["string"],
        description="string",
        http_url_spec={
            "url": "string",
            "authorization": "string",
            "enable_ssl_verification": False,
            "secret": "string",
        },
        job_spec={
            "access_token": "string",
            "job_id": "string",
            "workspace_url": "string",
        },
        model_name="string",
        status="string")
    
    const mlflowWebhookResource = new databricks.MlflowWebhook("mlflowWebhookResource", {
        events: ["string"],
        description: "string",
        httpUrlSpec: {
            url: "string",
            authorization: "string",
            enableSslVerification: false,
            secret: "string",
        },
        jobSpec: {
            accessToken: "string",
            jobId: "string",
            workspaceUrl: "string",
        },
        modelName: "string",
        status: "string",
    });
    
    type: databricks:MlflowWebhook
    properties:
        description: string
        events:
            - string
        httpUrlSpec:
            authorization: string
            enableSslVerification: false
            secret: string
            url: string
        jobSpec:
            accessToken: string
            jobId: string
            workspaceUrl: string
        modelName: string
        status: string
    

    MlflowWebhook Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The MlflowWebhook resource accepts the following input properties:

    Events List<string>

    The list of events that will trigger execution of Databricks job or POSTing to an URL, for example, MODEL_VERSION_CREATED, MODEL_VERSION_TRANSITIONED_STAGE, TRANSITION_REQUEST_CREATED, etc. Refer to the Webhooks API documentation for a full list of supported events.

    Configuration must include one of http_url_spec or job_spec blocks, but not both.

    Description string
    Optional description of the MLflow webhook.
    HttpUrlSpec MlflowWebhookHttpUrlSpec
    JobSpec MlflowWebhookJobSpec
    ModelName string
    Name of MLflow model for which webhook will be created. If the model name is not specified, a registry-wide webhook is created that listens for the specified events across all versions of all registered models.
    Status string
    Optional status of webhook. Possible values are ACTIVE, TEST_MODE, DISABLED. Default is ACTIVE.
    Events []string

    The list of events that will trigger execution of Databricks job or POSTing to an URL, for example, MODEL_VERSION_CREATED, MODEL_VERSION_TRANSITIONED_STAGE, TRANSITION_REQUEST_CREATED, etc. Refer to the Webhooks API documentation for a full list of supported events.

    Configuration must include one of http_url_spec or job_spec blocks, but not both.

    Description string
    Optional description of the MLflow webhook.
    HttpUrlSpec MlflowWebhookHttpUrlSpecArgs
    JobSpec MlflowWebhookJobSpecArgs
    ModelName string
    Name of MLflow model for which webhook will be created. If the model name is not specified, a registry-wide webhook is created that listens for the specified events across all versions of all registered models.
    Status string
    Optional status of webhook. Possible values are ACTIVE, TEST_MODE, DISABLED. Default is ACTIVE.
    events List<String>

    The list of events that will trigger execution of Databricks job or POSTing to an URL, for example, MODEL_VERSION_CREATED, MODEL_VERSION_TRANSITIONED_STAGE, TRANSITION_REQUEST_CREATED, etc. Refer to the Webhooks API documentation for a full list of supported events.

    Configuration must include one of http_url_spec or job_spec blocks, but not both.

    description String
    Optional description of the MLflow webhook.
    httpUrlSpec MlflowWebhookHttpUrlSpec
    jobSpec MlflowWebhookJobSpec
    modelName String
    Name of MLflow model for which webhook will be created. If the model name is not specified, a registry-wide webhook is created that listens for the specified events across all versions of all registered models.
    status String
    Optional status of webhook. Possible values are ACTIVE, TEST_MODE, DISABLED. Default is ACTIVE.
    events string[]

    The list of events that will trigger execution of Databricks job or POSTing to an URL, for example, MODEL_VERSION_CREATED, MODEL_VERSION_TRANSITIONED_STAGE, TRANSITION_REQUEST_CREATED, etc. Refer to the Webhooks API documentation for a full list of supported events.

    Configuration must include one of http_url_spec or job_spec blocks, but not both.

    description string
    Optional description of the MLflow webhook.
    httpUrlSpec MlflowWebhookHttpUrlSpec
    jobSpec MlflowWebhookJobSpec
    modelName string
    Name of MLflow model for which webhook will be created. If the model name is not specified, a registry-wide webhook is created that listens for the specified events across all versions of all registered models.
    status string
    Optional status of webhook. Possible values are ACTIVE, TEST_MODE, DISABLED. Default is ACTIVE.
    events Sequence[str]

    The list of events that will trigger execution of Databricks job or POSTing to an URL, for example, MODEL_VERSION_CREATED, MODEL_VERSION_TRANSITIONED_STAGE, TRANSITION_REQUEST_CREATED, etc. Refer to the Webhooks API documentation for a full list of supported events.

    Configuration must include one of http_url_spec or job_spec blocks, but not both.

    description str
    Optional description of the MLflow webhook.
    http_url_spec MlflowWebhookHttpUrlSpecArgs
    job_spec MlflowWebhookJobSpecArgs
    model_name str
    Name of MLflow model for which webhook will be created. If the model name is not specified, a registry-wide webhook is created that listens for the specified events across all versions of all registered models.
    status str
    Optional status of webhook. Possible values are ACTIVE, TEST_MODE, DISABLED. Default is ACTIVE.
    events List<String>

    The list of events that will trigger execution of Databricks job or POSTing to an URL, for example, MODEL_VERSION_CREATED, MODEL_VERSION_TRANSITIONED_STAGE, TRANSITION_REQUEST_CREATED, etc. Refer to the Webhooks API documentation for a full list of supported events.

    Configuration must include one of http_url_spec or job_spec blocks, but not both.

    description String
    Optional description of the MLflow webhook.
    httpUrlSpec Property Map
    jobSpec Property Map
    modelName String
    Name of MLflow model for which webhook will be created. If the model name is not specified, a registry-wide webhook is created that listens for the specified events across all versions of all registered models.
    status String
    Optional status of webhook. Possible values are ACTIVE, TEST_MODE, DISABLED. Default is ACTIVE.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the MlflowWebhook resource produces the following output properties:

    Id string
    The provider-assigned unique ID for this managed resource.
    Id string
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.
    id string
    The provider-assigned unique ID for this managed resource.
    id str
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.

    Look up Existing MlflowWebhook Resource

    Get an existing MlflowWebhook resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: MlflowWebhookState, opts?: CustomResourceOptions): MlflowWebhook
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            description: Optional[str] = None,
            events: Optional[Sequence[str]] = None,
            http_url_spec: Optional[MlflowWebhookHttpUrlSpecArgs] = None,
            job_spec: Optional[MlflowWebhookJobSpecArgs] = None,
            model_name: Optional[str] = None,
            status: Optional[str] = None) -> MlflowWebhook
    func GetMlflowWebhook(ctx *Context, name string, id IDInput, state *MlflowWebhookState, opts ...ResourceOption) (*MlflowWebhook, error)
    public static MlflowWebhook Get(string name, Input<string> id, MlflowWebhookState? state, CustomResourceOptions? opts = null)
    public static MlflowWebhook get(String name, Output<String> id, MlflowWebhookState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    Description string
    Optional description of the MLflow webhook.
    Events List<string>

    The list of events that will trigger execution of Databricks job or POSTing to an URL, for example, MODEL_VERSION_CREATED, MODEL_VERSION_TRANSITIONED_STAGE, TRANSITION_REQUEST_CREATED, etc. Refer to the Webhooks API documentation for a full list of supported events.

    Configuration must include one of http_url_spec or job_spec blocks, but not both.

    HttpUrlSpec MlflowWebhookHttpUrlSpec
    JobSpec MlflowWebhookJobSpec
    ModelName string
    Name of MLflow model for which webhook will be created. If the model name is not specified, a registry-wide webhook is created that listens for the specified events across all versions of all registered models.
    Status string
    Optional status of webhook. Possible values are ACTIVE, TEST_MODE, DISABLED. Default is ACTIVE.
    Description string
    Optional description of the MLflow webhook.
    Events []string

    The list of events that will trigger execution of Databricks job or POSTing to an URL, for example, MODEL_VERSION_CREATED, MODEL_VERSION_TRANSITIONED_STAGE, TRANSITION_REQUEST_CREATED, etc. Refer to the Webhooks API documentation for a full list of supported events.

    Configuration must include one of http_url_spec or job_spec blocks, but not both.

    HttpUrlSpec MlflowWebhookHttpUrlSpecArgs
    JobSpec MlflowWebhookJobSpecArgs
    ModelName string
    Name of MLflow model for which webhook will be created. If the model name is not specified, a registry-wide webhook is created that listens for the specified events across all versions of all registered models.
    Status string
    Optional status of webhook. Possible values are ACTIVE, TEST_MODE, DISABLED. Default is ACTIVE.
    description String
    Optional description of the MLflow webhook.
    events List<String>

    The list of events that will trigger execution of Databricks job or POSTing to an URL, for example, MODEL_VERSION_CREATED, MODEL_VERSION_TRANSITIONED_STAGE, TRANSITION_REQUEST_CREATED, etc. Refer to the Webhooks API documentation for a full list of supported events.

    Configuration must include one of http_url_spec or job_spec blocks, but not both.

    httpUrlSpec MlflowWebhookHttpUrlSpec
    jobSpec MlflowWebhookJobSpec
    modelName String
    Name of MLflow model for which webhook will be created. If the model name is not specified, a registry-wide webhook is created that listens for the specified events across all versions of all registered models.
    status String
    Optional status of webhook. Possible values are ACTIVE, TEST_MODE, DISABLED. Default is ACTIVE.
    description string
    Optional description of the MLflow webhook.
    events string[]

    The list of events that will trigger execution of Databricks job or POSTing to an URL, for example, MODEL_VERSION_CREATED, MODEL_VERSION_TRANSITIONED_STAGE, TRANSITION_REQUEST_CREATED, etc. Refer to the Webhooks API documentation for a full list of supported events.

    Configuration must include one of http_url_spec or job_spec blocks, but not both.

    httpUrlSpec MlflowWebhookHttpUrlSpec
    jobSpec MlflowWebhookJobSpec
    modelName string
    Name of MLflow model for which webhook will be created. If the model name is not specified, a registry-wide webhook is created that listens for the specified events across all versions of all registered models.
    status string
    Optional status of webhook. Possible values are ACTIVE, TEST_MODE, DISABLED. Default is ACTIVE.
    description str
    Optional description of the MLflow webhook.
    events Sequence[str]

    The list of events that will trigger execution of Databricks job or POSTing to an URL, for example, MODEL_VERSION_CREATED, MODEL_VERSION_TRANSITIONED_STAGE, TRANSITION_REQUEST_CREATED, etc. Refer to the Webhooks API documentation for a full list of supported events.

    Configuration must include one of http_url_spec or job_spec blocks, but not both.

    http_url_spec MlflowWebhookHttpUrlSpecArgs
    job_spec MlflowWebhookJobSpecArgs
    model_name str
    Name of MLflow model for which webhook will be created. If the model name is not specified, a registry-wide webhook is created that listens for the specified events across all versions of all registered models.
    status str
    Optional status of webhook. Possible values are ACTIVE, TEST_MODE, DISABLED. Default is ACTIVE.
    description String
    Optional description of the MLflow webhook.
    events List<String>

    The list of events that will trigger execution of Databricks job or POSTing to an URL, for example, MODEL_VERSION_CREATED, MODEL_VERSION_TRANSITIONED_STAGE, TRANSITION_REQUEST_CREATED, etc. Refer to the Webhooks API documentation for a full list of supported events.

    Configuration must include one of http_url_spec or job_spec blocks, but not both.

    httpUrlSpec Property Map
    jobSpec Property Map
    modelName String
    Name of MLflow model for which webhook will be created. If the model name is not specified, a registry-wide webhook is created that listens for the specified events across all versions of all registered models.
    status String
    Optional status of webhook. Possible values are ACTIVE, TEST_MODE, DISABLED. Default is ACTIVE.

    Supporting Types

    MlflowWebhookHttpUrlSpec, MlflowWebhookHttpUrlSpecArgs

    Url string
    External HTTPS URL called on event trigger (by using a POST request). Structure of payload depends on the event type, refer to documentation for more details.
    Authorization string
    Value of the authorization header that should be sent in the request sent by the wehbook. It should be of the form <auth type> <credentials>, e.g. Bearer <access_token>. If set to an empty string, no authorization header will be included in the request.
    EnableSslVerification bool
    Enable/disable SSL certificate validation. Default is true. For self-signed certificates, this field must be false AND the destination server must disable certificate validation as well. For security purposes, it is encouraged to perform secret validation with the HMAC-encoded portion of the payload and acknowledge the risk associated with disabling hostname validation whereby it becomes more likely that requests can be maliciously routed to an unintended host.
    Secret string
    Shared secret required for HMAC encoding payload. The HMAC-encoded payload will be sent in the header as X-Databricks-Signature: encoded_payload.
    Url string
    External HTTPS URL called on event trigger (by using a POST request). Structure of payload depends on the event type, refer to documentation for more details.
    Authorization string
    Value of the authorization header that should be sent in the request sent by the wehbook. It should be of the form <auth type> <credentials>, e.g. Bearer <access_token>. If set to an empty string, no authorization header will be included in the request.
    EnableSslVerification bool
    Enable/disable SSL certificate validation. Default is true. For self-signed certificates, this field must be false AND the destination server must disable certificate validation as well. For security purposes, it is encouraged to perform secret validation with the HMAC-encoded portion of the payload and acknowledge the risk associated with disabling hostname validation whereby it becomes more likely that requests can be maliciously routed to an unintended host.
    Secret string
    Shared secret required for HMAC encoding payload. The HMAC-encoded payload will be sent in the header as X-Databricks-Signature: encoded_payload.
    url String
    External HTTPS URL called on event trigger (by using a POST request). Structure of payload depends on the event type, refer to documentation for more details.
    authorization String
    Value of the authorization header that should be sent in the request sent by the wehbook. It should be of the form <auth type> <credentials>, e.g. Bearer <access_token>. If set to an empty string, no authorization header will be included in the request.
    enableSslVerification Boolean
    Enable/disable SSL certificate validation. Default is true. For self-signed certificates, this field must be false AND the destination server must disable certificate validation as well. For security purposes, it is encouraged to perform secret validation with the HMAC-encoded portion of the payload and acknowledge the risk associated with disabling hostname validation whereby it becomes more likely that requests can be maliciously routed to an unintended host.
    secret String
    Shared secret required for HMAC encoding payload. The HMAC-encoded payload will be sent in the header as X-Databricks-Signature: encoded_payload.
    url string
    External HTTPS URL called on event trigger (by using a POST request). Structure of payload depends on the event type, refer to documentation for more details.
    authorization string
    Value of the authorization header that should be sent in the request sent by the wehbook. It should be of the form <auth type> <credentials>, e.g. Bearer <access_token>. If set to an empty string, no authorization header will be included in the request.
    enableSslVerification boolean
    Enable/disable SSL certificate validation. Default is true. For self-signed certificates, this field must be false AND the destination server must disable certificate validation as well. For security purposes, it is encouraged to perform secret validation with the HMAC-encoded portion of the payload and acknowledge the risk associated with disabling hostname validation whereby it becomes more likely that requests can be maliciously routed to an unintended host.
    secret string
    Shared secret required for HMAC encoding payload. The HMAC-encoded payload will be sent in the header as X-Databricks-Signature: encoded_payload.
    url str
    External HTTPS URL called on event trigger (by using a POST request). Structure of payload depends on the event type, refer to documentation for more details.
    authorization str
    Value of the authorization header that should be sent in the request sent by the wehbook. It should be of the form <auth type> <credentials>, e.g. Bearer <access_token>. If set to an empty string, no authorization header will be included in the request.
    enable_ssl_verification bool
    Enable/disable SSL certificate validation. Default is true. For self-signed certificates, this field must be false AND the destination server must disable certificate validation as well. For security purposes, it is encouraged to perform secret validation with the HMAC-encoded portion of the payload and acknowledge the risk associated with disabling hostname validation whereby it becomes more likely that requests can be maliciously routed to an unintended host.
    secret str
    Shared secret required for HMAC encoding payload. The HMAC-encoded payload will be sent in the header as X-Databricks-Signature: encoded_payload.
    url String
    External HTTPS URL called on event trigger (by using a POST request). Structure of payload depends on the event type, refer to documentation for more details.
    authorization String
    Value of the authorization header that should be sent in the request sent by the wehbook. It should be of the form <auth type> <credentials>, e.g. Bearer <access_token>. If set to an empty string, no authorization header will be included in the request.
    enableSslVerification Boolean
    Enable/disable SSL certificate validation. Default is true. For self-signed certificates, this field must be false AND the destination server must disable certificate validation as well. For security purposes, it is encouraged to perform secret validation with the HMAC-encoded portion of the payload and acknowledge the risk associated with disabling hostname validation whereby it becomes more likely that requests can be maliciously routed to an unintended host.
    secret String
    Shared secret required for HMAC encoding payload. The HMAC-encoded payload will be sent in the header as X-Databricks-Signature: encoded_payload.

    MlflowWebhookJobSpec, MlflowWebhookJobSpecArgs

    AccessToken string
    The personal access token used to authorize webhook's job runs.
    JobId string
    ID of the Databricks job that the webhook runs.
    WorkspaceUrl string
    URL of the workspace containing the job that this webhook runs. If not specified, the job’s workspace URL is assumed to be the same as the workspace where the webhook is created.
    AccessToken string
    The personal access token used to authorize webhook's job runs.
    JobId string
    ID of the Databricks job that the webhook runs.
    WorkspaceUrl string
    URL of the workspace containing the job that this webhook runs. If not specified, the job’s workspace URL is assumed to be the same as the workspace where the webhook is created.
    accessToken String
    The personal access token used to authorize webhook's job runs.
    jobId String
    ID of the Databricks job that the webhook runs.
    workspaceUrl String
    URL of the workspace containing the job that this webhook runs. If not specified, the job’s workspace URL is assumed to be the same as the workspace where the webhook is created.
    accessToken string
    The personal access token used to authorize webhook's job runs.
    jobId string
    ID of the Databricks job that the webhook runs.
    workspaceUrl string
    URL of the workspace containing the job that this webhook runs. If not specified, the job’s workspace URL is assumed to be the same as the workspace where the webhook is created.
    access_token str
    The personal access token used to authorize webhook's job runs.
    job_id str
    ID of the Databricks job that the webhook runs.
    workspace_url str
    URL of the workspace containing the job that this webhook runs. If not specified, the job’s workspace URL is assumed to be the same as the workspace where the webhook is created.
    accessToken String
    The personal access token used to authorize webhook's job runs.
    jobId String
    ID of the Databricks job that the webhook runs.
    workspaceUrl String
    URL of the workspace containing the job that this webhook runs. If not specified, the job’s workspace URL is assumed to be the same as the workspace where the webhook is created.

    Import

    !> Importing this resource is not currently supported.

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    databricks pulumi/pulumi-databricks
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the databricks Terraform Provider.
    databricks logo
    Databricks v1.56.0 published on Tuesday, Nov 12, 2024 by Pulumi