databricks.MlflowExperiment
Explore with Pulumi AI
This resource allows you to manage MLflow experiments in Databricks.
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const me = databricks.getCurrentUser({});
const _this = new databricks.MlflowExperiment("this", {
name: me.then(me => `${me.home}/Sample`),
artifactLocation: "dbfs:/tmp/my-experiment",
description: "My MLflow experiment description",
});
import pulumi
import pulumi_databricks as databricks
me = databricks.get_current_user()
this = databricks.MlflowExperiment("this",
name=f"{me.home}/Sample",
artifact_location="dbfs:/tmp/my-experiment",
description="My MLflow experiment description")
package main
import (
"fmt"
"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
me, err := databricks.GetCurrentUser(ctx, map[string]interface{}{}, nil)
if err != nil {
return err
}
_, err = databricks.NewMlflowExperiment(ctx, "this", &databricks.MlflowExperimentArgs{
Name: pulumi.Sprintf("%v/Sample", me.Home),
ArtifactLocation: pulumi.String("dbfs:/tmp/my-experiment"),
Description: pulumi.String("My MLflow experiment description"),
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() =>
{
var me = Databricks.GetCurrentUser.Invoke();
var @this = new Databricks.MlflowExperiment("this", new()
{
Name = $"{me.Apply(getCurrentUserResult => getCurrentUserResult.Home)}/Sample",
ArtifactLocation = "dbfs:/tmp/my-experiment",
Description = "My MLflow experiment description",
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.DatabricksFunctions;
import com.pulumi.databricks.MlflowExperiment;
import com.pulumi.databricks.MlflowExperimentArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var me = DatabricksFunctions.getCurrentUser();
var this_ = new MlflowExperiment("this", MlflowExperimentArgs.builder()
.name(String.format("%s/Sample", me.applyValue(getCurrentUserResult -> getCurrentUserResult.home())))
.artifactLocation("dbfs:/tmp/my-experiment")
.description("My MLflow experiment description")
.build());
}
}
resources:
this:
type: databricks:MlflowExperiment
properties:
name: ${me.home}/Sample
artifactLocation: dbfs:/tmp/my-experiment
description: My MLflow experiment description
variables:
me:
fn::invoke:
Function: databricks:getCurrentUser
Arguments: {}
Access Control
- databricks.Permissions can control which groups or individual users can Read, Edit, or Manage individual experiments.
Related Resources
The following resources are often used in the same context:
- databricks.RegisteredModel to create Models in Unity Catalog in Databricks.
- End to end workspace management guide.
- databricks.Directory to manage directories in Databricks Workpace.
- databricks.MlflowModel to create models in the workspace model registry in Databricks.
- databricks.Notebook to manage Databricks Notebooks.
- databricks.Notebook data to export a notebook from Databricks Workspace.
- databricks.Repo to manage Databricks Repos.
Create MlflowExperiment Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new MlflowExperiment(name: string, args?: MlflowExperimentArgs, opts?: CustomResourceOptions);
@overload
def MlflowExperiment(resource_name: str,
args: Optional[MlflowExperimentArgs] = None,
opts: Optional[ResourceOptions] = None)
@overload
def MlflowExperiment(resource_name: str,
opts: Optional[ResourceOptions] = None,
artifact_location: Optional[str] = None,
creation_time: Optional[int] = None,
description: Optional[str] = None,
experiment_id: Optional[str] = None,
last_update_time: Optional[int] = None,
lifecycle_stage: Optional[str] = None,
name: Optional[str] = None)
func NewMlflowExperiment(ctx *Context, name string, args *MlflowExperimentArgs, opts ...ResourceOption) (*MlflowExperiment, error)
public MlflowExperiment(string name, MlflowExperimentArgs? args = null, CustomResourceOptions? opts = null)
public MlflowExperiment(String name, MlflowExperimentArgs args)
public MlflowExperiment(String name, MlflowExperimentArgs args, CustomResourceOptions options)
type: databricks:MlflowExperiment
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args MlflowExperimentArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args MlflowExperimentArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args MlflowExperimentArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args MlflowExperimentArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args MlflowExperimentArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var mlflowExperimentResource = new Databricks.MlflowExperiment("mlflowExperimentResource", new()
{
ArtifactLocation = "string",
CreationTime = 0,
Description = "string",
ExperimentId = "string",
LastUpdateTime = 0,
LifecycleStage = "string",
Name = "string",
});
example, err := databricks.NewMlflowExperiment(ctx, "mlflowExperimentResource", &databricks.MlflowExperimentArgs{
ArtifactLocation: pulumi.String("string"),
CreationTime: pulumi.Int(0),
Description: pulumi.String("string"),
ExperimentId: pulumi.String("string"),
LastUpdateTime: pulumi.Int(0),
LifecycleStage: pulumi.String("string"),
Name: pulumi.String("string"),
})
var mlflowExperimentResource = new MlflowExperiment("mlflowExperimentResource", MlflowExperimentArgs.builder()
.artifactLocation("string")
.creationTime(0)
.description("string")
.experimentId("string")
.lastUpdateTime(0)
.lifecycleStage("string")
.name("string")
.build());
mlflow_experiment_resource = databricks.MlflowExperiment("mlflowExperimentResource",
artifact_location="string",
creation_time=0,
description="string",
experiment_id="string",
last_update_time=0,
lifecycle_stage="string",
name="string")
const mlflowExperimentResource = new databricks.MlflowExperiment("mlflowExperimentResource", {
artifactLocation: "string",
creationTime: 0,
description: "string",
experimentId: "string",
lastUpdateTime: 0,
lifecycleStage: "string",
name: "string",
});
type: databricks:MlflowExperiment
properties:
artifactLocation: string
creationTime: 0
description: string
experimentId: string
lastUpdateTime: 0
lifecycleStage: string
name: string
MlflowExperiment Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The MlflowExperiment resource accepts the following input properties:
- Artifact
Location string - Path to dbfs:/ or s3:// artifact location of the MLflow experiment.
- Creation
Time int - Description string
- The description of the MLflow experiment.
- Experiment
Id string - Last
Update intTime - Lifecycle
Stage string - Name string
- Name of MLflow experiment. It must be an absolute path within the Databricks workspace, e.g.
/Users/<some-username>/my-experiment
. For more information about changes to experiment naming conventions, see mlflow docs.
- Artifact
Location string - Path to dbfs:/ or s3:// artifact location of the MLflow experiment.
- Creation
Time int - Description string
- The description of the MLflow experiment.
- Experiment
Id string - Last
Update intTime - Lifecycle
Stage string - Name string
- Name of MLflow experiment. It must be an absolute path within the Databricks workspace, e.g.
/Users/<some-username>/my-experiment
. For more information about changes to experiment naming conventions, see mlflow docs.
- artifact
Location String - Path to dbfs:/ or s3:// artifact location of the MLflow experiment.
- creation
Time Integer - description String
- The description of the MLflow experiment.
- experiment
Id String - last
Update IntegerTime - lifecycle
Stage String - name String
- Name of MLflow experiment. It must be an absolute path within the Databricks workspace, e.g.
/Users/<some-username>/my-experiment
. For more information about changes to experiment naming conventions, see mlflow docs.
- artifact
Location string - Path to dbfs:/ or s3:// artifact location of the MLflow experiment.
- creation
Time number - description string
- The description of the MLflow experiment.
- experiment
Id string - last
Update numberTime - lifecycle
Stage string - name string
- Name of MLflow experiment. It must be an absolute path within the Databricks workspace, e.g.
/Users/<some-username>/my-experiment
. For more information about changes to experiment naming conventions, see mlflow docs.
- artifact_
location str - Path to dbfs:/ or s3:// artifact location of the MLflow experiment.
- creation_
time int - description str
- The description of the MLflow experiment.
- experiment_
id str - last_
update_ inttime - lifecycle_
stage str - name str
- Name of MLflow experiment. It must be an absolute path within the Databricks workspace, e.g.
/Users/<some-username>/my-experiment
. For more information about changes to experiment naming conventions, see mlflow docs.
- artifact
Location String - Path to dbfs:/ or s3:// artifact location of the MLflow experiment.
- creation
Time Number - description String
- The description of the MLflow experiment.
- experiment
Id String - last
Update NumberTime - lifecycle
Stage String - name String
- Name of MLflow experiment. It must be an absolute path within the Databricks workspace, e.g.
/Users/<some-username>/my-experiment
. For more information about changes to experiment naming conventions, see mlflow docs.
Outputs
All input properties are implicitly available as output properties. Additionally, the MlflowExperiment resource produces the following output properties:
- Id string
- The provider-assigned unique ID for this managed resource.
- Id string
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
- id string
- The provider-assigned unique ID for this managed resource.
- id str
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
Look up Existing MlflowExperiment Resource
Get an existing MlflowExperiment resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: MlflowExperimentState, opts?: CustomResourceOptions): MlflowExperiment
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
artifact_location: Optional[str] = None,
creation_time: Optional[int] = None,
description: Optional[str] = None,
experiment_id: Optional[str] = None,
last_update_time: Optional[int] = None,
lifecycle_stage: Optional[str] = None,
name: Optional[str] = None) -> MlflowExperiment
func GetMlflowExperiment(ctx *Context, name string, id IDInput, state *MlflowExperimentState, opts ...ResourceOption) (*MlflowExperiment, error)
public static MlflowExperiment Get(string name, Input<string> id, MlflowExperimentState? state, CustomResourceOptions? opts = null)
public static MlflowExperiment get(String name, Output<String> id, MlflowExperimentState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Artifact
Location string - Path to dbfs:/ or s3:// artifact location of the MLflow experiment.
- Creation
Time int - Description string
- The description of the MLflow experiment.
- Experiment
Id string - Last
Update intTime - Lifecycle
Stage string - Name string
- Name of MLflow experiment. It must be an absolute path within the Databricks workspace, e.g.
/Users/<some-username>/my-experiment
. For more information about changes to experiment naming conventions, see mlflow docs.
- Artifact
Location string - Path to dbfs:/ or s3:// artifact location of the MLflow experiment.
- Creation
Time int - Description string
- The description of the MLflow experiment.
- Experiment
Id string - Last
Update intTime - Lifecycle
Stage string - Name string
- Name of MLflow experiment. It must be an absolute path within the Databricks workspace, e.g.
/Users/<some-username>/my-experiment
. For more information about changes to experiment naming conventions, see mlflow docs.
- artifact
Location String - Path to dbfs:/ or s3:// artifact location of the MLflow experiment.
- creation
Time Integer - description String
- The description of the MLflow experiment.
- experiment
Id String - last
Update IntegerTime - lifecycle
Stage String - name String
- Name of MLflow experiment. It must be an absolute path within the Databricks workspace, e.g.
/Users/<some-username>/my-experiment
. For more information about changes to experiment naming conventions, see mlflow docs.
- artifact
Location string - Path to dbfs:/ or s3:// artifact location of the MLflow experiment.
- creation
Time number - description string
- The description of the MLflow experiment.
- experiment
Id string - last
Update numberTime - lifecycle
Stage string - name string
- Name of MLflow experiment. It must be an absolute path within the Databricks workspace, e.g.
/Users/<some-username>/my-experiment
. For more information about changes to experiment naming conventions, see mlflow docs.
- artifact_
location str - Path to dbfs:/ or s3:// artifact location of the MLflow experiment.
- creation_
time int - description str
- The description of the MLflow experiment.
- experiment_
id str - last_
update_ inttime - lifecycle_
stage str - name str
- Name of MLflow experiment. It must be an absolute path within the Databricks workspace, e.g.
/Users/<some-username>/my-experiment
. For more information about changes to experiment naming conventions, see mlflow docs.
- artifact
Location String - Path to dbfs:/ or s3:// artifact location of the MLflow experiment.
- creation
Time Number - description String
- The description of the MLflow experiment.
- experiment
Id String - last
Update NumberTime - lifecycle
Stage String - name String
- Name of MLflow experiment. It must be an absolute path within the Databricks workspace, e.g.
/Users/<some-username>/my-experiment
. For more information about changes to experiment naming conventions, see mlflow docs.
Import
The experiment resource can be imported using the id of the experiment
bash
$ pulumi import databricks:index/mlflowExperiment:MlflowExperiment this <experiment-id>
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- databricks pulumi/pulumi-databricks
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
databricks
Terraform Provider.