1. Packages
  2. AWS
  3. API Docs
  4. sagemaker
  5. DataQualityJobDefinition
AWS v6.60.0 published on Tuesday, Nov 19, 2024 by Pulumi

aws.sagemaker.DataQualityJobDefinition

Explore with Pulumi AI

aws logo
AWS v6.60.0 published on Tuesday, Nov 19, 2024 by Pulumi

    Provides a SageMaker data quality job definition resource.

    Example Usage

    Basic usage:

    import * as pulumi from "@pulumi/pulumi";
    import * as aws from "@pulumi/aws";
    
    const test = new aws.sagemaker.DataQualityJobDefinition("test", {
        name: "my-data-quality-job-definition",
        dataQualityAppSpecification: {
            imageUri: monitor.registryPath,
        },
        dataQualityJobInput: {
            endpointInput: {
                endpointName: myEndpoint.name,
            },
        },
        dataQualityJobOutputConfig: {
            monitoringOutputs: {
                s3Output: {
                    s3Uri: `https://${myBucket.bucketRegionalDomainName}/output`,
                },
            },
        },
        jobResources: {
            clusterConfig: {
                instanceCount: 1,
                instanceType: "ml.t3.medium",
                volumeSizeInGb: 20,
            },
        },
        roleArn: myRole.arn,
    });
    
    import pulumi
    import pulumi_aws as aws
    
    test = aws.sagemaker.DataQualityJobDefinition("test",
        name="my-data-quality-job-definition",
        data_quality_app_specification={
            "image_uri": monitor["registryPath"],
        },
        data_quality_job_input={
            "endpoint_input": {
                "endpoint_name": my_endpoint["name"],
            },
        },
        data_quality_job_output_config={
            "monitoring_outputs": {
                "s3_output": {
                    "s3_uri": f"https://{my_bucket['bucketRegionalDomainName']}/output",
                },
            },
        },
        job_resources={
            "cluster_config": {
                "instance_count": 1,
                "instance_type": "ml.t3.medium",
                "volume_size_in_gb": 20,
            },
        },
        role_arn=my_role["arn"])
    
    package main
    
    import (
    	"fmt"
    
    	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/sagemaker"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := sagemaker.NewDataQualityJobDefinition(ctx, "test", &sagemaker.DataQualityJobDefinitionArgs{
    			Name: pulumi.String("my-data-quality-job-definition"),
    			DataQualityAppSpecification: &sagemaker.DataQualityJobDefinitionDataQualityAppSpecificationArgs{
    				ImageUri: pulumi.Any(monitor.RegistryPath),
    			},
    			DataQualityJobInput: &sagemaker.DataQualityJobDefinitionDataQualityJobInputArgs{
    				EndpointInput: &sagemaker.DataQualityJobDefinitionDataQualityJobInputEndpointInputArgs{
    					EndpointName: pulumi.Any(myEndpoint.Name),
    				},
    			},
    			DataQualityJobOutputConfig: &sagemaker.DataQualityJobDefinitionDataQualityJobOutputConfigArgs{
    				MonitoringOutputs: &sagemaker.DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsArgs{
    					S3Output: sagemaker.DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3OutputArgs{
    						S3Uri: pulumi.Sprintf("https://%v/output", myBucket.BucketRegionalDomainName),
    					},
    				},
    			},
    			JobResources: &sagemaker.DataQualityJobDefinitionJobResourcesArgs{
    				ClusterConfig: &sagemaker.DataQualityJobDefinitionJobResourcesClusterConfigArgs{
    					InstanceCount:  pulumi.Int(1),
    					InstanceType:   pulumi.String("ml.t3.medium"),
    					VolumeSizeInGb: pulumi.Int(20),
    				},
    			},
    			RoleArn: pulumi.Any(myRole.Arn),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Aws = Pulumi.Aws;
    
    return await Deployment.RunAsync(() => 
    {
        var test = new Aws.Sagemaker.DataQualityJobDefinition("test", new()
        {
            Name = "my-data-quality-job-definition",
            DataQualityAppSpecification = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityAppSpecificationArgs
            {
                ImageUri = monitor.RegistryPath,
            },
            DataQualityJobInput = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobInputArgs
            {
                EndpointInput = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobInputEndpointInputArgs
                {
                    EndpointName = myEndpoint.Name,
                },
            },
            DataQualityJobOutputConfig = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobOutputConfigArgs
            {
                MonitoringOutputs = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsArgs
                {
                    S3Output = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3OutputArgs
                    {
                        S3Uri = $"https://{myBucket.BucketRegionalDomainName}/output",
                    },
                },
            },
            JobResources = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionJobResourcesArgs
            {
                ClusterConfig = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionJobResourcesClusterConfigArgs
                {
                    InstanceCount = 1,
                    InstanceType = "ml.t3.medium",
                    VolumeSizeInGb = 20,
                },
            },
            RoleArn = myRole.Arn,
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.aws.sagemaker.DataQualityJobDefinition;
    import com.pulumi.aws.sagemaker.DataQualityJobDefinitionArgs;
    import com.pulumi.aws.sagemaker.inputs.DataQualityJobDefinitionDataQualityAppSpecificationArgs;
    import com.pulumi.aws.sagemaker.inputs.DataQualityJobDefinitionDataQualityJobInputArgs;
    import com.pulumi.aws.sagemaker.inputs.DataQualityJobDefinitionDataQualityJobInputEndpointInputArgs;
    import com.pulumi.aws.sagemaker.inputs.DataQualityJobDefinitionDataQualityJobOutputConfigArgs;
    import com.pulumi.aws.sagemaker.inputs.DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsArgs;
    import com.pulumi.aws.sagemaker.inputs.DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3OutputArgs;
    import com.pulumi.aws.sagemaker.inputs.DataQualityJobDefinitionJobResourcesArgs;
    import com.pulumi.aws.sagemaker.inputs.DataQualityJobDefinitionJobResourcesClusterConfigArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var test = new DataQualityJobDefinition("test", DataQualityJobDefinitionArgs.builder()
                .name("my-data-quality-job-definition")
                .dataQualityAppSpecification(DataQualityJobDefinitionDataQualityAppSpecificationArgs.builder()
                    .imageUri(monitor.registryPath())
                    .build())
                .dataQualityJobInput(DataQualityJobDefinitionDataQualityJobInputArgs.builder()
                    .endpointInput(DataQualityJobDefinitionDataQualityJobInputEndpointInputArgs.builder()
                        .endpointName(myEndpoint.name())
                        .build())
                    .build())
                .dataQualityJobOutputConfig(DataQualityJobDefinitionDataQualityJobOutputConfigArgs.builder()
                    .monitoringOutputs(DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsArgs.builder()
                        .s3Output(DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3OutputArgs.builder()
                            .s3Uri(String.format("https://%s/output", myBucket.bucketRegionalDomainName()))
                            .build())
                        .build())
                    .build())
                .jobResources(DataQualityJobDefinitionJobResourcesArgs.builder()
                    .clusterConfig(DataQualityJobDefinitionJobResourcesClusterConfigArgs.builder()
                        .instanceCount(1)
                        .instanceType("ml.t3.medium")
                        .volumeSizeInGb(20)
                        .build())
                    .build())
                .roleArn(myRole.arn())
                .build());
    
        }
    }
    
    resources:
      test:
        type: aws:sagemaker:DataQualityJobDefinition
        properties:
          name: my-data-quality-job-definition
          dataQualityAppSpecification:
            imageUri: ${monitor.registryPath}
          dataQualityJobInput:
            endpointInput:
              endpointName: ${myEndpoint.name}
          dataQualityJobOutputConfig:
            monitoringOutputs:
              s3Output:
                s3Uri: https://${myBucket.bucketRegionalDomainName}/output
          jobResources:
            clusterConfig:
              instanceCount: 1
              instanceType: ml.t3.medium
              volumeSizeInGb: 20
          roleArn: ${myRole.arn}
    

    Create DataQualityJobDefinition Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new DataQualityJobDefinition(name: string, args: DataQualityJobDefinitionArgs, opts?: CustomResourceOptions);
    @overload
    def DataQualityJobDefinition(resource_name: str,
                                 args: DataQualityJobDefinitionArgs,
                                 opts: Optional[ResourceOptions] = None)
    
    @overload
    def DataQualityJobDefinition(resource_name: str,
                                 opts: Optional[ResourceOptions] = None,
                                 data_quality_app_specification: Optional[DataQualityJobDefinitionDataQualityAppSpecificationArgs] = None,
                                 data_quality_job_input: Optional[DataQualityJobDefinitionDataQualityJobInputArgs] = None,
                                 data_quality_job_output_config: Optional[DataQualityJobDefinitionDataQualityJobOutputConfigArgs] = None,
                                 job_resources: Optional[DataQualityJobDefinitionJobResourcesArgs] = None,
                                 role_arn: Optional[str] = None,
                                 data_quality_baseline_config: Optional[DataQualityJobDefinitionDataQualityBaselineConfigArgs] = None,
                                 name: Optional[str] = None,
                                 network_config: Optional[DataQualityJobDefinitionNetworkConfigArgs] = None,
                                 stopping_condition: Optional[DataQualityJobDefinitionStoppingConditionArgs] = None,
                                 tags: Optional[Mapping[str, str]] = None)
    func NewDataQualityJobDefinition(ctx *Context, name string, args DataQualityJobDefinitionArgs, opts ...ResourceOption) (*DataQualityJobDefinition, error)
    public DataQualityJobDefinition(string name, DataQualityJobDefinitionArgs args, CustomResourceOptions? opts = null)
    public DataQualityJobDefinition(String name, DataQualityJobDefinitionArgs args)
    public DataQualityJobDefinition(String name, DataQualityJobDefinitionArgs args, CustomResourceOptions options)
    
    type: aws:sagemaker:DataQualityJobDefinition
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args DataQualityJobDefinitionArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args DataQualityJobDefinitionArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args DataQualityJobDefinitionArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args DataQualityJobDefinitionArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args DataQualityJobDefinitionArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var dataQualityJobDefinitionResource = new Aws.Sagemaker.DataQualityJobDefinition("dataQualityJobDefinitionResource", new()
    {
        DataQualityAppSpecification = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityAppSpecificationArgs
        {
            ImageUri = "string",
            Environment = 
            {
                { "string", "string" },
            },
            PostAnalyticsProcessorSourceUri = "string",
            RecordPreprocessorSourceUri = "string",
        },
        DataQualityJobInput = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobInputArgs
        {
            BatchTransformInput = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobInputBatchTransformInputArgs
            {
                DataCapturedDestinationS3Uri = "string",
                DatasetFormat = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatArgs
                {
                    Csv = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatCsvArgs
                    {
                        Header = false,
                    },
                    Json = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatJsonArgs
                    {
                        Line = false,
                    },
                },
                LocalPath = "string",
                S3DataDistributionType = "string",
                S3InputMode = "string",
            },
            EndpointInput = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobInputEndpointInputArgs
            {
                EndpointName = "string",
                LocalPath = "string",
                S3DataDistributionType = "string",
                S3InputMode = "string",
            },
        },
        DataQualityJobOutputConfig = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobOutputConfigArgs
        {
            MonitoringOutputs = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsArgs
            {
                S3Output = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3OutputArgs
                {
                    S3Uri = "string",
                    LocalPath = "string",
                    S3UploadMode = "string",
                },
            },
            KmsKeyId = "string",
        },
        JobResources = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionJobResourcesArgs
        {
            ClusterConfig = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionJobResourcesClusterConfigArgs
            {
                InstanceCount = 0,
                InstanceType = "string",
                VolumeSizeInGb = 0,
                VolumeKmsKeyId = "string",
            },
        },
        RoleArn = "string",
        DataQualityBaselineConfig = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityBaselineConfigArgs
        {
            ConstraintsResource = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityBaselineConfigConstraintsResourceArgs
            {
                S3Uri = "string",
            },
            StatisticsResource = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityBaselineConfigStatisticsResourceArgs
            {
                S3Uri = "string",
            },
        },
        Name = "string",
        NetworkConfig = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionNetworkConfigArgs
        {
            EnableInterContainerTrafficEncryption = false,
            EnableNetworkIsolation = false,
            VpcConfig = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionNetworkConfigVpcConfigArgs
            {
                SecurityGroupIds = new[]
                {
                    "string",
                },
                Subnets = new[]
                {
                    "string",
                },
            },
        },
        StoppingCondition = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionStoppingConditionArgs
        {
            MaxRuntimeInSeconds = 0,
        },
        Tags = 
        {
            { "string", "string" },
        },
    });
    
    example, err := sagemaker.NewDataQualityJobDefinition(ctx, "dataQualityJobDefinitionResource", &sagemaker.DataQualityJobDefinitionArgs{
    	DataQualityAppSpecification: &sagemaker.DataQualityJobDefinitionDataQualityAppSpecificationArgs{
    		ImageUri: pulumi.String("string"),
    		Environment: pulumi.StringMap{
    			"string": pulumi.String("string"),
    		},
    		PostAnalyticsProcessorSourceUri: pulumi.String("string"),
    		RecordPreprocessorSourceUri:     pulumi.String("string"),
    	},
    	DataQualityJobInput: &sagemaker.DataQualityJobDefinitionDataQualityJobInputArgs{
    		BatchTransformInput: &sagemaker.DataQualityJobDefinitionDataQualityJobInputBatchTransformInputArgs{
    			DataCapturedDestinationS3Uri: pulumi.String("string"),
    			DatasetFormat: &sagemaker.DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatArgs{
    				Csv: &sagemaker.DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatCsvArgs{
    					Header: pulumi.Bool(false),
    				},
    				Json: &sagemaker.DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatJsonArgs{
    					Line: pulumi.Bool(false),
    				},
    			},
    			LocalPath:              pulumi.String("string"),
    			S3DataDistributionType: pulumi.String("string"),
    			S3InputMode:            pulumi.String("string"),
    		},
    		EndpointInput: &sagemaker.DataQualityJobDefinitionDataQualityJobInputEndpointInputArgs{
    			EndpointName:           pulumi.String("string"),
    			LocalPath:              pulumi.String("string"),
    			S3DataDistributionType: pulumi.String("string"),
    			S3InputMode:            pulumi.String("string"),
    		},
    	},
    	DataQualityJobOutputConfig: &sagemaker.DataQualityJobDefinitionDataQualityJobOutputConfigArgs{
    		MonitoringOutputs: &sagemaker.DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsArgs{
    			S3Output: sagemaker.DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3OutputArgs{
    				S3Uri:        pulumi.String("string"),
    				LocalPath:    pulumi.String("string"),
    				S3UploadMode: pulumi.String("string"),
    			},
    		},
    		KmsKeyId: pulumi.String("string"),
    	},
    	JobResources: &sagemaker.DataQualityJobDefinitionJobResourcesArgs{
    		ClusterConfig: &sagemaker.DataQualityJobDefinitionJobResourcesClusterConfigArgs{
    			InstanceCount:  pulumi.Int(0),
    			InstanceType:   pulumi.String("string"),
    			VolumeSizeInGb: pulumi.Int(0),
    			VolumeKmsKeyId: pulumi.String("string"),
    		},
    	},
    	RoleArn: pulumi.String("string"),
    	DataQualityBaselineConfig: &sagemaker.DataQualityJobDefinitionDataQualityBaselineConfigArgs{
    		ConstraintsResource: &sagemaker.DataQualityJobDefinitionDataQualityBaselineConfigConstraintsResourceArgs{
    			S3Uri: pulumi.String("string"),
    		},
    		StatisticsResource: &sagemaker.DataQualityJobDefinitionDataQualityBaselineConfigStatisticsResourceArgs{
    			S3Uri: pulumi.String("string"),
    		},
    	},
    	Name: pulumi.String("string"),
    	NetworkConfig: &sagemaker.DataQualityJobDefinitionNetworkConfigArgs{
    		EnableInterContainerTrafficEncryption: pulumi.Bool(false),
    		EnableNetworkIsolation:                pulumi.Bool(false),
    		VpcConfig: &sagemaker.DataQualityJobDefinitionNetworkConfigVpcConfigArgs{
    			SecurityGroupIds: pulumi.StringArray{
    				pulumi.String("string"),
    			},
    			Subnets: pulumi.StringArray{
    				pulumi.String("string"),
    			},
    		},
    	},
    	StoppingCondition: &sagemaker.DataQualityJobDefinitionStoppingConditionArgs{
    		MaxRuntimeInSeconds: pulumi.Int(0),
    	},
    	Tags: pulumi.StringMap{
    		"string": pulumi.String("string"),
    	},
    })
    
    var dataQualityJobDefinitionResource = new DataQualityJobDefinition("dataQualityJobDefinitionResource", DataQualityJobDefinitionArgs.builder()
        .dataQualityAppSpecification(DataQualityJobDefinitionDataQualityAppSpecificationArgs.builder()
            .imageUri("string")
            .environment(Map.of("string", "string"))
            .postAnalyticsProcessorSourceUri("string")
            .recordPreprocessorSourceUri("string")
            .build())
        .dataQualityJobInput(DataQualityJobDefinitionDataQualityJobInputArgs.builder()
            .batchTransformInput(DataQualityJobDefinitionDataQualityJobInputBatchTransformInputArgs.builder()
                .dataCapturedDestinationS3Uri("string")
                .datasetFormat(DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatArgs.builder()
                    .csv(DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatCsvArgs.builder()
                        .header(false)
                        .build())
                    .json(DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatJsonArgs.builder()
                        .line(false)
                        .build())
                    .build())
                .localPath("string")
                .s3DataDistributionType("string")
                .s3InputMode("string")
                .build())
            .endpointInput(DataQualityJobDefinitionDataQualityJobInputEndpointInputArgs.builder()
                .endpointName("string")
                .localPath("string")
                .s3DataDistributionType("string")
                .s3InputMode("string")
                .build())
            .build())
        .dataQualityJobOutputConfig(DataQualityJobDefinitionDataQualityJobOutputConfigArgs.builder()
            .monitoringOutputs(DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsArgs.builder()
                .s3Output(DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3OutputArgs.builder()
                    .s3Uri("string")
                    .localPath("string")
                    .s3UploadMode("string")
                    .build())
                .build())
            .kmsKeyId("string")
            .build())
        .jobResources(DataQualityJobDefinitionJobResourcesArgs.builder()
            .clusterConfig(DataQualityJobDefinitionJobResourcesClusterConfigArgs.builder()
                .instanceCount(0)
                .instanceType("string")
                .volumeSizeInGb(0)
                .volumeKmsKeyId("string")
                .build())
            .build())
        .roleArn("string")
        .dataQualityBaselineConfig(DataQualityJobDefinitionDataQualityBaselineConfigArgs.builder()
            .constraintsResource(DataQualityJobDefinitionDataQualityBaselineConfigConstraintsResourceArgs.builder()
                .s3Uri("string")
                .build())
            .statisticsResource(DataQualityJobDefinitionDataQualityBaselineConfigStatisticsResourceArgs.builder()
                .s3Uri("string")
                .build())
            .build())
        .name("string")
        .networkConfig(DataQualityJobDefinitionNetworkConfigArgs.builder()
            .enableInterContainerTrafficEncryption(false)
            .enableNetworkIsolation(false)
            .vpcConfig(DataQualityJobDefinitionNetworkConfigVpcConfigArgs.builder()
                .securityGroupIds("string")
                .subnets("string")
                .build())
            .build())
        .stoppingCondition(DataQualityJobDefinitionStoppingConditionArgs.builder()
            .maxRuntimeInSeconds(0)
            .build())
        .tags(Map.of("string", "string"))
        .build());
    
    data_quality_job_definition_resource = aws.sagemaker.DataQualityJobDefinition("dataQualityJobDefinitionResource",
        data_quality_app_specification={
            "image_uri": "string",
            "environment": {
                "string": "string",
            },
            "post_analytics_processor_source_uri": "string",
            "record_preprocessor_source_uri": "string",
        },
        data_quality_job_input={
            "batch_transform_input": {
                "data_captured_destination_s3_uri": "string",
                "dataset_format": {
                    "csv": {
                        "header": False,
                    },
                    "json": {
                        "line": False,
                    },
                },
                "local_path": "string",
                "s3_data_distribution_type": "string",
                "s3_input_mode": "string",
            },
            "endpoint_input": {
                "endpoint_name": "string",
                "local_path": "string",
                "s3_data_distribution_type": "string",
                "s3_input_mode": "string",
            },
        },
        data_quality_job_output_config={
            "monitoring_outputs": {
                "s3_output": {
                    "s3_uri": "string",
                    "local_path": "string",
                    "s3_upload_mode": "string",
                },
            },
            "kms_key_id": "string",
        },
        job_resources={
            "cluster_config": {
                "instance_count": 0,
                "instance_type": "string",
                "volume_size_in_gb": 0,
                "volume_kms_key_id": "string",
            },
        },
        role_arn="string",
        data_quality_baseline_config={
            "constraints_resource": {
                "s3_uri": "string",
            },
            "statistics_resource": {
                "s3_uri": "string",
            },
        },
        name="string",
        network_config={
            "enable_inter_container_traffic_encryption": False,
            "enable_network_isolation": False,
            "vpc_config": {
                "security_group_ids": ["string"],
                "subnets": ["string"],
            },
        },
        stopping_condition={
            "max_runtime_in_seconds": 0,
        },
        tags={
            "string": "string",
        })
    
    const dataQualityJobDefinitionResource = new aws.sagemaker.DataQualityJobDefinition("dataQualityJobDefinitionResource", {
        dataQualityAppSpecification: {
            imageUri: "string",
            environment: {
                string: "string",
            },
            postAnalyticsProcessorSourceUri: "string",
            recordPreprocessorSourceUri: "string",
        },
        dataQualityJobInput: {
            batchTransformInput: {
                dataCapturedDestinationS3Uri: "string",
                datasetFormat: {
                    csv: {
                        header: false,
                    },
                    json: {
                        line: false,
                    },
                },
                localPath: "string",
                s3DataDistributionType: "string",
                s3InputMode: "string",
            },
            endpointInput: {
                endpointName: "string",
                localPath: "string",
                s3DataDistributionType: "string",
                s3InputMode: "string",
            },
        },
        dataQualityJobOutputConfig: {
            monitoringOutputs: {
                s3Output: {
                    s3Uri: "string",
                    localPath: "string",
                    s3UploadMode: "string",
                },
            },
            kmsKeyId: "string",
        },
        jobResources: {
            clusterConfig: {
                instanceCount: 0,
                instanceType: "string",
                volumeSizeInGb: 0,
                volumeKmsKeyId: "string",
            },
        },
        roleArn: "string",
        dataQualityBaselineConfig: {
            constraintsResource: {
                s3Uri: "string",
            },
            statisticsResource: {
                s3Uri: "string",
            },
        },
        name: "string",
        networkConfig: {
            enableInterContainerTrafficEncryption: false,
            enableNetworkIsolation: false,
            vpcConfig: {
                securityGroupIds: ["string"],
                subnets: ["string"],
            },
        },
        stoppingCondition: {
            maxRuntimeInSeconds: 0,
        },
        tags: {
            string: "string",
        },
    });
    
    type: aws:sagemaker:DataQualityJobDefinition
    properties:
        dataQualityAppSpecification:
            environment:
                string: string
            imageUri: string
            postAnalyticsProcessorSourceUri: string
            recordPreprocessorSourceUri: string
        dataQualityBaselineConfig:
            constraintsResource:
                s3Uri: string
            statisticsResource:
                s3Uri: string
        dataQualityJobInput:
            batchTransformInput:
                dataCapturedDestinationS3Uri: string
                datasetFormat:
                    csv:
                        header: false
                    json:
                        line: false
                localPath: string
                s3DataDistributionType: string
                s3InputMode: string
            endpointInput:
                endpointName: string
                localPath: string
                s3DataDistributionType: string
                s3InputMode: string
        dataQualityJobOutputConfig:
            kmsKeyId: string
            monitoringOutputs:
                s3Output:
                    localPath: string
                    s3UploadMode: string
                    s3Uri: string
        jobResources:
            clusterConfig:
                instanceCount: 0
                instanceType: string
                volumeKmsKeyId: string
                volumeSizeInGb: 0
        name: string
        networkConfig:
            enableInterContainerTrafficEncryption: false
            enableNetworkIsolation: false
            vpcConfig:
                securityGroupIds:
                    - string
                subnets:
                    - string
        roleArn: string
        stoppingCondition:
            maxRuntimeInSeconds: 0
        tags:
            string: string
    

    DataQualityJobDefinition Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The DataQualityJobDefinition resource accepts the following input properties:

    DataQualityAppSpecification DataQualityJobDefinitionDataQualityAppSpecification
    Specifies the container that runs the monitoring job. Fields are documented below.
    DataQualityJobInput DataQualityJobDefinitionDataQualityJobInput
    A list of inputs for the monitoring job. Fields are documented below.
    DataQualityJobOutputConfig DataQualityJobDefinitionDataQualityJobOutputConfig
    The output configuration for monitoring jobs. Fields are documented below.
    JobResources DataQualityJobDefinitionJobResources
    Identifies the resources to deploy for a monitoring job. Fields are documented below.
    RoleArn string
    The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.
    DataQualityBaselineConfig DataQualityJobDefinitionDataQualityBaselineConfig
    Configures the constraints and baselines for the monitoring job. Fields are documented below.
    Name string
    The name of the data quality job definition. If omitted, the provider will assign a random, unique name.
    NetworkConfig DataQualityJobDefinitionNetworkConfig
    Specifies networking configuration for the monitoring job. Fields are documented below.
    StoppingCondition DataQualityJobDefinitionStoppingCondition
    A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.
    Tags Dictionary<string, string>
    A mapping of tags to assign to the resource. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    DataQualityAppSpecification DataQualityJobDefinitionDataQualityAppSpecificationArgs
    Specifies the container that runs the monitoring job. Fields are documented below.
    DataQualityJobInput DataQualityJobDefinitionDataQualityJobInputArgs
    A list of inputs for the monitoring job. Fields are documented below.
    DataQualityJobOutputConfig DataQualityJobDefinitionDataQualityJobOutputConfigArgs
    The output configuration for monitoring jobs. Fields are documented below.
    JobResources DataQualityJobDefinitionJobResourcesArgs
    Identifies the resources to deploy for a monitoring job. Fields are documented below.
    RoleArn string
    The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.
    DataQualityBaselineConfig DataQualityJobDefinitionDataQualityBaselineConfigArgs
    Configures the constraints and baselines for the monitoring job. Fields are documented below.
    Name string
    The name of the data quality job definition. If omitted, the provider will assign a random, unique name.
    NetworkConfig DataQualityJobDefinitionNetworkConfigArgs
    Specifies networking configuration for the monitoring job. Fields are documented below.
    StoppingCondition DataQualityJobDefinitionStoppingConditionArgs
    A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.
    Tags map[string]string
    A mapping of tags to assign to the resource. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    dataQualityAppSpecification DataQualityJobDefinitionDataQualityAppSpecification
    Specifies the container that runs the monitoring job. Fields are documented below.
    dataQualityJobInput DataQualityJobDefinitionDataQualityJobInput
    A list of inputs for the monitoring job. Fields are documented below.
    dataQualityJobOutputConfig DataQualityJobDefinitionDataQualityJobOutputConfig
    The output configuration for monitoring jobs. Fields are documented below.
    jobResources DataQualityJobDefinitionJobResources
    Identifies the resources to deploy for a monitoring job. Fields are documented below.
    roleArn String
    The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.
    dataQualityBaselineConfig DataQualityJobDefinitionDataQualityBaselineConfig
    Configures the constraints and baselines for the monitoring job. Fields are documented below.
    name String
    The name of the data quality job definition. If omitted, the provider will assign a random, unique name.
    networkConfig DataQualityJobDefinitionNetworkConfig
    Specifies networking configuration for the monitoring job. Fields are documented below.
    stoppingCondition DataQualityJobDefinitionStoppingCondition
    A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.
    tags Map<String,String>
    A mapping of tags to assign to the resource. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    dataQualityAppSpecification DataQualityJobDefinitionDataQualityAppSpecification
    Specifies the container that runs the monitoring job. Fields are documented below.
    dataQualityJobInput DataQualityJobDefinitionDataQualityJobInput
    A list of inputs for the monitoring job. Fields are documented below.
    dataQualityJobOutputConfig DataQualityJobDefinitionDataQualityJobOutputConfig
    The output configuration for monitoring jobs. Fields are documented below.
    jobResources DataQualityJobDefinitionJobResources
    Identifies the resources to deploy for a monitoring job. Fields are documented below.
    roleArn string
    The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.
    dataQualityBaselineConfig DataQualityJobDefinitionDataQualityBaselineConfig
    Configures the constraints and baselines for the monitoring job. Fields are documented below.
    name string
    The name of the data quality job definition. If omitted, the provider will assign a random, unique name.
    networkConfig DataQualityJobDefinitionNetworkConfig
    Specifies networking configuration for the monitoring job. Fields are documented below.
    stoppingCondition DataQualityJobDefinitionStoppingCondition
    A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.
    tags {[key: string]: string}
    A mapping of tags to assign to the resource. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    data_quality_app_specification DataQualityJobDefinitionDataQualityAppSpecificationArgs
    Specifies the container that runs the monitoring job. Fields are documented below.
    data_quality_job_input DataQualityJobDefinitionDataQualityJobInputArgs
    A list of inputs for the monitoring job. Fields are documented below.
    data_quality_job_output_config DataQualityJobDefinitionDataQualityJobOutputConfigArgs
    The output configuration for monitoring jobs. Fields are documented below.
    job_resources DataQualityJobDefinitionJobResourcesArgs
    Identifies the resources to deploy for a monitoring job. Fields are documented below.
    role_arn str
    The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.
    data_quality_baseline_config DataQualityJobDefinitionDataQualityBaselineConfigArgs
    Configures the constraints and baselines for the monitoring job. Fields are documented below.
    name str
    The name of the data quality job definition. If omitted, the provider will assign a random, unique name.
    network_config DataQualityJobDefinitionNetworkConfigArgs
    Specifies networking configuration for the monitoring job. Fields are documented below.
    stopping_condition DataQualityJobDefinitionStoppingConditionArgs
    A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.
    tags Mapping[str, str]
    A mapping of tags to assign to the resource. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    dataQualityAppSpecification Property Map
    Specifies the container that runs the monitoring job. Fields are documented below.
    dataQualityJobInput Property Map
    A list of inputs for the monitoring job. Fields are documented below.
    dataQualityJobOutputConfig Property Map
    The output configuration for monitoring jobs. Fields are documented below.
    jobResources Property Map
    Identifies the resources to deploy for a monitoring job. Fields are documented below.
    roleArn String
    The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.
    dataQualityBaselineConfig Property Map
    Configures the constraints and baselines for the monitoring job. Fields are documented below.
    name String
    The name of the data quality job definition. If omitted, the provider will assign a random, unique name.
    networkConfig Property Map
    Specifies networking configuration for the monitoring job. Fields are documented below.
    stoppingCondition Property Map
    A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.
    tags Map<String>
    A mapping of tags to assign to the resource. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the DataQualityJobDefinition resource produces the following output properties:

    Arn string
    The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.
    Id string
    The provider-assigned unique ID for this managed resource.
    TagsAll Dictionary<string, string>
    A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    Arn string
    The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.
    Id string
    The provider-assigned unique ID for this managed resource.
    TagsAll map[string]string
    A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    arn String
    The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.
    id String
    The provider-assigned unique ID for this managed resource.
    tagsAll Map<String,String>
    A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    arn string
    The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.
    id string
    The provider-assigned unique ID for this managed resource.
    tagsAll {[key: string]: string}
    A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    arn str
    The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.
    id str
    The provider-assigned unique ID for this managed resource.
    tags_all Mapping[str, str]
    A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    arn String
    The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.
    id String
    The provider-assigned unique ID for this managed resource.
    tagsAll Map<String>
    A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    Look up Existing DataQualityJobDefinition Resource

    Get an existing DataQualityJobDefinition resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: DataQualityJobDefinitionState, opts?: CustomResourceOptions): DataQualityJobDefinition
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            arn: Optional[str] = None,
            data_quality_app_specification: Optional[DataQualityJobDefinitionDataQualityAppSpecificationArgs] = None,
            data_quality_baseline_config: Optional[DataQualityJobDefinitionDataQualityBaselineConfigArgs] = None,
            data_quality_job_input: Optional[DataQualityJobDefinitionDataQualityJobInputArgs] = None,
            data_quality_job_output_config: Optional[DataQualityJobDefinitionDataQualityJobOutputConfigArgs] = None,
            job_resources: Optional[DataQualityJobDefinitionJobResourcesArgs] = None,
            name: Optional[str] = None,
            network_config: Optional[DataQualityJobDefinitionNetworkConfigArgs] = None,
            role_arn: Optional[str] = None,
            stopping_condition: Optional[DataQualityJobDefinitionStoppingConditionArgs] = None,
            tags: Optional[Mapping[str, str]] = None,
            tags_all: Optional[Mapping[str, str]] = None) -> DataQualityJobDefinition
    func GetDataQualityJobDefinition(ctx *Context, name string, id IDInput, state *DataQualityJobDefinitionState, opts ...ResourceOption) (*DataQualityJobDefinition, error)
    public static DataQualityJobDefinition Get(string name, Input<string> id, DataQualityJobDefinitionState? state, CustomResourceOptions? opts = null)
    public static DataQualityJobDefinition get(String name, Output<String> id, DataQualityJobDefinitionState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    Arn string
    The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.
    DataQualityAppSpecification DataQualityJobDefinitionDataQualityAppSpecification
    Specifies the container that runs the monitoring job. Fields are documented below.
    DataQualityBaselineConfig DataQualityJobDefinitionDataQualityBaselineConfig
    Configures the constraints and baselines for the monitoring job. Fields are documented below.
    DataQualityJobInput DataQualityJobDefinitionDataQualityJobInput
    A list of inputs for the monitoring job. Fields are documented below.
    DataQualityJobOutputConfig DataQualityJobDefinitionDataQualityJobOutputConfig
    The output configuration for monitoring jobs. Fields are documented below.
    JobResources DataQualityJobDefinitionJobResources
    Identifies the resources to deploy for a monitoring job. Fields are documented below.
    Name string
    The name of the data quality job definition. If omitted, the provider will assign a random, unique name.
    NetworkConfig DataQualityJobDefinitionNetworkConfig
    Specifies networking configuration for the monitoring job. Fields are documented below.
    RoleArn string
    The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.
    StoppingCondition DataQualityJobDefinitionStoppingCondition
    A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.
    Tags Dictionary<string, string>
    A mapping of tags to assign to the resource. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    TagsAll Dictionary<string, string>
    A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    Arn string
    The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.
    DataQualityAppSpecification DataQualityJobDefinitionDataQualityAppSpecificationArgs
    Specifies the container that runs the monitoring job. Fields are documented below.
    DataQualityBaselineConfig DataQualityJobDefinitionDataQualityBaselineConfigArgs
    Configures the constraints and baselines for the monitoring job. Fields are documented below.
    DataQualityJobInput DataQualityJobDefinitionDataQualityJobInputArgs
    A list of inputs for the monitoring job. Fields are documented below.
    DataQualityJobOutputConfig DataQualityJobDefinitionDataQualityJobOutputConfigArgs
    The output configuration for monitoring jobs. Fields are documented below.
    JobResources DataQualityJobDefinitionJobResourcesArgs
    Identifies the resources to deploy for a monitoring job. Fields are documented below.
    Name string
    The name of the data quality job definition. If omitted, the provider will assign a random, unique name.
    NetworkConfig DataQualityJobDefinitionNetworkConfigArgs
    Specifies networking configuration for the monitoring job. Fields are documented below.
    RoleArn string
    The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.
    StoppingCondition DataQualityJobDefinitionStoppingConditionArgs
    A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.
    Tags map[string]string
    A mapping of tags to assign to the resource. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    TagsAll map[string]string
    A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    arn String
    The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.
    dataQualityAppSpecification DataQualityJobDefinitionDataQualityAppSpecification
    Specifies the container that runs the monitoring job. Fields are documented below.
    dataQualityBaselineConfig DataQualityJobDefinitionDataQualityBaselineConfig
    Configures the constraints and baselines for the monitoring job. Fields are documented below.
    dataQualityJobInput DataQualityJobDefinitionDataQualityJobInput
    A list of inputs for the monitoring job. Fields are documented below.
    dataQualityJobOutputConfig DataQualityJobDefinitionDataQualityJobOutputConfig
    The output configuration for monitoring jobs. Fields are documented below.
    jobResources DataQualityJobDefinitionJobResources
    Identifies the resources to deploy for a monitoring job. Fields are documented below.
    name String
    The name of the data quality job definition. If omitted, the provider will assign a random, unique name.
    networkConfig DataQualityJobDefinitionNetworkConfig
    Specifies networking configuration for the monitoring job. Fields are documented below.
    roleArn String
    The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.
    stoppingCondition DataQualityJobDefinitionStoppingCondition
    A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.
    tags Map<String,String>
    A mapping of tags to assign to the resource. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    tagsAll Map<String,String>
    A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    arn string
    The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.
    dataQualityAppSpecification DataQualityJobDefinitionDataQualityAppSpecification
    Specifies the container that runs the monitoring job. Fields are documented below.
    dataQualityBaselineConfig DataQualityJobDefinitionDataQualityBaselineConfig
    Configures the constraints and baselines for the monitoring job. Fields are documented below.
    dataQualityJobInput DataQualityJobDefinitionDataQualityJobInput
    A list of inputs for the monitoring job. Fields are documented below.
    dataQualityJobOutputConfig DataQualityJobDefinitionDataQualityJobOutputConfig
    The output configuration for monitoring jobs. Fields are documented below.
    jobResources DataQualityJobDefinitionJobResources
    Identifies the resources to deploy for a monitoring job. Fields are documented below.
    name string
    The name of the data quality job definition. If omitted, the provider will assign a random, unique name.
    networkConfig DataQualityJobDefinitionNetworkConfig
    Specifies networking configuration for the monitoring job. Fields are documented below.
    roleArn string
    The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.
    stoppingCondition DataQualityJobDefinitionStoppingCondition
    A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.
    tags {[key: string]: string}
    A mapping of tags to assign to the resource. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    tagsAll {[key: string]: string}
    A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    arn str
    The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.
    data_quality_app_specification DataQualityJobDefinitionDataQualityAppSpecificationArgs
    Specifies the container that runs the monitoring job. Fields are documented below.
    data_quality_baseline_config DataQualityJobDefinitionDataQualityBaselineConfigArgs
    Configures the constraints and baselines for the monitoring job. Fields are documented below.
    data_quality_job_input DataQualityJobDefinitionDataQualityJobInputArgs
    A list of inputs for the monitoring job. Fields are documented below.
    data_quality_job_output_config DataQualityJobDefinitionDataQualityJobOutputConfigArgs
    The output configuration for monitoring jobs. Fields are documented below.
    job_resources DataQualityJobDefinitionJobResourcesArgs
    Identifies the resources to deploy for a monitoring job. Fields are documented below.
    name str
    The name of the data quality job definition. If omitted, the provider will assign a random, unique name.
    network_config DataQualityJobDefinitionNetworkConfigArgs
    Specifies networking configuration for the monitoring job. Fields are documented below.
    role_arn str
    The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.
    stopping_condition DataQualityJobDefinitionStoppingConditionArgs
    A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.
    tags Mapping[str, str]
    A mapping of tags to assign to the resource. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    tags_all Mapping[str, str]
    A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    arn String
    The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.
    dataQualityAppSpecification Property Map
    Specifies the container that runs the monitoring job. Fields are documented below.
    dataQualityBaselineConfig Property Map
    Configures the constraints and baselines for the monitoring job. Fields are documented below.
    dataQualityJobInput Property Map
    A list of inputs for the monitoring job. Fields are documented below.
    dataQualityJobOutputConfig Property Map
    The output configuration for monitoring jobs. Fields are documented below.
    jobResources Property Map
    Identifies the resources to deploy for a monitoring job. Fields are documented below.
    name String
    The name of the data quality job definition. If omitted, the provider will assign a random, unique name.
    networkConfig Property Map
    Specifies networking configuration for the monitoring job. Fields are documented below.
    roleArn String
    The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.
    stoppingCondition Property Map
    A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.
    tags Map<String>
    A mapping of tags to assign to the resource. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    tagsAll Map<String>
    A map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    Supporting Types

    DataQualityJobDefinitionDataQualityAppSpecification, DataQualityJobDefinitionDataQualityAppSpecificationArgs

    ImageUri string
    The container image that the data quality monitoring job runs.
    Environment Dictionary<string, string>
    Sets the environment variables in the container that the monitoring job runs. A list of key value pairs.
    PostAnalyticsProcessorSourceUri string
    An Amazon S3 URI to a script that is called after analysis has been performed. Applicable only for the built-in (first party) containers.
    RecordPreprocessorSourceUri string
    An Amazon S3 URI to a script that is called per row prior to running analysis. It can base64 decode the payload and convert it into a flatted json so that the built-in container can use the converted data. Applicable only for the built-in (first party) containers.
    ImageUri string
    The container image that the data quality monitoring job runs.
    Environment map[string]string
    Sets the environment variables in the container that the monitoring job runs. A list of key value pairs.
    PostAnalyticsProcessorSourceUri string
    An Amazon S3 URI to a script that is called after analysis has been performed. Applicable only for the built-in (first party) containers.
    RecordPreprocessorSourceUri string
    An Amazon S3 URI to a script that is called per row prior to running analysis. It can base64 decode the payload and convert it into a flatted json so that the built-in container can use the converted data. Applicable only for the built-in (first party) containers.
    imageUri String
    The container image that the data quality monitoring job runs.
    environment Map<String,String>
    Sets the environment variables in the container that the monitoring job runs. A list of key value pairs.
    postAnalyticsProcessorSourceUri String
    An Amazon S3 URI to a script that is called after analysis has been performed. Applicable only for the built-in (first party) containers.
    recordPreprocessorSourceUri String
    An Amazon S3 URI to a script that is called per row prior to running analysis. It can base64 decode the payload and convert it into a flatted json so that the built-in container can use the converted data. Applicable only for the built-in (first party) containers.
    imageUri string
    The container image that the data quality monitoring job runs.
    environment {[key: string]: string}
    Sets the environment variables in the container that the monitoring job runs. A list of key value pairs.
    postAnalyticsProcessorSourceUri string
    An Amazon S3 URI to a script that is called after analysis has been performed. Applicable only for the built-in (first party) containers.
    recordPreprocessorSourceUri string
    An Amazon S3 URI to a script that is called per row prior to running analysis. It can base64 decode the payload and convert it into a flatted json so that the built-in container can use the converted data. Applicable only for the built-in (first party) containers.
    image_uri str
    The container image that the data quality monitoring job runs.
    environment Mapping[str, str]
    Sets the environment variables in the container that the monitoring job runs. A list of key value pairs.
    post_analytics_processor_source_uri str
    An Amazon S3 URI to a script that is called after analysis has been performed. Applicable only for the built-in (first party) containers.
    record_preprocessor_source_uri str
    An Amazon S3 URI to a script that is called per row prior to running analysis. It can base64 decode the payload and convert it into a flatted json so that the built-in container can use the converted data. Applicable only for the built-in (first party) containers.
    imageUri String
    The container image that the data quality monitoring job runs.
    environment Map<String>
    Sets the environment variables in the container that the monitoring job runs. A list of key value pairs.
    postAnalyticsProcessorSourceUri String
    An Amazon S3 URI to a script that is called after analysis has been performed. Applicable only for the built-in (first party) containers.
    recordPreprocessorSourceUri String
    An Amazon S3 URI to a script that is called per row prior to running analysis. It can base64 decode the payload and convert it into a flatted json so that the built-in container can use the converted data. Applicable only for the built-in (first party) containers.

    DataQualityJobDefinitionDataQualityBaselineConfig, DataQualityJobDefinitionDataQualityBaselineConfigArgs

    ConstraintsResource DataQualityJobDefinitionDataQualityBaselineConfigConstraintsResource
    The constraints resource for a monitoring job. Fields are documented below.
    StatisticsResource DataQualityJobDefinitionDataQualityBaselineConfigStatisticsResource
    The statistics resource for a monitoring job. Fields are documented below.
    ConstraintsResource DataQualityJobDefinitionDataQualityBaselineConfigConstraintsResource
    The constraints resource for a monitoring job. Fields are documented below.
    StatisticsResource DataQualityJobDefinitionDataQualityBaselineConfigStatisticsResource
    The statistics resource for a monitoring job. Fields are documented below.
    constraintsResource DataQualityJobDefinitionDataQualityBaselineConfigConstraintsResource
    The constraints resource for a monitoring job. Fields are documented below.
    statisticsResource DataQualityJobDefinitionDataQualityBaselineConfigStatisticsResource
    The statistics resource for a monitoring job. Fields are documented below.
    constraintsResource DataQualityJobDefinitionDataQualityBaselineConfigConstraintsResource
    The constraints resource for a monitoring job. Fields are documented below.
    statisticsResource DataQualityJobDefinitionDataQualityBaselineConfigStatisticsResource
    The statistics resource for a monitoring job. Fields are documented below.
    constraints_resource DataQualityJobDefinitionDataQualityBaselineConfigConstraintsResource
    The constraints resource for a monitoring job. Fields are documented below.
    statistics_resource DataQualityJobDefinitionDataQualityBaselineConfigStatisticsResource
    The statistics resource for a monitoring job. Fields are documented below.
    constraintsResource Property Map
    The constraints resource for a monitoring job. Fields are documented below.
    statisticsResource Property Map
    The statistics resource for a monitoring job. Fields are documented below.

    DataQualityJobDefinitionDataQualityBaselineConfigConstraintsResource, DataQualityJobDefinitionDataQualityBaselineConfigConstraintsResourceArgs

    S3Uri string
    The Amazon S3 URI for the constraints resource.
    S3Uri string
    The Amazon S3 URI for the constraints resource.
    s3Uri String
    The Amazon S3 URI for the constraints resource.
    s3Uri string
    The Amazon S3 URI for the constraints resource.
    s3_uri str
    The Amazon S3 URI for the constraints resource.
    s3Uri String
    The Amazon S3 URI for the constraints resource.

    DataQualityJobDefinitionDataQualityBaselineConfigStatisticsResource, DataQualityJobDefinitionDataQualityBaselineConfigStatisticsResourceArgs

    S3Uri string
    The Amazon S3 URI for the statistics resource.
    S3Uri string
    The Amazon S3 URI for the statistics resource.
    s3Uri String
    The Amazon S3 URI for the statistics resource.
    s3Uri string
    The Amazon S3 URI for the statistics resource.
    s3_uri str
    The Amazon S3 URI for the statistics resource.
    s3Uri String
    The Amazon S3 URI for the statistics resource.

    DataQualityJobDefinitionDataQualityJobInput, DataQualityJobDefinitionDataQualityJobInputArgs

    BatchTransformInput DataQualityJobDefinitionDataQualityJobInputBatchTransformInput
    Input object for the batch transform job. Fields are documented below.
    EndpointInput DataQualityJobDefinitionDataQualityJobInputEndpointInput
    Input object for the endpoint. Fields are documented below.
    BatchTransformInput DataQualityJobDefinitionDataQualityJobInputBatchTransformInput
    Input object for the batch transform job. Fields are documented below.
    EndpointInput DataQualityJobDefinitionDataQualityJobInputEndpointInput
    Input object for the endpoint. Fields are documented below.
    batchTransformInput DataQualityJobDefinitionDataQualityJobInputBatchTransformInput
    Input object for the batch transform job. Fields are documented below.
    endpointInput DataQualityJobDefinitionDataQualityJobInputEndpointInput
    Input object for the endpoint. Fields are documented below.
    batchTransformInput DataQualityJobDefinitionDataQualityJobInputBatchTransformInput
    Input object for the batch transform job. Fields are documented below.
    endpointInput DataQualityJobDefinitionDataQualityJobInputEndpointInput
    Input object for the endpoint. Fields are documented below.
    batch_transform_input DataQualityJobDefinitionDataQualityJobInputBatchTransformInput
    Input object for the batch transform job. Fields are documented below.
    endpoint_input DataQualityJobDefinitionDataQualityJobInputEndpointInput
    Input object for the endpoint. Fields are documented below.
    batchTransformInput Property Map
    Input object for the batch transform job. Fields are documented below.
    endpointInput Property Map
    Input object for the endpoint. Fields are documented below.

    DataQualityJobDefinitionDataQualityJobInputBatchTransformInput, DataQualityJobDefinitionDataQualityJobInputBatchTransformInputArgs

    DataCapturedDestinationS3Uri string
    The Amazon S3 location being used to capture the data.
    DatasetFormat DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormat
    The dataset format for your batch transform job. Fields are documented below.
    LocalPath string
    Path to the filesystem where the batch transform data is available to the container. Defaults to /opt/ml/processing/input.
    S3DataDistributionType string
    Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values are FullyReplicated or ShardedByS3Key
    S3InputMode string
    Whether the Pipe or File is used as the input mode for transferring data for the monitoring job. Pipe mode is recommended for large datasets. File mode is useful for small files that fit in memory. Defaults to File. Valid values are Pipe or File
    DataCapturedDestinationS3Uri string
    The Amazon S3 location being used to capture the data.
    DatasetFormat DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormat
    The dataset format for your batch transform job. Fields are documented below.
    LocalPath string
    Path to the filesystem where the batch transform data is available to the container. Defaults to /opt/ml/processing/input.
    S3DataDistributionType string
    Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values are FullyReplicated or ShardedByS3Key
    S3InputMode string
    Whether the Pipe or File is used as the input mode for transferring data for the monitoring job. Pipe mode is recommended for large datasets. File mode is useful for small files that fit in memory. Defaults to File. Valid values are Pipe or File
    dataCapturedDestinationS3Uri String
    The Amazon S3 location being used to capture the data.
    datasetFormat DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormat
    The dataset format for your batch transform job. Fields are documented below.
    localPath String
    Path to the filesystem where the batch transform data is available to the container. Defaults to /opt/ml/processing/input.
    s3DataDistributionType String
    Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values are FullyReplicated or ShardedByS3Key
    s3InputMode String
    Whether the Pipe or File is used as the input mode for transferring data for the monitoring job. Pipe mode is recommended for large datasets. File mode is useful for small files that fit in memory. Defaults to File. Valid values are Pipe or File
    dataCapturedDestinationS3Uri string
    The Amazon S3 location being used to capture the data.
    datasetFormat DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormat
    The dataset format for your batch transform job. Fields are documented below.
    localPath string
    Path to the filesystem where the batch transform data is available to the container. Defaults to /opt/ml/processing/input.
    s3DataDistributionType string
    Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values are FullyReplicated or ShardedByS3Key
    s3InputMode string
    Whether the Pipe or File is used as the input mode for transferring data for the monitoring job. Pipe mode is recommended for large datasets. File mode is useful for small files that fit in memory. Defaults to File. Valid values are Pipe or File
    data_captured_destination_s3_uri str
    The Amazon S3 location being used to capture the data.
    dataset_format DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormat
    The dataset format for your batch transform job. Fields are documented below.
    local_path str
    Path to the filesystem where the batch transform data is available to the container. Defaults to /opt/ml/processing/input.
    s3_data_distribution_type str
    Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values are FullyReplicated or ShardedByS3Key
    s3_input_mode str
    Whether the Pipe or File is used as the input mode for transferring data for the monitoring job. Pipe mode is recommended for large datasets. File mode is useful for small files that fit in memory. Defaults to File. Valid values are Pipe or File
    dataCapturedDestinationS3Uri String
    The Amazon S3 location being used to capture the data.
    datasetFormat Property Map
    The dataset format for your batch transform job. Fields are documented below.
    localPath String
    Path to the filesystem where the batch transform data is available to the container. Defaults to /opt/ml/processing/input.
    s3DataDistributionType String
    Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values are FullyReplicated or ShardedByS3Key
    s3InputMode String
    Whether the Pipe or File is used as the input mode for transferring data for the monitoring job. Pipe mode is recommended for large datasets. File mode is useful for small files that fit in memory. Defaults to File. Valid values are Pipe or File

    DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormat, DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatArgs

    Csv DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatCsv
    The CSV dataset used in the monitoring job. Fields are documented below.
    Json DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatJson
    The JSON dataset used in the monitoring job. Fields are documented below.
    Csv DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatCsv
    The CSV dataset used in the monitoring job. Fields are documented below.
    Json DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatJson
    The JSON dataset used in the monitoring job. Fields are documented below.
    csv DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatCsv
    The CSV dataset used in the monitoring job. Fields are documented below.
    json DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatJson
    The JSON dataset used in the monitoring job. Fields are documented below.
    csv DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatCsv
    The CSV dataset used in the monitoring job. Fields are documented below.
    json DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatJson
    The JSON dataset used in the monitoring job. Fields are documented below.
    csv DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatCsv
    The CSV dataset used in the monitoring job. Fields are documented below.
    json DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatJson
    The JSON dataset used in the monitoring job. Fields are documented below.
    csv Property Map
    The CSV dataset used in the monitoring job. Fields are documented below.
    json Property Map
    The JSON dataset used in the monitoring job. Fields are documented below.

    DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatCsv, DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatCsvArgs

    Header bool
    Indicates if the CSV data has a header.
    Header bool
    Indicates if the CSV data has a header.
    header Boolean
    Indicates if the CSV data has a header.
    header boolean
    Indicates if the CSV data has a header.
    header bool
    Indicates if the CSV data has a header.
    header Boolean
    Indicates if the CSV data has a header.

    DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatJson, DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatJsonArgs

    Line bool
    Indicates if the file should be read as a json object per line.
    Line bool
    Indicates if the file should be read as a json object per line.
    line Boolean
    Indicates if the file should be read as a json object per line.
    line boolean
    Indicates if the file should be read as a json object per line.
    line bool
    Indicates if the file should be read as a json object per line.
    line Boolean
    Indicates if the file should be read as a json object per line.

    DataQualityJobDefinitionDataQualityJobInputEndpointInput, DataQualityJobDefinitionDataQualityJobInputEndpointInputArgs

    EndpointName string
    An endpoint in customer's account which has data_capture_config enabled.
    LocalPath string
    Path to the filesystem where the endpoint data is available to the container. Defaults to /opt/ml/processing/input.
    S3DataDistributionType string
    Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values are FullyReplicated or ShardedByS3Key
    S3InputMode string
    Whether the Pipe or File is used as the input mode for transferring data for the monitoring job. Pipe mode is recommended for large datasets. File mode is useful for small files that fit in memory. Defaults to File. Valid values are Pipe or File
    EndpointName string
    An endpoint in customer's account which has data_capture_config enabled.
    LocalPath string
    Path to the filesystem where the endpoint data is available to the container. Defaults to /opt/ml/processing/input.
    S3DataDistributionType string
    Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values are FullyReplicated or ShardedByS3Key
    S3InputMode string
    Whether the Pipe or File is used as the input mode for transferring data for the monitoring job. Pipe mode is recommended for large datasets. File mode is useful for small files that fit in memory. Defaults to File. Valid values are Pipe or File
    endpointName String
    An endpoint in customer's account which has data_capture_config enabled.
    localPath String
    Path to the filesystem where the endpoint data is available to the container. Defaults to /opt/ml/processing/input.
    s3DataDistributionType String
    Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values are FullyReplicated or ShardedByS3Key
    s3InputMode String
    Whether the Pipe or File is used as the input mode for transferring data for the monitoring job. Pipe mode is recommended for large datasets. File mode is useful for small files that fit in memory. Defaults to File. Valid values are Pipe or File
    endpointName string
    An endpoint in customer's account which has data_capture_config enabled.
    localPath string
    Path to the filesystem where the endpoint data is available to the container. Defaults to /opt/ml/processing/input.
    s3DataDistributionType string
    Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values are FullyReplicated or ShardedByS3Key
    s3InputMode string
    Whether the Pipe or File is used as the input mode for transferring data for the monitoring job. Pipe mode is recommended for large datasets. File mode is useful for small files that fit in memory. Defaults to File. Valid values are Pipe or File
    endpoint_name str
    An endpoint in customer's account which has data_capture_config enabled.
    local_path str
    Path to the filesystem where the endpoint data is available to the container. Defaults to /opt/ml/processing/input.
    s3_data_distribution_type str
    Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values are FullyReplicated or ShardedByS3Key
    s3_input_mode str
    Whether the Pipe or File is used as the input mode for transferring data for the monitoring job. Pipe mode is recommended for large datasets. File mode is useful for small files that fit in memory. Defaults to File. Valid values are Pipe or File
    endpointName String
    An endpoint in customer's account which has data_capture_config enabled.
    localPath String
    Path to the filesystem where the endpoint data is available to the container. Defaults to /opt/ml/processing/input.
    s3DataDistributionType String
    Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values are FullyReplicated or ShardedByS3Key
    s3InputMode String
    Whether the Pipe or File is used as the input mode for transferring data for the monitoring job. Pipe mode is recommended for large datasets. File mode is useful for small files that fit in memory. Defaults to File. Valid values are Pipe or File

    DataQualityJobDefinitionDataQualityJobOutputConfig, DataQualityJobDefinitionDataQualityJobOutputConfigArgs

    MonitoringOutputs DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputs
    Monitoring outputs for monitoring jobs. This is where the output of the periodic monitoring jobs is uploaded. Fields are documented below.
    KmsKeyId string
    The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using Amazon S3 server-side encryption.
    MonitoringOutputs DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputs
    Monitoring outputs for monitoring jobs. This is where the output of the periodic monitoring jobs is uploaded. Fields are documented below.
    KmsKeyId string
    The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using Amazon S3 server-side encryption.
    monitoringOutputs DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputs
    Monitoring outputs for monitoring jobs. This is where the output of the periodic monitoring jobs is uploaded. Fields are documented below.
    kmsKeyId String
    The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using Amazon S3 server-side encryption.
    monitoringOutputs DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputs
    Monitoring outputs for monitoring jobs. This is where the output of the periodic monitoring jobs is uploaded. Fields are documented below.
    kmsKeyId string
    The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using Amazon S3 server-side encryption.
    monitoring_outputs DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputs
    Monitoring outputs for monitoring jobs. This is where the output of the periodic monitoring jobs is uploaded. Fields are documented below.
    kms_key_id str
    The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using Amazon S3 server-side encryption.
    monitoringOutputs Property Map
    Monitoring outputs for monitoring jobs. This is where the output of the periodic monitoring jobs is uploaded. Fields are documented below.
    kmsKeyId String
    The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using Amazon S3 server-side encryption.

    DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputs, DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsArgs

    S3Output DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3Output
    The Amazon S3 storage location where the results of a monitoring job are saved. Fields are documented below.
    S3Output DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3Output
    The Amazon S3 storage location where the results of a monitoring job are saved. Fields are documented below.
    s3Output DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3Output
    The Amazon S3 storage location where the results of a monitoring job are saved. Fields are documented below.
    s3Output DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3Output
    The Amazon S3 storage location where the results of a monitoring job are saved. Fields are documented below.
    s3_output DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3Output
    The Amazon S3 storage location where the results of a monitoring job are saved. Fields are documented below.
    s3Output Property Map
    The Amazon S3 storage location where the results of a monitoring job are saved. Fields are documented below.

    DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3Output, DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3OutputArgs

    S3Uri string
    A URI that identifies the Amazon S3 storage location where Amazon SageMaker saves the results of a monitoring job.
    LocalPath string
    The local path to the Amazon S3 storage location where Amazon SageMaker saves the results of a monitoring job. LocalPath is an absolute path for the output data. Defaults to /opt/ml/processing/output.
    S3UploadMode string
    Whether to upload the results of the monitoring job continuously or after the job completes. Valid values are Continuous or EndOfJob
    S3Uri string
    A URI that identifies the Amazon S3 storage location where Amazon SageMaker saves the results of a monitoring job.
    LocalPath string
    The local path to the Amazon S3 storage location where Amazon SageMaker saves the results of a monitoring job. LocalPath is an absolute path for the output data. Defaults to /opt/ml/processing/output.
    S3UploadMode string
    Whether to upload the results of the monitoring job continuously or after the job completes. Valid values are Continuous or EndOfJob
    s3Uri String
    A URI that identifies the Amazon S3 storage location where Amazon SageMaker saves the results of a monitoring job.
    localPath String
    The local path to the Amazon S3 storage location where Amazon SageMaker saves the results of a monitoring job. LocalPath is an absolute path for the output data. Defaults to /opt/ml/processing/output.
    s3UploadMode String
    Whether to upload the results of the monitoring job continuously or after the job completes. Valid values are Continuous or EndOfJob
    s3Uri string
    A URI that identifies the Amazon S3 storage location where Amazon SageMaker saves the results of a monitoring job.
    localPath string
    The local path to the Amazon S3 storage location where Amazon SageMaker saves the results of a monitoring job. LocalPath is an absolute path for the output data. Defaults to /opt/ml/processing/output.
    s3UploadMode string
    Whether to upload the results of the monitoring job continuously or after the job completes. Valid values are Continuous or EndOfJob
    s3_uri str
    A URI that identifies the Amazon S3 storage location where Amazon SageMaker saves the results of a monitoring job.
    local_path str
    The local path to the Amazon S3 storage location where Amazon SageMaker saves the results of a monitoring job. LocalPath is an absolute path for the output data. Defaults to /opt/ml/processing/output.
    s3_upload_mode str
    Whether to upload the results of the monitoring job continuously or after the job completes. Valid values are Continuous or EndOfJob
    s3Uri String
    A URI that identifies the Amazon S3 storage location where Amazon SageMaker saves the results of a monitoring job.
    localPath String
    The local path to the Amazon S3 storage location where Amazon SageMaker saves the results of a monitoring job. LocalPath is an absolute path for the output data. Defaults to /opt/ml/processing/output.
    s3UploadMode String
    Whether to upload the results of the monitoring job continuously or after the job completes. Valid values are Continuous or EndOfJob

    DataQualityJobDefinitionJobResources, DataQualityJobDefinitionJobResourcesArgs

    ClusterConfig DataQualityJobDefinitionJobResourcesClusterConfig
    The configuration for the cluster resources used to run the processing job. Fields are documented below.
    ClusterConfig DataQualityJobDefinitionJobResourcesClusterConfig
    The configuration for the cluster resources used to run the processing job. Fields are documented below.
    clusterConfig DataQualityJobDefinitionJobResourcesClusterConfig
    The configuration for the cluster resources used to run the processing job. Fields are documented below.
    clusterConfig DataQualityJobDefinitionJobResourcesClusterConfig
    The configuration for the cluster resources used to run the processing job. Fields are documented below.
    cluster_config DataQualityJobDefinitionJobResourcesClusterConfig
    The configuration for the cluster resources used to run the processing job. Fields are documented below.
    clusterConfig Property Map
    The configuration for the cluster resources used to run the processing job. Fields are documented below.

    DataQualityJobDefinitionJobResourcesClusterConfig, DataQualityJobDefinitionJobResourcesClusterConfigArgs

    InstanceCount int
    The number of ML compute instances to use in the model monitoring job. For distributed processing jobs, specify a value greater than 1.
    InstanceType string
    The ML compute instance type for the processing job.
    VolumeSizeInGb int
    The size of the ML storage volume, in gigabytes, that you want to provision. You must specify sufficient ML storage for your scenario.
    VolumeKmsKeyId string
    The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s) that run the model monitoring job.
    InstanceCount int
    The number of ML compute instances to use in the model monitoring job. For distributed processing jobs, specify a value greater than 1.
    InstanceType string
    The ML compute instance type for the processing job.
    VolumeSizeInGb int
    The size of the ML storage volume, in gigabytes, that you want to provision. You must specify sufficient ML storage for your scenario.
    VolumeKmsKeyId string
    The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s) that run the model monitoring job.
    instanceCount Integer
    The number of ML compute instances to use in the model monitoring job. For distributed processing jobs, specify a value greater than 1.
    instanceType String
    The ML compute instance type for the processing job.
    volumeSizeInGb Integer
    The size of the ML storage volume, in gigabytes, that you want to provision. You must specify sufficient ML storage for your scenario.
    volumeKmsKeyId String
    The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s) that run the model monitoring job.
    instanceCount number
    The number of ML compute instances to use in the model monitoring job. For distributed processing jobs, specify a value greater than 1.
    instanceType string
    The ML compute instance type for the processing job.
    volumeSizeInGb number
    The size of the ML storage volume, in gigabytes, that you want to provision. You must specify sufficient ML storage for your scenario.
    volumeKmsKeyId string
    The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s) that run the model monitoring job.
    instance_count int
    The number of ML compute instances to use in the model monitoring job. For distributed processing jobs, specify a value greater than 1.
    instance_type str
    The ML compute instance type for the processing job.
    volume_size_in_gb int
    The size of the ML storage volume, in gigabytes, that you want to provision. You must specify sufficient ML storage for your scenario.
    volume_kms_key_id str
    The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s) that run the model monitoring job.
    instanceCount Number
    The number of ML compute instances to use in the model monitoring job. For distributed processing jobs, specify a value greater than 1.
    instanceType String
    The ML compute instance type for the processing job.
    volumeSizeInGb Number
    The size of the ML storage volume, in gigabytes, that you want to provision. You must specify sufficient ML storage for your scenario.
    volumeKmsKeyId String
    The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s) that run the model monitoring job.

    DataQualityJobDefinitionNetworkConfig, DataQualityJobDefinitionNetworkConfigArgs

    EnableInterContainerTrafficEncryption bool
    Whether to encrypt all communications between the instances used for the monitoring jobs. Choose true to encrypt communications. Encryption provides greater security for distributed jobs, but the processing might take longer.
    EnableNetworkIsolation bool
    Whether to allow inbound and outbound network calls to and from the containers used for the monitoring job.
    VpcConfig DataQualityJobDefinitionNetworkConfigVpcConfig
    Specifies a VPC that your training jobs and hosted models have access to. Control access to and from your training and model containers by configuring the VPC. Fields are documented below.
    EnableInterContainerTrafficEncryption bool
    Whether to encrypt all communications between the instances used for the monitoring jobs. Choose true to encrypt communications. Encryption provides greater security for distributed jobs, but the processing might take longer.
    EnableNetworkIsolation bool
    Whether to allow inbound and outbound network calls to and from the containers used for the monitoring job.
    VpcConfig DataQualityJobDefinitionNetworkConfigVpcConfig
    Specifies a VPC that your training jobs and hosted models have access to. Control access to and from your training and model containers by configuring the VPC. Fields are documented below.
    enableInterContainerTrafficEncryption Boolean
    Whether to encrypt all communications between the instances used for the monitoring jobs. Choose true to encrypt communications. Encryption provides greater security for distributed jobs, but the processing might take longer.
    enableNetworkIsolation Boolean
    Whether to allow inbound and outbound network calls to and from the containers used for the monitoring job.
    vpcConfig DataQualityJobDefinitionNetworkConfigVpcConfig
    Specifies a VPC that your training jobs and hosted models have access to. Control access to and from your training and model containers by configuring the VPC. Fields are documented below.
    enableInterContainerTrafficEncryption boolean
    Whether to encrypt all communications between the instances used for the monitoring jobs. Choose true to encrypt communications. Encryption provides greater security for distributed jobs, but the processing might take longer.
    enableNetworkIsolation boolean
    Whether to allow inbound and outbound network calls to and from the containers used for the monitoring job.
    vpcConfig DataQualityJobDefinitionNetworkConfigVpcConfig
    Specifies a VPC that your training jobs and hosted models have access to. Control access to and from your training and model containers by configuring the VPC. Fields are documented below.
    enable_inter_container_traffic_encryption bool
    Whether to encrypt all communications between the instances used for the monitoring jobs. Choose true to encrypt communications. Encryption provides greater security for distributed jobs, but the processing might take longer.
    enable_network_isolation bool
    Whether to allow inbound and outbound network calls to and from the containers used for the monitoring job.
    vpc_config DataQualityJobDefinitionNetworkConfigVpcConfig
    Specifies a VPC that your training jobs and hosted models have access to. Control access to and from your training and model containers by configuring the VPC. Fields are documented below.
    enableInterContainerTrafficEncryption Boolean
    Whether to encrypt all communications between the instances used for the monitoring jobs. Choose true to encrypt communications. Encryption provides greater security for distributed jobs, but the processing might take longer.
    enableNetworkIsolation Boolean
    Whether to allow inbound and outbound network calls to and from the containers used for the monitoring job.
    vpcConfig Property Map
    Specifies a VPC that your training jobs and hosted models have access to. Control access to and from your training and model containers by configuring the VPC. Fields are documented below.

    DataQualityJobDefinitionNetworkConfigVpcConfig, DataQualityJobDefinitionNetworkConfigVpcConfigArgs

    SecurityGroupIds List<string>
    The VPC security group IDs, in the form sg-xxxxxxxx. Specify the security groups for the VPC that is specified in the subnets field.
    Subnets List<string>
    The ID of the subnets in the VPC to which you want to connect your training job or model.
    SecurityGroupIds []string
    The VPC security group IDs, in the form sg-xxxxxxxx. Specify the security groups for the VPC that is specified in the subnets field.
    Subnets []string
    The ID of the subnets in the VPC to which you want to connect your training job or model.
    securityGroupIds List<String>
    The VPC security group IDs, in the form sg-xxxxxxxx. Specify the security groups for the VPC that is specified in the subnets field.
    subnets List<String>
    The ID of the subnets in the VPC to which you want to connect your training job or model.
    securityGroupIds string[]
    The VPC security group IDs, in the form sg-xxxxxxxx. Specify the security groups for the VPC that is specified in the subnets field.
    subnets string[]
    The ID of the subnets in the VPC to which you want to connect your training job or model.
    security_group_ids Sequence[str]
    The VPC security group IDs, in the form sg-xxxxxxxx. Specify the security groups for the VPC that is specified in the subnets field.
    subnets Sequence[str]
    The ID of the subnets in the VPC to which you want to connect your training job or model.
    securityGroupIds List<String>
    The VPC security group IDs, in the form sg-xxxxxxxx. Specify the security groups for the VPC that is specified in the subnets field.
    subnets List<String>
    The ID of the subnets in the VPC to which you want to connect your training job or model.

    DataQualityJobDefinitionStoppingCondition, DataQualityJobDefinitionStoppingConditionArgs

    MaxRuntimeInSeconds int
    The maximum runtime allowed in seconds.
    MaxRuntimeInSeconds int
    The maximum runtime allowed in seconds.
    maxRuntimeInSeconds Integer
    The maximum runtime allowed in seconds.
    maxRuntimeInSeconds number
    The maximum runtime allowed in seconds.
    max_runtime_in_seconds int
    The maximum runtime allowed in seconds.
    maxRuntimeInSeconds Number
    The maximum runtime allowed in seconds.

    Import

    Using pulumi import, import data quality job definitions using the name. For example:

    $ pulumi import aws:sagemaker/dataQualityJobDefinition:DataQualityJobDefinition test_data_quality_job_definition data-quality-job-definition-foo
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    AWS Classic pulumi/pulumi-aws
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the aws Terraform Provider.
    aws logo
    AWS v6.60.0 published on Tuesday, Nov 19, 2024 by Pulumi