1. Packages
  2. Oracle Cloud Infrastructure
  3. API Docs
  4. DataFlow
  5. InvokeRun
Oracle Cloud Infrastructure v2.17.0 published on Friday, Nov 15, 2024 by Pulumi

oci.DataFlow.InvokeRun

Explore with Pulumi AI

oci logo
Oracle Cloud Infrastructure v2.17.0 published on Friday, Nov 15, 2024 by Pulumi

    This resource provides the Invoke Run resource in Oracle Cloud Infrastructure Data Flow service.

    Creates a run for an application.

    Example Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as oci from "@pulumi/oci";
    
    const testInvokeRun = new oci.dataflow.InvokeRun("test_invoke_run", {
        compartmentId: compartmentId,
        applicationId: testApplication.id,
        applicationLogConfig: {
            logGroupId: testLogGroup.id,
            logId: testLog.id,
        },
        archiveUri: invokeRunArchiveUri,
        arguments: invokeRunArguments,
        configuration: invokeRunConfiguration,
        definedTags: {
            "Operations.CostCenter": "42",
        },
        displayName: invokeRunDisplayName,
        driverShape: invokeRunDriverShape,
        driverShapeConfig: {
            memoryInGbs: invokeRunDriverShapeConfigMemoryInGbs,
            ocpus: invokeRunDriverShapeConfigOcpus,
        },
        execute: invokeRunExecute,
        executorShape: invokeRunExecutorShape,
        executorShapeConfig: {
            memoryInGbs: invokeRunExecutorShapeConfigMemoryInGbs,
            ocpus: invokeRunExecutorShapeConfigOcpus,
        },
        freeformTags: {
            Department: "Finance",
        },
        idleTimeoutInMinutes: invokeRunIdleTimeoutInMinutes,
        logsBucketUri: invokeRunLogsBucketUri,
        maxDurationInMinutes: invokeRunMaxDurationInMinutes,
        metastoreId: metastoreId,
        numExecutors: invokeRunNumExecutors,
        opcParentRptUrl: invokeRunOpcParentRptUrl,
        parameters: [{
            name: invokeRunParametersName,
            value: invokeRunParametersValue,
        }],
        poolId: testPool.id,
        sparkVersion: invokeRunSparkVersion,
        type: invokeRunType,
        warehouseBucketUri: invokeRunWarehouseBucketUri,
    });
    
    import pulumi
    import pulumi_oci as oci
    
    test_invoke_run = oci.data_flow.InvokeRun("test_invoke_run",
        compartment_id=compartment_id,
        application_id=test_application["id"],
        application_log_config={
            "log_group_id": test_log_group["id"],
            "log_id": test_log["id"],
        },
        archive_uri=invoke_run_archive_uri,
        arguments=invoke_run_arguments,
        configuration=invoke_run_configuration,
        defined_tags={
            "Operations.CostCenter": "42",
        },
        display_name=invoke_run_display_name,
        driver_shape=invoke_run_driver_shape,
        driver_shape_config={
            "memory_in_gbs": invoke_run_driver_shape_config_memory_in_gbs,
            "ocpus": invoke_run_driver_shape_config_ocpus,
        },
        execute=invoke_run_execute,
        executor_shape=invoke_run_executor_shape,
        executor_shape_config={
            "memory_in_gbs": invoke_run_executor_shape_config_memory_in_gbs,
            "ocpus": invoke_run_executor_shape_config_ocpus,
        },
        freeform_tags={
            "Department": "Finance",
        },
        idle_timeout_in_minutes=invoke_run_idle_timeout_in_minutes,
        logs_bucket_uri=invoke_run_logs_bucket_uri,
        max_duration_in_minutes=invoke_run_max_duration_in_minutes,
        metastore_id=metastore_id,
        num_executors=invoke_run_num_executors,
        opc_parent_rpt_url=invoke_run_opc_parent_rpt_url,
        parameters=[{
            "name": invoke_run_parameters_name,
            "value": invoke_run_parameters_value,
        }],
        pool_id=test_pool["id"],
        spark_version=invoke_run_spark_version,
        type=invoke_run_type,
        warehouse_bucket_uri=invoke_run_warehouse_bucket_uri)
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-oci/sdk/v2/go/oci/DataFlow"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := DataFlow.NewInvokeRun(ctx, "test_invoke_run", &DataFlow.InvokeRunArgs{
    			CompartmentId: pulumi.Any(compartmentId),
    			ApplicationId: pulumi.Any(testApplication.Id),
    			ApplicationLogConfig: &dataflow.InvokeRunApplicationLogConfigArgs{
    				LogGroupId: pulumi.Any(testLogGroup.Id),
    				LogId:      pulumi.Any(testLog.Id),
    			},
    			ArchiveUri:    pulumi.Any(invokeRunArchiveUri),
    			Arguments:     pulumi.Any(invokeRunArguments),
    			Configuration: pulumi.Any(invokeRunConfiguration),
    			DefinedTags: pulumi.StringMap{
    				"Operations.CostCenter": pulumi.String("42"),
    			},
    			DisplayName: pulumi.Any(invokeRunDisplayName),
    			DriverShape: pulumi.Any(invokeRunDriverShape),
    			DriverShapeConfig: &dataflow.InvokeRunDriverShapeConfigArgs{
    				MemoryInGbs: pulumi.Any(invokeRunDriverShapeConfigMemoryInGbs),
    				Ocpus:       pulumi.Any(invokeRunDriverShapeConfigOcpus),
    			},
    			Execute:       pulumi.Any(invokeRunExecute),
    			ExecutorShape: pulumi.Any(invokeRunExecutorShape),
    			ExecutorShapeConfig: &dataflow.InvokeRunExecutorShapeConfigArgs{
    				MemoryInGbs: pulumi.Any(invokeRunExecutorShapeConfigMemoryInGbs),
    				Ocpus:       pulumi.Any(invokeRunExecutorShapeConfigOcpus),
    			},
    			FreeformTags: pulumi.StringMap{
    				"Department": pulumi.String("Finance"),
    			},
    			IdleTimeoutInMinutes: pulumi.Any(invokeRunIdleTimeoutInMinutes),
    			LogsBucketUri:        pulumi.Any(invokeRunLogsBucketUri),
    			MaxDurationInMinutes: pulumi.Any(invokeRunMaxDurationInMinutes),
    			MetastoreId:          pulumi.Any(metastoreId),
    			NumExecutors:         pulumi.Any(invokeRunNumExecutors),
    			OpcParentRptUrl:      pulumi.Any(invokeRunOpcParentRptUrl),
    			Parameters: dataflow.InvokeRunParameterArray{
    				&dataflow.InvokeRunParameterArgs{
    					Name:  pulumi.Any(invokeRunParametersName),
    					Value: pulumi.Any(invokeRunParametersValue),
    				},
    			},
    			PoolId:             pulumi.Any(testPool.Id),
    			SparkVersion:       pulumi.Any(invokeRunSparkVersion),
    			Type:               pulumi.Any(invokeRunType),
    			WarehouseBucketUri: pulumi.Any(invokeRunWarehouseBucketUri),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Oci = Pulumi.Oci;
    
    return await Deployment.RunAsync(() => 
    {
        var testInvokeRun = new Oci.DataFlow.InvokeRun("test_invoke_run", new()
        {
            CompartmentId = compartmentId,
            ApplicationId = testApplication.Id,
            ApplicationLogConfig = new Oci.DataFlow.Inputs.InvokeRunApplicationLogConfigArgs
            {
                LogGroupId = testLogGroup.Id,
                LogId = testLog.Id,
            },
            ArchiveUri = invokeRunArchiveUri,
            Arguments = invokeRunArguments,
            Configuration = invokeRunConfiguration,
            DefinedTags = 
            {
                { "Operations.CostCenter", "42" },
            },
            DisplayName = invokeRunDisplayName,
            DriverShape = invokeRunDriverShape,
            DriverShapeConfig = new Oci.DataFlow.Inputs.InvokeRunDriverShapeConfigArgs
            {
                MemoryInGbs = invokeRunDriverShapeConfigMemoryInGbs,
                Ocpus = invokeRunDriverShapeConfigOcpus,
            },
            Execute = invokeRunExecute,
            ExecutorShape = invokeRunExecutorShape,
            ExecutorShapeConfig = new Oci.DataFlow.Inputs.InvokeRunExecutorShapeConfigArgs
            {
                MemoryInGbs = invokeRunExecutorShapeConfigMemoryInGbs,
                Ocpus = invokeRunExecutorShapeConfigOcpus,
            },
            FreeformTags = 
            {
                { "Department", "Finance" },
            },
            IdleTimeoutInMinutes = invokeRunIdleTimeoutInMinutes,
            LogsBucketUri = invokeRunLogsBucketUri,
            MaxDurationInMinutes = invokeRunMaxDurationInMinutes,
            MetastoreId = metastoreId,
            NumExecutors = invokeRunNumExecutors,
            OpcParentRptUrl = invokeRunOpcParentRptUrl,
            Parameters = new[]
            {
                new Oci.DataFlow.Inputs.InvokeRunParameterArgs
                {
                    Name = invokeRunParametersName,
                    Value = invokeRunParametersValue,
                },
            },
            PoolId = testPool.Id,
            SparkVersion = invokeRunSparkVersion,
            Type = invokeRunType,
            WarehouseBucketUri = invokeRunWarehouseBucketUri,
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.oci.DataFlow.InvokeRun;
    import com.pulumi.oci.DataFlow.InvokeRunArgs;
    import com.pulumi.oci.DataFlow.inputs.InvokeRunApplicationLogConfigArgs;
    import com.pulumi.oci.DataFlow.inputs.InvokeRunDriverShapeConfigArgs;
    import com.pulumi.oci.DataFlow.inputs.InvokeRunExecutorShapeConfigArgs;
    import com.pulumi.oci.DataFlow.inputs.InvokeRunParameterArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var testInvokeRun = new InvokeRun("testInvokeRun", InvokeRunArgs.builder()
                .compartmentId(compartmentId)
                .applicationId(testApplication.id())
                .applicationLogConfig(InvokeRunApplicationLogConfigArgs.builder()
                    .logGroupId(testLogGroup.id())
                    .logId(testLog.id())
                    .build())
                .archiveUri(invokeRunArchiveUri)
                .arguments(invokeRunArguments)
                .configuration(invokeRunConfiguration)
                .definedTags(Map.of("Operations.CostCenter", "42"))
                .displayName(invokeRunDisplayName)
                .driverShape(invokeRunDriverShape)
                .driverShapeConfig(InvokeRunDriverShapeConfigArgs.builder()
                    .memoryInGbs(invokeRunDriverShapeConfigMemoryInGbs)
                    .ocpus(invokeRunDriverShapeConfigOcpus)
                    .build())
                .execute(invokeRunExecute)
                .executorShape(invokeRunExecutorShape)
                .executorShapeConfig(InvokeRunExecutorShapeConfigArgs.builder()
                    .memoryInGbs(invokeRunExecutorShapeConfigMemoryInGbs)
                    .ocpus(invokeRunExecutorShapeConfigOcpus)
                    .build())
                .freeformTags(Map.of("Department", "Finance"))
                .idleTimeoutInMinutes(invokeRunIdleTimeoutInMinutes)
                .logsBucketUri(invokeRunLogsBucketUri)
                .maxDurationInMinutes(invokeRunMaxDurationInMinutes)
                .metastoreId(metastoreId)
                .numExecutors(invokeRunNumExecutors)
                .opcParentRptUrl(invokeRunOpcParentRptUrl)
                .parameters(InvokeRunParameterArgs.builder()
                    .name(invokeRunParametersName)
                    .value(invokeRunParametersValue)
                    .build())
                .poolId(testPool.id())
                .sparkVersion(invokeRunSparkVersion)
                .type(invokeRunType)
                .warehouseBucketUri(invokeRunWarehouseBucketUri)
                .build());
    
        }
    }
    
    resources:
      testInvokeRun:
        type: oci:DataFlow:InvokeRun
        name: test_invoke_run
        properties:
          compartmentId: ${compartmentId}
          applicationId: ${testApplication.id}
          applicationLogConfig:
            logGroupId: ${testLogGroup.id}
            logId: ${testLog.id}
          archiveUri: ${invokeRunArchiveUri}
          arguments: ${invokeRunArguments}
          configuration: ${invokeRunConfiguration}
          definedTags:
            Operations.CostCenter: '42'
          displayName: ${invokeRunDisplayName}
          driverShape: ${invokeRunDriverShape}
          driverShapeConfig:
            memoryInGbs: ${invokeRunDriverShapeConfigMemoryInGbs}
            ocpus: ${invokeRunDriverShapeConfigOcpus}
          execute: ${invokeRunExecute}
          executorShape: ${invokeRunExecutorShape}
          executorShapeConfig:
            memoryInGbs: ${invokeRunExecutorShapeConfigMemoryInGbs}
            ocpus: ${invokeRunExecutorShapeConfigOcpus}
          freeformTags:
            Department: Finance
          idleTimeoutInMinutes: ${invokeRunIdleTimeoutInMinutes}
          logsBucketUri: ${invokeRunLogsBucketUri}
          maxDurationInMinutes: ${invokeRunMaxDurationInMinutes}
          metastoreId: ${metastoreId}
          numExecutors: ${invokeRunNumExecutors}
          opcParentRptUrl: ${invokeRunOpcParentRptUrl}
          parameters:
            - name: ${invokeRunParametersName}
              value: ${invokeRunParametersValue}
          poolId: ${testPool.id}
          sparkVersion: ${invokeRunSparkVersion}
          type: ${invokeRunType}
          warehouseBucketUri: ${invokeRunWarehouseBucketUri}
    

    Note

    At a time service allows only one run to succeed if user is trying to invoke runs on multiple applications which have Private Endpoints and service will proceed invoking only one run and put the rest of them in failed state.

    Create InvokeRun Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new InvokeRun(name: string, args: InvokeRunArgs, opts?: CustomResourceOptions);
    @overload
    def InvokeRun(resource_name: str,
                  args: InvokeRunArgs,
                  opts: Optional[ResourceOptions] = None)
    
    @overload
    def InvokeRun(resource_name: str,
                  opts: Optional[ResourceOptions] = None,
                  compartment_id: Optional[str] = None,
                  executor_shape: Optional[str] = None,
                  application_log_config: Optional[_dataflow.InvokeRunApplicationLogConfigArgs] = None,
                  executor_shape_config: Optional[_dataflow.InvokeRunExecutorShapeConfigArgs] = None,
                  asynchronous: Optional[bool] = None,
                  freeform_tags: Optional[Mapping[str, str]] = None,
                  configuration: Optional[Mapping[str, str]] = None,
                  defined_tags: Optional[Mapping[str, str]] = None,
                  idle_timeout_in_minutes: Optional[str] = None,
                  driver_shape: Optional[str] = None,
                  driver_shape_config: Optional[_dataflow.InvokeRunDriverShapeConfigArgs] = None,
                  execute: Optional[str] = None,
                  application_id: Optional[str] = None,
                  arguments: Optional[Sequence[str]] = None,
                  archive_uri: Optional[str] = None,
                  display_name: Optional[str] = None,
                  logs_bucket_uri: Optional[str] = None,
                  max_duration_in_minutes: Optional[str] = None,
                  metastore_id: Optional[str] = None,
                  num_executors: Optional[int] = None,
                  opc_parent_rpt_url: Optional[str] = None,
                  parameters: Optional[Sequence[_dataflow.InvokeRunParameterArgs]] = None,
                  pool_id: Optional[str] = None,
                  spark_version: Optional[str] = None,
                  type: Optional[str] = None,
                  warehouse_bucket_uri: Optional[str] = None)
    func NewInvokeRun(ctx *Context, name string, args InvokeRunArgs, opts ...ResourceOption) (*InvokeRun, error)
    public InvokeRun(string name, InvokeRunArgs args, CustomResourceOptions? opts = null)
    public InvokeRun(String name, InvokeRunArgs args)
    public InvokeRun(String name, InvokeRunArgs args, CustomResourceOptions options)
    
    type: oci:DataFlow:InvokeRun
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args InvokeRunArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args InvokeRunArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args InvokeRunArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args InvokeRunArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args InvokeRunArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var invokeRunResource = new Oci.DataFlow.InvokeRun("invokeRunResource", new()
    {
        CompartmentId = "string",
        ExecutorShape = "string",
        ApplicationLogConfig = new Oci.DataFlow.Inputs.InvokeRunApplicationLogConfigArgs
        {
            LogGroupId = "string",
            LogId = "string",
        },
        ExecutorShapeConfig = new Oci.DataFlow.Inputs.InvokeRunExecutorShapeConfigArgs
        {
            MemoryInGbs = 0,
            Ocpus = 0,
        },
        Asynchronous = false,
        FreeformTags = 
        {
            { "string", "string" },
        },
        Configuration = 
        {
            { "string", "string" },
        },
        DefinedTags = 
        {
            { "string", "string" },
        },
        IdleTimeoutInMinutes = "string",
        DriverShape = "string",
        DriverShapeConfig = new Oci.DataFlow.Inputs.InvokeRunDriverShapeConfigArgs
        {
            MemoryInGbs = 0,
            Ocpus = 0,
        },
        Execute = "string",
        ApplicationId = "string",
        Arguments = new[]
        {
            "string",
        },
        ArchiveUri = "string",
        DisplayName = "string",
        LogsBucketUri = "string",
        MaxDurationInMinutes = "string",
        MetastoreId = "string",
        NumExecutors = 0,
        OpcParentRptUrl = "string",
        Parameters = new[]
        {
            new Oci.DataFlow.Inputs.InvokeRunParameterArgs
            {
                Name = "string",
                Value = "string",
            },
        },
        PoolId = "string",
        SparkVersion = "string",
        Type = "string",
        WarehouseBucketUri = "string",
    });
    
    example, err := DataFlow.NewInvokeRun(ctx, "invokeRunResource", &DataFlow.InvokeRunArgs{
    	CompartmentId: pulumi.String("string"),
    	ExecutorShape: pulumi.String("string"),
    	ApplicationLogConfig: &dataflow.InvokeRunApplicationLogConfigArgs{
    		LogGroupId: pulumi.String("string"),
    		LogId:      pulumi.String("string"),
    	},
    	ExecutorShapeConfig: &dataflow.InvokeRunExecutorShapeConfigArgs{
    		MemoryInGbs: pulumi.Float64(0),
    		Ocpus:       pulumi.Float64(0),
    	},
    	Asynchronous: pulumi.Bool(false),
    	FreeformTags: pulumi.StringMap{
    		"string": pulumi.String("string"),
    	},
    	Configuration: pulumi.StringMap{
    		"string": pulumi.String("string"),
    	},
    	DefinedTags: pulumi.StringMap{
    		"string": pulumi.String("string"),
    	},
    	IdleTimeoutInMinutes: pulumi.String("string"),
    	DriverShape:          pulumi.String("string"),
    	DriverShapeConfig: &dataflow.InvokeRunDriverShapeConfigArgs{
    		MemoryInGbs: pulumi.Float64(0),
    		Ocpus:       pulumi.Float64(0),
    	},
    	Execute:       pulumi.String("string"),
    	ApplicationId: pulumi.String("string"),
    	Arguments: pulumi.StringArray{
    		pulumi.String("string"),
    	},
    	ArchiveUri:           pulumi.String("string"),
    	DisplayName:          pulumi.String("string"),
    	LogsBucketUri:        pulumi.String("string"),
    	MaxDurationInMinutes: pulumi.String("string"),
    	MetastoreId:          pulumi.String("string"),
    	NumExecutors:         pulumi.Int(0),
    	OpcParentRptUrl:      pulumi.String("string"),
    	Parameters: dataflow.InvokeRunParameterArray{
    		&dataflow.InvokeRunParameterArgs{
    			Name:  pulumi.String("string"),
    			Value: pulumi.String("string"),
    		},
    	},
    	PoolId:             pulumi.String("string"),
    	SparkVersion:       pulumi.String("string"),
    	Type:               pulumi.String("string"),
    	WarehouseBucketUri: pulumi.String("string"),
    })
    
    var invokeRunResource = new InvokeRun("invokeRunResource", InvokeRunArgs.builder()
        .compartmentId("string")
        .executorShape("string")
        .applicationLogConfig(InvokeRunApplicationLogConfigArgs.builder()
            .logGroupId("string")
            .logId("string")
            .build())
        .executorShapeConfig(InvokeRunExecutorShapeConfigArgs.builder()
            .memoryInGbs(0)
            .ocpus(0)
            .build())
        .asynchronous(false)
        .freeformTags(Map.of("string", "string"))
        .configuration(Map.of("string", "string"))
        .definedTags(Map.of("string", "string"))
        .idleTimeoutInMinutes("string")
        .driverShape("string")
        .driverShapeConfig(InvokeRunDriverShapeConfigArgs.builder()
            .memoryInGbs(0)
            .ocpus(0)
            .build())
        .execute("string")
        .applicationId("string")
        .arguments("string")
        .archiveUri("string")
        .displayName("string")
        .logsBucketUri("string")
        .maxDurationInMinutes("string")
        .metastoreId("string")
        .numExecutors(0)
        .opcParentRptUrl("string")
        .parameters(InvokeRunParameterArgs.builder()
            .name("string")
            .value("string")
            .build())
        .poolId("string")
        .sparkVersion("string")
        .type("string")
        .warehouseBucketUri("string")
        .build());
    
    invoke_run_resource = oci.data_flow.InvokeRun("invokeRunResource",
        compartment_id="string",
        executor_shape="string",
        application_log_config={
            "log_group_id": "string",
            "log_id": "string",
        },
        executor_shape_config={
            "memory_in_gbs": 0,
            "ocpus": 0,
        },
        asynchronous=False,
        freeform_tags={
            "string": "string",
        },
        configuration={
            "string": "string",
        },
        defined_tags={
            "string": "string",
        },
        idle_timeout_in_minutes="string",
        driver_shape="string",
        driver_shape_config={
            "memory_in_gbs": 0,
            "ocpus": 0,
        },
        execute="string",
        application_id="string",
        arguments=["string"],
        archive_uri="string",
        display_name="string",
        logs_bucket_uri="string",
        max_duration_in_minutes="string",
        metastore_id="string",
        num_executors=0,
        opc_parent_rpt_url="string",
        parameters=[{
            "name": "string",
            "value": "string",
        }],
        pool_id="string",
        spark_version="string",
        type="string",
        warehouse_bucket_uri="string")
    
    const invokeRunResource = new oci.dataflow.InvokeRun("invokeRunResource", {
        compartmentId: "string",
        executorShape: "string",
        applicationLogConfig: {
            logGroupId: "string",
            logId: "string",
        },
        executorShapeConfig: {
            memoryInGbs: 0,
            ocpus: 0,
        },
        asynchronous: false,
        freeformTags: {
            string: "string",
        },
        configuration: {
            string: "string",
        },
        definedTags: {
            string: "string",
        },
        idleTimeoutInMinutes: "string",
        driverShape: "string",
        driverShapeConfig: {
            memoryInGbs: 0,
            ocpus: 0,
        },
        execute: "string",
        applicationId: "string",
        arguments: ["string"],
        archiveUri: "string",
        displayName: "string",
        logsBucketUri: "string",
        maxDurationInMinutes: "string",
        metastoreId: "string",
        numExecutors: 0,
        opcParentRptUrl: "string",
        parameters: [{
            name: "string",
            value: "string",
        }],
        poolId: "string",
        sparkVersion: "string",
        type: "string",
        warehouseBucketUri: "string",
    });
    
    type: oci:DataFlow:InvokeRun
    properties:
        applicationId: string
        applicationLogConfig:
            logGroupId: string
            logId: string
        archiveUri: string
        arguments:
            - string
        asynchronous: false
        compartmentId: string
        configuration:
            string: string
        definedTags:
            string: string
        displayName: string
        driverShape: string
        driverShapeConfig:
            memoryInGbs: 0
            ocpus: 0
        execute: string
        executorShape: string
        executorShapeConfig:
            memoryInGbs: 0
            ocpus: 0
        freeformTags:
            string: string
        idleTimeoutInMinutes: string
        logsBucketUri: string
        maxDurationInMinutes: string
        metastoreId: string
        numExecutors: 0
        opcParentRptUrl: string
        parameters:
            - name: string
              value: string
        poolId: string
        sparkVersion: string
        type: string
        warehouseBucketUri: string
    

    InvokeRun Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The InvokeRun resource accepts the following input properties:

    CompartmentId string
    (Updatable) The OCID of a compartment.
    ApplicationId string
    The OCID of the associated application. If this value is set, then no value for the execute parameter is required. If this value is not set, then a value for the execute parameter is required, and a new application is created and associated with the new run.
    ApplicationLogConfig InvokeRunApplicationLogConfig
    Logging details of Application logs for Data Flow Run.
    ArchiveUri string
    A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    Arguments List<string>
    The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, where name is the name of the parameter. Example: [ "--input", "${input_file}", "--name", "John Doe" ] If "input_file" has a value of "mydata.xml", then the value above will be translated to --input mydata.xml --name "John Doe"
    Asynchronous bool
    Configuration Dictionary<string, string>
    The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
    DefinedTags Dictionary<string, string>
    (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
    DisplayName string
    A user-friendly name that does not have to be unique. Avoid entering confidential information. If this value is not specified, it will be derived from the associated application's displayName or set by API using fileUri's application file name.
    DriverShape string
    The VM shape for the driver. Sets the driver cores and memory.
    DriverShapeConfig InvokeRunDriverShapeConfig
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    Execute string
    The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class, --file, --jars, --conf, --py-files, and main application file with arguments. Example: --jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10 Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
    ExecutorShape string
    The VM shape for the executors. Sets the executor cores and memory.
    ExecutorShapeConfig InvokeRunExecutorShapeConfig
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    FreeformTags Dictionary<string, string>
    (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
    IdleTimeoutInMinutes string
    (Updatable) The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
    LogsBucketUri string
    An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    MaxDurationInMinutes string
    (Updatable) The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESS state.
    MetastoreId string
    The OCID of Oracle Cloud Infrastructure Hive Metastore.
    NumExecutors int
    The number of executor VMs requested.
    OpcParentRptUrl string
    (Optional header param, required for Resource Principal version 3.0+) Parent resource control plane endpoint used to exchange for upper level resource principal token.
    Parameters List<InvokeRunParameter>
    An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
    PoolId string
    The OCID of a pool. Unique Id to indentify a dataflow pool resource.
    SparkVersion string
    The Spark version utilized to run the application. This value may be set if applicationId is not since the Spark version will be taken from the associated application.
    Type string
    The Spark application processing type.
    WarehouseBucketUri string

    An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.

    ** IMPORTANT ** Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values

    CompartmentId string
    (Updatable) The OCID of a compartment.
    ApplicationId string
    The OCID of the associated application. If this value is set, then no value for the execute parameter is required. If this value is not set, then a value for the execute parameter is required, and a new application is created and associated with the new run.
    ApplicationLogConfig InvokeRunApplicationLogConfigArgs
    Logging details of Application logs for Data Flow Run.
    ArchiveUri string
    A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    Arguments []string
    The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, where name is the name of the parameter. Example: [ "--input", "${input_file}", "--name", "John Doe" ] If "input_file" has a value of "mydata.xml", then the value above will be translated to --input mydata.xml --name "John Doe"
    Asynchronous bool
    Configuration map[string]string
    The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
    DefinedTags map[string]string
    (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
    DisplayName string
    A user-friendly name that does not have to be unique. Avoid entering confidential information. If this value is not specified, it will be derived from the associated application's displayName or set by API using fileUri's application file name.
    DriverShape string
    The VM shape for the driver. Sets the driver cores and memory.
    DriverShapeConfig InvokeRunDriverShapeConfigArgs
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    Execute string
    The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class, --file, --jars, --conf, --py-files, and main application file with arguments. Example: --jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10 Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
    ExecutorShape string
    The VM shape for the executors. Sets the executor cores and memory.
    ExecutorShapeConfig InvokeRunExecutorShapeConfigArgs
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    FreeformTags map[string]string
    (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
    IdleTimeoutInMinutes string
    (Updatable) The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
    LogsBucketUri string
    An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    MaxDurationInMinutes string
    (Updatable) The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESS state.
    MetastoreId string
    The OCID of Oracle Cloud Infrastructure Hive Metastore.
    NumExecutors int
    The number of executor VMs requested.
    OpcParentRptUrl string
    (Optional header param, required for Resource Principal version 3.0+) Parent resource control plane endpoint used to exchange for upper level resource principal token.
    Parameters []InvokeRunParameterArgs
    An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
    PoolId string
    The OCID of a pool. Unique Id to indentify a dataflow pool resource.
    SparkVersion string
    The Spark version utilized to run the application. This value may be set if applicationId is not since the Spark version will be taken from the associated application.
    Type string
    The Spark application processing type.
    WarehouseBucketUri string

    An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.

    ** IMPORTANT ** Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values

    compartmentId String
    (Updatable) The OCID of a compartment.
    applicationId String
    The OCID of the associated application. If this value is set, then no value for the execute parameter is required. If this value is not set, then a value for the execute parameter is required, and a new application is created and associated with the new run.
    applicationLogConfig InvokeRunApplicationLogConfig
    Logging details of Application logs for Data Flow Run.
    archiveUri String
    A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    arguments List<String>
    The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, where name is the name of the parameter. Example: [ "--input", "${input_file}", "--name", "John Doe" ] If "input_file" has a value of "mydata.xml", then the value above will be translated to --input mydata.xml --name "John Doe"
    asynchronous Boolean
    configuration Map<String,String>
    The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
    definedTags Map<String,String>
    (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
    displayName String
    A user-friendly name that does not have to be unique. Avoid entering confidential information. If this value is not specified, it will be derived from the associated application's displayName or set by API using fileUri's application file name.
    driverShape String
    The VM shape for the driver. Sets the driver cores and memory.
    driverShapeConfig InvokeRunDriverShapeConfig
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    execute String
    The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class, --file, --jars, --conf, --py-files, and main application file with arguments. Example: --jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10 Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
    executorShape String
    The VM shape for the executors. Sets the executor cores and memory.
    executorShapeConfig InvokeRunExecutorShapeConfig
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    freeformTags Map<String,String>
    (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
    idleTimeoutInMinutes String
    (Updatable) The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
    logsBucketUri String
    An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    maxDurationInMinutes String
    (Updatable) The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESS state.
    metastoreId String
    The OCID of Oracle Cloud Infrastructure Hive Metastore.
    numExecutors Integer
    The number of executor VMs requested.
    opcParentRptUrl String
    (Optional header param, required for Resource Principal version 3.0+) Parent resource control plane endpoint used to exchange for upper level resource principal token.
    parameters List<InvokeRunParameter>
    An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
    poolId String
    The OCID of a pool. Unique Id to indentify a dataflow pool resource.
    sparkVersion String
    The Spark version utilized to run the application. This value may be set if applicationId is not since the Spark version will be taken from the associated application.
    type String
    The Spark application processing type.
    warehouseBucketUri String

    An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.

    ** IMPORTANT ** Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values

    compartmentId string
    (Updatable) The OCID of a compartment.
    applicationId string
    The OCID of the associated application. If this value is set, then no value for the execute parameter is required. If this value is not set, then a value for the execute parameter is required, and a new application is created and associated with the new run.
    applicationLogConfig InvokeRunApplicationLogConfig
    Logging details of Application logs for Data Flow Run.
    archiveUri string
    A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    arguments string[]
    The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, where name is the name of the parameter. Example: [ "--input", "${input_file}", "--name", "John Doe" ] If "input_file" has a value of "mydata.xml", then the value above will be translated to --input mydata.xml --name "John Doe"
    asynchronous boolean
    configuration {[key: string]: string}
    The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
    definedTags {[key: string]: string}
    (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
    displayName string
    A user-friendly name that does not have to be unique. Avoid entering confidential information. If this value is not specified, it will be derived from the associated application's displayName or set by API using fileUri's application file name.
    driverShape string
    The VM shape for the driver. Sets the driver cores and memory.
    driverShapeConfig InvokeRunDriverShapeConfig
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    execute string
    The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class, --file, --jars, --conf, --py-files, and main application file with arguments. Example: --jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10 Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
    executorShape string
    The VM shape for the executors. Sets the executor cores and memory.
    executorShapeConfig InvokeRunExecutorShapeConfig
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    freeformTags {[key: string]: string}
    (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
    idleTimeoutInMinutes string
    (Updatable) The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
    logsBucketUri string
    An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    maxDurationInMinutes string
    (Updatable) The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESS state.
    metastoreId string
    The OCID of Oracle Cloud Infrastructure Hive Metastore.
    numExecutors number
    The number of executor VMs requested.
    opcParentRptUrl string
    (Optional header param, required for Resource Principal version 3.0+) Parent resource control plane endpoint used to exchange for upper level resource principal token.
    parameters InvokeRunParameter[]
    An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
    poolId string
    The OCID of a pool. Unique Id to indentify a dataflow pool resource.
    sparkVersion string
    The Spark version utilized to run the application. This value may be set if applicationId is not since the Spark version will be taken from the associated application.
    type string
    The Spark application processing type.
    warehouseBucketUri string

    An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.

    ** IMPORTANT ** Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values

    compartment_id str
    (Updatable) The OCID of a compartment.
    application_id str
    The OCID of the associated application. If this value is set, then no value for the execute parameter is required. If this value is not set, then a value for the execute parameter is required, and a new application is created and associated with the new run.
    application_log_config dataflow.InvokeRunApplicationLogConfigArgs
    Logging details of Application logs for Data Flow Run.
    archive_uri str
    A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    arguments Sequence[str]
    The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, where name is the name of the parameter. Example: [ "--input", "${input_file}", "--name", "John Doe" ] If "input_file" has a value of "mydata.xml", then the value above will be translated to --input mydata.xml --name "John Doe"
    asynchronous bool
    configuration Mapping[str, str]
    The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
    defined_tags Mapping[str, str]
    (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
    display_name str
    A user-friendly name that does not have to be unique. Avoid entering confidential information. If this value is not specified, it will be derived from the associated application's displayName or set by API using fileUri's application file name.
    driver_shape str
    The VM shape for the driver. Sets the driver cores and memory.
    driver_shape_config dataflow.InvokeRunDriverShapeConfigArgs
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    execute str
    The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class, --file, --jars, --conf, --py-files, and main application file with arguments. Example: --jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10 Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
    executor_shape str
    The VM shape for the executors. Sets the executor cores and memory.
    executor_shape_config dataflow.InvokeRunExecutorShapeConfigArgs
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    freeform_tags Mapping[str, str]
    (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
    idle_timeout_in_minutes str
    (Updatable) The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
    logs_bucket_uri str
    An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    max_duration_in_minutes str
    (Updatable) The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESS state.
    metastore_id str
    The OCID of Oracle Cloud Infrastructure Hive Metastore.
    num_executors int
    The number of executor VMs requested.
    opc_parent_rpt_url str
    (Optional header param, required for Resource Principal version 3.0+) Parent resource control plane endpoint used to exchange for upper level resource principal token.
    parameters Sequence[dataflow.InvokeRunParameterArgs]
    An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
    pool_id str
    The OCID of a pool. Unique Id to indentify a dataflow pool resource.
    spark_version str
    The Spark version utilized to run the application. This value may be set if applicationId is not since the Spark version will be taken from the associated application.
    type str
    The Spark application processing type.
    warehouse_bucket_uri str

    An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.

    ** IMPORTANT ** Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values

    compartmentId String
    (Updatable) The OCID of a compartment.
    applicationId String
    The OCID of the associated application. If this value is set, then no value for the execute parameter is required. If this value is not set, then a value for the execute parameter is required, and a new application is created and associated with the new run.
    applicationLogConfig Property Map
    Logging details of Application logs for Data Flow Run.
    archiveUri String
    A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    arguments List<String>
    The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, where name is the name of the parameter. Example: [ "--input", "${input_file}", "--name", "John Doe" ] If "input_file" has a value of "mydata.xml", then the value above will be translated to --input mydata.xml --name "John Doe"
    asynchronous Boolean
    configuration Map<String>
    The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
    definedTags Map<String>
    (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
    displayName String
    A user-friendly name that does not have to be unique. Avoid entering confidential information. If this value is not specified, it will be derived from the associated application's displayName or set by API using fileUri's application file name.
    driverShape String
    The VM shape for the driver. Sets the driver cores and memory.
    driverShapeConfig Property Map
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    execute String
    The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class, --file, --jars, --conf, --py-files, and main application file with arguments. Example: --jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10 Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
    executorShape String
    The VM shape for the executors. Sets the executor cores and memory.
    executorShapeConfig Property Map
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    freeformTags Map<String>
    (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
    idleTimeoutInMinutes String
    (Updatable) The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
    logsBucketUri String
    An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    maxDurationInMinutes String
    (Updatable) The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESS state.
    metastoreId String
    The OCID of Oracle Cloud Infrastructure Hive Metastore.
    numExecutors Number
    The number of executor VMs requested.
    opcParentRptUrl String
    (Optional header param, required for Resource Principal version 3.0+) Parent resource control plane endpoint used to exchange for upper level resource principal token.
    parameters List<Property Map>
    An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
    poolId String
    The OCID of a pool. Unique Id to indentify a dataflow pool resource.
    sparkVersion String
    The Spark version utilized to run the application. This value may be set if applicationId is not since the Spark version will be taken from the associated application.
    type String
    The Spark application processing type.
    warehouseBucketUri String

    An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.

    ** IMPORTANT ** Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values

    Outputs

    All input properties are implicitly available as output properties. Additionally, the InvokeRun resource produces the following output properties:

    ClassName string
    The class for the application.
    DataReadInBytes string
    The data read by the run in bytes.
    DataWrittenInBytes string
    The data written by the run in bytes.
    FileUri string
    An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    Id string
    The provider-assigned unique ID for this managed resource.
    Language string
    The Spark language.
    LifecycleDetails string
    The detailed messages about the lifecycle state.
    OpcRequestId string
    Unique Oracle assigned identifier for the request. If you need to contact Oracle about a particular request, please provide the request ID.
    OwnerPrincipalId string
    The OCID of the user who created the resource.
    OwnerUserName string
    The username of the user who created the resource. If the username of the owner does not exist, null will be returned and the caller should refer to the ownerPrincipalId value instead.
    PrivateEndpointDnsZones List<string>
    An array of DNS zone names. Example: [ "app.examplecorp.com", "app.examplecorp2.com" ]
    PrivateEndpointId string
    The OCID of a private endpoint.
    PrivateEndpointMaxHostCount int
    The maximum number of hosts to be accessed through the private endpoint. This value is used to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up to 512.
    PrivateEndpointNsgIds List<string>
    An array of network security group OCIDs.
    PrivateEndpointSubnetId string
    The OCID of a subnet.
    RunDurationInMilliseconds string
    The duration of the run in milliseconds.
    State string
    The current state of this run.
    TimeCreated string
    The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    TimeUpdated string
    The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    TotalOcpu int
    The total number of oCPU requested by the run.
    ClassName string
    The class for the application.
    DataReadInBytes string
    The data read by the run in bytes.
    DataWrittenInBytes string
    The data written by the run in bytes.
    FileUri string
    An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    Id string
    The provider-assigned unique ID for this managed resource.
    Language string
    The Spark language.
    LifecycleDetails string
    The detailed messages about the lifecycle state.
    OpcRequestId string
    Unique Oracle assigned identifier for the request. If you need to contact Oracle about a particular request, please provide the request ID.
    OwnerPrincipalId string
    The OCID of the user who created the resource.
    OwnerUserName string
    The username of the user who created the resource. If the username of the owner does not exist, null will be returned and the caller should refer to the ownerPrincipalId value instead.
    PrivateEndpointDnsZones []string
    An array of DNS zone names. Example: [ "app.examplecorp.com", "app.examplecorp2.com" ]
    PrivateEndpointId string
    The OCID of a private endpoint.
    PrivateEndpointMaxHostCount int
    The maximum number of hosts to be accessed through the private endpoint. This value is used to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up to 512.
    PrivateEndpointNsgIds []string
    An array of network security group OCIDs.
    PrivateEndpointSubnetId string
    The OCID of a subnet.
    RunDurationInMilliseconds string
    The duration of the run in milliseconds.
    State string
    The current state of this run.
    TimeCreated string
    The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    TimeUpdated string
    The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    TotalOcpu int
    The total number of oCPU requested by the run.
    className String
    The class for the application.
    dataReadInBytes String
    The data read by the run in bytes.
    dataWrittenInBytes String
    The data written by the run in bytes.
    fileUri String
    An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    id String
    The provider-assigned unique ID for this managed resource.
    language String
    The Spark language.
    lifecycleDetails String
    The detailed messages about the lifecycle state.
    opcRequestId String
    Unique Oracle assigned identifier for the request. If you need to contact Oracle about a particular request, please provide the request ID.
    ownerPrincipalId String
    The OCID of the user who created the resource.
    ownerUserName String
    The username of the user who created the resource. If the username of the owner does not exist, null will be returned and the caller should refer to the ownerPrincipalId value instead.
    privateEndpointDnsZones List<String>
    An array of DNS zone names. Example: [ "app.examplecorp.com", "app.examplecorp2.com" ]
    privateEndpointId String
    The OCID of a private endpoint.
    privateEndpointMaxHostCount Integer
    The maximum number of hosts to be accessed through the private endpoint. This value is used to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up to 512.
    privateEndpointNsgIds List<String>
    An array of network security group OCIDs.
    privateEndpointSubnetId String
    The OCID of a subnet.
    runDurationInMilliseconds String
    The duration of the run in milliseconds.
    state String
    The current state of this run.
    timeCreated String
    The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    timeUpdated String
    The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    totalOcpu Integer
    The total number of oCPU requested by the run.
    className string
    The class for the application.
    dataReadInBytes string
    The data read by the run in bytes.
    dataWrittenInBytes string
    The data written by the run in bytes.
    fileUri string
    An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    id string
    The provider-assigned unique ID for this managed resource.
    language string
    The Spark language.
    lifecycleDetails string
    The detailed messages about the lifecycle state.
    opcRequestId string
    Unique Oracle assigned identifier for the request. If you need to contact Oracle about a particular request, please provide the request ID.
    ownerPrincipalId string
    The OCID of the user who created the resource.
    ownerUserName string
    The username of the user who created the resource. If the username of the owner does not exist, null will be returned and the caller should refer to the ownerPrincipalId value instead.
    privateEndpointDnsZones string[]
    An array of DNS zone names. Example: [ "app.examplecorp.com", "app.examplecorp2.com" ]
    privateEndpointId string
    The OCID of a private endpoint.
    privateEndpointMaxHostCount number
    The maximum number of hosts to be accessed through the private endpoint. This value is used to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up to 512.
    privateEndpointNsgIds string[]
    An array of network security group OCIDs.
    privateEndpointSubnetId string
    The OCID of a subnet.
    runDurationInMilliseconds string
    The duration of the run in milliseconds.
    state string
    The current state of this run.
    timeCreated string
    The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    timeUpdated string
    The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    totalOcpu number
    The total number of oCPU requested by the run.
    class_name str
    The class for the application.
    data_read_in_bytes str
    The data read by the run in bytes.
    data_written_in_bytes str
    The data written by the run in bytes.
    file_uri str
    An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    id str
    The provider-assigned unique ID for this managed resource.
    language str
    The Spark language.
    lifecycle_details str
    The detailed messages about the lifecycle state.
    opc_request_id str
    Unique Oracle assigned identifier for the request. If you need to contact Oracle about a particular request, please provide the request ID.
    owner_principal_id str
    The OCID of the user who created the resource.
    owner_user_name str
    The username of the user who created the resource. If the username of the owner does not exist, null will be returned and the caller should refer to the ownerPrincipalId value instead.
    private_endpoint_dns_zones Sequence[str]
    An array of DNS zone names. Example: [ "app.examplecorp.com", "app.examplecorp2.com" ]
    private_endpoint_id str
    The OCID of a private endpoint.
    private_endpoint_max_host_count int
    The maximum number of hosts to be accessed through the private endpoint. This value is used to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up to 512.
    private_endpoint_nsg_ids Sequence[str]
    An array of network security group OCIDs.
    private_endpoint_subnet_id str
    The OCID of a subnet.
    run_duration_in_milliseconds str
    The duration of the run in milliseconds.
    state str
    The current state of this run.
    time_created str
    The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    time_updated str
    The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    total_ocpu int
    The total number of oCPU requested by the run.
    className String
    The class for the application.
    dataReadInBytes String
    The data read by the run in bytes.
    dataWrittenInBytes String
    The data written by the run in bytes.
    fileUri String
    An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    id String
    The provider-assigned unique ID for this managed resource.
    language String
    The Spark language.
    lifecycleDetails String
    The detailed messages about the lifecycle state.
    opcRequestId String
    Unique Oracle assigned identifier for the request. If you need to contact Oracle about a particular request, please provide the request ID.
    ownerPrincipalId String
    The OCID of the user who created the resource.
    ownerUserName String
    The username of the user who created the resource. If the username of the owner does not exist, null will be returned and the caller should refer to the ownerPrincipalId value instead.
    privateEndpointDnsZones List<String>
    An array of DNS zone names. Example: [ "app.examplecorp.com", "app.examplecorp2.com" ]
    privateEndpointId String
    The OCID of a private endpoint.
    privateEndpointMaxHostCount Number
    The maximum number of hosts to be accessed through the private endpoint. This value is used to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up to 512.
    privateEndpointNsgIds List<String>
    An array of network security group OCIDs.
    privateEndpointSubnetId String
    The OCID of a subnet.
    runDurationInMilliseconds String
    The duration of the run in milliseconds.
    state String
    The current state of this run.
    timeCreated String
    The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    timeUpdated String
    The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    totalOcpu Number
    The total number of oCPU requested by the run.

    Look up Existing InvokeRun Resource

    Get an existing InvokeRun resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: InvokeRunState, opts?: CustomResourceOptions): InvokeRun
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            application_id: Optional[str] = None,
            application_log_config: Optional[_dataflow.InvokeRunApplicationLogConfigArgs] = None,
            archive_uri: Optional[str] = None,
            arguments: Optional[Sequence[str]] = None,
            asynchronous: Optional[bool] = None,
            class_name: Optional[str] = None,
            compartment_id: Optional[str] = None,
            configuration: Optional[Mapping[str, str]] = None,
            data_read_in_bytes: Optional[str] = None,
            data_written_in_bytes: Optional[str] = None,
            defined_tags: Optional[Mapping[str, str]] = None,
            display_name: Optional[str] = None,
            driver_shape: Optional[str] = None,
            driver_shape_config: Optional[_dataflow.InvokeRunDriverShapeConfigArgs] = None,
            execute: Optional[str] = None,
            executor_shape: Optional[str] = None,
            executor_shape_config: Optional[_dataflow.InvokeRunExecutorShapeConfigArgs] = None,
            file_uri: Optional[str] = None,
            freeform_tags: Optional[Mapping[str, str]] = None,
            idle_timeout_in_minutes: Optional[str] = None,
            language: Optional[str] = None,
            lifecycle_details: Optional[str] = None,
            logs_bucket_uri: Optional[str] = None,
            max_duration_in_minutes: Optional[str] = None,
            metastore_id: Optional[str] = None,
            num_executors: Optional[int] = None,
            opc_parent_rpt_url: Optional[str] = None,
            opc_request_id: Optional[str] = None,
            owner_principal_id: Optional[str] = None,
            owner_user_name: Optional[str] = None,
            parameters: Optional[Sequence[_dataflow.InvokeRunParameterArgs]] = None,
            pool_id: Optional[str] = None,
            private_endpoint_dns_zones: Optional[Sequence[str]] = None,
            private_endpoint_id: Optional[str] = None,
            private_endpoint_max_host_count: Optional[int] = None,
            private_endpoint_nsg_ids: Optional[Sequence[str]] = None,
            private_endpoint_subnet_id: Optional[str] = None,
            run_duration_in_milliseconds: Optional[str] = None,
            spark_version: Optional[str] = None,
            state: Optional[str] = None,
            time_created: Optional[str] = None,
            time_updated: Optional[str] = None,
            total_ocpu: Optional[int] = None,
            type: Optional[str] = None,
            warehouse_bucket_uri: Optional[str] = None) -> InvokeRun
    func GetInvokeRun(ctx *Context, name string, id IDInput, state *InvokeRunState, opts ...ResourceOption) (*InvokeRun, error)
    public static InvokeRun Get(string name, Input<string> id, InvokeRunState? state, CustomResourceOptions? opts = null)
    public static InvokeRun get(String name, Output<String> id, InvokeRunState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    ApplicationId string
    The OCID of the associated application. If this value is set, then no value for the execute parameter is required. If this value is not set, then a value for the execute parameter is required, and a new application is created and associated with the new run.
    ApplicationLogConfig InvokeRunApplicationLogConfig
    Logging details of Application logs for Data Flow Run.
    ArchiveUri string
    A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    Arguments List<string>
    The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, where name is the name of the parameter. Example: [ "--input", "${input_file}", "--name", "John Doe" ] If "input_file" has a value of "mydata.xml", then the value above will be translated to --input mydata.xml --name "John Doe"
    Asynchronous bool
    ClassName string
    The class for the application.
    CompartmentId string
    (Updatable) The OCID of a compartment.
    Configuration Dictionary<string, string>
    The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
    DataReadInBytes string
    The data read by the run in bytes.
    DataWrittenInBytes string
    The data written by the run in bytes.
    DefinedTags Dictionary<string, string>
    (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
    DisplayName string
    A user-friendly name that does not have to be unique. Avoid entering confidential information. If this value is not specified, it will be derived from the associated application's displayName or set by API using fileUri's application file name.
    DriverShape string
    The VM shape for the driver. Sets the driver cores and memory.
    DriverShapeConfig InvokeRunDriverShapeConfig
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    Execute string
    The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class, --file, --jars, --conf, --py-files, and main application file with arguments. Example: --jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10 Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
    ExecutorShape string
    The VM shape for the executors. Sets the executor cores and memory.
    ExecutorShapeConfig InvokeRunExecutorShapeConfig
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    FileUri string
    An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    FreeformTags Dictionary<string, string>
    (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
    IdleTimeoutInMinutes string
    (Updatable) The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
    Language string
    The Spark language.
    LifecycleDetails string
    The detailed messages about the lifecycle state.
    LogsBucketUri string
    An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    MaxDurationInMinutes string
    (Updatable) The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESS state.
    MetastoreId string
    The OCID of Oracle Cloud Infrastructure Hive Metastore.
    NumExecutors int
    The number of executor VMs requested.
    OpcParentRptUrl string
    (Optional header param, required for Resource Principal version 3.0+) Parent resource control plane endpoint used to exchange for upper level resource principal token.
    OpcRequestId string
    Unique Oracle assigned identifier for the request. If you need to contact Oracle about a particular request, please provide the request ID.
    OwnerPrincipalId string
    The OCID of the user who created the resource.
    OwnerUserName string
    The username of the user who created the resource. If the username of the owner does not exist, null will be returned and the caller should refer to the ownerPrincipalId value instead.
    Parameters List<InvokeRunParameter>
    An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
    PoolId string
    The OCID of a pool. Unique Id to indentify a dataflow pool resource.
    PrivateEndpointDnsZones List<string>
    An array of DNS zone names. Example: [ "app.examplecorp.com", "app.examplecorp2.com" ]
    PrivateEndpointId string
    The OCID of a private endpoint.
    PrivateEndpointMaxHostCount int
    The maximum number of hosts to be accessed through the private endpoint. This value is used to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up to 512.
    PrivateEndpointNsgIds List<string>
    An array of network security group OCIDs.
    PrivateEndpointSubnetId string
    The OCID of a subnet.
    RunDurationInMilliseconds string
    The duration of the run in milliseconds.
    SparkVersion string
    The Spark version utilized to run the application. This value may be set if applicationId is not since the Spark version will be taken from the associated application.
    State string
    The current state of this run.
    TimeCreated string
    The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    TimeUpdated string
    The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    TotalOcpu int
    The total number of oCPU requested by the run.
    Type string
    The Spark application processing type.
    WarehouseBucketUri string

    An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.

    ** IMPORTANT ** Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values

    ApplicationId string
    The OCID of the associated application. If this value is set, then no value for the execute parameter is required. If this value is not set, then a value for the execute parameter is required, and a new application is created and associated with the new run.
    ApplicationLogConfig InvokeRunApplicationLogConfigArgs
    Logging details of Application logs for Data Flow Run.
    ArchiveUri string
    A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    Arguments []string
    The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, where name is the name of the parameter. Example: [ "--input", "${input_file}", "--name", "John Doe" ] If "input_file" has a value of "mydata.xml", then the value above will be translated to --input mydata.xml --name "John Doe"
    Asynchronous bool
    ClassName string
    The class for the application.
    CompartmentId string
    (Updatable) The OCID of a compartment.
    Configuration map[string]string
    The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
    DataReadInBytes string
    The data read by the run in bytes.
    DataWrittenInBytes string
    The data written by the run in bytes.
    DefinedTags map[string]string
    (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
    DisplayName string
    A user-friendly name that does not have to be unique. Avoid entering confidential information. If this value is not specified, it will be derived from the associated application's displayName or set by API using fileUri's application file name.
    DriverShape string
    The VM shape for the driver. Sets the driver cores and memory.
    DriverShapeConfig InvokeRunDriverShapeConfigArgs
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    Execute string
    The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class, --file, --jars, --conf, --py-files, and main application file with arguments. Example: --jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10 Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
    ExecutorShape string
    The VM shape for the executors. Sets the executor cores and memory.
    ExecutorShapeConfig InvokeRunExecutorShapeConfigArgs
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    FileUri string
    An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    FreeformTags map[string]string
    (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
    IdleTimeoutInMinutes string
    (Updatable) The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
    Language string
    The Spark language.
    LifecycleDetails string
    The detailed messages about the lifecycle state.
    LogsBucketUri string
    An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    MaxDurationInMinutes string
    (Updatable) The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESS state.
    MetastoreId string
    The OCID of Oracle Cloud Infrastructure Hive Metastore.
    NumExecutors int
    The number of executor VMs requested.
    OpcParentRptUrl string
    (Optional header param, required for Resource Principal version 3.0+) Parent resource control plane endpoint used to exchange for upper level resource principal token.
    OpcRequestId string
    Unique Oracle assigned identifier for the request. If you need to contact Oracle about a particular request, please provide the request ID.
    OwnerPrincipalId string
    The OCID of the user who created the resource.
    OwnerUserName string
    The username of the user who created the resource. If the username of the owner does not exist, null will be returned and the caller should refer to the ownerPrincipalId value instead.
    Parameters []InvokeRunParameterArgs
    An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
    PoolId string
    The OCID of a pool. Unique Id to indentify a dataflow pool resource.
    PrivateEndpointDnsZones []string
    An array of DNS zone names. Example: [ "app.examplecorp.com", "app.examplecorp2.com" ]
    PrivateEndpointId string
    The OCID of a private endpoint.
    PrivateEndpointMaxHostCount int
    The maximum number of hosts to be accessed through the private endpoint. This value is used to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up to 512.
    PrivateEndpointNsgIds []string
    An array of network security group OCIDs.
    PrivateEndpointSubnetId string
    The OCID of a subnet.
    RunDurationInMilliseconds string
    The duration of the run in milliseconds.
    SparkVersion string
    The Spark version utilized to run the application. This value may be set if applicationId is not since the Spark version will be taken from the associated application.
    State string
    The current state of this run.
    TimeCreated string
    The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    TimeUpdated string
    The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    TotalOcpu int
    The total number of oCPU requested by the run.
    Type string
    The Spark application processing type.
    WarehouseBucketUri string

    An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.

    ** IMPORTANT ** Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values

    applicationId String
    The OCID of the associated application. If this value is set, then no value for the execute parameter is required. If this value is not set, then a value for the execute parameter is required, and a new application is created and associated with the new run.
    applicationLogConfig InvokeRunApplicationLogConfig
    Logging details of Application logs for Data Flow Run.
    archiveUri String
    A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    arguments List<String>
    The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, where name is the name of the parameter. Example: [ "--input", "${input_file}", "--name", "John Doe" ] If "input_file" has a value of "mydata.xml", then the value above will be translated to --input mydata.xml --name "John Doe"
    asynchronous Boolean
    className String
    The class for the application.
    compartmentId String
    (Updatable) The OCID of a compartment.
    configuration Map<String,String>
    The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
    dataReadInBytes String
    The data read by the run in bytes.
    dataWrittenInBytes String
    The data written by the run in bytes.
    definedTags Map<String,String>
    (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
    displayName String
    A user-friendly name that does not have to be unique. Avoid entering confidential information. If this value is not specified, it will be derived from the associated application's displayName or set by API using fileUri's application file name.
    driverShape String
    The VM shape for the driver. Sets the driver cores and memory.
    driverShapeConfig InvokeRunDriverShapeConfig
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    execute String
    The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class, --file, --jars, --conf, --py-files, and main application file with arguments. Example: --jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10 Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
    executorShape String
    The VM shape for the executors. Sets the executor cores and memory.
    executorShapeConfig InvokeRunExecutorShapeConfig
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    fileUri String
    An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    freeformTags Map<String,String>
    (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
    idleTimeoutInMinutes String
    (Updatable) The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
    language String
    The Spark language.
    lifecycleDetails String
    The detailed messages about the lifecycle state.
    logsBucketUri String
    An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    maxDurationInMinutes String
    (Updatable) The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESS state.
    metastoreId String
    The OCID of Oracle Cloud Infrastructure Hive Metastore.
    numExecutors Integer
    The number of executor VMs requested.
    opcParentRptUrl String
    (Optional header param, required for Resource Principal version 3.0+) Parent resource control plane endpoint used to exchange for upper level resource principal token.
    opcRequestId String
    Unique Oracle assigned identifier for the request. If you need to contact Oracle about a particular request, please provide the request ID.
    ownerPrincipalId String
    The OCID of the user who created the resource.
    ownerUserName String
    The username of the user who created the resource. If the username of the owner does not exist, null will be returned and the caller should refer to the ownerPrincipalId value instead.
    parameters List<InvokeRunParameter>
    An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
    poolId String
    The OCID of a pool. Unique Id to indentify a dataflow pool resource.
    privateEndpointDnsZones List<String>
    An array of DNS zone names. Example: [ "app.examplecorp.com", "app.examplecorp2.com" ]
    privateEndpointId String
    The OCID of a private endpoint.
    privateEndpointMaxHostCount Integer
    The maximum number of hosts to be accessed through the private endpoint. This value is used to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up to 512.
    privateEndpointNsgIds List<String>
    An array of network security group OCIDs.
    privateEndpointSubnetId String
    The OCID of a subnet.
    runDurationInMilliseconds String
    The duration of the run in milliseconds.
    sparkVersion String
    The Spark version utilized to run the application. This value may be set if applicationId is not since the Spark version will be taken from the associated application.
    state String
    The current state of this run.
    timeCreated String
    The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    timeUpdated String
    The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    totalOcpu Integer
    The total number of oCPU requested by the run.
    type String
    The Spark application processing type.
    warehouseBucketUri String

    An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.

    ** IMPORTANT ** Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values

    applicationId string
    The OCID of the associated application. If this value is set, then no value for the execute parameter is required. If this value is not set, then a value for the execute parameter is required, and a new application is created and associated with the new run.
    applicationLogConfig InvokeRunApplicationLogConfig
    Logging details of Application logs for Data Flow Run.
    archiveUri string
    A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    arguments string[]
    The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, where name is the name of the parameter. Example: [ "--input", "${input_file}", "--name", "John Doe" ] If "input_file" has a value of "mydata.xml", then the value above will be translated to --input mydata.xml --name "John Doe"
    asynchronous boolean
    className string
    The class for the application.
    compartmentId string
    (Updatable) The OCID of a compartment.
    configuration {[key: string]: string}
    The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
    dataReadInBytes string
    The data read by the run in bytes.
    dataWrittenInBytes string
    The data written by the run in bytes.
    definedTags {[key: string]: string}
    (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
    displayName string
    A user-friendly name that does not have to be unique. Avoid entering confidential information. If this value is not specified, it will be derived from the associated application's displayName or set by API using fileUri's application file name.
    driverShape string
    The VM shape for the driver. Sets the driver cores and memory.
    driverShapeConfig InvokeRunDriverShapeConfig
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    execute string
    The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class, --file, --jars, --conf, --py-files, and main application file with arguments. Example: --jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10 Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
    executorShape string
    The VM shape for the executors. Sets the executor cores and memory.
    executorShapeConfig InvokeRunExecutorShapeConfig
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    fileUri string
    An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    freeformTags {[key: string]: string}
    (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
    idleTimeoutInMinutes string
    (Updatable) The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
    language string
    The Spark language.
    lifecycleDetails string
    The detailed messages about the lifecycle state.
    logsBucketUri string
    An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    maxDurationInMinutes string
    (Updatable) The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESS state.
    metastoreId string
    The OCID of Oracle Cloud Infrastructure Hive Metastore.
    numExecutors number
    The number of executor VMs requested.
    opcParentRptUrl string
    (Optional header param, required for Resource Principal version 3.0+) Parent resource control plane endpoint used to exchange for upper level resource principal token.
    opcRequestId string
    Unique Oracle assigned identifier for the request. If you need to contact Oracle about a particular request, please provide the request ID.
    ownerPrincipalId string
    The OCID of the user who created the resource.
    ownerUserName string
    The username of the user who created the resource. If the username of the owner does not exist, null will be returned and the caller should refer to the ownerPrincipalId value instead.
    parameters InvokeRunParameter[]
    An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
    poolId string
    The OCID of a pool. Unique Id to indentify a dataflow pool resource.
    privateEndpointDnsZones string[]
    An array of DNS zone names. Example: [ "app.examplecorp.com", "app.examplecorp2.com" ]
    privateEndpointId string
    The OCID of a private endpoint.
    privateEndpointMaxHostCount number
    The maximum number of hosts to be accessed through the private endpoint. This value is used to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up to 512.
    privateEndpointNsgIds string[]
    An array of network security group OCIDs.
    privateEndpointSubnetId string
    The OCID of a subnet.
    runDurationInMilliseconds string
    The duration of the run in milliseconds.
    sparkVersion string
    The Spark version utilized to run the application. This value may be set if applicationId is not since the Spark version will be taken from the associated application.
    state string
    The current state of this run.
    timeCreated string
    The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    timeUpdated string
    The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    totalOcpu number
    The total number of oCPU requested by the run.
    type string
    The Spark application processing type.
    warehouseBucketUri string

    An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.

    ** IMPORTANT ** Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values

    application_id str
    The OCID of the associated application. If this value is set, then no value for the execute parameter is required. If this value is not set, then a value for the execute parameter is required, and a new application is created and associated with the new run.
    application_log_config dataflow.InvokeRunApplicationLogConfigArgs
    Logging details of Application logs for Data Flow Run.
    archive_uri str
    A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    arguments Sequence[str]
    The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, where name is the name of the parameter. Example: [ "--input", "${input_file}", "--name", "John Doe" ] If "input_file" has a value of "mydata.xml", then the value above will be translated to --input mydata.xml --name "John Doe"
    asynchronous bool
    class_name str
    The class for the application.
    compartment_id str
    (Updatable) The OCID of a compartment.
    configuration Mapping[str, str]
    The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
    data_read_in_bytes str
    The data read by the run in bytes.
    data_written_in_bytes str
    The data written by the run in bytes.
    defined_tags Mapping[str, str]
    (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
    display_name str
    A user-friendly name that does not have to be unique. Avoid entering confidential information. If this value is not specified, it will be derived from the associated application's displayName or set by API using fileUri's application file name.
    driver_shape str
    The VM shape for the driver. Sets the driver cores and memory.
    driver_shape_config dataflow.InvokeRunDriverShapeConfigArgs
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    execute str
    The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class, --file, --jars, --conf, --py-files, and main application file with arguments. Example: --jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10 Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
    executor_shape str
    The VM shape for the executors. Sets the executor cores and memory.
    executor_shape_config dataflow.InvokeRunExecutorShapeConfigArgs
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    file_uri str
    An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    freeform_tags Mapping[str, str]
    (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
    idle_timeout_in_minutes str
    (Updatable) The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
    language str
    The Spark language.
    lifecycle_details str
    The detailed messages about the lifecycle state.
    logs_bucket_uri str
    An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    max_duration_in_minutes str
    (Updatable) The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESS state.
    metastore_id str
    The OCID of Oracle Cloud Infrastructure Hive Metastore.
    num_executors int
    The number of executor VMs requested.
    opc_parent_rpt_url str
    (Optional header param, required for Resource Principal version 3.0+) Parent resource control plane endpoint used to exchange for upper level resource principal token.
    opc_request_id str
    Unique Oracle assigned identifier for the request. If you need to contact Oracle about a particular request, please provide the request ID.
    owner_principal_id str
    The OCID of the user who created the resource.
    owner_user_name str
    The username of the user who created the resource. If the username of the owner does not exist, null will be returned and the caller should refer to the ownerPrincipalId value instead.
    parameters Sequence[dataflow.InvokeRunParameterArgs]
    An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
    pool_id str
    The OCID of a pool. Unique Id to indentify a dataflow pool resource.
    private_endpoint_dns_zones Sequence[str]
    An array of DNS zone names. Example: [ "app.examplecorp.com", "app.examplecorp2.com" ]
    private_endpoint_id str
    The OCID of a private endpoint.
    private_endpoint_max_host_count int
    The maximum number of hosts to be accessed through the private endpoint. This value is used to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up to 512.
    private_endpoint_nsg_ids Sequence[str]
    An array of network security group OCIDs.
    private_endpoint_subnet_id str
    The OCID of a subnet.
    run_duration_in_milliseconds str
    The duration of the run in milliseconds.
    spark_version str
    The Spark version utilized to run the application. This value may be set if applicationId is not since the Spark version will be taken from the associated application.
    state str
    The current state of this run.
    time_created str
    The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    time_updated str
    The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    total_ocpu int
    The total number of oCPU requested by the run.
    type str
    The Spark application processing type.
    warehouse_bucket_uri str

    An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.

    ** IMPORTANT ** Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values

    applicationId String
    The OCID of the associated application. If this value is set, then no value for the execute parameter is required. If this value is not set, then a value for the execute parameter is required, and a new application is created and associated with the new run.
    applicationLogConfig Property Map
    Logging details of Application logs for Data Flow Run.
    archiveUri String
    A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    arguments List<String>
    The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, where name is the name of the parameter. Example: [ "--input", "${input_file}", "--name", "John Doe" ] If "input_file" has a value of "mydata.xml", then the value above will be translated to --input mydata.xml --name "John Doe"
    asynchronous Boolean
    className String
    The class for the application.
    compartmentId String
    (Updatable) The OCID of a compartment.
    configuration Map<String>
    The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
    dataReadInBytes String
    The data read by the run in bytes.
    dataWrittenInBytes String
    The data written by the run in bytes.
    definedTags Map<String>
    (Updatable) Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
    displayName String
    A user-friendly name that does not have to be unique. Avoid entering confidential information. If this value is not specified, it will be derived from the associated application's displayName or set by API using fileUri's application file name.
    driverShape String
    The VM shape for the driver. Sets the driver cores and memory.
    driverShapeConfig Property Map
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    execute String
    The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class, --file, --jars, --conf, --py-files, and main application file with arguments. Example: --jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10 Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
    executorShape String
    The VM shape for the executors. Sets the executor cores and memory.
    executorShapeConfig Property Map
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    fileUri String
    An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    freeformTags Map<String>
    (Updatable) Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
    idleTimeoutInMinutes String
    (Updatable) The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
    language String
    The Spark language.
    lifecycleDetails String
    The detailed messages about the lifecycle state.
    logsBucketUri String
    An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    maxDurationInMinutes String
    (Updatable) The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESS state.
    metastoreId String
    The OCID of Oracle Cloud Infrastructure Hive Metastore.
    numExecutors Number
    The number of executor VMs requested.
    opcParentRptUrl String
    (Optional header param, required for Resource Principal version 3.0+) Parent resource control plane endpoint used to exchange for upper level resource principal token.
    opcRequestId String
    Unique Oracle assigned identifier for the request. If you need to contact Oracle about a particular request, please provide the request ID.
    ownerPrincipalId String
    The OCID of the user who created the resource.
    ownerUserName String
    The username of the user who created the resource. If the username of the owner does not exist, null will be returned and the caller should refer to the ownerPrincipalId value instead.
    parameters List<Property Map>
    An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
    poolId String
    The OCID of a pool. Unique Id to indentify a dataflow pool resource.
    privateEndpointDnsZones List<String>
    An array of DNS zone names. Example: [ "app.examplecorp.com", "app.examplecorp2.com" ]
    privateEndpointId String
    The OCID of a private endpoint.
    privateEndpointMaxHostCount Number
    The maximum number of hosts to be accessed through the private endpoint. This value is used to calculate the relevant CIDR block and should be a multiple of 256. If the value is not a multiple of 256, it is rounded up to the next multiple of 256. For example, 300 is rounded up to 512.
    privateEndpointNsgIds List<String>
    An array of network security group OCIDs.
    privateEndpointSubnetId String
    The OCID of a subnet.
    runDurationInMilliseconds String
    The duration of the run in milliseconds.
    sparkVersion String
    The Spark version utilized to run the application. This value may be set if applicationId is not since the Spark version will be taken from the associated application.
    state String
    The current state of this run.
    timeCreated String
    The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    timeUpdated String
    The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    totalOcpu Number
    The total number of oCPU requested by the run.
    type String
    The Spark application processing type.
    warehouseBucketUri String

    An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.

    ** IMPORTANT ** Any change to a property that does not support update will force the destruction and recreation of the resource with the new property values

    Supporting Types

    InvokeRunApplicationLogConfig, InvokeRunApplicationLogConfigArgs

    LogGroupId string
    The log group id for where log objects will be for Data Flow Runs.
    LogId string
    The log id of the log object the Application Logs of Data Flow Run will be shipped to.
    LogGroupId string
    The log group id for where log objects will be for Data Flow Runs.
    LogId string
    The log id of the log object the Application Logs of Data Flow Run will be shipped to.
    logGroupId String
    The log group id for where log objects will be for Data Flow Runs.
    logId String
    The log id of the log object the Application Logs of Data Flow Run will be shipped to.
    logGroupId string
    The log group id for where log objects will be for Data Flow Runs.
    logId string
    The log id of the log object the Application Logs of Data Flow Run will be shipped to.
    log_group_id str
    The log group id for where log objects will be for Data Flow Runs.
    log_id str
    The log id of the log object the Application Logs of Data Flow Run will be shipped to.
    logGroupId String
    The log group id for where log objects will be for Data Flow Runs.
    logId String
    The log id of the log object the Application Logs of Data Flow Run will be shipped to.

    InvokeRunDriverShapeConfig, InvokeRunDriverShapeConfigArgs

    MemoryInGbs double
    The amount of memory used for the driver or executors.
    Ocpus double
    The total number of OCPUs used for the driver or executors. See here for details.
    MemoryInGbs float64
    The amount of memory used for the driver or executors.
    Ocpus float64
    The total number of OCPUs used for the driver or executors. See here for details.
    memoryInGbs Double
    The amount of memory used for the driver or executors.
    ocpus Double
    The total number of OCPUs used for the driver or executors. See here for details.
    memoryInGbs number
    The amount of memory used for the driver or executors.
    ocpus number
    The total number of OCPUs used for the driver or executors. See here for details.
    memory_in_gbs float
    The amount of memory used for the driver or executors.
    ocpus float
    The total number of OCPUs used for the driver or executors. See here for details.
    memoryInGbs Number
    The amount of memory used for the driver or executors.
    ocpus Number
    The total number of OCPUs used for the driver or executors. See here for details.

    InvokeRunExecutorShapeConfig, InvokeRunExecutorShapeConfigArgs

    MemoryInGbs double
    The amount of memory used for the driver or executors.
    Ocpus double
    The total number of OCPUs used for the driver or executors. See here for details.
    MemoryInGbs float64
    The amount of memory used for the driver or executors.
    Ocpus float64
    The total number of OCPUs used for the driver or executors. See here for details.
    memoryInGbs Double
    The amount of memory used for the driver or executors.
    ocpus Double
    The total number of OCPUs used for the driver or executors. See here for details.
    memoryInGbs number
    The amount of memory used for the driver or executors.
    ocpus number
    The total number of OCPUs used for the driver or executors. See here for details.
    memory_in_gbs float
    The amount of memory used for the driver or executors.
    ocpus float
    The total number of OCPUs used for the driver or executors. See here for details.
    memoryInGbs Number
    The amount of memory used for the driver or executors.
    ocpus Number
    The total number of OCPUs used for the driver or executors. See here for details.

    InvokeRunParameter, InvokeRunParameterArgs

    Name string
    The name of the parameter. It must be a string of one or more word characters (a-z, A-Z, 0-9, _). Examples: "iterations", "input_file"
    Value string
    The value of the parameter. It must be a string of 0 or more characters of any kind. Examples: "" (empty string), "10", "mydata.xml", "${x}"
    Name string
    The name of the parameter. It must be a string of one or more word characters (a-z, A-Z, 0-9, _). Examples: "iterations", "input_file"
    Value string
    The value of the parameter. It must be a string of 0 or more characters of any kind. Examples: "" (empty string), "10", "mydata.xml", "${x}"
    name String
    The name of the parameter. It must be a string of one or more word characters (a-z, A-Z, 0-9, _). Examples: "iterations", "input_file"
    value String
    The value of the parameter. It must be a string of 0 or more characters of any kind. Examples: "" (empty string), "10", "mydata.xml", "${x}"
    name string
    The name of the parameter. It must be a string of one or more word characters (a-z, A-Z, 0-9, _). Examples: "iterations", "input_file"
    value string
    The value of the parameter. It must be a string of 0 or more characters of any kind. Examples: "" (empty string), "10", "mydata.xml", "${x}"
    name str
    The name of the parameter. It must be a string of one or more word characters (a-z, A-Z, 0-9, _). Examples: "iterations", "input_file"
    value str
    The value of the parameter. It must be a string of 0 or more characters of any kind. Examples: "" (empty string), "10", "mydata.xml", "${x}"
    name String
    The name of the parameter. It must be a string of one or more word characters (a-z, A-Z, 0-9, _). Examples: "iterations", "input_file"
    value String
    The value of the parameter. It must be a string of 0 or more characters of any kind. Examples: "" (empty string), "10", "mydata.xml", "${x}"

    Import

    InvokeRuns can be imported using the id, e.g.

    $ pulumi import oci:DataFlow/invokeRun:InvokeRun test_invoke_run "id"
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    oci pulumi/pulumi-oci
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the oci Terraform Provider.
    oci logo
    Oracle Cloud Infrastructure v2.17.0 published on Friday, Nov 15, 2024 by Pulumi