1. Packages
  2. Oracle Cloud Infrastructure
  3. API Docs
  4. DataFlow
  5. getApplications
Oracle Cloud Infrastructure v2.17.0 published on Friday, Nov 15, 2024 by Pulumi

oci.DataFlow.getApplications

Explore with Pulumi AI

oci logo
Oracle Cloud Infrastructure v2.17.0 published on Friday, Nov 15, 2024 by Pulumi

    This data source provides the list of Applications in Oracle Cloud Infrastructure Data Flow service.

    Lists all applications in the specified compartment. Only one parameter other than compartmentId may also be included in a query. The query must include compartmentId. If the query does not include compartmentId, or includes compartmentId but two or more other parameters an error is returned.

    Example Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as oci from "@pulumi/oci";
    
    const testApplications = oci.DataFlow.getApplications({
        compartmentId: compartmentId,
        displayName: applicationDisplayName,
        displayNameStartsWith: applicationDisplayNameStartsWith,
        ownerPrincipalId: testOwnerPrincipal.id,
        sparkVersion: applicationSparkVersion,
    });
    
    import pulumi
    import pulumi_oci as oci
    
    test_applications = oci.DataFlow.get_applications(compartment_id=compartment_id,
        display_name=application_display_name,
        display_name_starts_with=application_display_name_starts_with,
        owner_principal_id=test_owner_principal["id"],
        spark_version=application_spark_version)
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-oci/sdk/v2/go/oci/DataFlow"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := DataFlow.GetApplications(ctx, &dataflow.GetApplicationsArgs{
    			CompartmentId:         compartmentId,
    			DisplayName:           pulumi.StringRef(applicationDisplayName),
    			DisplayNameStartsWith: pulumi.StringRef(applicationDisplayNameStartsWith),
    			OwnerPrincipalId:      pulumi.StringRef(testOwnerPrincipal.Id),
    			SparkVersion:          pulumi.StringRef(applicationSparkVersion),
    		}, nil)
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Oci = Pulumi.Oci;
    
    return await Deployment.RunAsync(() => 
    {
        var testApplications = Oci.DataFlow.GetApplications.Invoke(new()
        {
            CompartmentId = compartmentId,
            DisplayName = applicationDisplayName,
            DisplayNameStartsWith = applicationDisplayNameStartsWith,
            OwnerPrincipalId = testOwnerPrincipal.Id,
            SparkVersion = applicationSparkVersion,
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.oci.DataFlow.DataFlowFunctions;
    import com.pulumi.oci.DataFlow.inputs.GetApplicationsArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            final var testApplications = DataFlowFunctions.getApplications(GetApplicationsArgs.builder()
                .compartmentId(compartmentId)
                .displayName(applicationDisplayName)
                .displayNameStartsWith(applicationDisplayNameStartsWith)
                .ownerPrincipalId(testOwnerPrincipal.id())
                .sparkVersion(applicationSparkVersion)
                .build());
    
        }
    }
    
    variables:
      testApplications:
        fn::invoke:
          Function: oci:DataFlow:getApplications
          Arguments:
            compartmentId: ${compartmentId}
            displayName: ${applicationDisplayName}
            displayNameStartsWith: ${applicationDisplayNameStartsWith}
            ownerPrincipalId: ${testOwnerPrincipal.id}
            sparkVersion: ${applicationSparkVersion}
    

    Using getApplications

    Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.

    function getApplications(args: GetApplicationsArgs, opts?: InvokeOptions): Promise<GetApplicationsResult>
    function getApplicationsOutput(args: GetApplicationsOutputArgs, opts?: InvokeOptions): Output<GetApplicationsResult>
    def get_applications(compartment_id: Optional[str] = None,
                         display_name: Optional[str] = None,
                         display_name_starts_with: Optional[str] = None,
                         filters: Optional[Sequence[_dataflow.GetApplicationsFilter]] = None,
                         owner_principal_id: Optional[str] = None,
                         spark_version: Optional[str] = None,
                         opts: Optional[InvokeOptions] = None) -> GetApplicationsResult
    def get_applications_output(compartment_id: Optional[pulumi.Input[str]] = None,
                         display_name: Optional[pulumi.Input[str]] = None,
                         display_name_starts_with: Optional[pulumi.Input[str]] = None,
                         filters: Optional[pulumi.Input[Sequence[pulumi.Input[_dataflow.GetApplicationsFilterArgs]]]] = None,
                         owner_principal_id: Optional[pulumi.Input[str]] = None,
                         spark_version: Optional[pulumi.Input[str]] = None,
                         opts: Optional[InvokeOptions] = None) -> Output[GetApplicationsResult]
    func GetApplications(ctx *Context, args *GetApplicationsArgs, opts ...InvokeOption) (*GetApplicationsResult, error)
    func GetApplicationsOutput(ctx *Context, args *GetApplicationsOutputArgs, opts ...InvokeOption) GetApplicationsResultOutput

    > Note: This function is named GetApplications in the Go SDK.

    public static class GetApplications 
    {
        public static Task<GetApplicationsResult> InvokeAsync(GetApplicationsArgs args, InvokeOptions? opts = null)
        public static Output<GetApplicationsResult> Invoke(GetApplicationsInvokeArgs args, InvokeOptions? opts = null)
    }
    public static CompletableFuture<GetApplicationsResult> getApplications(GetApplicationsArgs args, InvokeOptions options)
    // Output-based functions aren't available in Java yet
    
    fn::invoke:
      function: oci:DataFlow/getApplications:getApplications
      arguments:
        # arguments dictionary

    The following arguments are supported:

    CompartmentId string
    The OCID of the compartment.
    DisplayName string
    The query parameter for the Spark application name.
    DisplayNameStartsWith string
    The displayName prefix.
    Filters List<GetApplicationsFilter>
    OwnerPrincipalId string
    The OCID of the user who created the resource.
    SparkVersion string
    The Spark version utilized to run the application.
    CompartmentId string
    The OCID of the compartment.
    DisplayName string
    The query parameter for the Spark application name.
    DisplayNameStartsWith string
    The displayName prefix.
    Filters []GetApplicationsFilter
    OwnerPrincipalId string
    The OCID of the user who created the resource.
    SparkVersion string
    The Spark version utilized to run the application.
    compartmentId String
    The OCID of the compartment.
    displayName String
    The query parameter for the Spark application name.
    displayNameStartsWith String
    The displayName prefix.
    filters List<GetApplicationsFilter>
    ownerPrincipalId String
    The OCID of the user who created the resource.
    sparkVersion String
    The Spark version utilized to run the application.
    compartmentId string
    The OCID of the compartment.
    displayName string
    The query parameter for the Spark application name.
    displayNameStartsWith string
    The displayName prefix.
    filters GetApplicationsFilter[]
    ownerPrincipalId string
    The OCID of the user who created the resource.
    sparkVersion string
    The Spark version utilized to run the application.
    compartment_id str
    The OCID of the compartment.
    display_name str
    The query parameter for the Spark application name.
    display_name_starts_with str
    The displayName prefix.
    filters Sequence[dataflow.GetApplicationsFilter]
    owner_principal_id str
    The OCID of the user who created the resource.
    spark_version str
    The Spark version utilized to run the application.
    compartmentId String
    The OCID of the compartment.
    displayName String
    The query parameter for the Spark application name.
    displayNameStartsWith String
    The displayName prefix.
    filters List<Property Map>
    ownerPrincipalId String
    The OCID of the user who created the resource.
    sparkVersion String
    The Spark version utilized to run the application.

    getApplications Result

    The following output properties are available:

    Applications List<GetApplicationsApplication>
    The list of applications.
    CompartmentId string
    The OCID of a compartment.
    Id string
    The provider-assigned unique ID for this managed resource.
    DisplayName string
    A user-friendly name. This name is not necessarily unique.
    DisplayNameStartsWith string
    Filters List<GetApplicationsFilter>
    OwnerPrincipalId string
    The OCID of the user who created the resource.
    SparkVersion string
    The Spark version utilized to run the application.
    Applications []GetApplicationsApplication
    The list of applications.
    CompartmentId string
    The OCID of a compartment.
    Id string
    The provider-assigned unique ID for this managed resource.
    DisplayName string
    A user-friendly name. This name is not necessarily unique.
    DisplayNameStartsWith string
    Filters []GetApplicationsFilter
    OwnerPrincipalId string
    The OCID of the user who created the resource.
    SparkVersion string
    The Spark version utilized to run the application.
    applications List<GetApplicationsApplication>
    The list of applications.
    compartmentId String
    The OCID of a compartment.
    id String
    The provider-assigned unique ID for this managed resource.
    displayName String
    A user-friendly name. This name is not necessarily unique.
    displayNameStartsWith String
    filters List<GetApplicationsFilter>
    ownerPrincipalId String
    The OCID of the user who created the resource.
    sparkVersion String
    The Spark version utilized to run the application.
    applications GetApplicationsApplication[]
    The list of applications.
    compartmentId string
    The OCID of a compartment.
    id string
    The provider-assigned unique ID for this managed resource.
    displayName string
    A user-friendly name. This name is not necessarily unique.
    displayNameStartsWith string
    filters GetApplicationsFilter[]
    ownerPrincipalId string
    The OCID of the user who created the resource.
    sparkVersion string
    The Spark version utilized to run the application.
    applications Sequence[dataflow.GetApplicationsApplication]
    The list of applications.
    compartment_id str
    The OCID of a compartment.
    id str
    The provider-assigned unique ID for this managed resource.
    display_name str
    A user-friendly name. This name is not necessarily unique.
    display_name_starts_with str
    filters Sequence[dataflow.GetApplicationsFilter]
    owner_principal_id str
    The OCID of the user who created the resource.
    spark_version str
    The Spark version utilized to run the application.
    applications List<Property Map>
    The list of applications.
    compartmentId String
    The OCID of a compartment.
    id String
    The provider-assigned unique ID for this managed resource.
    displayName String
    A user-friendly name. This name is not necessarily unique.
    displayNameStartsWith String
    filters List<Property Map>
    ownerPrincipalId String
    The OCID of the user who created the resource.
    sparkVersion String
    The Spark version utilized to run the application.

    Supporting Types

    GetApplicationsApplication

    ApplicationLogConfigs List<GetApplicationsApplicationApplicationLogConfig>
    Logging details of Application logs for Data Flow Run.
    ArchiveUri string
    A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    Arguments List<string>
    The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, where name is the name of the parameter. Example: [ "--input", "${input_file}", "--name", "John Doe" ] If "input_file" has a value of "mydata.xml", then the value above will be translated to --input mydata.xml --name "John Doe"
    ClassName string
    The class for the application.
    CompartmentId string
    The OCID of the compartment.
    Configuration Dictionary<string, string>
    The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties. Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
    DefinedTags Dictionary<string, string>
    Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
    Description string
    A user-friendly description.
    DisplayName string
    The query parameter for the Spark application name.
    DriverShape string
    The VM shape for the driver. Sets the driver cores and memory.
    DriverShapeConfigs List<GetApplicationsApplicationDriverShapeConfig>
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    Execute string
    The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class, --file, --jars, --conf, --py-files, and main application file with arguments. Example: --jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10 Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
    ExecutorShape string
    The VM shape for the executors. Sets the executor cores and memory.
    ExecutorShapeConfigs List<GetApplicationsApplicationExecutorShapeConfig>
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    FileUri string
    An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    FreeformTags Dictionary<string, string>
    Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
    Id string
    The application ID.
    IdleTimeoutInMinutes string
    The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
    Language string
    The Spark language.
    LogsBucketUri string
    An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    MaxDurationInMinutes string
    The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESS state.
    MetastoreId string
    The OCID of Oracle Cloud Infrastructure Hive Metastore.
    NumExecutors int
    The number of executor VMs requested.
    OwnerPrincipalId string
    The OCID of the user who created the resource.
    OwnerUserName string
    The username of the user who created the resource. If the username of the owner does not exist, null will be returned and the caller should refer to the ownerPrincipalId value instead.
    Parameters List<GetApplicationsApplicationParameter>
    An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
    PoolId string
    The OCID of a pool. Unique Id to indentify a dataflow pool resource.
    PrivateEndpointId string
    The OCID of a private endpoint.
    SparkVersion string
    The Spark version utilized to run the application.
    State string
    The current state of this application.
    TimeCreated string
    The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    TimeUpdated string
    The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    Type string
    The Spark application processing type.
    WarehouseBucketUri string
    An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    ApplicationLogConfigs []GetApplicationsApplicationApplicationLogConfig
    Logging details of Application logs for Data Flow Run.
    ArchiveUri string
    A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    Arguments []string
    The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, where name is the name of the parameter. Example: [ "--input", "${input_file}", "--name", "John Doe" ] If "input_file" has a value of "mydata.xml", then the value above will be translated to --input mydata.xml --name "John Doe"
    ClassName string
    The class for the application.
    CompartmentId string
    The OCID of the compartment.
    Configuration map[string]string
    The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties. Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
    DefinedTags map[string]string
    Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
    Description string
    A user-friendly description.
    DisplayName string
    The query parameter for the Spark application name.
    DriverShape string
    The VM shape for the driver. Sets the driver cores and memory.
    DriverShapeConfigs []GetApplicationsApplicationDriverShapeConfig
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    Execute string
    The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class, --file, --jars, --conf, --py-files, and main application file with arguments. Example: --jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10 Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
    ExecutorShape string
    The VM shape for the executors. Sets the executor cores and memory.
    ExecutorShapeConfigs []GetApplicationsApplicationExecutorShapeConfig
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    FileUri string
    An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    FreeformTags map[string]string
    Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
    Id string
    The application ID.
    IdleTimeoutInMinutes string
    The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
    Language string
    The Spark language.
    LogsBucketUri string
    An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    MaxDurationInMinutes string
    The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESS state.
    MetastoreId string
    The OCID of Oracle Cloud Infrastructure Hive Metastore.
    NumExecutors int
    The number of executor VMs requested.
    OwnerPrincipalId string
    The OCID of the user who created the resource.
    OwnerUserName string
    The username of the user who created the resource. If the username of the owner does not exist, null will be returned and the caller should refer to the ownerPrincipalId value instead.
    Parameters []GetApplicationsApplicationParameter
    An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
    PoolId string
    The OCID of a pool. Unique Id to indentify a dataflow pool resource.
    PrivateEndpointId string
    The OCID of a private endpoint.
    SparkVersion string
    The Spark version utilized to run the application.
    State string
    The current state of this application.
    TimeCreated string
    The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    TimeUpdated string
    The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    Type string
    The Spark application processing type.
    WarehouseBucketUri string
    An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    applicationLogConfigs List<GetApplicationsApplicationApplicationLogConfig>
    Logging details of Application logs for Data Flow Run.
    archiveUri String
    A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    arguments List<String>
    The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, where name is the name of the parameter. Example: [ "--input", "${input_file}", "--name", "John Doe" ] If "input_file" has a value of "mydata.xml", then the value above will be translated to --input mydata.xml --name "John Doe"
    className String
    The class for the application.
    compartmentId String
    The OCID of the compartment.
    configuration Map<String,String>
    The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties. Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
    definedTags Map<String,String>
    Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
    description String
    A user-friendly description.
    displayName String
    The query parameter for the Spark application name.
    driverShape String
    The VM shape for the driver. Sets the driver cores and memory.
    driverShapeConfigs List<GetApplicationsApplicationDriverShapeConfig>
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    execute String
    The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class, --file, --jars, --conf, --py-files, and main application file with arguments. Example: --jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10 Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
    executorShape String
    The VM shape for the executors. Sets the executor cores and memory.
    executorShapeConfigs List<GetApplicationsApplicationExecutorShapeConfig>
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    fileUri String
    An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    freeformTags Map<String,String>
    Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
    id String
    The application ID.
    idleTimeoutInMinutes String
    The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
    language String
    The Spark language.
    logsBucketUri String
    An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    maxDurationInMinutes String
    The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESS state.
    metastoreId String
    The OCID of Oracle Cloud Infrastructure Hive Metastore.
    numExecutors Integer
    The number of executor VMs requested.
    ownerPrincipalId String
    The OCID of the user who created the resource.
    ownerUserName String
    The username of the user who created the resource. If the username of the owner does not exist, null will be returned and the caller should refer to the ownerPrincipalId value instead.
    parameters List<GetApplicationsApplicationParameter>
    An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
    poolId String
    The OCID of a pool. Unique Id to indentify a dataflow pool resource.
    privateEndpointId String
    The OCID of a private endpoint.
    sparkVersion String
    The Spark version utilized to run the application.
    state String
    The current state of this application.
    timeCreated String
    The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    timeUpdated String
    The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    type String
    The Spark application processing type.
    warehouseBucketUri String
    An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    applicationLogConfigs GetApplicationsApplicationApplicationLogConfig[]
    Logging details of Application logs for Data Flow Run.
    archiveUri string
    A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    arguments string[]
    The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, where name is the name of the parameter. Example: [ "--input", "${input_file}", "--name", "John Doe" ] If "input_file" has a value of "mydata.xml", then the value above will be translated to --input mydata.xml --name "John Doe"
    className string
    The class for the application.
    compartmentId string
    The OCID of the compartment.
    configuration {[key: string]: string}
    The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties. Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
    definedTags {[key: string]: string}
    Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
    description string
    A user-friendly description.
    displayName string
    The query parameter for the Spark application name.
    driverShape string
    The VM shape for the driver. Sets the driver cores and memory.
    driverShapeConfigs GetApplicationsApplicationDriverShapeConfig[]
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    execute string
    The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class, --file, --jars, --conf, --py-files, and main application file with arguments. Example: --jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10 Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
    executorShape string
    The VM shape for the executors. Sets the executor cores and memory.
    executorShapeConfigs GetApplicationsApplicationExecutorShapeConfig[]
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    fileUri string
    An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    freeformTags {[key: string]: string}
    Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
    id string
    The application ID.
    idleTimeoutInMinutes string
    The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
    language string
    The Spark language.
    logsBucketUri string
    An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    maxDurationInMinutes string
    The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESS state.
    metastoreId string
    The OCID of Oracle Cloud Infrastructure Hive Metastore.
    numExecutors number
    The number of executor VMs requested.
    ownerPrincipalId string
    The OCID of the user who created the resource.
    ownerUserName string
    The username of the user who created the resource. If the username of the owner does not exist, null will be returned and the caller should refer to the ownerPrincipalId value instead.
    parameters GetApplicationsApplicationParameter[]
    An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
    poolId string
    The OCID of a pool. Unique Id to indentify a dataflow pool resource.
    privateEndpointId string
    The OCID of a private endpoint.
    sparkVersion string
    The Spark version utilized to run the application.
    state string
    The current state of this application.
    timeCreated string
    The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    timeUpdated string
    The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    type string
    The Spark application processing type.
    warehouseBucketUri string
    An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    application_log_configs Sequence[dataflow.GetApplicationsApplicationApplicationLogConfig]
    Logging details of Application logs for Data Flow Run.
    archive_uri str
    A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    arguments Sequence[str]
    The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, where name is the name of the parameter. Example: [ "--input", "${input_file}", "--name", "John Doe" ] If "input_file" has a value of "mydata.xml", then the value above will be translated to --input mydata.xml --name "John Doe"
    class_name str
    The class for the application.
    compartment_id str
    The OCID of the compartment.
    configuration Mapping[str, str]
    The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties. Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
    defined_tags Mapping[str, str]
    Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
    description str
    A user-friendly description.
    display_name str
    The query parameter for the Spark application name.
    driver_shape str
    The VM shape for the driver. Sets the driver cores and memory.
    driver_shape_configs Sequence[dataflow.GetApplicationsApplicationDriverShapeConfig]
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    execute str
    The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class, --file, --jars, --conf, --py-files, and main application file with arguments. Example: --jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10 Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
    executor_shape str
    The VM shape for the executors. Sets the executor cores and memory.
    executor_shape_configs Sequence[dataflow.GetApplicationsApplicationExecutorShapeConfig]
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    file_uri str
    An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    freeform_tags Mapping[str, str]
    Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
    id str
    The application ID.
    idle_timeout_in_minutes str
    The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
    language str
    The Spark language.
    logs_bucket_uri str
    An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    max_duration_in_minutes str
    The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESS state.
    metastore_id str
    The OCID of Oracle Cloud Infrastructure Hive Metastore.
    num_executors int
    The number of executor VMs requested.
    owner_principal_id str
    The OCID of the user who created the resource.
    owner_user_name str
    The username of the user who created the resource. If the username of the owner does not exist, null will be returned and the caller should refer to the ownerPrincipalId value instead.
    parameters Sequence[dataflow.GetApplicationsApplicationParameter]
    An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
    pool_id str
    The OCID of a pool. Unique Id to indentify a dataflow pool resource.
    private_endpoint_id str
    The OCID of a private endpoint.
    spark_version str
    The Spark version utilized to run the application.
    state str
    The current state of this application.
    time_created str
    The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    time_updated str
    The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    type str
    The Spark application processing type.
    warehouse_bucket_uri str
    An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    applicationLogConfigs List<Property Map>
    Logging details of Application logs for Data Flow Run.
    archiveUri String
    A comma separated list of one or more archive files as Oracle Cloud Infrastructure URIs. For example, oci://path/to/a.zip,oci://path/to/b.zip. An Oracle Cloud Infrastructure URI of an archive.zip file containing custom dependencies that may be used to support the execution of a Python, Java, or Scala application. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    arguments List<String>
    The arguments passed to the running application as command line arguments. An argument is either a plain text or a placeholder. Placeholders are replaced using values from the parameters map. Each placeholder specified must be represented in the parameters map else the request (POST or PUT) will fail with a HTTP 400 status code. Placeholders are specified as Service Api Spec, where name is the name of the parameter. Example: [ "--input", "${input_file}", "--name", "John Doe" ] If "input_file" has a value of "mydata.xml", then the value above will be translated to --input mydata.xml --name "John Doe"
    className String
    The class for the application.
    compartmentId String
    The OCID of the compartment.
    configuration Map<String>
    The Spark configuration passed to the running process. See https://spark.apache.org/docs/latest/configuration.html#available-properties. Example: { "spark.app.name" : "My App Name", "spark.shuffle.io.maxRetries" : "4" } Note: Not all Spark properties are permitted to be set. Attempting to set a property that is not allowed to be overwritten will cause a 400 status to be returned.
    definedTags Map<String>
    Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
    description String
    A user-friendly description.
    displayName String
    The query parameter for the Spark application name.
    driverShape String
    The VM shape for the driver. Sets the driver cores and memory.
    driverShapeConfigs List<Property Map>
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    execute String
    The input used for spark-submit command. For more details see https://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit. Supported options include --class, --file, --jars, --conf, --py-files, and main application file with arguments. Example: --jars oci://path/to/a.jar,oci://path/to/b.jar --files oci://path/to/a.json,oci://path/to/b.csv --py-files oci://path/to/a.py,oci://path/to/b.py --conf spark.sql.crossJoin.enabled=true --class org.apache.spark.examples.SparkPi oci://path/to/main.jar 10 Note: If execute is specified together with applicationId, className, configuration, fileUri, language, arguments, parameters during application create/update, or run create/submit, Data Flow service will use derived information from execute input only.
    executorShape String
    The VM shape for the executors. Sets the executor cores and memory.
    executorShapeConfigs List<Property Map>
    This is used to configure the shape of the driver or executor if a flexible shape is used.
    fileUri String
    An Oracle Cloud Infrastructure URI of the file containing the application to execute. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    freeformTags Map<String>
    Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see Resource Tags. Example: {"Department": "Finance"}
    id String
    The application ID.
    idleTimeoutInMinutes String
    The timeout value in minutes used to manage Runs. A Run would be stopped after inactivity for this amount of time period. Note: This parameter is currently only applicable for Runs of type SESSION. Default value is 2880 minutes (2 days)
    language String
    The Spark language.
    logsBucketUri String
    An Oracle Cloud Infrastructure URI of the bucket where the Spark job logs are to be uploaded. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.
    maxDurationInMinutes String
    The maximum duration in minutes for which an Application should run. Data Flow Run would be terminated once it reaches this duration from the time it transitions to IN_PROGRESS state.
    metastoreId String
    The OCID of Oracle Cloud Infrastructure Hive Metastore.
    numExecutors Number
    The number of executor VMs requested.
    ownerPrincipalId String
    The OCID of the user who created the resource.
    ownerUserName String
    The username of the user who created the resource. If the username of the owner does not exist, null will be returned and the caller should refer to the ownerPrincipalId value instead.
    parameters List<Property Map>
    An array of name/value pairs used to fill placeholders found in properties like Application.arguments. The name must be a string of one or more word characters (a-z, A-Z, 0-9, _). The value can be a string of 0 or more characters of any kind. Example: [ { name: "iterations", value: "10"}, { name: "input_file", value: "mydata.xml" }, { name: "variable_x", value: "${x}"} ]
    poolId String
    The OCID of a pool. Unique Id to indentify a dataflow pool resource.
    privateEndpointId String
    The OCID of a private endpoint.
    sparkVersion String
    The Spark version utilized to run the application.
    state String
    The current state of this application.
    timeCreated String
    The date and time the resource was created, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    timeUpdated String
    The date and time the resource was updated, expressed in RFC 3339 timestamp format. Example: 2018-04-03T21:10:29.600Z
    type String
    The Spark application processing type.
    warehouseBucketUri String
    An Oracle Cloud Infrastructure URI of the bucket to be used as default warehouse directory for BATCH SQL runs. See https://docs.cloud.oracle.com/iaas/Content/API/SDKDocs/hdfsconnector.htm#uriformat.

    GetApplicationsApplicationApplicationLogConfig

    LogGroupId string
    The log group id for where log objects will be for Data Flow Runs.
    LogId string
    The log id of the log object the Application Logs of Data Flow Run will be shipped to.
    LogGroupId string
    The log group id for where log objects will be for Data Flow Runs.
    LogId string
    The log id of the log object the Application Logs of Data Flow Run will be shipped to.
    logGroupId String
    The log group id for where log objects will be for Data Flow Runs.
    logId String
    The log id of the log object the Application Logs of Data Flow Run will be shipped to.
    logGroupId string
    The log group id for where log objects will be for Data Flow Runs.
    logId string
    The log id of the log object the Application Logs of Data Flow Run will be shipped to.
    log_group_id str
    The log group id for where log objects will be for Data Flow Runs.
    log_id str
    The log id of the log object the Application Logs of Data Flow Run will be shipped to.
    logGroupId String
    The log group id for where log objects will be for Data Flow Runs.
    logId String
    The log id of the log object the Application Logs of Data Flow Run will be shipped to.

    GetApplicationsApplicationDriverShapeConfig

    MemoryInGbs double
    The amount of memory used for the driver or executors.
    Ocpus double
    The total number of OCPUs used for the driver or executors. See here for details.
    MemoryInGbs float64
    The amount of memory used for the driver or executors.
    Ocpus float64
    The total number of OCPUs used for the driver or executors. See here for details.
    memoryInGbs Double
    The amount of memory used for the driver or executors.
    ocpus Double
    The total number of OCPUs used for the driver or executors. See here for details.
    memoryInGbs number
    The amount of memory used for the driver or executors.
    ocpus number
    The total number of OCPUs used for the driver or executors. See here for details.
    memory_in_gbs float
    The amount of memory used for the driver or executors.
    ocpus float
    The total number of OCPUs used for the driver or executors. See here for details.
    memoryInGbs Number
    The amount of memory used for the driver or executors.
    ocpus Number
    The total number of OCPUs used for the driver or executors. See here for details.

    GetApplicationsApplicationExecutorShapeConfig

    MemoryInGbs double
    The amount of memory used for the driver or executors.
    Ocpus double
    The total number of OCPUs used for the driver or executors. See here for details.
    MemoryInGbs float64
    The amount of memory used for the driver or executors.
    Ocpus float64
    The total number of OCPUs used for the driver or executors. See here for details.
    memoryInGbs Double
    The amount of memory used for the driver or executors.
    ocpus Double
    The total number of OCPUs used for the driver or executors. See here for details.
    memoryInGbs number
    The amount of memory used for the driver or executors.
    ocpus number
    The total number of OCPUs used for the driver or executors. See here for details.
    memory_in_gbs float
    The amount of memory used for the driver or executors.
    ocpus float
    The total number of OCPUs used for the driver or executors. See here for details.
    memoryInGbs Number
    The amount of memory used for the driver or executors.
    ocpus Number
    The total number of OCPUs used for the driver or executors. See here for details.

    GetApplicationsApplicationParameter

    Name string
    The name of the parameter. It must be a string of one or more word characters (a-z, A-Z, 0-9, _). Examples: "iterations", "input_file"
    Value string
    The value of the parameter. It must be a string of 0 or more characters of any kind. Examples: "" (empty string), "10", "mydata.xml", "${x}"
    Name string
    The name of the parameter. It must be a string of one or more word characters (a-z, A-Z, 0-9, _). Examples: "iterations", "input_file"
    Value string
    The value of the parameter. It must be a string of 0 or more characters of any kind. Examples: "" (empty string), "10", "mydata.xml", "${x}"
    name String
    The name of the parameter. It must be a string of one or more word characters (a-z, A-Z, 0-9, _). Examples: "iterations", "input_file"
    value String
    The value of the parameter. It must be a string of 0 or more characters of any kind. Examples: "" (empty string), "10", "mydata.xml", "${x}"
    name string
    The name of the parameter. It must be a string of one or more word characters (a-z, A-Z, 0-9, _). Examples: "iterations", "input_file"
    value string
    The value of the parameter. It must be a string of 0 or more characters of any kind. Examples: "" (empty string), "10", "mydata.xml", "${x}"
    name str
    The name of the parameter. It must be a string of one or more word characters (a-z, A-Z, 0-9, _). Examples: "iterations", "input_file"
    value str
    The value of the parameter. It must be a string of 0 or more characters of any kind. Examples: "" (empty string), "10", "mydata.xml", "${x}"
    name String
    The name of the parameter. It must be a string of one or more word characters (a-z, A-Z, 0-9, _). Examples: "iterations", "input_file"
    value String
    The value of the parameter. It must be a string of 0 or more characters of any kind. Examples: "" (empty string), "10", "mydata.xml", "${x}"

    GetApplicationsFilter

    Name string
    The name of the parameter. It must be a string of one or more word characters (a-z, A-Z, 0-9, _). Examples: "iterations", "input_file"
    Values List<string>
    Regex bool
    Name string
    The name of the parameter. It must be a string of one or more word characters (a-z, A-Z, 0-9, _). Examples: "iterations", "input_file"
    Values []string
    Regex bool
    name String
    The name of the parameter. It must be a string of one or more word characters (a-z, A-Z, 0-9, _). Examples: "iterations", "input_file"
    values List<String>
    regex Boolean
    name string
    The name of the parameter. It must be a string of one or more word characters (a-z, A-Z, 0-9, _). Examples: "iterations", "input_file"
    values string[]
    regex boolean
    name str
    The name of the parameter. It must be a string of one or more word characters (a-z, A-Z, 0-9, _). Examples: "iterations", "input_file"
    values Sequence[str]
    regex bool
    name String
    The name of the parameter. It must be a string of one or more word characters (a-z, A-Z, 0-9, _). Examples: "iterations", "input_file"
    values List<String>
    regex Boolean

    Package Details

    Repository
    oci pulumi/pulumi-oci
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the oci Terraform Provider.
    oci logo
    Oracle Cloud Infrastructure v2.17.0 published on Friday, Nov 15, 2024 by Pulumi