We recommend using Azure Native.
azure.datafactory.Pipeline
Explore with Pulumi AI
Manages a Pipeline inside a Azure Data Factory.
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as azure from "@pulumi/azure";
const example = new azure.core.ResourceGroup("example", {
name: "example-resources",
location: "West Europe",
});
const exampleFactory = new azure.datafactory.Factory("example", {
name: "example",
location: example.location,
resourceGroupName: example.name,
});
const examplePipeline = new azure.datafactory.Pipeline("example", {
name: "example",
dataFactoryId: exampleFactory.id,
});
import pulumi
import pulumi_azure as azure
example = azure.core.ResourceGroup("example",
name="example-resources",
location="West Europe")
example_factory = azure.datafactory.Factory("example",
name="example",
location=example.location,
resource_group_name=example.name)
example_pipeline = azure.datafactory.Pipeline("example",
name="example",
data_factory_id=example_factory.id)
package main
import (
"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/core"
"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/datafactory"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
example, err := core.NewResourceGroup(ctx, "example", &core.ResourceGroupArgs{
Name: pulumi.String("example-resources"),
Location: pulumi.String("West Europe"),
})
if err != nil {
return err
}
exampleFactory, err := datafactory.NewFactory(ctx, "example", &datafactory.FactoryArgs{
Name: pulumi.String("example"),
Location: example.Location,
ResourceGroupName: example.Name,
})
if err != nil {
return err
}
_, err = datafactory.NewPipeline(ctx, "example", &datafactory.PipelineArgs{
Name: pulumi.String("example"),
DataFactoryId: exampleFactory.ID(),
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Azure = Pulumi.Azure;
return await Deployment.RunAsync(() =>
{
var example = new Azure.Core.ResourceGroup("example", new()
{
Name = "example-resources",
Location = "West Europe",
});
var exampleFactory = new Azure.DataFactory.Factory("example", new()
{
Name = "example",
Location = example.Location,
ResourceGroupName = example.Name,
});
var examplePipeline = new Azure.DataFactory.Pipeline("example", new()
{
Name = "example",
DataFactoryId = exampleFactory.Id,
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azure.core.ResourceGroup;
import com.pulumi.azure.core.ResourceGroupArgs;
import com.pulumi.azure.datafactory.Factory;
import com.pulumi.azure.datafactory.FactoryArgs;
import com.pulumi.azure.datafactory.Pipeline;
import com.pulumi.azure.datafactory.PipelineArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var example = new ResourceGroup("example", ResourceGroupArgs.builder()
.name("example-resources")
.location("West Europe")
.build());
var exampleFactory = new Factory("exampleFactory", FactoryArgs.builder()
.name("example")
.location(example.location())
.resourceGroupName(example.name())
.build());
var examplePipeline = new Pipeline("examplePipeline", PipelineArgs.builder()
.name("example")
.dataFactoryId(exampleFactory.id())
.build());
}
}
resources:
example:
type: azure:core:ResourceGroup
properties:
name: example-resources
location: West Europe
exampleFactory:
type: azure:datafactory:Factory
name: example
properties:
name: example
location: ${example.location}
resourceGroupName: ${example.name}
examplePipeline:
type: azure:datafactory:Pipeline
name: example
properties:
name: example
dataFactoryId: ${exampleFactory.id}
With Activities
import * as pulumi from "@pulumi/pulumi";
import * as azure from "@pulumi/azure";
const test = new azure.datafactory.Pipeline("test", {
name: "example",
dataFactoryId: testAzurermDataFactory.id,
variables: {
bob: "item1",
},
activitiesJson: `[
{
"name": "Append variable1",
"type": "AppendVariable",
"dependsOn": [],
"userProperties": [],
"typeProperties": {
"variableName": "bob",
"value": "something"
}
}
]
`,
});
import pulumi
import pulumi_azure as azure
test = azure.datafactory.Pipeline("test",
name="example",
data_factory_id=test_azurerm_data_factory["id"],
variables={
"bob": "item1",
},
activities_json="""[
{
"name": "Append variable1",
"type": "AppendVariable",
"dependsOn": [],
"userProperties": [],
"typeProperties": {
"variableName": "bob",
"value": "something"
}
}
]
""")
package main
import (
"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/datafactory"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := datafactory.NewPipeline(ctx, "test", &datafactory.PipelineArgs{
Name: pulumi.String("example"),
DataFactoryId: pulumi.Any(testAzurermDataFactory.Id),
Variables: pulumi.StringMap{
"bob": pulumi.String("item1"),
},
ActivitiesJson: pulumi.String(`[
{
"name": "Append variable1",
"type": "AppendVariable",
"dependsOn": [],
"userProperties": [],
"typeProperties": {
"variableName": "bob",
"value": "something"
}
}
]
`),
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Azure = Pulumi.Azure;
return await Deployment.RunAsync(() =>
{
var test = new Azure.DataFactory.Pipeline("test", new()
{
Name = "example",
DataFactoryId = testAzurermDataFactory.Id,
Variables =
{
{ "bob", "item1" },
},
ActivitiesJson = @"[
{
""name"": ""Append variable1"",
""type"": ""AppendVariable"",
""dependsOn"": [],
""userProperties"": [],
""typeProperties"": {
""variableName"": ""bob"",
""value"": ""something""
}
}
]
",
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azure.datafactory.Pipeline;
import com.pulumi.azure.datafactory.PipelineArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var test = new Pipeline("test", PipelineArgs.builder()
.name("example")
.dataFactoryId(testAzurermDataFactory.id())
.variables(Map.of("bob", "item1"))
.activitiesJson("""
[
{
"name": "Append variable1",
"type": "AppendVariable",
"dependsOn": [],
"userProperties": [],
"typeProperties": {
"variableName": "bob",
"value": "something"
}
}
]
""")
.build());
}
}
resources:
test:
type: azure:datafactory:Pipeline
properties:
name: example
dataFactoryId: ${testAzurermDataFactory.id}
variables:
bob: item1
activitiesJson: |
[
{
"name": "Append variable1",
"type": "AppendVariable",
"dependsOn": [],
"userProperties": [],
"typeProperties": {
"variableName": "bob",
"value": "something"
}
}
]
Create Pipeline Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new Pipeline(name: string, args: PipelineArgs, opts?: CustomResourceOptions);
@overload
def Pipeline(resource_name: str,
args: PipelineArgs,
opts: Optional[ResourceOptions] = None)
@overload
def Pipeline(resource_name: str,
opts: Optional[ResourceOptions] = None,
data_factory_id: Optional[str] = None,
activities_json: Optional[str] = None,
annotations: Optional[Sequence[str]] = None,
concurrency: Optional[int] = None,
description: Optional[str] = None,
folder: Optional[str] = None,
moniter_metrics_after_duration: Optional[str] = None,
name: Optional[str] = None,
parameters: Optional[Mapping[str, str]] = None,
variables: Optional[Mapping[str, str]] = None)
func NewPipeline(ctx *Context, name string, args PipelineArgs, opts ...ResourceOption) (*Pipeline, error)
public Pipeline(string name, PipelineArgs args, CustomResourceOptions? opts = null)
public Pipeline(String name, PipelineArgs args)
public Pipeline(String name, PipelineArgs args, CustomResourceOptions options)
type: azure:datafactory:Pipeline
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args PipelineArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args PipelineArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args PipelineArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args PipelineArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args PipelineArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var pipelineResource = new Azure.DataFactory.Pipeline("pipelineResource", new()
{
DataFactoryId = "string",
ActivitiesJson = "string",
Annotations = new[]
{
"string",
},
Concurrency = 0,
Description = "string",
Folder = "string",
MoniterMetricsAfterDuration = "string",
Name = "string",
Parameters =
{
{ "string", "string" },
},
Variables =
{
{ "string", "string" },
},
});
example, err := datafactory.NewPipeline(ctx, "pipelineResource", &datafactory.PipelineArgs{
DataFactoryId: pulumi.String("string"),
ActivitiesJson: pulumi.String("string"),
Annotations: pulumi.StringArray{
pulumi.String("string"),
},
Concurrency: pulumi.Int(0),
Description: pulumi.String("string"),
Folder: pulumi.String("string"),
MoniterMetricsAfterDuration: pulumi.String("string"),
Name: pulumi.String("string"),
Parameters: pulumi.StringMap{
"string": pulumi.String("string"),
},
Variables: pulumi.StringMap{
"string": pulumi.String("string"),
},
})
var pipelineResource = new Pipeline("pipelineResource", PipelineArgs.builder()
.dataFactoryId("string")
.activitiesJson("string")
.annotations("string")
.concurrency(0)
.description("string")
.folder("string")
.moniterMetricsAfterDuration("string")
.name("string")
.parameters(Map.of("string", "string"))
.variables(Map.of("string", "string"))
.build());
pipeline_resource = azure.datafactory.Pipeline("pipelineResource",
data_factory_id="string",
activities_json="string",
annotations=["string"],
concurrency=0,
description="string",
folder="string",
moniter_metrics_after_duration="string",
name="string",
parameters={
"string": "string",
},
variables={
"string": "string",
})
const pipelineResource = new azure.datafactory.Pipeline("pipelineResource", {
dataFactoryId: "string",
activitiesJson: "string",
annotations: ["string"],
concurrency: 0,
description: "string",
folder: "string",
moniterMetricsAfterDuration: "string",
name: "string",
parameters: {
string: "string",
},
variables: {
string: "string",
},
});
type: azure:datafactory:Pipeline
properties:
activitiesJson: string
annotations:
- string
concurrency: 0
dataFactoryId: string
description: string
folder: string
moniterMetricsAfterDuration: string
name: string
parameters:
string: string
variables:
string: string
Pipeline Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The Pipeline resource accepts the following input properties:
- Data
Factory stringId - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- Activities
Json string - A JSON object that contains the activities that will be associated with the Data Factory Pipeline.
- Annotations List<string>
- List of tags that can be used for describing the Data Factory Pipeline.
- Concurrency int
- The max number of concurrent runs for the Data Factory Pipeline. Must be between
1
and50
. - Description string
- The description for the Data Factory Pipeline.
- Folder string
- The folder that this Pipeline is in. If not specified, the Pipeline will appear at the root level.
- Moniter
Metrics stringAfter Duration - The TimeSpan value after which an Azure Monitoring Metric is fired.
- Name string
- Specifies the name of the Data Factory Pipeline. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- Parameters Dictionary<string, string>
- A map of parameters to associate with the Data Factory Pipeline.
- Variables Dictionary<string, string>
- A map of variables to associate with the Data Factory Pipeline.
- Data
Factory stringId - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- Activities
Json string - A JSON object that contains the activities that will be associated with the Data Factory Pipeline.
- Annotations []string
- List of tags that can be used for describing the Data Factory Pipeline.
- Concurrency int
- The max number of concurrent runs for the Data Factory Pipeline. Must be between
1
and50
. - Description string
- The description for the Data Factory Pipeline.
- Folder string
- The folder that this Pipeline is in. If not specified, the Pipeline will appear at the root level.
- Moniter
Metrics stringAfter Duration - The TimeSpan value after which an Azure Monitoring Metric is fired.
- Name string
- Specifies the name of the Data Factory Pipeline. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- Parameters map[string]string
- A map of parameters to associate with the Data Factory Pipeline.
- Variables map[string]string
- A map of variables to associate with the Data Factory Pipeline.
- data
Factory StringId - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- activities
Json String - A JSON object that contains the activities that will be associated with the Data Factory Pipeline.
- annotations List<String>
- List of tags that can be used for describing the Data Factory Pipeline.
- concurrency Integer
- The max number of concurrent runs for the Data Factory Pipeline. Must be between
1
and50
. - description String
- The description for the Data Factory Pipeline.
- folder String
- The folder that this Pipeline is in. If not specified, the Pipeline will appear at the root level.
- moniter
Metrics StringAfter Duration - The TimeSpan value after which an Azure Monitoring Metric is fired.
- name String
- Specifies the name of the Data Factory Pipeline. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- parameters Map<String,String>
- A map of parameters to associate with the Data Factory Pipeline.
- variables Map<String,String>
- A map of variables to associate with the Data Factory Pipeline.
- data
Factory stringId - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- activities
Json string - A JSON object that contains the activities that will be associated with the Data Factory Pipeline.
- annotations string[]
- List of tags that can be used for describing the Data Factory Pipeline.
- concurrency number
- The max number of concurrent runs for the Data Factory Pipeline. Must be between
1
and50
. - description string
- The description for the Data Factory Pipeline.
- folder string
- The folder that this Pipeline is in. If not specified, the Pipeline will appear at the root level.
- moniter
Metrics stringAfter Duration - The TimeSpan value after which an Azure Monitoring Metric is fired.
- name string
- Specifies the name of the Data Factory Pipeline. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- parameters {[key: string]: string}
- A map of parameters to associate with the Data Factory Pipeline.
- variables {[key: string]: string}
- A map of variables to associate with the Data Factory Pipeline.
- data_
factory_ strid - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- activities_
json str - A JSON object that contains the activities that will be associated with the Data Factory Pipeline.
- annotations Sequence[str]
- List of tags that can be used for describing the Data Factory Pipeline.
- concurrency int
- The max number of concurrent runs for the Data Factory Pipeline. Must be between
1
and50
. - description str
- The description for the Data Factory Pipeline.
- folder str
- The folder that this Pipeline is in. If not specified, the Pipeline will appear at the root level.
- moniter_
metrics_ strafter_ duration - The TimeSpan value after which an Azure Monitoring Metric is fired.
- name str
- Specifies the name of the Data Factory Pipeline. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- parameters Mapping[str, str]
- A map of parameters to associate with the Data Factory Pipeline.
- variables Mapping[str, str]
- A map of variables to associate with the Data Factory Pipeline.
- data
Factory StringId - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- activities
Json String - A JSON object that contains the activities that will be associated with the Data Factory Pipeline.
- annotations List<String>
- List of tags that can be used for describing the Data Factory Pipeline.
- concurrency Number
- The max number of concurrent runs for the Data Factory Pipeline. Must be between
1
and50
. - description String
- The description for the Data Factory Pipeline.
- folder String
- The folder that this Pipeline is in. If not specified, the Pipeline will appear at the root level.
- moniter
Metrics StringAfter Duration - The TimeSpan value after which an Azure Monitoring Metric is fired.
- name String
- Specifies the name of the Data Factory Pipeline. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- parameters Map<String>
- A map of parameters to associate with the Data Factory Pipeline.
- variables Map<String>
- A map of variables to associate with the Data Factory Pipeline.
Outputs
All input properties are implicitly available as output properties. Additionally, the Pipeline resource produces the following output properties:
- Id string
- The provider-assigned unique ID for this managed resource.
- Id string
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
- id string
- The provider-assigned unique ID for this managed resource.
- id str
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
Look up Existing Pipeline Resource
Get an existing Pipeline resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: PipelineState, opts?: CustomResourceOptions): Pipeline
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
activities_json: Optional[str] = None,
annotations: Optional[Sequence[str]] = None,
concurrency: Optional[int] = None,
data_factory_id: Optional[str] = None,
description: Optional[str] = None,
folder: Optional[str] = None,
moniter_metrics_after_duration: Optional[str] = None,
name: Optional[str] = None,
parameters: Optional[Mapping[str, str]] = None,
variables: Optional[Mapping[str, str]] = None) -> Pipeline
func GetPipeline(ctx *Context, name string, id IDInput, state *PipelineState, opts ...ResourceOption) (*Pipeline, error)
public static Pipeline Get(string name, Input<string> id, PipelineState? state, CustomResourceOptions? opts = null)
public static Pipeline get(String name, Output<String> id, PipelineState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Activities
Json string - A JSON object that contains the activities that will be associated with the Data Factory Pipeline.
- Annotations List<string>
- List of tags that can be used for describing the Data Factory Pipeline.
- Concurrency int
- The max number of concurrent runs for the Data Factory Pipeline. Must be between
1
and50
. - Data
Factory stringId - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- Description string
- The description for the Data Factory Pipeline.
- Folder string
- The folder that this Pipeline is in. If not specified, the Pipeline will appear at the root level.
- Moniter
Metrics stringAfter Duration - The TimeSpan value after which an Azure Monitoring Metric is fired.
- Name string
- Specifies the name of the Data Factory Pipeline. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- Parameters Dictionary<string, string>
- A map of parameters to associate with the Data Factory Pipeline.
- Variables Dictionary<string, string>
- A map of variables to associate with the Data Factory Pipeline.
- Activities
Json string - A JSON object that contains the activities that will be associated with the Data Factory Pipeline.
- Annotations []string
- List of tags that can be used for describing the Data Factory Pipeline.
- Concurrency int
- The max number of concurrent runs for the Data Factory Pipeline. Must be between
1
and50
. - Data
Factory stringId - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- Description string
- The description for the Data Factory Pipeline.
- Folder string
- The folder that this Pipeline is in. If not specified, the Pipeline will appear at the root level.
- Moniter
Metrics stringAfter Duration - The TimeSpan value after which an Azure Monitoring Metric is fired.
- Name string
- Specifies the name of the Data Factory Pipeline. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- Parameters map[string]string
- A map of parameters to associate with the Data Factory Pipeline.
- Variables map[string]string
- A map of variables to associate with the Data Factory Pipeline.
- activities
Json String - A JSON object that contains the activities that will be associated with the Data Factory Pipeline.
- annotations List<String>
- List of tags that can be used for describing the Data Factory Pipeline.
- concurrency Integer
- The max number of concurrent runs for the Data Factory Pipeline. Must be between
1
and50
. - data
Factory StringId - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- description String
- The description for the Data Factory Pipeline.
- folder String
- The folder that this Pipeline is in. If not specified, the Pipeline will appear at the root level.
- moniter
Metrics StringAfter Duration - The TimeSpan value after which an Azure Monitoring Metric is fired.
- name String
- Specifies the name of the Data Factory Pipeline. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- parameters Map<String,String>
- A map of parameters to associate with the Data Factory Pipeline.
- variables Map<String,String>
- A map of variables to associate with the Data Factory Pipeline.
- activities
Json string - A JSON object that contains the activities that will be associated with the Data Factory Pipeline.
- annotations string[]
- List of tags that can be used for describing the Data Factory Pipeline.
- concurrency number
- The max number of concurrent runs for the Data Factory Pipeline. Must be between
1
and50
. - data
Factory stringId - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- description string
- The description for the Data Factory Pipeline.
- folder string
- The folder that this Pipeline is in. If not specified, the Pipeline will appear at the root level.
- moniter
Metrics stringAfter Duration - The TimeSpan value after which an Azure Monitoring Metric is fired.
- name string
- Specifies the name of the Data Factory Pipeline. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- parameters {[key: string]: string}
- A map of parameters to associate with the Data Factory Pipeline.
- variables {[key: string]: string}
- A map of variables to associate with the Data Factory Pipeline.
- activities_
json str - A JSON object that contains the activities that will be associated with the Data Factory Pipeline.
- annotations Sequence[str]
- List of tags that can be used for describing the Data Factory Pipeline.
- concurrency int
- The max number of concurrent runs for the Data Factory Pipeline. Must be between
1
and50
. - data_
factory_ strid - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- description str
- The description for the Data Factory Pipeline.
- folder str
- The folder that this Pipeline is in. If not specified, the Pipeline will appear at the root level.
- moniter_
metrics_ strafter_ duration - The TimeSpan value after which an Azure Monitoring Metric is fired.
- name str
- Specifies the name of the Data Factory Pipeline. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- parameters Mapping[str, str]
- A map of parameters to associate with the Data Factory Pipeline.
- variables Mapping[str, str]
- A map of variables to associate with the Data Factory Pipeline.
- activities
Json String - A JSON object that contains the activities that will be associated with the Data Factory Pipeline.
- annotations List<String>
- List of tags that can be used for describing the Data Factory Pipeline.
- concurrency Number
- The max number of concurrent runs for the Data Factory Pipeline. Must be between
1
and50
. - data
Factory StringId - The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- description String
- The description for the Data Factory Pipeline.
- folder String
- The folder that this Pipeline is in. If not specified, the Pipeline will appear at the root level.
- moniter
Metrics StringAfter Duration - The TimeSpan value after which an Azure Monitoring Metric is fired.
- name String
- Specifies the name of the Data Factory Pipeline. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- parameters Map<String>
- A map of parameters to associate with the Data Factory Pipeline.
- variables Map<String>
- A map of variables to associate with the Data Factory Pipeline.
Import
Data Factory Pipeline’s can be imported using the resource id
, e.g.
$ pulumi import azure:datafactory/pipeline:Pipeline example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/pipelines/example
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- Azure Classic pulumi/pulumi-azure
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
azurerm
Terraform Provider.