We recommend using Azure Native.
azure.streamanalytics.OutputBlob
Explore with Pulumi AI
Manages a Stream Analytics Output to Blob Storage.
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as azure from "@pulumi/azure";
const exampleResourceGroup = new azure.core.ResourceGroup("example", {
name: "rg-example",
location: "West Europe",
});
const example = azure.streamanalytics.getJobOutput({
name: "example-job",
resourceGroupName: exampleResourceGroup.name,
});
const exampleAccount = new azure.storage.Account("example", {
name: "examplesa",
resourceGroupName: exampleResourceGroup.name,
location: exampleResourceGroup.location,
accountTier: "Standard",
accountReplicationType: "LRS",
});
const exampleContainer = new azure.storage.Container("example", {
name: "example",
storageAccountName: exampleAccount.name,
containerAccessType: "private",
});
const exampleOutputBlob = new azure.streamanalytics.OutputBlob("example", {
name: "output-to-blob-storage",
streamAnalyticsJobName: example.apply(example => example.name),
resourceGroupName: example.apply(example => example.resourceGroupName),
storageAccountName: exampleAccount.name,
storageAccountKey: exampleAccount.primaryAccessKey,
storageContainerName: exampleContainer.name,
pathPattern: "some-pattern",
dateFormat: "yyyy-MM-dd",
timeFormat: "HH",
serialization: {
type: "Csv",
encoding: "UTF8",
fieldDelimiter: ",",
},
});
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("example",
name="rg-example",
location="West Europe")
example = azure.streamanalytics.get_job_output(name="example-job",
resource_group_name=example_resource_group.name)
example_account = azure.storage.Account("example",
name="examplesa",
resource_group_name=example_resource_group.name,
location=example_resource_group.location,
account_tier="Standard",
account_replication_type="LRS")
example_container = azure.storage.Container("example",
name="example",
storage_account_name=example_account.name,
container_access_type="private")
example_output_blob = azure.streamanalytics.OutputBlob("example",
name="output-to-blob-storage",
stream_analytics_job_name=example.name,
resource_group_name=example.resource_group_name,
storage_account_name=example_account.name,
storage_account_key=example_account.primary_access_key,
storage_container_name=example_container.name,
path_pattern="some-pattern",
date_format="yyyy-MM-dd",
time_format="HH",
serialization={
"type": "Csv",
"encoding": "UTF8",
"field_delimiter": ",",
})
package main
import (
"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/core"
"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/storage"
"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/streamanalytics"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
exampleResourceGroup, err := core.NewResourceGroup(ctx, "example", &core.ResourceGroupArgs{
Name: pulumi.String("rg-example"),
Location: pulumi.String("West Europe"),
})
if err != nil {
return err
}
example := streamanalytics.LookupJobOutput(ctx, streamanalytics.GetJobOutputArgs{
Name: pulumi.String("example-job"),
ResourceGroupName: exampleResourceGroup.Name,
}, nil)
exampleAccount, err := storage.NewAccount(ctx, "example", &storage.AccountArgs{
Name: pulumi.String("examplesa"),
ResourceGroupName: exampleResourceGroup.Name,
Location: exampleResourceGroup.Location,
AccountTier: pulumi.String("Standard"),
AccountReplicationType: pulumi.String("LRS"),
})
if err != nil {
return err
}
exampleContainer, err := storage.NewContainer(ctx, "example", &storage.ContainerArgs{
Name: pulumi.String("example"),
StorageAccountName: exampleAccount.Name,
ContainerAccessType: pulumi.String("private"),
})
if err != nil {
return err
}
_, err = streamanalytics.NewOutputBlob(ctx, "example", &streamanalytics.OutputBlobArgs{
Name: pulumi.String("output-to-blob-storage"),
StreamAnalyticsJobName: pulumi.String(example.ApplyT(func(example streamanalytics.GetJobResult) (*string, error) {
return &example.Name, nil
}).(pulumi.StringPtrOutput)),
ResourceGroupName: pulumi.String(example.ApplyT(func(example streamanalytics.GetJobResult) (*string, error) {
return &example.ResourceGroupName, nil
}).(pulumi.StringPtrOutput)),
StorageAccountName: exampleAccount.Name,
StorageAccountKey: exampleAccount.PrimaryAccessKey,
StorageContainerName: exampleContainer.Name,
PathPattern: pulumi.String("some-pattern"),
DateFormat: pulumi.String("yyyy-MM-dd"),
TimeFormat: pulumi.String("HH"),
Serialization: &streamanalytics.OutputBlobSerializationArgs{
Type: pulumi.String("Csv"),
Encoding: pulumi.String("UTF8"),
FieldDelimiter: pulumi.String(","),
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Azure = Pulumi.Azure;
return await Deployment.RunAsync(() =>
{
var exampleResourceGroup = new Azure.Core.ResourceGroup("example", new()
{
Name = "rg-example",
Location = "West Europe",
});
var example = Azure.StreamAnalytics.GetJob.Invoke(new()
{
Name = "example-job",
ResourceGroupName = exampleResourceGroup.Name,
});
var exampleAccount = new Azure.Storage.Account("example", new()
{
Name = "examplesa",
ResourceGroupName = exampleResourceGroup.Name,
Location = exampleResourceGroup.Location,
AccountTier = "Standard",
AccountReplicationType = "LRS",
});
var exampleContainer = new Azure.Storage.Container("example", new()
{
Name = "example",
StorageAccountName = exampleAccount.Name,
ContainerAccessType = "private",
});
var exampleOutputBlob = new Azure.StreamAnalytics.OutputBlob("example", new()
{
Name = "output-to-blob-storage",
StreamAnalyticsJobName = example.Apply(getJobResult => getJobResult.Name),
ResourceGroupName = example.Apply(getJobResult => getJobResult.ResourceGroupName),
StorageAccountName = exampleAccount.Name,
StorageAccountKey = exampleAccount.PrimaryAccessKey,
StorageContainerName = exampleContainer.Name,
PathPattern = "some-pattern",
DateFormat = "yyyy-MM-dd",
TimeFormat = "HH",
Serialization = new Azure.StreamAnalytics.Inputs.OutputBlobSerializationArgs
{
Type = "Csv",
Encoding = "UTF8",
FieldDelimiter = ",",
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azure.core.ResourceGroup;
import com.pulumi.azure.core.ResourceGroupArgs;
import com.pulumi.azure.streamanalytics.StreamanalyticsFunctions;
import com.pulumi.azure.streamanalytics.inputs.GetJobArgs;
import com.pulumi.azure.storage.Account;
import com.pulumi.azure.storage.AccountArgs;
import com.pulumi.azure.storage.Container;
import com.pulumi.azure.storage.ContainerArgs;
import com.pulumi.azure.streamanalytics.OutputBlob;
import com.pulumi.azure.streamanalytics.OutputBlobArgs;
import com.pulumi.azure.streamanalytics.inputs.OutputBlobSerializationArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var exampleResourceGroup = new ResourceGroup("exampleResourceGroup", ResourceGroupArgs.builder()
.name("rg-example")
.location("West Europe")
.build());
final var example = StreamanalyticsFunctions.getJob(GetJobArgs.builder()
.name("example-job")
.resourceGroupName(exampleResourceGroup.name())
.build());
var exampleAccount = new Account("exampleAccount", AccountArgs.builder()
.name("examplesa")
.resourceGroupName(exampleResourceGroup.name())
.location(exampleResourceGroup.location())
.accountTier("Standard")
.accountReplicationType("LRS")
.build());
var exampleContainer = new Container("exampleContainer", ContainerArgs.builder()
.name("example")
.storageAccountName(exampleAccount.name())
.containerAccessType("private")
.build());
var exampleOutputBlob = new OutputBlob("exampleOutputBlob", OutputBlobArgs.builder()
.name("output-to-blob-storage")
.streamAnalyticsJobName(example.applyValue(getJobResult -> getJobResult).applyValue(example -> example.applyValue(getJobResult -> getJobResult.name())))
.resourceGroupName(example.applyValue(getJobResult -> getJobResult).applyValue(example -> example.applyValue(getJobResult -> getJobResult.resourceGroupName())))
.storageAccountName(exampleAccount.name())
.storageAccountKey(exampleAccount.primaryAccessKey())
.storageContainerName(exampleContainer.name())
.pathPattern("some-pattern")
.dateFormat("yyyy-MM-dd")
.timeFormat("HH")
.serialization(OutputBlobSerializationArgs.builder()
.type("Csv")
.encoding("UTF8")
.fieldDelimiter(",")
.build())
.build());
}
}
resources:
exampleResourceGroup:
type: azure:core:ResourceGroup
name: example
properties:
name: rg-example
location: West Europe
exampleAccount:
type: azure:storage:Account
name: example
properties:
name: examplesa
resourceGroupName: ${exampleResourceGroup.name}
location: ${exampleResourceGroup.location}
accountTier: Standard
accountReplicationType: LRS
exampleContainer:
type: azure:storage:Container
name: example
properties:
name: example
storageAccountName: ${exampleAccount.name}
containerAccessType: private
exampleOutputBlob:
type: azure:streamanalytics:OutputBlob
name: example
properties:
name: output-to-blob-storage
streamAnalyticsJobName: ${example.name}
resourceGroupName: ${example.resourceGroupName}
storageAccountName: ${exampleAccount.name}
storageAccountKey: ${exampleAccount.primaryAccessKey}
storageContainerName: ${exampleContainer.name}
pathPattern: some-pattern
dateFormat: yyyy-MM-dd
timeFormat: HH
serialization:
type: Csv
encoding: UTF8
fieldDelimiter: ','
variables:
example:
fn::invoke:
Function: azure:streamanalytics:getJob
Arguments:
name: example-job
resourceGroupName: ${exampleResourceGroup.name}
Create OutputBlob Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new OutputBlob(name: string, args: OutputBlobArgs, opts?: CustomResourceOptions);
@overload
def OutputBlob(resource_name: str,
args: OutputBlobArgs,
opts: Optional[ResourceOptions] = None)
@overload
def OutputBlob(resource_name: str,
opts: Optional[ResourceOptions] = None,
serialization: Optional[OutputBlobSerializationArgs] = None,
time_format: Optional[str] = None,
stream_analytics_job_name: Optional[str] = None,
storage_container_name: Optional[str] = None,
date_format: Optional[str] = None,
storage_account_name: Optional[str] = None,
path_pattern: Optional[str] = None,
resource_group_name: Optional[str] = None,
blob_write_mode: Optional[str] = None,
storage_account_key: Optional[str] = None,
name: Optional[str] = None,
authentication_mode: Optional[str] = None,
batch_min_rows: Optional[int] = None,
batch_max_wait_time: Optional[str] = None)
func NewOutputBlob(ctx *Context, name string, args OutputBlobArgs, opts ...ResourceOption) (*OutputBlob, error)
public OutputBlob(string name, OutputBlobArgs args, CustomResourceOptions? opts = null)
public OutputBlob(String name, OutputBlobArgs args)
public OutputBlob(String name, OutputBlobArgs args, CustomResourceOptions options)
type: azure:streamanalytics:OutputBlob
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args OutputBlobArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args OutputBlobArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args OutputBlobArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args OutputBlobArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args OutputBlobArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var outputBlobResource = new Azure.StreamAnalytics.OutputBlob("outputBlobResource", new()
{
Serialization = new Azure.StreamAnalytics.Inputs.OutputBlobSerializationArgs
{
Type = "string",
Encoding = "string",
FieldDelimiter = "string",
Format = "string",
},
TimeFormat = "string",
StreamAnalyticsJobName = "string",
StorageContainerName = "string",
DateFormat = "string",
StorageAccountName = "string",
PathPattern = "string",
ResourceGroupName = "string",
BlobWriteMode = "string",
StorageAccountKey = "string",
Name = "string",
AuthenticationMode = "string",
BatchMinRows = 0,
BatchMaxWaitTime = "string",
});
example, err := streamanalytics.NewOutputBlob(ctx, "outputBlobResource", &streamanalytics.OutputBlobArgs{
Serialization: &streamanalytics.OutputBlobSerializationArgs{
Type: pulumi.String("string"),
Encoding: pulumi.String("string"),
FieldDelimiter: pulumi.String("string"),
Format: pulumi.String("string"),
},
TimeFormat: pulumi.String("string"),
StreamAnalyticsJobName: pulumi.String("string"),
StorageContainerName: pulumi.String("string"),
DateFormat: pulumi.String("string"),
StorageAccountName: pulumi.String("string"),
PathPattern: pulumi.String("string"),
ResourceGroupName: pulumi.String("string"),
BlobWriteMode: pulumi.String("string"),
StorageAccountKey: pulumi.String("string"),
Name: pulumi.String("string"),
AuthenticationMode: pulumi.String("string"),
BatchMinRows: pulumi.Int(0),
BatchMaxWaitTime: pulumi.String("string"),
})
var outputBlobResource = new OutputBlob("outputBlobResource", OutputBlobArgs.builder()
.serialization(OutputBlobSerializationArgs.builder()
.type("string")
.encoding("string")
.fieldDelimiter("string")
.format("string")
.build())
.timeFormat("string")
.streamAnalyticsJobName("string")
.storageContainerName("string")
.dateFormat("string")
.storageAccountName("string")
.pathPattern("string")
.resourceGroupName("string")
.blobWriteMode("string")
.storageAccountKey("string")
.name("string")
.authenticationMode("string")
.batchMinRows(0)
.batchMaxWaitTime("string")
.build());
output_blob_resource = azure.streamanalytics.OutputBlob("outputBlobResource",
serialization={
"type": "string",
"encoding": "string",
"field_delimiter": "string",
"format": "string",
},
time_format="string",
stream_analytics_job_name="string",
storage_container_name="string",
date_format="string",
storage_account_name="string",
path_pattern="string",
resource_group_name="string",
blob_write_mode="string",
storage_account_key="string",
name="string",
authentication_mode="string",
batch_min_rows=0,
batch_max_wait_time="string")
const outputBlobResource = new azure.streamanalytics.OutputBlob("outputBlobResource", {
serialization: {
type: "string",
encoding: "string",
fieldDelimiter: "string",
format: "string",
},
timeFormat: "string",
streamAnalyticsJobName: "string",
storageContainerName: "string",
dateFormat: "string",
storageAccountName: "string",
pathPattern: "string",
resourceGroupName: "string",
blobWriteMode: "string",
storageAccountKey: "string",
name: "string",
authenticationMode: "string",
batchMinRows: 0,
batchMaxWaitTime: "string",
});
type: azure:streamanalytics:OutputBlob
properties:
authenticationMode: string
batchMaxWaitTime: string
batchMinRows: 0
blobWriteMode: string
dateFormat: string
name: string
pathPattern: string
resourceGroupName: string
serialization:
encoding: string
fieldDelimiter: string
format: string
type: string
storageAccountKey: string
storageAccountName: string
storageContainerName: string
streamAnalyticsJobName: string
timeFormat: string
OutputBlob Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The OutputBlob resource accepts the following input properties:
- Date
Format string - The date format. Wherever
{date}
appears inpath_pattern
, the value of this property is used as the date format instead. - Path
Pattern string - The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job.
- Resource
Group stringName - The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
- Serialization
Output
Blob Serialization - A
serialization
block as defined below. - Storage
Account stringName - The name of the Storage Account.
- Storage
Container stringName - The name of the Container within the Storage Account.
- Stream
Analytics stringJob Name - The name of the Stream Analytics Job. Changing this forces a new resource to be created.
- Time
Format string - The time format. Wherever
{time}
appears inpath_pattern
, the value of this property is used as the time format instead. - Authentication
Mode string - The authentication mode for the Stream Output. Possible values are
Msi
andConnectionString
. Defaults toConnectionString
. - Batch
Max stringWait Time - The maximum wait time per batch in
hh:mm:ss
e.g.00:02:00
for two minutes. - Batch
Min intRows - The minimum number of rows per batch (must be between
0
and1000000
). - Blob
Write stringMode - Determines whether blob blocks are either committed automatically or appended. Possible values are
Append
andOnce
. Defaults toAppend
. - Name string
- The name of the Stream Output. Changing this forces a new resource to be created.
- Storage
Account stringKey - The Access Key which should be used to connect to this Storage Account.
- Date
Format string - The date format. Wherever
{date}
appears inpath_pattern
, the value of this property is used as the date format instead. - Path
Pattern string - The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job.
- Resource
Group stringName - The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
- Serialization
Output
Blob Serialization Args - A
serialization
block as defined below. - Storage
Account stringName - The name of the Storage Account.
- Storage
Container stringName - The name of the Container within the Storage Account.
- Stream
Analytics stringJob Name - The name of the Stream Analytics Job. Changing this forces a new resource to be created.
- Time
Format string - The time format. Wherever
{time}
appears inpath_pattern
, the value of this property is used as the time format instead. - Authentication
Mode string - The authentication mode for the Stream Output. Possible values are
Msi
andConnectionString
. Defaults toConnectionString
. - Batch
Max stringWait Time - The maximum wait time per batch in
hh:mm:ss
e.g.00:02:00
for two minutes. - Batch
Min intRows - The minimum number of rows per batch (must be between
0
and1000000
). - Blob
Write stringMode - Determines whether blob blocks are either committed automatically or appended. Possible values are
Append
andOnce
. Defaults toAppend
. - Name string
- The name of the Stream Output. Changing this forces a new resource to be created.
- Storage
Account stringKey - The Access Key which should be used to connect to this Storage Account.
- date
Format String - The date format. Wherever
{date}
appears inpath_pattern
, the value of this property is used as the date format instead. - path
Pattern String - The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job.
- resource
Group StringName - The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
- serialization
Output
Blob Serialization - A
serialization
block as defined below. - storage
Account StringName - The name of the Storage Account.
- storage
Container StringName - The name of the Container within the Storage Account.
- stream
Analytics StringJob Name - The name of the Stream Analytics Job. Changing this forces a new resource to be created.
- time
Format String - The time format. Wherever
{time}
appears inpath_pattern
, the value of this property is used as the time format instead. - authentication
Mode String - The authentication mode for the Stream Output. Possible values are
Msi
andConnectionString
. Defaults toConnectionString
. - batch
Max StringWait Time - The maximum wait time per batch in
hh:mm:ss
e.g.00:02:00
for two minutes. - batch
Min IntegerRows - The minimum number of rows per batch (must be between
0
and1000000
). - blob
Write StringMode - Determines whether blob blocks are either committed automatically or appended. Possible values are
Append
andOnce
. Defaults toAppend
. - name String
- The name of the Stream Output. Changing this forces a new resource to be created.
- storage
Account StringKey - The Access Key which should be used to connect to this Storage Account.
- date
Format string - The date format. Wherever
{date}
appears inpath_pattern
, the value of this property is used as the date format instead. - path
Pattern string - The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job.
- resource
Group stringName - The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
- serialization
Output
Blob Serialization - A
serialization
block as defined below. - storage
Account stringName - The name of the Storage Account.
- storage
Container stringName - The name of the Container within the Storage Account.
- stream
Analytics stringJob Name - The name of the Stream Analytics Job. Changing this forces a new resource to be created.
- time
Format string - The time format. Wherever
{time}
appears inpath_pattern
, the value of this property is used as the time format instead. - authentication
Mode string - The authentication mode for the Stream Output. Possible values are
Msi
andConnectionString
. Defaults toConnectionString
. - batch
Max stringWait Time - The maximum wait time per batch in
hh:mm:ss
e.g.00:02:00
for two minutes. - batch
Min numberRows - The minimum number of rows per batch (must be between
0
and1000000
). - blob
Write stringMode - Determines whether blob blocks are either committed automatically or appended. Possible values are
Append
andOnce
. Defaults toAppend
. - name string
- The name of the Stream Output. Changing this forces a new resource to be created.
- storage
Account stringKey - The Access Key which should be used to connect to this Storage Account.
- date_
format str - The date format. Wherever
{date}
appears inpath_pattern
, the value of this property is used as the date format instead. - path_
pattern str - The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job.
- resource_
group_ strname - The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
- serialization
Output
Blob Serialization Args - A
serialization
block as defined below. - storage_
account_ strname - The name of the Storage Account.
- storage_
container_ strname - The name of the Container within the Storage Account.
- stream_
analytics_ strjob_ name - The name of the Stream Analytics Job. Changing this forces a new resource to be created.
- time_
format str - The time format. Wherever
{time}
appears inpath_pattern
, the value of this property is used as the time format instead. - authentication_
mode str - The authentication mode for the Stream Output. Possible values are
Msi
andConnectionString
. Defaults toConnectionString
. - batch_
max_ strwait_ time - The maximum wait time per batch in
hh:mm:ss
e.g.00:02:00
for two minutes. - batch_
min_ introws - The minimum number of rows per batch (must be between
0
and1000000
). - blob_
write_ strmode - Determines whether blob blocks are either committed automatically or appended. Possible values are
Append
andOnce
. Defaults toAppend
. - name str
- The name of the Stream Output. Changing this forces a new resource to be created.
- storage_
account_ strkey - The Access Key which should be used to connect to this Storage Account.
- date
Format String - The date format. Wherever
{date}
appears inpath_pattern
, the value of this property is used as the date format instead. - path
Pattern String - The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job.
- resource
Group StringName - The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
- serialization Property Map
- A
serialization
block as defined below. - storage
Account StringName - The name of the Storage Account.
- storage
Container StringName - The name of the Container within the Storage Account.
- stream
Analytics StringJob Name - The name of the Stream Analytics Job. Changing this forces a new resource to be created.
- time
Format String - The time format. Wherever
{time}
appears inpath_pattern
, the value of this property is used as the time format instead. - authentication
Mode String - The authentication mode for the Stream Output. Possible values are
Msi
andConnectionString
. Defaults toConnectionString
. - batch
Max StringWait Time - The maximum wait time per batch in
hh:mm:ss
e.g.00:02:00
for two minutes. - batch
Min NumberRows - The minimum number of rows per batch (must be between
0
and1000000
). - blob
Write StringMode - Determines whether blob blocks are either committed automatically or appended. Possible values are
Append
andOnce
. Defaults toAppend
. - name String
- The name of the Stream Output. Changing this forces a new resource to be created.
- storage
Account StringKey - The Access Key which should be used to connect to this Storage Account.
Outputs
All input properties are implicitly available as output properties. Additionally, the OutputBlob resource produces the following output properties:
- Id string
- The provider-assigned unique ID for this managed resource.
- Id string
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
- id string
- The provider-assigned unique ID for this managed resource.
- id str
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
Look up Existing OutputBlob Resource
Get an existing OutputBlob resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: OutputBlobState, opts?: CustomResourceOptions): OutputBlob
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
authentication_mode: Optional[str] = None,
batch_max_wait_time: Optional[str] = None,
batch_min_rows: Optional[int] = None,
blob_write_mode: Optional[str] = None,
date_format: Optional[str] = None,
name: Optional[str] = None,
path_pattern: Optional[str] = None,
resource_group_name: Optional[str] = None,
serialization: Optional[OutputBlobSerializationArgs] = None,
storage_account_key: Optional[str] = None,
storage_account_name: Optional[str] = None,
storage_container_name: Optional[str] = None,
stream_analytics_job_name: Optional[str] = None,
time_format: Optional[str] = None) -> OutputBlob
func GetOutputBlob(ctx *Context, name string, id IDInput, state *OutputBlobState, opts ...ResourceOption) (*OutputBlob, error)
public static OutputBlob Get(string name, Input<string> id, OutputBlobState? state, CustomResourceOptions? opts = null)
public static OutputBlob get(String name, Output<String> id, OutputBlobState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Authentication
Mode string - The authentication mode for the Stream Output. Possible values are
Msi
andConnectionString
. Defaults toConnectionString
. - Batch
Max stringWait Time - The maximum wait time per batch in
hh:mm:ss
e.g.00:02:00
for two minutes. - Batch
Min intRows - The minimum number of rows per batch (must be between
0
and1000000
). - Blob
Write stringMode - Determines whether blob blocks are either committed automatically or appended. Possible values are
Append
andOnce
. Defaults toAppend
. - Date
Format string - The date format. Wherever
{date}
appears inpath_pattern
, the value of this property is used as the date format instead. - Name string
- The name of the Stream Output. Changing this forces a new resource to be created.
- Path
Pattern string - The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job.
- Resource
Group stringName - The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
- Serialization
Output
Blob Serialization - A
serialization
block as defined below. - Storage
Account stringKey - The Access Key which should be used to connect to this Storage Account.
- Storage
Account stringName - The name of the Storage Account.
- Storage
Container stringName - The name of the Container within the Storage Account.
- Stream
Analytics stringJob Name - The name of the Stream Analytics Job. Changing this forces a new resource to be created.
- Time
Format string - The time format. Wherever
{time}
appears inpath_pattern
, the value of this property is used as the time format instead.
- Authentication
Mode string - The authentication mode for the Stream Output. Possible values are
Msi
andConnectionString
. Defaults toConnectionString
. - Batch
Max stringWait Time - The maximum wait time per batch in
hh:mm:ss
e.g.00:02:00
for two minutes. - Batch
Min intRows - The minimum number of rows per batch (must be between
0
and1000000
). - Blob
Write stringMode - Determines whether blob blocks are either committed automatically or appended. Possible values are
Append
andOnce
. Defaults toAppend
. - Date
Format string - The date format. Wherever
{date}
appears inpath_pattern
, the value of this property is used as the date format instead. - Name string
- The name of the Stream Output. Changing this forces a new resource to be created.
- Path
Pattern string - The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job.
- Resource
Group stringName - The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
- Serialization
Output
Blob Serialization Args - A
serialization
block as defined below. - Storage
Account stringKey - The Access Key which should be used to connect to this Storage Account.
- Storage
Account stringName - The name of the Storage Account.
- Storage
Container stringName - The name of the Container within the Storage Account.
- Stream
Analytics stringJob Name - The name of the Stream Analytics Job. Changing this forces a new resource to be created.
- Time
Format string - The time format. Wherever
{time}
appears inpath_pattern
, the value of this property is used as the time format instead.
- authentication
Mode String - The authentication mode for the Stream Output. Possible values are
Msi
andConnectionString
. Defaults toConnectionString
. - batch
Max StringWait Time - The maximum wait time per batch in
hh:mm:ss
e.g.00:02:00
for two minutes. - batch
Min IntegerRows - The minimum number of rows per batch (must be between
0
and1000000
). - blob
Write StringMode - Determines whether blob blocks are either committed automatically or appended. Possible values are
Append
andOnce
. Defaults toAppend
. - date
Format String - The date format. Wherever
{date}
appears inpath_pattern
, the value of this property is used as the date format instead. - name String
- The name of the Stream Output. Changing this forces a new resource to be created.
- path
Pattern String - The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job.
- resource
Group StringName - The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
- serialization
Output
Blob Serialization - A
serialization
block as defined below. - storage
Account StringKey - The Access Key which should be used to connect to this Storage Account.
- storage
Account StringName - The name of the Storage Account.
- storage
Container StringName - The name of the Container within the Storage Account.
- stream
Analytics StringJob Name - The name of the Stream Analytics Job. Changing this forces a new resource to be created.
- time
Format String - The time format. Wherever
{time}
appears inpath_pattern
, the value of this property is used as the time format instead.
- authentication
Mode string - The authentication mode for the Stream Output. Possible values are
Msi
andConnectionString
. Defaults toConnectionString
. - batch
Max stringWait Time - The maximum wait time per batch in
hh:mm:ss
e.g.00:02:00
for two minutes. - batch
Min numberRows - The minimum number of rows per batch (must be between
0
and1000000
). - blob
Write stringMode - Determines whether blob blocks are either committed automatically or appended. Possible values are
Append
andOnce
. Defaults toAppend
. - date
Format string - The date format. Wherever
{date}
appears inpath_pattern
, the value of this property is used as the date format instead. - name string
- The name of the Stream Output. Changing this forces a new resource to be created.
- path
Pattern string - The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job.
- resource
Group stringName - The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
- serialization
Output
Blob Serialization - A
serialization
block as defined below. - storage
Account stringKey - The Access Key which should be used to connect to this Storage Account.
- storage
Account stringName - The name of the Storage Account.
- storage
Container stringName - The name of the Container within the Storage Account.
- stream
Analytics stringJob Name - The name of the Stream Analytics Job. Changing this forces a new resource to be created.
- time
Format string - The time format. Wherever
{time}
appears inpath_pattern
, the value of this property is used as the time format instead.
- authentication_
mode str - The authentication mode for the Stream Output. Possible values are
Msi
andConnectionString
. Defaults toConnectionString
. - batch_
max_ strwait_ time - The maximum wait time per batch in
hh:mm:ss
e.g.00:02:00
for two minutes. - batch_
min_ introws - The minimum number of rows per batch (must be between
0
and1000000
). - blob_
write_ strmode - Determines whether blob blocks are either committed automatically or appended. Possible values are
Append
andOnce
. Defaults toAppend
. - date_
format str - The date format. Wherever
{date}
appears inpath_pattern
, the value of this property is used as the date format instead. - name str
- The name of the Stream Output. Changing this forces a new resource to be created.
- path_
pattern str - The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job.
- resource_
group_ strname - The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
- serialization
Output
Blob Serialization Args - A
serialization
block as defined below. - storage_
account_ strkey - The Access Key which should be used to connect to this Storage Account.
- storage_
account_ strname - The name of the Storage Account.
- storage_
container_ strname - The name of the Container within the Storage Account.
- stream_
analytics_ strjob_ name - The name of the Stream Analytics Job. Changing this forces a new resource to be created.
- time_
format str - The time format. Wherever
{time}
appears inpath_pattern
, the value of this property is used as the time format instead.
- authentication
Mode String - The authentication mode for the Stream Output. Possible values are
Msi
andConnectionString
. Defaults toConnectionString
. - batch
Max StringWait Time - The maximum wait time per batch in
hh:mm:ss
e.g.00:02:00
for two minutes. - batch
Min NumberRows - The minimum number of rows per batch (must be between
0
and1000000
). - blob
Write StringMode - Determines whether blob blocks are either committed automatically or appended. Possible values are
Append
andOnce
. Defaults toAppend
. - date
Format String - The date format. Wherever
{date}
appears inpath_pattern
, the value of this property is used as the date format instead. - name String
- The name of the Stream Output. Changing this forces a new resource to be created.
- path
Pattern String - The blob path pattern. Not a regular expression. It represents a pattern against which blob names will be matched to determine whether or not they should be included as input or output to the job.
- resource
Group StringName - The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
- serialization Property Map
- A
serialization
block as defined below. - storage
Account StringKey - The Access Key which should be used to connect to this Storage Account.
- storage
Account StringName - The name of the Storage Account.
- storage
Container StringName - The name of the Container within the Storage Account.
- stream
Analytics StringJob Name - The name of the Stream Analytics Job. Changing this forces a new resource to be created.
- time
Format String - The time format. Wherever
{time}
appears inpath_pattern
, the value of this property is used as the time format instead.
Supporting Types
OutputBlobSerialization, OutputBlobSerializationArgs
- Type string
The serialization format used for outgoing data streams. Possible values are
Avro
,Csv
,Json
andParquet
.NOTE:
batch_max_wait_time
andbatch_min_rows
are required whentype
is set toParquet
- Encoding string
The encoding of the incoming data in the case of input and the encoding of outgoing data in the case of output. It currently can only be set to
UTF8
.NOTE: This is required when
type
is set toCsv
orJson
.- Field
Delimiter string The delimiter that will be used to separate comma-separated value (CSV) records. Possible values are
(space),
,
(comma),(tab),
|
(pipe) and;
.NOTE: This is required when
type
is set toCsv
.- Format string
Specifies the format of the JSON the output will be written in. Possible values are
Array
andLineSeparated
.NOTE: This is Required and can only be specified when
type
is set toJson
.
- Type string
The serialization format used for outgoing data streams. Possible values are
Avro
,Csv
,Json
andParquet
.NOTE:
batch_max_wait_time
andbatch_min_rows
are required whentype
is set toParquet
- Encoding string
The encoding of the incoming data in the case of input and the encoding of outgoing data in the case of output. It currently can only be set to
UTF8
.NOTE: This is required when
type
is set toCsv
orJson
.- Field
Delimiter string The delimiter that will be used to separate comma-separated value (CSV) records. Possible values are
(space),
,
(comma),(tab),
|
(pipe) and;
.NOTE: This is required when
type
is set toCsv
.- Format string
Specifies the format of the JSON the output will be written in. Possible values are
Array
andLineSeparated
.NOTE: This is Required and can only be specified when
type
is set toJson
.
- type String
The serialization format used for outgoing data streams. Possible values are
Avro
,Csv
,Json
andParquet
.NOTE:
batch_max_wait_time
andbatch_min_rows
are required whentype
is set toParquet
- encoding String
The encoding of the incoming data in the case of input and the encoding of outgoing data in the case of output. It currently can only be set to
UTF8
.NOTE: This is required when
type
is set toCsv
orJson
.- field
Delimiter String The delimiter that will be used to separate comma-separated value (CSV) records. Possible values are
(space),
,
(comma),(tab),
|
(pipe) and;
.NOTE: This is required when
type
is set toCsv
.- format String
Specifies the format of the JSON the output will be written in. Possible values are
Array
andLineSeparated
.NOTE: This is Required and can only be specified when
type
is set toJson
.
- type string
The serialization format used for outgoing data streams. Possible values are
Avro
,Csv
,Json
andParquet
.NOTE:
batch_max_wait_time
andbatch_min_rows
are required whentype
is set toParquet
- encoding string
The encoding of the incoming data in the case of input and the encoding of outgoing data in the case of output. It currently can only be set to
UTF8
.NOTE: This is required when
type
is set toCsv
orJson
.- field
Delimiter string The delimiter that will be used to separate comma-separated value (CSV) records. Possible values are
(space),
,
(comma),(tab),
|
(pipe) and;
.NOTE: This is required when
type
is set toCsv
.- format string
Specifies the format of the JSON the output will be written in. Possible values are
Array
andLineSeparated
.NOTE: This is Required and can only be specified when
type
is set toJson
.
- type str
The serialization format used for outgoing data streams. Possible values are
Avro
,Csv
,Json
andParquet
.NOTE:
batch_max_wait_time
andbatch_min_rows
are required whentype
is set toParquet
- encoding str
The encoding of the incoming data in the case of input and the encoding of outgoing data in the case of output. It currently can only be set to
UTF8
.NOTE: This is required when
type
is set toCsv
orJson
.- field_
delimiter str The delimiter that will be used to separate comma-separated value (CSV) records. Possible values are
(space),
,
(comma),(tab),
|
(pipe) and;
.NOTE: This is required when
type
is set toCsv
.- format str
Specifies the format of the JSON the output will be written in. Possible values are
Array
andLineSeparated
.NOTE: This is Required and can only be specified when
type
is set toJson
.
- type String
The serialization format used for outgoing data streams. Possible values are
Avro
,Csv
,Json
andParquet
.NOTE:
batch_max_wait_time
andbatch_min_rows
are required whentype
is set toParquet
- encoding String
The encoding of the incoming data in the case of input and the encoding of outgoing data in the case of output. It currently can only be set to
UTF8
.NOTE: This is required when
type
is set toCsv
orJson
.- field
Delimiter String The delimiter that will be used to separate comma-separated value (CSV) records. Possible values are
(space),
,
(comma),(tab),
|
(pipe) and;
.NOTE: This is required when
type
is set toCsv
.- format String
Specifies the format of the JSON the output will be written in. Possible values are
Array
andLineSeparated
.NOTE: This is Required and can only be specified when
type
is set toJson
.
Import
Stream Analytics Outputs to Blob Storage can be imported using the resource id
, e.g.
$ pulumi import azure:streamanalytics/outputBlob:OutputBlob example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.StreamAnalytics/streamingJobs/job1/outputs/output1
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- Azure Classic pulumi/pulumi-azure
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
azurerm
Terraform Provider.