databricks.getDbfsFilePaths
Explore with Pulumi AI
Note If you have a fully automated setup with workspaces created by databricks.MwsWorkspaces or azurerm_databricks_workspace, please make sure to add depends_on attribute in order to prevent default auth: cannot configure default credentials errors.
This data source allows to get list of file names from get file content from Databricks File System (DBFS).
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
const partitions = databricks.getDbfsFilePaths({
path: "dbfs:/user/hive/default.db/table",
recursive: false,
});
import pulumi
import pulumi_databricks as databricks
partitions = databricks.get_dbfs_file_paths(path="dbfs:/user/hive/default.db/table",
recursive=False)
package main
import (
"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := databricks.GetDbfsFilePaths(ctx, &databricks.GetDbfsFilePathsArgs{
Path: "dbfs:/user/hive/default.db/table",
Recursive: false,
}, nil)
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(() =>
{
var partitions = Databricks.GetDbfsFilePaths.Invoke(new()
{
Path = "dbfs:/user/hive/default.db/table",
Recursive = false,
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.DatabricksFunctions;
import com.pulumi.databricks.inputs.GetDbfsFilePathsArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var partitions = DatabricksFunctions.getDbfsFilePaths(GetDbfsFilePathsArgs.builder()
.path("dbfs:/user/hive/default.db/table")
.recursive(false)
.build());
}
}
variables:
partitions:
fn::invoke:
Function: databricks:getDbfsFilePaths
Arguments:
path: dbfs:/user/hive/default.db/table
recursive: false
Related Resources
The following resources are used in the same context:
- End to end workspace management guide.
- databricks.DbfsFile data to get file content from Databricks File System (DBFS).
- databricks.getDbfsFilePaths data to get list of file names from get file content from Databricks File System (DBFS).
- databricks.DbfsFile to manage relatively small files on Databricks File System (DBFS).
- databricks.Library to install a library on databricks_cluster.
- databricks.Mount to mount your cloud storage on
dbfs:/mnt/name
.
Using getDbfsFilePaths
Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.
function getDbfsFilePaths(args: GetDbfsFilePathsArgs, opts?: InvokeOptions): Promise<GetDbfsFilePathsResult>
function getDbfsFilePathsOutput(args: GetDbfsFilePathsOutputArgs, opts?: InvokeOptions): Output<GetDbfsFilePathsResult>
def get_dbfs_file_paths(path: Optional[str] = None,
recursive: Optional[bool] = None,
opts: Optional[InvokeOptions] = None) -> GetDbfsFilePathsResult
def get_dbfs_file_paths_output(path: Optional[pulumi.Input[str]] = None,
recursive: Optional[pulumi.Input[bool]] = None,
opts: Optional[InvokeOptions] = None) -> Output[GetDbfsFilePathsResult]
func GetDbfsFilePaths(ctx *Context, args *GetDbfsFilePathsArgs, opts ...InvokeOption) (*GetDbfsFilePathsResult, error)
func GetDbfsFilePathsOutput(ctx *Context, args *GetDbfsFilePathsOutputArgs, opts ...InvokeOption) GetDbfsFilePathsResultOutput
> Note: This function is named GetDbfsFilePaths
in the Go SDK.
public static class GetDbfsFilePaths
{
public static Task<GetDbfsFilePathsResult> InvokeAsync(GetDbfsFilePathsArgs args, InvokeOptions? opts = null)
public static Output<GetDbfsFilePathsResult> Invoke(GetDbfsFilePathsInvokeArgs args, InvokeOptions? opts = null)
}
public static CompletableFuture<GetDbfsFilePathsResult> getDbfsFilePaths(GetDbfsFilePathsArgs args, InvokeOptions options)
// Output-based functions aren't available in Java yet
fn::invoke:
function: databricks:index/getDbfsFilePaths:getDbfsFilePaths
arguments:
# arguments dictionary
The following arguments are supported:
getDbfsFilePaths Result
The following output properties are available:
- Id string
- The provider-assigned unique ID for this managed resource.
- Path string
- Path
Lists List<GetDbfs File Paths Path List> - returns list of objects with
path
andfile_size
attributes in each - Recursive bool
- Id string
- The provider-assigned unique ID for this managed resource.
- Path string
- Path
Lists []GetDbfs File Paths Path List - returns list of objects with
path
andfile_size
attributes in each - Recursive bool
- id String
- The provider-assigned unique ID for this managed resource.
- path String
- path
Lists List<GetDbfs File Paths Path List> - returns list of objects with
path
andfile_size
attributes in each - recursive Boolean
- id string
- The provider-assigned unique ID for this managed resource.
- path string
- path
Lists GetDbfs File Paths Path List[] - returns list of objects with
path
andfile_size
attributes in each - recursive boolean
- id str
- The provider-assigned unique ID for this managed resource.
- path str
- path_
lists Sequence[GetDbfs File Paths Path List] - returns list of objects with
path
andfile_size
attributes in each - recursive bool
- id String
- The provider-assigned unique ID for this managed resource.
- path String
- path
Lists List<Property Map> - returns list of objects with
path
andfile_size
attributes in each - recursive Boolean
Supporting Types
GetDbfsFilePathsPathList
Package Details
- Repository
- databricks pulumi/pulumi-databricks
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
databricks
Terraform Provider.