AWS v6.60.0 published on Tuesday, Nov 19, 2024 by Pulumi
aws.bedrockfoundation.getModel
Explore with Pulumi AI
Data source for managing an AWS Bedrock Foundation Model.
Example Usage
Basic Usage
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const test = aws.bedrockfoundation.getModels({});
const testGetModel = test.then(test => aws.bedrockfoundation.getModel({
modelId: test.modelSummaries?.[0]?.modelId,
}));
import pulumi
import pulumi_aws as aws
test = aws.bedrockfoundation.get_models()
test_get_model = aws.bedrockfoundation.get_model(model_id=test.model_summaries[0].model_id)
package main
import (
"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/bedrockfoundation"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
test, err := bedrockfoundation.GetModels(ctx, &bedrockfoundation.GetModelsArgs{}, nil)
if err != nil {
return err
}
_, err = bedrockfoundation.GetModel(ctx, &bedrockfoundation.GetModelArgs{
ModelId: test.ModelSummaries[0].ModelId,
}, nil)
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() =>
{
var test = Aws.BedrockFoundation.GetModels.Invoke();
var testGetModel = Aws.BedrockFoundation.GetModel.Invoke(new()
{
ModelId = test.Apply(getModelsResult => getModelsResult.ModelSummaries[0]?.ModelId),
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.bedrockfoundation.BedrockfoundationFunctions;
import com.pulumi.aws.bedrockfoundation.inputs.GetModelsArgs;
import com.pulumi.aws.bedrockfoundation.inputs.GetModelArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var test = BedrockfoundationFunctions.getModels();
final var testGetModel = BedrockfoundationFunctions.getModel(GetModelArgs.builder()
.modelId(test.applyValue(getModelsResult -> getModelsResult.modelSummaries()[0].modelId()))
.build());
}
}
variables:
test:
fn::invoke:
Function: aws:bedrockfoundation:getModels
Arguments: {}
testGetModel:
fn::invoke:
Function: aws:bedrockfoundation:getModel
Arguments:
modelId: ${test.modelSummaries[0].modelId}
Using getModel
Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.
function getModel(args: GetModelArgs, opts?: InvokeOptions): Promise<GetModelResult>
function getModelOutput(args: GetModelOutputArgs, opts?: InvokeOptions): Output<GetModelResult>
def get_model(model_id: Optional[str] = None,
opts: Optional[InvokeOptions] = None) -> GetModelResult
def get_model_output(model_id: Optional[pulumi.Input[str]] = None,
opts: Optional[InvokeOptions] = None) -> Output[GetModelResult]
func GetModel(ctx *Context, args *GetModelArgs, opts ...InvokeOption) (*GetModelResult, error)
func GetModelOutput(ctx *Context, args *GetModelOutputArgs, opts ...InvokeOption) GetModelResultOutput
> Note: This function is named GetModel
in the Go SDK.
public static class GetModel
{
public static Task<GetModelResult> InvokeAsync(GetModelArgs args, InvokeOptions? opts = null)
public static Output<GetModelResult> Invoke(GetModelInvokeArgs args, InvokeOptions? opts = null)
}
public static CompletableFuture<GetModelResult> getModel(GetModelArgs args, InvokeOptions options)
// Output-based functions aren't available in Java yet
fn::invoke:
function: aws:bedrockfoundation/getModel:getModel
arguments:
# arguments dictionary
The following arguments are supported:
- Model
Id string - Model identifier.
- Model
Id string - Model identifier.
- model
Id String - Model identifier.
- model
Id string - Model identifier.
- model_
id str - Model identifier.
- model
Id String - Model identifier.
getModel Result
The following output properties are available:
- Customizations
Supporteds List<string> - Customizations that the model supports.
- Id string
- Inference
Types List<string>Supporteds - Inference types that the model supports.
- Input
Modalities List<string> - Input modalities that the model supports.
- Model
Arn string - Model ARN.
- Model
Id string - Model
Name string - Model name.
- Output
Modalities List<string> - Output modalities that the model supports.
- Provider
Name string - Model provider name.
- Response
Streaming boolSupported - Indicates whether the model supports streaming.
- Customizations
Supporteds []string - Customizations that the model supports.
- Id string
- Inference
Types []stringSupporteds - Inference types that the model supports.
- Input
Modalities []string - Input modalities that the model supports.
- Model
Arn string - Model ARN.
- Model
Id string - Model
Name string - Model name.
- Output
Modalities []string - Output modalities that the model supports.
- Provider
Name string - Model provider name.
- Response
Streaming boolSupported - Indicates whether the model supports streaming.
- customizations
Supporteds List<String> - Customizations that the model supports.
- id String
- inference
Types List<String>Supporteds - Inference types that the model supports.
- input
Modalities List<String> - Input modalities that the model supports.
- model
Arn String - Model ARN.
- model
Id String - model
Name String - Model name.
- output
Modalities List<String> - Output modalities that the model supports.
- provider
Name String - Model provider name.
- response
Streaming BooleanSupported - Indicates whether the model supports streaming.
- customizations
Supporteds string[] - Customizations that the model supports.
- id string
- inference
Types string[]Supporteds - Inference types that the model supports.
- input
Modalities string[] - Input modalities that the model supports.
- model
Arn string - Model ARN.
- model
Id string - model
Name string - Model name.
- output
Modalities string[] - Output modalities that the model supports.
- provider
Name string - Model provider name.
- response
Streaming booleanSupported - Indicates whether the model supports streaming.
- customizations_
supporteds Sequence[str] - Customizations that the model supports.
- id str
- inference_
types_ Sequence[str]supporteds - Inference types that the model supports.
- input_
modalities Sequence[str] - Input modalities that the model supports.
- model_
arn str - Model ARN.
- model_
id str - model_
name str - Model name.
- output_
modalities Sequence[str] - Output modalities that the model supports.
- provider_
name str - Model provider name.
- response_
streaming_ boolsupported - Indicates whether the model supports streaming.
- customizations
Supporteds List<String> - Customizations that the model supports.
- id String
- inference
Types List<String>Supporteds - Inference types that the model supports.
- input
Modalities List<String> - Input modalities that the model supports.
- model
Arn String - Model ARN.
- model
Id String - model
Name String - Model name.
- output
Modalities List<String> - Output modalities that the model supports.
- provider
Name String - Model provider name.
- response
Streaming BooleanSupported - Indicates whether the model supports streaming.
Package Details
- Repository
- AWS Classic pulumi/pulumi-aws
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
aws
Terraform Provider.