1. Packages
  2. AWS
  3. API Docs
  4. bedrock
  5. getInferenceProfile
AWS v6.60.0 published on Tuesday, Nov 19, 2024 by Pulumi

aws.bedrock.getInferenceProfile

Explore with Pulumi AI

aws logo
AWS v6.60.0 published on Tuesday, Nov 19, 2024 by Pulumi

    Data source for managing an AWS Bedrock Inference Profile.

    Example Usage

    Basic Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as aws from "@pulumi/aws";
    
    const test = aws.bedrock.getInferenceProfiles({});
    const testGetInferenceProfile = test.then(test => aws.bedrock.getInferenceProfile({
        inferenceProfileId: test.inferenceProfileSummaries?.[0]?.inferenceProfileId,
    }));
    
    import pulumi
    import pulumi_aws as aws
    
    test = aws.bedrock.get_inference_profiles()
    test_get_inference_profile = aws.bedrock.get_inference_profile(inference_profile_id=test.inference_profile_summaries[0].inference_profile_id)
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/bedrock"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		test, err := bedrock.GetInferenceProfiles(ctx, map[string]interface{}{}, nil)
    		if err != nil {
    			return err
    		}
    		_, err = bedrock.GetInferenceProfile(ctx, &bedrock.GetInferenceProfileArgs{
    			InferenceProfileId: test.InferenceProfileSummaries[0].InferenceProfileId,
    		}, nil)
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Aws = Pulumi.Aws;
    
    return await Deployment.RunAsync(() => 
    {
        var test = Aws.Bedrock.GetInferenceProfiles.Invoke();
    
        var testGetInferenceProfile = Aws.Bedrock.GetInferenceProfile.Invoke(new()
        {
            InferenceProfileId = test.Apply(getInferenceProfilesResult => getInferenceProfilesResult.InferenceProfileSummaries[0]?.InferenceProfileId),
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.aws.bedrock.BedrockFunctions;
    import com.pulumi.aws.bedrock.inputs.GetInferenceProfileArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            final var test = BedrockFunctions.getInferenceProfiles();
    
            final var testGetInferenceProfile = BedrockFunctions.getInferenceProfile(GetInferenceProfileArgs.builder()
                .inferenceProfileId(test.applyValue(getInferenceProfilesResult -> getInferenceProfilesResult.inferenceProfileSummaries()[0].inferenceProfileId()))
                .build());
    
        }
    }
    
    variables:
      test:
        fn::invoke:
          Function: aws:bedrock:getInferenceProfiles
          Arguments: {}
      testGetInferenceProfile:
        fn::invoke:
          Function: aws:bedrock:getInferenceProfile
          Arguments:
            inferenceProfileId: ${test.inferenceProfileSummaries[0].inferenceProfileId}
    

    Using getInferenceProfile

    Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.

    function getInferenceProfile(args: GetInferenceProfileArgs, opts?: InvokeOptions): Promise<GetInferenceProfileResult>
    function getInferenceProfileOutput(args: GetInferenceProfileOutputArgs, opts?: InvokeOptions): Output<GetInferenceProfileResult>
    def get_inference_profile(inference_profile_id: Optional[str] = None,
                              opts: Optional[InvokeOptions] = None) -> GetInferenceProfileResult
    def get_inference_profile_output(inference_profile_id: Optional[pulumi.Input[str]] = None,
                              opts: Optional[InvokeOptions] = None) -> Output[GetInferenceProfileResult]
    func GetInferenceProfile(ctx *Context, args *GetInferenceProfileArgs, opts ...InvokeOption) (*GetInferenceProfileResult, error)
    func GetInferenceProfileOutput(ctx *Context, args *GetInferenceProfileOutputArgs, opts ...InvokeOption) GetInferenceProfileResultOutput

    > Note: This function is named GetInferenceProfile in the Go SDK.

    public static class GetInferenceProfile 
    {
        public static Task<GetInferenceProfileResult> InvokeAsync(GetInferenceProfileArgs args, InvokeOptions? opts = null)
        public static Output<GetInferenceProfileResult> Invoke(GetInferenceProfileInvokeArgs args, InvokeOptions? opts = null)
    }
    public static CompletableFuture<GetInferenceProfileResult> getInferenceProfile(GetInferenceProfileArgs args, InvokeOptions options)
    // Output-based functions aren't available in Java yet
    
    fn::invoke:
      function: aws:bedrock/getInferenceProfile:getInferenceProfile
      arguments:
        # arguments dictionary

    The following arguments are supported:

    InferenceProfileId string
    Inference Profile identifier.
    InferenceProfileId string
    Inference Profile identifier.
    inferenceProfileId String
    Inference Profile identifier.
    inferenceProfileId string
    Inference Profile identifier.
    inference_profile_id str
    Inference Profile identifier.
    inferenceProfileId String
    Inference Profile identifier.

    getInferenceProfile Result

    The following output properties are available:

    CreatedAt string
    The time at which the inference profile was created.
    Description string
    The description of the inference profile.
    Id string
    The provider-assigned unique ID for this managed resource.
    InferenceProfileArn string
    The Amazon Resource Name (ARN) of the inference profile.
    InferenceProfileId string
    InferenceProfileName string
    The unique identifier of the inference profile.
    Models List<GetInferenceProfileModel>
    A list of information about each model in the inference profile. See models.
    Status string
    The status of the inference profile. ACTIVE means that the inference profile is available to use.
    Type string
    The type of the inference profile. SYSTEM_DEFINED means that the inference profile is defined by Amazon Bedrock.
    UpdatedAt string
    The time at which the inference profile was last updated.
    CreatedAt string
    The time at which the inference profile was created.
    Description string
    The description of the inference profile.
    Id string
    The provider-assigned unique ID for this managed resource.
    InferenceProfileArn string
    The Amazon Resource Name (ARN) of the inference profile.
    InferenceProfileId string
    InferenceProfileName string
    The unique identifier of the inference profile.
    Models []GetInferenceProfileModel
    A list of information about each model in the inference profile. See models.
    Status string
    The status of the inference profile. ACTIVE means that the inference profile is available to use.
    Type string
    The type of the inference profile. SYSTEM_DEFINED means that the inference profile is defined by Amazon Bedrock.
    UpdatedAt string
    The time at which the inference profile was last updated.
    createdAt String
    The time at which the inference profile was created.
    description String
    The description of the inference profile.
    id String
    The provider-assigned unique ID for this managed resource.
    inferenceProfileArn String
    The Amazon Resource Name (ARN) of the inference profile.
    inferenceProfileId String
    inferenceProfileName String
    The unique identifier of the inference profile.
    models List<GetInferenceProfileModel>
    A list of information about each model in the inference profile. See models.
    status String
    The status of the inference profile. ACTIVE means that the inference profile is available to use.
    type String
    The type of the inference profile. SYSTEM_DEFINED means that the inference profile is defined by Amazon Bedrock.
    updatedAt String
    The time at which the inference profile was last updated.
    createdAt string
    The time at which the inference profile was created.
    description string
    The description of the inference profile.
    id string
    The provider-assigned unique ID for this managed resource.
    inferenceProfileArn string
    The Amazon Resource Name (ARN) of the inference profile.
    inferenceProfileId string
    inferenceProfileName string
    The unique identifier of the inference profile.
    models GetInferenceProfileModel[]
    A list of information about each model in the inference profile. See models.
    status string
    The status of the inference profile. ACTIVE means that the inference profile is available to use.
    type string
    The type of the inference profile. SYSTEM_DEFINED means that the inference profile is defined by Amazon Bedrock.
    updatedAt string
    The time at which the inference profile was last updated.
    created_at str
    The time at which the inference profile was created.
    description str
    The description of the inference profile.
    id str
    The provider-assigned unique ID for this managed resource.
    inference_profile_arn str
    The Amazon Resource Name (ARN) of the inference profile.
    inference_profile_id str
    inference_profile_name str
    The unique identifier of the inference profile.
    models Sequence[GetInferenceProfileModel]
    A list of information about each model in the inference profile. See models.
    status str
    The status of the inference profile. ACTIVE means that the inference profile is available to use.
    type str
    The type of the inference profile. SYSTEM_DEFINED means that the inference profile is defined by Amazon Bedrock.
    updated_at str
    The time at which the inference profile was last updated.
    createdAt String
    The time at which the inference profile was created.
    description String
    The description of the inference profile.
    id String
    The provider-assigned unique ID for this managed resource.
    inferenceProfileArn String
    The Amazon Resource Name (ARN) of the inference profile.
    inferenceProfileId String
    inferenceProfileName String
    The unique identifier of the inference profile.
    models List<Property Map>
    A list of information about each model in the inference profile. See models.
    status String
    The status of the inference profile. ACTIVE means that the inference profile is available to use.
    type String
    The type of the inference profile. SYSTEM_DEFINED means that the inference profile is defined by Amazon Bedrock.
    updatedAt String
    The time at which the inference profile was last updated.

    Supporting Types

    GetInferenceProfileModel

    ModelArn string
    The Amazon Resource Name (ARN) of the model.
    ModelArn string
    The Amazon Resource Name (ARN) of the model.
    modelArn String
    The Amazon Resource Name (ARN) of the model.
    modelArn string
    The Amazon Resource Name (ARN) of the model.
    model_arn str
    The Amazon Resource Name (ARN) of the model.
    modelArn String
    The Amazon Resource Name (ARN) of the model.

    Package Details

    Repository
    AWS Classic pulumi/pulumi-aws
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the aws Terraform Provider.
    aws logo
    AWS v6.60.0 published on Tuesday, Nov 19, 2024 by Pulumi