1. Packages
  2. AWS
  3. API Docs
  4. bedrock
  5. getInferenceProfiles
AWS v6.60.0 published on Tuesday, Nov 19, 2024 by Pulumi

aws.bedrock.getInferenceProfiles

Explore with Pulumi AI

aws logo
AWS v6.60.0 published on Tuesday, Nov 19, 2024 by Pulumi

    Data source for managing AWS Bedrock AWS Bedrock Inference Profiles.

    Example Usage

    Basic Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as aws from "@pulumi/aws";
    
    const test = aws.bedrock.getInferenceProfiles({});
    
    import pulumi
    import pulumi_aws as aws
    
    test = aws.bedrock.get_inference_profiles()
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/bedrock"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := bedrock.GetInferenceProfiles(ctx, map[string]interface{}{}, nil)
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Aws = Pulumi.Aws;
    
    return await Deployment.RunAsync(() => 
    {
        var test = Aws.Bedrock.GetInferenceProfiles.Invoke();
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.aws.bedrock.BedrockFunctions;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            final var test = BedrockFunctions.getInferenceProfiles();
    
        }
    }
    
    variables:
      test:
        fn::invoke:
          Function: aws:bedrock:getInferenceProfiles
          Arguments: {}
    

    Using getInferenceProfiles

    Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.

    function getInferenceProfiles(opts?: InvokeOptions): Promise<GetInferenceProfilesResult>
    function getInferenceProfilesOutput(opts?: InvokeOptions): Output<GetInferenceProfilesResult>
    def get_inference_profiles(opts: Optional[InvokeOptions] = None) -> GetInferenceProfilesResult
    def get_inference_profiles_output(opts: Optional[InvokeOptions] = None) -> Output[GetInferenceProfilesResult]
    func GetInferenceProfiles(ctx *Context, opts ...InvokeOption) (*GetInferenceProfilesResult, error)
    func GetInferenceProfilesOutput(ctx *Context, opts ...InvokeOption) GetInferenceProfilesResultOutput

    > Note: This function is named GetInferenceProfiles in the Go SDK.

    public static class GetInferenceProfiles 
    {
        public static Task<GetInferenceProfilesResult> InvokeAsync(InvokeOptions? opts = null)
        public static Output<GetInferenceProfilesResult> Invoke(InvokeOptions? opts = null)
    }
    public static CompletableFuture<GetInferenceProfilesResult> getInferenceProfiles(InvokeOptions options)
    // Output-based functions aren't available in Java yet
    
    fn::invoke:
      function: aws:bedrock/getInferenceProfiles:getInferenceProfiles
      arguments:
        # arguments dictionary

    getInferenceProfiles Result

    The following output properties are available:

    Id string
    The provider-assigned unique ID for this managed resource.
    InferenceProfileSummaries List<GetInferenceProfilesInferenceProfileSummary>
    List of inference profile summary objects. See inference_profile_summaries.
    Id string
    The provider-assigned unique ID for this managed resource.
    InferenceProfileSummaries []GetInferenceProfilesInferenceProfileSummary
    List of inference profile summary objects. See inference_profile_summaries.
    id String
    The provider-assigned unique ID for this managed resource.
    inferenceProfileSummaries List<GetInferenceProfilesInferenceProfileSummary>
    List of inference profile summary objects. See inference_profile_summaries.
    id string
    The provider-assigned unique ID for this managed resource.
    inferenceProfileSummaries GetInferenceProfilesInferenceProfileSummary[]
    List of inference profile summary objects. See inference_profile_summaries.
    id str
    The provider-assigned unique ID for this managed resource.
    inference_profile_summaries Sequence[GetInferenceProfilesInferenceProfileSummary]
    List of inference profile summary objects. See inference_profile_summaries.
    id String
    The provider-assigned unique ID for this managed resource.
    inferenceProfileSummaries List<Property Map>
    List of inference profile summary objects. See inference_profile_summaries.

    Supporting Types

    GetInferenceProfilesInferenceProfileSummary

    CreatedAt string
    The time at which the inference profile was created.
    Description string
    The description of the inference profile.
    InferenceProfileArn string
    The Amazon Resource Name (ARN) of the inference profile.
    InferenceProfileId string
    The unique identifier of the inference profile.
    InferenceProfileName string
    The name of the inference profile.
    Models List<GetInferenceProfilesInferenceProfileSummaryModel>
    A list of information about each model in the inference profile. See models.
    Status string
    The status of the inference profile. ACTIVE means that the inference profile is available to use.
    Type string
    The type of the inference profile. SYSTEM_DEFINED means that the inference profile is defined by Amazon Bedrock.
    UpdatedAt string
    The time at which the inference profile was last updated.
    CreatedAt string
    The time at which the inference profile was created.
    Description string
    The description of the inference profile.
    InferenceProfileArn string
    The Amazon Resource Name (ARN) of the inference profile.
    InferenceProfileId string
    The unique identifier of the inference profile.
    InferenceProfileName string
    The name of the inference profile.
    Models []GetInferenceProfilesInferenceProfileSummaryModel
    A list of information about each model in the inference profile. See models.
    Status string
    The status of the inference profile. ACTIVE means that the inference profile is available to use.
    Type string
    The type of the inference profile. SYSTEM_DEFINED means that the inference profile is defined by Amazon Bedrock.
    UpdatedAt string
    The time at which the inference profile was last updated.
    createdAt String
    The time at which the inference profile was created.
    description String
    The description of the inference profile.
    inferenceProfileArn String
    The Amazon Resource Name (ARN) of the inference profile.
    inferenceProfileId String
    The unique identifier of the inference profile.
    inferenceProfileName String
    The name of the inference profile.
    models List<GetInferenceProfilesInferenceProfileSummaryModel>
    A list of information about each model in the inference profile. See models.
    status String
    The status of the inference profile. ACTIVE means that the inference profile is available to use.
    type String
    The type of the inference profile. SYSTEM_DEFINED means that the inference profile is defined by Amazon Bedrock.
    updatedAt String
    The time at which the inference profile was last updated.
    createdAt string
    The time at which the inference profile was created.
    description string
    The description of the inference profile.
    inferenceProfileArn string
    The Amazon Resource Name (ARN) of the inference profile.
    inferenceProfileId string
    The unique identifier of the inference profile.
    inferenceProfileName string
    The name of the inference profile.
    models GetInferenceProfilesInferenceProfileSummaryModel[]
    A list of information about each model in the inference profile. See models.
    status string
    The status of the inference profile. ACTIVE means that the inference profile is available to use.
    type string
    The type of the inference profile. SYSTEM_DEFINED means that the inference profile is defined by Amazon Bedrock.
    updatedAt string
    The time at which the inference profile was last updated.
    created_at str
    The time at which the inference profile was created.
    description str
    The description of the inference profile.
    inference_profile_arn str
    The Amazon Resource Name (ARN) of the inference profile.
    inference_profile_id str
    The unique identifier of the inference profile.
    inference_profile_name str
    The name of the inference profile.
    models Sequence[GetInferenceProfilesInferenceProfileSummaryModel]
    A list of information about each model in the inference profile. See models.
    status str
    The status of the inference profile. ACTIVE means that the inference profile is available to use.
    type str
    The type of the inference profile. SYSTEM_DEFINED means that the inference profile is defined by Amazon Bedrock.
    updated_at str
    The time at which the inference profile was last updated.
    createdAt String
    The time at which the inference profile was created.
    description String
    The description of the inference profile.
    inferenceProfileArn String
    The Amazon Resource Name (ARN) of the inference profile.
    inferenceProfileId String
    The unique identifier of the inference profile.
    inferenceProfileName String
    The name of the inference profile.
    models List<Property Map>
    A list of information about each model in the inference profile. See models.
    status String
    The status of the inference profile. ACTIVE means that the inference profile is available to use.
    type String
    The type of the inference profile. SYSTEM_DEFINED means that the inference profile is defined by Amazon Bedrock.
    updatedAt String
    The time at which the inference profile was last updated.

    GetInferenceProfilesInferenceProfileSummaryModel

    ModelArn string
    The Amazon Resource Name (ARN) of the model.
    ModelArn string
    The Amazon Resource Name (ARN) of the model.
    modelArn String
    The Amazon Resource Name (ARN) of the model.
    modelArn string
    The Amazon Resource Name (ARN) of the model.
    model_arn str
    The Amazon Resource Name (ARN) of the model.
    modelArn String
    The Amazon Resource Name (ARN) of the model.

    Package Details

    Repository
    AWS Classic pulumi/pulumi-aws
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the aws Terraform Provider.
    aws logo
    AWS v6.60.0 published on Tuesday, Nov 19, 2024 by Pulumi