AWS v6.77.1 published on Friday, Apr 18, 2025 by Pulumi
aws.bedrock.getInferenceProfile
Explore with Pulumi AI
Data source for managing an AWS Bedrock Inference Profile.
Example Usage
Basic Usage
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const test = aws.bedrock.getInferenceProfiles({});
const testGetInferenceProfile = test.then(test => aws.bedrock.getInferenceProfile({
    inferenceProfileId: test.inferenceProfileSummaries?.[0]?.inferenceProfileId,
}));
import pulumi
import pulumi_aws as aws
test = aws.bedrock.get_inference_profiles()
test_get_inference_profile = aws.bedrock.get_inference_profile(inference_profile_id=test.inference_profile_summaries[0].inference_profile_id)
package main
import (
	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/bedrock"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		test, err := bedrock.GetInferenceProfiles(ctx, map[string]interface{}{}, nil)
		if err != nil {
			return err
		}
		_, err = bedrock.LookupInferenceProfile(ctx, &bedrock.LookupInferenceProfileArgs{
			InferenceProfileId: test.InferenceProfileSummaries[0].InferenceProfileId,
		}, nil)
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() => 
{
    var test = Aws.Bedrock.GetInferenceProfiles.Invoke();
    var testGetInferenceProfile = Aws.Bedrock.GetInferenceProfile.Invoke(new()
    {
        InferenceProfileId = test.Apply(getInferenceProfilesResult => getInferenceProfilesResult.InferenceProfileSummaries[0]?.InferenceProfileId),
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.bedrock.BedrockFunctions;
import com.pulumi.aws.bedrock.inputs.GetInferenceProfileArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        final var test = BedrockFunctions.getInferenceProfiles(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference);
        final var testGetInferenceProfile = BedrockFunctions.getInferenceProfile(GetInferenceProfileArgs.builder()
            .inferenceProfileId(test.inferenceProfileSummaries()[0].inferenceProfileId())
            .build());
    }
}
variables:
  test:
    fn::invoke:
      function: aws:bedrock:getInferenceProfiles
      arguments: {}
  testGetInferenceProfile:
    fn::invoke:
      function: aws:bedrock:getInferenceProfile
      arguments:
        inferenceProfileId: ${test.inferenceProfileSummaries[0].inferenceProfileId}
Using getInferenceProfile
Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.
function getInferenceProfile(args: GetInferenceProfileArgs, opts?: InvokeOptions): Promise<GetInferenceProfileResult>
function getInferenceProfileOutput(args: GetInferenceProfileOutputArgs, opts?: InvokeOptions): Output<GetInferenceProfileResult>def get_inference_profile(inference_profile_id: Optional[str] = None,
                          opts: Optional[InvokeOptions] = None) -> GetInferenceProfileResult
def get_inference_profile_output(inference_profile_id: Optional[pulumi.Input[str]] = None,
                          opts: Optional[InvokeOptions] = None) -> Output[GetInferenceProfileResult]func LookupInferenceProfile(ctx *Context, args *LookupInferenceProfileArgs, opts ...InvokeOption) (*LookupInferenceProfileResult, error)
func LookupInferenceProfileOutput(ctx *Context, args *LookupInferenceProfileOutputArgs, opts ...InvokeOption) LookupInferenceProfileResultOutput> Note: This function is named LookupInferenceProfile in the Go SDK.
public static class GetInferenceProfile 
{
    public static Task<GetInferenceProfileResult> InvokeAsync(GetInferenceProfileArgs args, InvokeOptions? opts = null)
    public static Output<GetInferenceProfileResult> Invoke(GetInferenceProfileInvokeArgs args, InvokeOptions? opts = null)
}public static CompletableFuture<GetInferenceProfileResult> getInferenceProfile(GetInferenceProfileArgs args, InvokeOptions options)
public static Output<GetInferenceProfileResult> getInferenceProfile(GetInferenceProfileArgs args, InvokeOptions options)
fn::invoke:
  function: aws:bedrock/getInferenceProfile:getInferenceProfile
  arguments:
    # arguments dictionaryThe following arguments are supported:
- Inference
Profile stringId  - Inference Profile identifier.
 
- Inference
Profile stringId  - Inference Profile identifier.
 
- inference
Profile StringId  - Inference Profile identifier.
 
- inference
Profile stringId  - Inference Profile identifier.
 
- inference_
profile_ strid  - Inference Profile identifier.
 
- inference
Profile StringId  - Inference Profile identifier.
 
getInferenceProfile Result
The following output properties are available:
- Created
At string - The time at which the inference profile was created.
 - Description string
 - The description of the inference profile.
 - Id string
 - The provider-assigned unique ID for this managed resource.
 - Inference
Profile stringArn  - The Amazon Resource Name (ARN) of the inference profile.
 - Inference
Profile stringId  - Inference
Profile stringName  - The unique identifier of the inference profile.
 - Models
List<Get
Inference Profile Model>  - A list of information about each model in the inference profile. See 
models. - Status string
 - The status of the inference profile. 
ACTIVEmeans that the inference profile is available to use. - Type string
 - The type of the inference profile. 
SYSTEM_DEFINEDmeans that the inference profile is defined by Amazon Bedrock.APPLICATIONmeans that the inference profile is defined by the user. - Updated
At string - The time at which the inference profile was last updated.
 
- Created
At string - The time at which the inference profile was created.
 - Description string
 - The description of the inference profile.
 - Id string
 - The provider-assigned unique ID for this managed resource.
 - Inference
Profile stringArn  - The Amazon Resource Name (ARN) of the inference profile.
 - Inference
Profile stringId  - Inference
Profile stringName  - The unique identifier of the inference profile.
 - Models
[]Get
Inference Profile Model  - A list of information about each model in the inference profile. See 
models. - Status string
 - The status of the inference profile. 
ACTIVEmeans that the inference profile is available to use. - Type string
 - The type of the inference profile. 
SYSTEM_DEFINEDmeans that the inference profile is defined by Amazon Bedrock.APPLICATIONmeans that the inference profile is defined by the user. - Updated
At string - The time at which the inference profile was last updated.
 
- created
At String - The time at which the inference profile was created.
 - description String
 - The description of the inference profile.
 - id String
 - The provider-assigned unique ID for this managed resource.
 - inference
Profile StringArn  - The Amazon Resource Name (ARN) of the inference profile.
 - inference
Profile StringId  - inference
Profile StringName  - The unique identifier of the inference profile.
 - models
List<Get
Inference Profile Model>  - A list of information about each model in the inference profile. See 
models. - status String
 - The status of the inference profile. 
ACTIVEmeans that the inference profile is available to use. - type String
 - The type of the inference profile. 
SYSTEM_DEFINEDmeans that the inference profile is defined by Amazon Bedrock.APPLICATIONmeans that the inference profile is defined by the user. - updated
At String - The time at which the inference profile was last updated.
 
- created
At string - The time at which the inference profile was created.
 - description string
 - The description of the inference profile.
 - id string
 - The provider-assigned unique ID for this managed resource.
 - inference
Profile stringArn  - The Amazon Resource Name (ARN) of the inference profile.
 - inference
Profile stringId  - inference
Profile stringName  - The unique identifier of the inference profile.
 - models
Get
Inference Profile Model[]  - A list of information about each model in the inference profile. See 
models. - status string
 - The status of the inference profile. 
ACTIVEmeans that the inference profile is available to use. - type string
 - The type of the inference profile. 
SYSTEM_DEFINEDmeans that the inference profile is defined by Amazon Bedrock.APPLICATIONmeans that the inference profile is defined by the user. - updated
At string - The time at which the inference profile was last updated.
 
- created_
at str - The time at which the inference profile was created.
 - description str
 - The description of the inference profile.
 - id str
 - The provider-assigned unique ID for this managed resource.
 - inference_
profile_ strarn  - The Amazon Resource Name (ARN) of the inference profile.
 - inference_
profile_ strid  - inference_
profile_ strname  - The unique identifier of the inference profile.
 - models
Sequence[Get
Inference Profile Model]  - A list of information about each model in the inference profile. See 
models. - status str
 - The status of the inference profile. 
ACTIVEmeans that the inference profile is available to use. - type str
 - The type of the inference profile. 
SYSTEM_DEFINEDmeans that the inference profile is defined by Amazon Bedrock.APPLICATIONmeans that the inference profile is defined by the user. - updated_
at str - The time at which the inference profile was last updated.
 
- created
At String - The time at which the inference profile was created.
 - description String
 - The description of the inference profile.
 - id String
 - The provider-assigned unique ID for this managed resource.
 - inference
Profile StringArn  - The Amazon Resource Name (ARN) of the inference profile.
 - inference
Profile StringId  - inference
Profile StringName  - The unique identifier of the inference profile.
 - models List<Property Map>
 - A list of information about each model in the inference profile. See 
models. - status String
 - The status of the inference profile. 
ACTIVEmeans that the inference profile is available to use. - type String
 - The type of the inference profile. 
SYSTEM_DEFINEDmeans that the inference profile is defined by Amazon Bedrock.APPLICATIONmeans that the inference profile is defined by the user. - updated
At String - The time at which the inference profile was last updated.
 
Supporting Types
GetInferenceProfileModel   
- Model
Arn string - The Amazon Resource Name (ARN) of the model.
 
- Model
Arn string - The Amazon Resource Name (ARN) of the model.
 
- model
Arn String - The Amazon Resource Name (ARN) of the model.
 
- model
Arn string - The Amazon Resource Name (ARN) of the model.
 
- model_
arn str - The Amazon Resource Name (ARN) of the model.
 
- model
Arn String - The Amazon Resource Name (ARN) of the model.
 
Package Details
- Repository
 - AWS Classic pulumi/pulumi-aws
 - License
 - Apache-2.0
 - Notes
 - This Pulumi package is based on the 
awsTerraform Provider.