AWS v6.77.1 published on Friday, Apr 18, 2025 by Pulumi
aws.bedrockfoundation.getModels
Explore with Pulumi AI
Data source for managing AWS Bedrock Foundation Models.
Example Usage
Basic Usage
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const test = aws.bedrockfoundation.getModels({});
import pulumi
import pulumi_aws as aws
test = aws.bedrockfoundation.get_models()
package main
import (
	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/bedrockfoundation"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := bedrockfoundation.GetModels(ctx, &bedrockfoundation.GetModelsArgs{}, nil)
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() => 
{
    var test = Aws.BedrockFoundation.GetModels.Invoke();
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.bedrockfoundation.BedrockfoundationFunctions;
import com.pulumi.aws.bedrockfoundation.inputs.GetModelsArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        final var test = BedrockfoundationFunctions.getModels(GetModelsArgs.builder()
            .build());
    }
}
variables:
  test:
    fn::invoke:
      function: aws:bedrockfoundation:getModels
      arguments: {}
Filter by Inference Type
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const test = aws.bedrockfoundation.getModels({
    byInferenceType: "ON_DEMAND",
});
import pulumi
import pulumi_aws as aws
test = aws.bedrockfoundation.get_models(by_inference_type="ON_DEMAND")
package main
import (
	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/bedrockfoundation"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := bedrockfoundation.GetModels(ctx, &bedrockfoundation.GetModelsArgs{
			ByInferenceType: pulumi.StringRef("ON_DEMAND"),
		}, nil)
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() => 
{
    var test = Aws.BedrockFoundation.GetModels.Invoke(new()
    {
        ByInferenceType = "ON_DEMAND",
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.bedrockfoundation.BedrockfoundationFunctions;
import com.pulumi.aws.bedrockfoundation.inputs.GetModelsArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        final var test = BedrockfoundationFunctions.getModels(GetModelsArgs.builder()
            .byInferenceType("ON_DEMAND")
            .build());
    }
}
variables:
  test:
    fn::invoke:
      function: aws:bedrockfoundation:getModels
      arguments:
        byInferenceType: ON_DEMAND
Using getModels
Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.
function getModels(args: GetModelsArgs, opts?: InvokeOptions): Promise<GetModelsResult>
function getModelsOutput(args: GetModelsOutputArgs, opts?: InvokeOptions): Output<GetModelsResult>def get_models(by_customization_type: Optional[str] = None,
               by_inference_type: Optional[str] = None,
               by_output_modality: Optional[str] = None,
               by_provider: Optional[str] = None,
               opts: Optional[InvokeOptions] = None) -> GetModelsResult
def get_models_output(by_customization_type: Optional[pulumi.Input[str]] = None,
               by_inference_type: Optional[pulumi.Input[str]] = None,
               by_output_modality: Optional[pulumi.Input[str]] = None,
               by_provider: Optional[pulumi.Input[str]] = None,
               opts: Optional[InvokeOptions] = None) -> Output[GetModelsResult]func GetModels(ctx *Context, args *GetModelsArgs, opts ...InvokeOption) (*GetModelsResult, error)
func GetModelsOutput(ctx *Context, args *GetModelsOutputArgs, opts ...InvokeOption) GetModelsResultOutput> Note: This function is named GetModels in the Go SDK.
public static class GetModels 
{
    public static Task<GetModelsResult> InvokeAsync(GetModelsArgs args, InvokeOptions? opts = null)
    public static Output<GetModelsResult> Invoke(GetModelsInvokeArgs args, InvokeOptions? opts = null)
}public static CompletableFuture<GetModelsResult> getModels(GetModelsArgs args, InvokeOptions options)
public static Output<GetModelsResult> getModels(GetModelsArgs args, InvokeOptions options)
fn::invoke:
  function: aws:bedrockfoundation/getModels:getModels
  arguments:
    # arguments dictionaryThe following arguments are supported:
- By
Customization stringType  - Customization type to filter on. Valid values are 
FINE_TUNING. - By
Inference stringType  - Inference type to filter on. Valid values are 
ON_DEMANDandPROVISIONED. - By
Output stringModality  - Output modality to filter on. Valid values are 
TEXT,IMAGE, andEMBEDDING. - By
Provider string - Model provider to filter on.
 
- By
Customization stringType  - Customization type to filter on. Valid values are 
FINE_TUNING. - By
Inference stringType  - Inference type to filter on. Valid values are 
ON_DEMANDandPROVISIONED. - By
Output stringModality  - Output modality to filter on. Valid values are 
TEXT,IMAGE, andEMBEDDING. - By
Provider string - Model provider to filter on.
 
- by
Customization StringType  - Customization type to filter on. Valid values are 
FINE_TUNING. - by
Inference StringType  - Inference type to filter on. Valid values are 
ON_DEMANDandPROVISIONED. - by
Output StringModality  - Output modality to filter on. Valid values are 
TEXT,IMAGE, andEMBEDDING. - by
Provider String - Model provider to filter on.
 
- by
Customization stringType  - Customization type to filter on. Valid values are 
FINE_TUNING. - by
Inference stringType  - Inference type to filter on. Valid values are 
ON_DEMANDandPROVISIONED. - by
Output stringModality  - Output modality to filter on. Valid values are 
TEXT,IMAGE, andEMBEDDING. - by
Provider string - Model provider to filter on.
 
- by_
customization_ strtype  - Customization type to filter on. Valid values are 
FINE_TUNING. - by_
inference_ strtype  - Inference type to filter on. Valid values are 
ON_DEMANDandPROVISIONED. - by_
output_ strmodality  - Output modality to filter on. Valid values are 
TEXT,IMAGE, andEMBEDDING. - by_
provider str - Model provider to filter on.
 
- by
Customization StringType  - Customization type to filter on. Valid values are 
FINE_TUNING. - by
Inference StringType  - Inference type to filter on. Valid values are 
ON_DEMANDandPROVISIONED. - by
Output StringModality  - Output modality to filter on. Valid values are 
TEXT,IMAGE, andEMBEDDING. - by
Provider String - Model provider to filter on.
 
getModels Result
The following output properties are available:
- Id string
 - AWS region.
 - Model
Summaries List<GetModels Model Summary>  - List of model summary objects. See 
model_summaries. - By
Customization stringType  - By
Inference stringType  - By
Output stringModality  - By
Provider string 
- Id string
 - AWS region.
 - Model
Summaries []GetModels Model Summary  - List of model summary objects. See 
model_summaries. - By
Customization stringType  - By
Inference stringType  - By
Output stringModality  - By
Provider string 
- id String
 - AWS region.
 - model
Summaries List<GetModels Model Summary>  - List of model summary objects. See 
model_summaries. - by
Customization StringType  - by
Inference StringType  - by
Output StringModality  - by
Provider String 
- id string
 - AWS region.
 - model
Summaries GetModels Model Summary[]  - List of model summary objects. See 
model_summaries. - by
Customization stringType  - by
Inference stringType  - by
Output stringModality  - by
Provider string 
- id str
 - AWS region.
 - model_
summaries Sequence[GetModels Model Summary]  - List of model summary objects. See 
model_summaries. - by_
customization_ strtype  - by_
inference_ strtype  - by_
output_ strmodality  - by_
provider str 
- id String
 - AWS region.
 - model
Summaries List<Property Map> - List of model summary objects. See 
model_summaries. - by
Customization StringType  - by
Inference StringType  - by
Output StringModality  - by
Provider String 
Supporting Types
GetModelsModelSummary   
- Customizations
Supporteds List<string> - Customizations that the model supports.
 - Inference
Types List<string>Supporteds  - Inference types that the model supports.
 - Input
Modalities List<string> - Input modalities that the model supports.
 - Model
Arn string - Model ARN.
 - Model
Id string - Model identifier.
 - Model
Name string - Model name.
 - Output
Modalities List<string> - Output modalities that the model supports.
 - Provider
Name string - Model provider name.
 - Response
Streaming boolSupported  - Indicates whether the model supports streaming.
 
- Customizations
Supporteds []string - Customizations that the model supports.
 - Inference
Types []stringSupporteds  - Inference types that the model supports.
 - Input
Modalities []string - Input modalities that the model supports.
 - Model
Arn string - Model ARN.
 - Model
Id string - Model identifier.
 - Model
Name string - Model name.
 - Output
Modalities []string - Output modalities that the model supports.
 - Provider
Name string - Model provider name.
 - Response
Streaming boolSupported  - Indicates whether the model supports streaming.
 
- customizations
Supporteds List<String> - Customizations that the model supports.
 - inference
Types List<String>Supporteds  - Inference types that the model supports.
 - input
Modalities List<String> - Input modalities that the model supports.
 - model
Arn String - Model ARN.
 - model
Id String - Model identifier.
 - model
Name String - Model name.
 - output
Modalities List<String> - Output modalities that the model supports.
 - provider
Name String - Model provider name.
 - response
Streaming BooleanSupported  - Indicates whether the model supports streaming.
 
- customizations
Supporteds string[] - Customizations that the model supports.
 - inference
Types string[]Supporteds  - Inference types that the model supports.
 - input
Modalities string[] - Input modalities that the model supports.
 - model
Arn string - Model ARN.
 - model
Id string - Model identifier.
 - model
Name string - Model name.
 - output
Modalities string[] - Output modalities that the model supports.
 - provider
Name string - Model provider name.
 - response
Streaming booleanSupported  - Indicates whether the model supports streaming.
 
- customizations_
supporteds Sequence[str] - Customizations that the model supports.
 - inference_
types_ Sequence[str]supporteds  - Inference types that the model supports.
 - input_
modalities Sequence[str] - Input modalities that the model supports.
 - model_
arn str - Model ARN.
 - model_
id str - Model identifier.
 - model_
name str - Model name.
 - output_
modalities Sequence[str] - Output modalities that the model supports.
 - provider_
name str - Model provider name.
 - response_
streaming_ boolsupported  - Indicates whether the model supports streaming.
 
- customizations
Supporteds List<String> - Customizations that the model supports.
 - inference
Types List<String>Supporteds  - Inference types that the model supports.
 - input
Modalities List<String> - Input modalities that the model supports.
 - model
Arn String - Model ARN.
 - model
Id String - Model identifier.
 - model
Name String - Model name.
 - output
Modalities List<String> - Output modalities that the model supports.
 - provider
Name String - Model provider name.
 - response
Streaming BooleanSupported  - Indicates whether the model supports streaming.
 
Package Details
- Repository
 - AWS Classic pulumi/pulumi-aws
 - License
 - Apache-2.0
 - Notes
 - This Pulumi package is based on the 
awsTerraform Provider.