hsdp 0.65.3 published on Tuesday, Apr 15, 2025 by philips-software
hsdp.getAiInferenceComputeTargets
Explore with Pulumi AI
hsdp 0.65.3 published on Tuesday, Apr 15, 2025 by philips-software
Retrieves AI Inference Compute Targets
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as hsdp from "@pulumi/hsdp";
const inferenceConfig = hsdp.getConfig({
    service: "inference",
});
const inferenceAiInferenceServiceInstance = inferenceConfig.then(inferenceConfig => hsdp.getAiInferenceServiceInstance({
    baseUrl: inferenceConfig.url,
    organizationId: _var.inference_tenant_org_id,
}));
const targets = inferenceAiInferenceServiceInstance.then(inferenceAiInferenceServiceInstance => hsdp.getAiInferenceComputeTargets({
    endpoint: inferenceAiInferenceServiceInstance.endpoint,
}));
import pulumi
import pulumi_hsdp as hsdp
inference_config = hsdp.get_config(service="inference")
inference_ai_inference_service_instance = hsdp.get_ai_inference_service_instance(base_url=inference_config.url,
    organization_id=var["inference_tenant_org_id"])
targets = hsdp.get_ai_inference_compute_targets(endpoint=inference_ai_inference_service_instance.endpoint)
package main
import (
	"github.com/pulumi/pulumi-terraform-provider/sdks/go/hsdp/hsdp"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		inferenceConfig, err := hsdp.GetConfig(ctx, &hsdp.GetConfigArgs{
			Service: "inference",
		}, nil)
		if err != nil {
			return err
		}
		inferenceAiInferenceServiceInstance, err := hsdp.GetAiInferenceServiceInstance(ctx, &hsdp.GetAiInferenceServiceInstanceArgs{
			BaseUrl:        inferenceConfig.Url,
			OrganizationId: _var.Inference_tenant_org_id,
		}, nil)
		if err != nil {
			return err
		}
		_, err = hsdp.GetAiInferenceComputeTargets(ctx, &hsdp.GetAiInferenceComputeTargetsArgs{
			Endpoint: inferenceAiInferenceServiceInstance.Endpoint,
		}, nil)
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Hsdp = Pulumi.Hsdp;
return await Deployment.RunAsync(() => 
{
    var inferenceConfig = Hsdp.GetConfig.Invoke(new()
    {
        Service = "inference",
    });
    var inferenceAiInferenceServiceInstance = Hsdp.GetAiInferenceServiceInstance.Invoke(new()
    {
        BaseUrl = inferenceConfig.Apply(getConfigResult => getConfigResult.Url),
        OrganizationId = @var.Inference_tenant_org_id,
    });
    var targets = Hsdp.GetAiInferenceComputeTargets.Invoke(new()
    {
        Endpoint = inferenceAiInferenceServiceInstance.Apply(getAiInferenceServiceInstanceResult => getAiInferenceServiceInstanceResult.Endpoint),
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.hsdp.HsdpFunctions;
import com.pulumi.hsdp.inputs.GetConfigArgs;
import com.pulumi.hsdp.inputs.GetAiInferenceServiceInstanceArgs;
import com.pulumi.hsdp.inputs.GetAiInferenceComputeTargetsArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        final var inferenceConfig = HsdpFunctions.getConfig(GetConfigArgs.builder()
            .service("inference")
            .build());
        final var inferenceAiInferenceServiceInstance = HsdpFunctions.getAiInferenceServiceInstance(GetAiInferenceServiceInstanceArgs.builder()
            .baseUrl(inferenceConfig.applyValue(getConfigResult -> getConfigResult.url()))
            .organizationId(var_.inference_tenant_org_id())
            .build());
        final var targets = HsdpFunctions.getAiInferenceComputeTargets(GetAiInferenceComputeTargetsArgs.builder()
            .endpoint(inferenceAiInferenceServiceInstance.applyValue(getAiInferenceServiceInstanceResult -> getAiInferenceServiceInstanceResult.endpoint()))
            .build());
    }
}
variables:
  inferenceConfig:
    fn::invoke:
      function: hsdp:getConfig
      arguments:
        service: inference
  inferenceAiInferenceServiceInstance:
    fn::invoke:
      function: hsdp:getAiInferenceServiceInstance
      arguments:
        baseUrl: ${inferenceConfig.url}
        organizationId: ${var.inference_tenant_org_id}
  targets:
    fn::invoke:
      function: hsdp:getAiInferenceComputeTargets
      arguments:
        endpoint: ${inferenceAiInferenceServiceInstance.endpoint}
Using getAiInferenceComputeTargets
Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.
function getAiInferenceComputeTargets(args: GetAiInferenceComputeTargetsArgs, opts?: InvokeOptions): Promise<GetAiInferenceComputeTargetsResult>
function getAiInferenceComputeTargetsOutput(args: GetAiInferenceComputeTargetsOutputArgs, opts?: InvokeOptions): Output<GetAiInferenceComputeTargetsResult>def get_ai_inference_compute_targets(endpoint: Optional[str] = None,
                                     id: Optional[str] = None,
                                     opts: Optional[InvokeOptions] = None) -> GetAiInferenceComputeTargetsResult
def get_ai_inference_compute_targets_output(endpoint: Optional[pulumi.Input[str]] = None,
                                     id: Optional[pulumi.Input[str]] = None,
                                     opts: Optional[InvokeOptions] = None) -> Output[GetAiInferenceComputeTargetsResult]func GetAiInferenceComputeTargets(ctx *Context, args *GetAiInferenceComputeTargetsArgs, opts ...InvokeOption) (*GetAiInferenceComputeTargetsResult, error)
func GetAiInferenceComputeTargetsOutput(ctx *Context, args *GetAiInferenceComputeTargetsOutputArgs, opts ...InvokeOption) GetAiInferenceComputeTargetsResultOutput> Note: This function is named GetAiInferenceComputeTargets in the Go SDK.
public static class GetAiInferenceComputeTargets 
{
    public static Task<GetAiInferenceComputeTargetsResult> InvokeAsync(GetAiInferenceComputeTargetsArgs args, InvokeOptions? opts = null)
    public static Output<GetAiInferenceComputeTargetsResult> Invoke(GetAiInferenceComputeTargetsInvokeArgs args, InvokeOptions? opts = null)
}public static CompletableFuture<GetAiInferenceComputeTargetsResult> getAiInferenceComputeTargets(GetAiInferenceComputeTargetsArgs args, InvokeOptions options)
public static Output<GetAiInferenceComputeTargetsResult> getAiInferenceComputeTargets(GetAiInferenceComputeTargetsArgs args, InvokeOptions options)
fn::invoke:
  function: hsdp:index/getAiInferenceComputeTargets:getAiInferenceComputeTargets
  arguments:
    # arguments dictionaryThe following arguments are supported:
getAiInferenceComputeTargets Result
The following output properties are available:
Package Details
- Repository
 - hsdp philips-software/terraform-provider-hsdp
 - License
 - Notes
 - This Pulumi package is based on the 
hsdpTerraform Provider. 
hsdp 0.65.3 published on Tuesday, Apr 15, 2025 by philips-software