1. Packages
  2. Oracle Cloud Infrastructure
  3. API Docs
  4. GenerativeAi
  5. getEndpoint
Oracle Cloud Infrastructure v3.10.0 published on Wednesday, Nov 5, 2025 by Pulumi

oci.GenerativeAi.getEndpoint

Start a Neo task
Explain and create an oci.GenerativeAi.getEndpoint resource
oci logo
Oracle Cloud Infrastructure v3.10.0 published on Wednesday, Nov 5, 2025 by Pulumi

    This data source provides details about a specific Endpoint resource in Oracle Cloud Infrastructure Generative AI service.

    Gets information about an endpoint.

    Example Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as oci from "@pulumi/oci";
    
    const testEndpoint = oci.GenerativeAi.getEndpoint({
        endpointId: testEndpointOciGenerativeAiEndpoint.id,
    });
    
    import pulumi
    import pulumi_oci as oci
    
    test_endpoint = oci.GenerativeAi.get_endpoint(endpoint_id=test_endpoint_oci_generative_ai_endpoint["id"])
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-oci/sdk/v3/go/oci/generativeai"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := generativeai.GetEndpoint(ctx, &generativeai.GetEndpointArgs{
    			EndpointId: testEndpointOciGenerativeAiEndpoint.Id,
    		}, nil)
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Oci = Pulumi.Oci;
    
    return await Deployment.RunAsync(() => 
    {
        var testEndpoint = Oci.GenerativeAi.GetEndpoint.Invoke(new()
        {
            EndpointId = testEndpointOciGenerativeAiEndpoint.Id,
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.oci.GenerativeAi.GenerativeAiFunctions;
    import com.pulumi.oci.GenerativeAi.inputs.GetEndpointArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            final var testEndpoint = GenerativeAiFunctions.getEndpoint(GetEndpointArgs.builder()
                .endpointId(testEndpointOciGenerativeAiEndpoint.id())
                .build());
    
        }
    }
    
    variables:
      testEndpoint:
        fn::invoke:
          function: oci:GenerativeAi:getEndpoint
          arguments:
            endpointId: ${testEndpointOciGenerativeAiEndpoint.id}
    

    Using getEndpoint

    Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.

    function getEndpoint(args: GetEndpointArgs, opts?: InvokeOptions): Promise<GetEndpointResult>
    function getEndpointOutput(args: GetEndpointOutputArgs, opts?: InvokeOptions): Output<GetEndpointResult>
    def get_endpoint(endpoint_id: Optional[str] = None,
                     opts: Optional[InvokeOptions] = None) -> GetEndpointResult
    def get_endpoint_output(endpoint_id: Optional[pulumi.Input[str]] = None,
                     opts: Optional[InvokeOptions] = None) -> Output[GetEndpointResult]
    func LookupEndpoint(ctx *Context, args *LookupEndpointArgs, opts ...InvokeOption) (*LookupEndpointResult, error)
    func LookupEndpointOutput(ctx *Context, args *LookupEndpointOutputArgs, opts ...InvokeOption) LookupEndpointResultOutput

    > Note: This function is named LookupEndpoint in the Go SDK.

    public static class GetEndpoint 
    {
        public static Task<GetEndpointResult> InvokeAsync(GetEndpointArgs args, InvokeOptions? opts = null)
        public static Output<GetEndpointResult> Invoke(GetEndpointInvokeArgs args, InvokeOptions? opts = null)
    }
    public static CompletableFuture<GetEndpointResult> getEndpoint(GetEndpointArgs args, InvokeOptions options)
    public static Output<GetEndpointResult> getEndpoint(GetEndpointArgs args, InvokeOptions options)
    
    fn::invoke:
      function: oci:GenerativeAi/getEndpoint:getEndpoint
      arguments:
        # arguments dictionary

    The following arguments are supported:

    EndpointId string
    The OCID of the endpoint.
    EndpointId string
    The OCID of the endpoint.
    endpointId String
    The OCID of the endpoint.
    endpointId string
    The OCID of the endpoint.
    endpoint_id str
    The OCID of the endpoint.
    endpointId String
    The OCID of the endpoint.

    getEndpoint Result

    The following output properties are available:

    CompartmentId string
    ContentModerationConfigs List<GetEndpointContentModerationConfig>
    The configuration details, whether to add the content moderation feature to the model. Content moderation removes toxic and biased content from responses.
    DedicatedAiClusterId string
    The OCID of the dedicated AI cluster on which the model will be deployed to.
    DefinedTags Dictionary<string, string>
    Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
    Description string
    DisplayName string
    EndpointId string
    FreeformTags Dictionary<string, string>
    GenerativeAiPrivateEndpointId string
    Id string
    LifecycleDetails string
    A message describing the current state of the endpoint in more detail that can provide actionable information.
    ModelId string
    The OCID of the model used for the feature.
    State string
    The current state of the endpoint.
    SystemTags Dictionary<string, string>
    System tags for this resource. Each key is predefined and scoped to a namespace. Example: {"orcl-cloud.free-tier-retained": "true"}
    TimeCreated string
    TimeUpdated string
    CompartmentId string
    ContentModerationConfigs []GetEndpointContentModerationConfig
    The configuration details, whether to add the content moderation feature to the model. Content moderation removes toxic and biased content from responses.
    DedicatedAiClusterId string
    The OCID of the dedicated AI cluster on which the model will be deployed to.
    DefinedTags map[string]string
    Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
    Description string
    DisplayName string
    EndpointId string
    FreeformTags map[string]string
    GenerativeAiPrivateEndpointId string
    Id string
    LifecycleDetails string
    A message describing the current state of the endpoint in more detail that can provide actionable information.
    ModelId string
    The OCID of the model used for the feature.
    State string
    The current state of the endpoint.
    SystemTags map[string]string
    System tags for this resource. Each key is predefined and scoped to a namespace. Example: {"orcl-cloud.free-tier-retained": "true"}
    TimeCreated string
    TimeUpdated string
    compartmentId String
    contentModerationConfigs List<GetEndpointContentModerationConfig>
    The configuration details, whether to add the content moderation feature to the model. Content moderation removes toxic and biased content from responses.
    dedicatedAiClusterId String
    The OCID of the dedicated AI cluster on which the model will be deployed to.
    definedTags Map<String,String>
    Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
    description String
    displayName String
    endpointId String
    freeformTags Map<String,String>
    generativeAiPrivateEndpointId String
    id String
    lifecycleDetails String
    A message describing the current state of the endpoint in more detail that can provide actionable information.
    modelId String
    The OCID of the model used for the feature.
    state String
    The current state of the endpoint.
    systemTags Map<String,String>
    System tags for this resource. Each key is predefined and scoped to a namespace. Example: {"orcl-cloud.free-tier-retained": "true"}
    timeCreated String
    timeUpdated String
    compartmentId string
    contentModerationConfigs GetEndpointContentModerationConfig[]
    The configuration details, whether to add the content moderation feature to the model. Content moderation removes toxic and biased content from responses.
    dedicatedAiClusterId string
    The OCID of the dedicated AI cluster on which the model will be deployed to.
    definedTags {[key: string]: string}
    Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
    description string
    displayName string
    endpointId string
    freeformTags {[key: string]: string}
    generativeAiPrivateEndpointId string
    id string
    lifecycleDetails string
    A message describing the current state of the endpoint in more detail that can provide actionable information.
    modelId string
    The OCID of the model used for the feature.
    state string
    The current state of the endpoint.
    systemTags {[key: string]: string}
    System tags for this resource. Each key is predefined and scoped to a namespace. Example: {"orcl-cloud.free-tier-retained": "true"}
    timeCreated string
    timeUpdated string
    compartment_id str
    content_moderation_configs Sequence[GetEndpointContentModerationConfig]
    The configuration details, whether to add the content moderation feature to the model. Content moderation removes toxic and biased content from responses.
    dedicated_ai_cluster_id str
    The OCID of the dedicated AI cluster on which the model will be deployed to.
    defined_tags Mapping[str, str]
    Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
    description str
    display_name str
    endpoint_id str
    freeform_tags Mapping[str, str]
    generative_ai_private_endpoint_id str
    id str
    lifecycle_details str
    A message describing the current state of the endpoint in more detail that can provide actionable information.
    model_id str
    The OCID of the model used for the feature.
    state str
    The current state of the endpoint.
    system_tags Mapping[str, str]
    System tags for this resource. Each key is predefined and scoped to a namespace. Example: {"orcl-cloud.free-tier-retained": "true"}
    time_created str
    time_updated str
    compartmentId String
    contentModerationConfigs List<Property Map>
    The configuration details, whether to add the content moderation feature to the model. Content moderation removes toxic and biased content from responses.
    dedicatedAiClusterId String
    The OCID of the dedicated AI cluster on which the model will be deployed to.
    definedTags Map<String>
    Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. Example: {"Operations.CostCenter": "42"}
    description String
    displayName String
    endpointId String
    freeformTags Map<String>
    generativeAiPrivateEndpointId String
    id String
    lifecycleDetails String
    A message describing the current state of the endpoint in more detail that can provide actionable information.
    modelId String
    The OCID of the model used for the feature.
    state String
    The current state of the endpoint.
    systemTags Map<String>
    System tags for this resource. Each key is predefined and scoped to a namespace. Example: {"orcl-cloud.free-tier-retained": "true"}
    timeCreated String
    timeUpdated String

    Supporting Types

    GetEndpointContentModerationConfig

    IsEnabled bool
    Whether to enable the content moderation feature.
    Mode string
    Enum for the modes of operation for inference protection.
    ModelId string
    The OCID of the model used for the feature.
    IsEnabled bool
    Whether to enable the content moderation feature.
    Mode string
    Enum for the modes of operation for inference protection.
    ModelId string
    The OCID of the model used for the feature.
    isEnabled Boolean
    Whether to enable the content moderation feature.
    mode String
    Enum for the modes of operation for inference protection.
    modelId String
    The OCID of the model used for the feature.
    isEnabled boolean
    Whether to enable the content moderation feature.
    mode string
    Enum for the modes of operation for inference protection.
    modelId string
    The OCID of the model used for the feature.
    is_enabled bool
    Whether to enable the content moderation feature.
    mode str
    Enum for the modes of operation for inference protection.
    model_id str
    The OCID of the model used for the feature.
    isEnabled Boolean
    Whether to enable the content moderation feature.
    mode String
    Enum for the modes of operation for inference protection.
    modelId String
    The OCID of the model used for the feature.

    Package Details

    Repository
    oci pulumi/pulumi-oci
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the oci Terraform Provider.
    oci logo
    Oracle Cloud Infrastructure v3.10.0 published on Wednesday, Nov 5, 2025 by Pulumi
      Meet Neo: Your AI Platform Teammate