1. Packages
  2. DataRobot
  3. API Docs
  4. LlmBlueprint
DataRobot v0.1.44 published on Monday, Sep 23, 2024 by DataRobot, Inc.

datarobot.LlmBlueprint

Explore with Pulumi AI

datarobot logo
DataRobot v0.1.44 published on Monday, Sep 23, 2024 by DataRobot, Inc.

    LLMBlueprint

    Example Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as datarobot from "@datarobot/pulumi-datarobot";
    
    const exampleUseCase = new datarobot.UseCase("exampleUseCase", {});
    const examplePlayground = new datarobot.Playground("examplePlayground", {
        description: "Description for the example playground",
        useCaseId: exampleUseCase.id,
    });
    const exampleLlmBlueprint = new datarobot.LlmBlueprint("exampleLlmBlueprint", {
        description: "Description for the example LLM blueprint",
        playgroundId: examplePlayground.id,
        llmId: "azure-openai-gpt-3.5-turbo",
        promptType: "ONE_TIME_PROMPT",
    });
    // Optional
    // llm_settings {
    //   max_completion_length = 1000
    //   temperature           = 0.5
    //   top_p                 = 0.9
    //   system_prompt         = "My Prompt:"
    // }
    // vector_database_settings = {
    //   max_documents_retrieved_per_prompt = 5
    //   max_tokens = 1000
    // }
    export const exampleId = exampleLlmBlueprint.id;
    
    import pulumi
    import pulumi_datarobot as datarobot
    
    example_use_case = datarobot.UseCase("exampleUseCase")
    example_playground = datarobot.Playground("examplePlayground",
        description="Description for the example playground",
        use_case_id=example_use_case.id)
    example_llm_blueprint = datarobot.LlmBlueprint("exampleLlmBlueprint",
        description="Description for the example LLM blueprint",
        playground_id=example_playground.id,
        llm_id="azure-openai-gpt-3.5-turbo",
        prompt_type="ONE_TIME_PROMPT")
    # Optional
    # llm_settings {
    #   max_completion_length = 1000
    #   temperature           = 0.5
    #   top_p                 = 0.9
    #   system_prompt         = "My Prompt:"
    # }
    # vector_database_settings = {
    #   max_documents_retrieved_per_prompt = 5
    #   max_tokens = 1000
    # }
    pulumi.export("exampleId", example_llm_blueprint.id)
    
    package main
    
    import (
    	"github.com/datarobot-community/pulumi-datarobot/sdk/go/datarobot"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		exampleUseCase, err := datarobot.NewUseCase(ctx, "exampleUseCase", nil)
    		if err != nil {
    			return err
    		}
    		examplePlayground, err := datarobot.NewPlayground(ctx, "examplePlayground", &datarobot.PlaygroundArgs{
    			Description: pulumi.String("Description for the example playground"),
    			UseCaseId:   exampleUseCase.ID(),
    		})
    		if err != nil {
    			return err
    		}
    		exampleLlmBlueprint, err := datarobot.NewLlmBlueprint(ctx, "exampleLlmBlueprint", &datarobot.LlmBlueprintArgs{
    			Description:  pulumi.String("Description for the example LLM blueprint"),
    			PlaygroundId: examplePlayground.ID(),
    			LlmId:        pulumi.String("azure-openai-gpt-3.5-turbo"),
    			PromptType:   pulumi.String("ONE_TIME_PROMPT"),
    		})
    		if err != nil {
    			return err
    		}
    		ctx.Export("exampleId", exampleLlmBlueprint.ID())
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Datarobot = DataRobotPulumi.Datarobot;
    
    return await Deployment.RunAsync(() => 
    {
        var exampleUseCase = new Datarobot.UseCase("exampleUseCase");
    
        var examplePlayground = new Datarobot.Playground("examplePlayground", new()
        {
            Description = "Description for the example playground",
            UseCaseId = exampleUseCase.Id,
        });
    
        var exampleLlmBlueprint = new Datarobot.LlmBlueprint("exampleLlmBlueprint", new()
        {
            Description = "Description for the example LLM blueprint",
            PlaygroundId = examplePlayground.Id,
            LlmId = "azure-openai-gpt-3.5-turbo",
            PromptType = "ONE_TIME_PROMPT",
        });
    
        // Optional
        // llm_settings {
        //   max_completion_length = 1000
        //   temperature           = 0.5
        //   top_p                 = 0.9
        //   system_prompt         = "My Prompt:"
        // }
        // vector_database_settings = {
        //   max_documents_retrieved_per_prompt = 5
        //   max_tokens = 1000
        // }
        return new Dictionary<string, object?>
        {
            ["exampleId"] = exampleLlmBlueprint.Id,
        };
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.datarobot.UseCase;
    import com.pulumi.datarobot.Playground;
    import com.pulumi.datarobot.PlaygroundArgs;
    import com.pulumi.datarobot.LlmBlueprint;
    import com.pulumi.datarobot.LlmBlueprintArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var exampleUseCase = new UseCase("exampleUseCase");
    
            var examplePlayground = new Playground("examplePlayground", PlaygroundArgs.builder()
                .description("Description for the example playground")
                .useCaseId(exampleUseCase.id())
                .build());
    
            var exampleLlmBlueprint = new LlmBlueprint("exampleLlmBlueprint", LlmBlueprintArgs.builder()
                .description("Description for the example LLM blueprint")
                .playgroundId(examplePlayground.id())
                .llmId("azure-openai-gpt-3.5-turbo")
                .promptType("ONE_TIME_PROMPT")
                .build());
    
            // Optional
            // llm_settings {
            //   max_completion_length = 1000
            //   temperature           = 0.5
            //   top_p                 = 0.9
            //   system_prompt         = "My Prompt:"
            // }
            // vector_database_settings = {
            //   max_documents_retrieved_per_prompt = 5
            //   max_tokens = 1000
            // }
            ctx.export("exampleId", exampleLlmBlueprint.id());
        }
    }
    
    resources:
      exampleUseCase:
        type: datarobot:UseCase
      examplePlayground:
        type: datarobot:Playground
        properties:
          description: Description for the example playground
          useCaseId: ${exampleUseCase.id}
      exampleLlmBlueprint:
        type: datarobot:LlmBlueprint
        properties:
          description: Description for the example LLM blueprint
          playgroundId: ${examplePlayground.id}
          llmId: azure-openai-gpt-3.5-turbo
          promptType: ONE_TIME_PROMPT
    outputs:
      exampleId: ${exampleLlmBlueprint.id}
    

    Create LlmBlueprint Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new LlmBlueprint(name: string, args: LlmBlueprintArgs, opts?: CustomResourceOptions);
    @overload
    def LlmBlueprint(resource_name: str,
                     args: LlmBlueprintArgs,
                     opts: Optional[ResourceOptions] = None)
    
    @overload
    def LlmBlueprint(resource_name: str,
                     opts: Optional[ResourceOptions] = None,
                     llm_id: Optional[str] = None,
                     playground_id: Optional[str] = None,
                     description: Optional[str] = None,
                     llm_settings: Optional[LlmBlueprintLlmSettingsArgs] = None,
                     name: Optional[str] = None,
                     prompt_type: Optional[str] = None,
                     vector_database_id: Optional[str] = None,
                     vector_database_settings: Optional[LlmBlueprintVectorDatabaseSettingsArgs] = None)
    func NewLlmBlueprint(ctx *Context, name string, args LlmBlueprintArgs, opts ...ResourceOption) (*LlmBlueprint, error)
    public LlmBlueprint(string name, LlmBlueprintArgs args, CustomResourceOptions? opts = null)
    public LlmBlueprint(String name, LlmBlueprintArgs args)
    public LlmBlueprint(String name, LlmBlueprintArgs args, CustomResourceOptions options)
    
    type: datarobot:LlmBlueprint
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args LlmBlueprintArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args LlmBlueprintArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args LlmBlueprintArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args LlmBlueprintArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args LlmBlueprintArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var llmBlueprintResource = new Datarobot.LlmBlueprint("llmBlueprintResource", new()
    {
        LlmId = "string",
        PlaygroundId = "string",
        Description = "string",
        LlmSettings = new Datarobot.Inputs.LlmBlueprintLlmSettingsArgs
        {
            MaxCompletionLength = 0,
            SystemPrompt = "string",
            Temperature = 0,
            TopP = 0,
        },
        Name = "string",
        PromptType = "string",
        VectorDatabaseId = "string",
        VectorDatabaseSettings = new Datarobot.Inputs.LlmBlueprintVectorDatabaseSettingsArgs
        {
            MaxDocumentsRetrievedPerPrompt = 0,
            MaxTokens = 0,
        },
    });
    
    example, err := datarobot.NewLlmBlueprint(ctx, "llmBlueprintResource", &datarobot.LlmBlueprintArgs{
    	LlmId:        pulumi.String("string"),
    	PlaygroundId: pulumi.String("string"),
    	Description:  pulumi.String("string"),
    	LlmSettings: &datarobot.LlmBlueprintLlmSettingsArgs{
    		MaxCompletionLength: pulumi.Int(0),
    		SystemPrompt:        pulumi.String("string"),
    		Temperature:         pulumi.Float64(0),
    		TopP:                pulumi.Float64(0),
    	},
    	Name:             pulumi.String("string"),
    	PromptType:       pulumi.String("string"),
    	VectorDatabaseId: pulumi.String("string"),
    	VectorDatabaseSettings: &datarobot.LlmBlueprintVectorDatabaseSettingsArgs{
    		MaxDocumentsRetrievedPerPrompt: pulumi.Int(0),
    		MaxTokens:                      pulumi.Int(0),
    	},
    })
    
    var llmBlueprintResource = new LlmBlueprint("llmBlueprintResource", LlmBlueprintArgs.builder()
        .llmId("string")
        .playgroundId("string")
        .description("string")
        .llmSettings(LlmBlueprintLlmSettingsArgs.builder()
            .maxCompletionLength(0)
            .systemPrompt("string")
            .temperature(0)
            .topP(0)
            .build())
        .name("string")
        .promptType("string")
        .vectorDatabaseId("string")
        .vectorDatabaseSettings(LlmBlueprintVectorDatabaseSettingsArgs.builder()
            .maxDocumentsRetrievedPerPrompt(0)
            .maxTokens(0)
            .build())
        .build());
    
    llm_blueprint_resource = datarobot.LlmBlueprint("llmBlueprintResource",
        llm_id="string",
        playground_id="string",
        description="string",
        llm_settings=datarobot.LlmBlueprintLlmSettingsArgs(
            max_completion_length=0,
            system_prompt="string",
            temperature=0,
            top_p=0,
        ),
        name="string",
        prompt_type="string",
        vector_database_id="string",
        vector_database_settings=datarobot.LlmBlueprintVectorDatabaseSettingsArgs(
            max_documents_retrieved_per_prompt=0,
            max_tokens=0,
        ))
    
    const llmBlueprintResource = new datarobot.LlmBlueprint("llmBlueprintResource", {
        llmId: "string",
        playgroundId: "string",
        description: "string",
        llmSettings: {
            maxCompletionLength: 0,
            systemPrompt: "string",
            temperature: 0,
            topP: 0,
        },
        name: "string",
        promptType: "string",
        vectorDatabaseId: "string",
        vectorDatabaseSettings: {
            maxDocumentsRetrievedPerPrompt: 0,
            maxTokens: 0,
        },
    });
    
    type: datarobot:LlmBlueprint
    properties:
        description: string
        llmId: string
        llmSettings:
            maxCompletionLength: 0
            systemPrompt: string
            temperature: 0
            topP: 0
        name: string
        playgroundId: string
        promptType: string
        vectorDatabaseId: string
        vectorDatabaseSettings:
            maxDocumentsRetrievedPerPrompt: 0
            maxTokens: 0
    

    LlmBlueprint Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    The LlmBlueprint resource accepts the following input properties:

    LlmId string
    The id of the LLM for the LLM Blueprint.
    PlaygroundId string
    The id of the Playground for the LLM Blueprint.
    Description string
    The description of the LLM Blueprint.
    LlmSettings DataRobotLlmBlueprintLlmSettings
    The LLM settings for the LLM Blueprint.
    Name string
    The name of the LLM Blueprint.
    PromptType string
    The prompt type for the LLM Blueprint.
    VectorDatabaseId string
    The id of the Vector Database for the LLM Blueprint.
    VectorDatabaseSettings DataRobotLlmBlueprintVectorDatabaseSettings
    The Vector Database settings for the LLM Blueprint.
    LlmId string
    The id of the LLM for the LLM Blueprint.
    PlaygroundId string
    The id of the Playground for the LLM Blueprint.
    Description string
    The description of the LLM Blueprint.
    LlmSettings LlmBlueprintLlmSettingsArgs
    The LLM settings for the LLM Blueprint.
    Name string
    The name of the LLM Blueprint.
    PromptType string
    The prompt type for the LLM Blueprint.
    VectorDatabaseId string
    The id of the Vector Database for the LLM Blueprint.
    VectorDatabaseSettings LlmBlueprintVectorDatabaseSettingsArgs
    The Vector Database settings for the LLM Blueprint.
    llmId String
    The id of the LLM for the LLM Blueprint.
    playgroundId String
    The id of the Playground for the LLM Blueprint.
    description String
    The description of the LLM Blueprint.
    llmSettings LlmBlueprintLlmSettings
    The LLM settings for the LLM Blueprint.
    name String
    The name of the LLM Blueprint.
    promptType String
    The prompt type for the LLM Blueprint.
    vectorDatabaseId String
    The id of the Vector Database for the LLM Blueprint.
    vectorDatabaseSettings LlmBlueprintVectorDatabaseSettings
    The Vector Database settings for the LLM Blueprint.
    llmId string
    The id of the LLM for the LLM Blueprint.
    playgroundId string
    The id of the Playground for the LLM Blueprint.
    description string
    The description of the LLM Blueprint.
    llmSettings LlmBlueprintLlmSettings
    The LLM settings for the LLM Blueprint.
    name string
    The name of the LLM Blueprint.
    promptType string
    The prompt type for the LLM Blueprint.
    vectorDatabaseId string
    The id of the Vector Database for the LLM Blueprint.
    vectorDatabaseSettings LlmBlueprintVectorDatabaseSettings
    The Vector Database settings for the LLM Blueprint.
    llm_id str
    The id of the LLM for the LLM Blueprint.
    playground_id str
    The id of the Playground for the LLM Blueprint.
    description str
    The description of the LLM Blueprint.
    llm_settings LlmBlueprintLlmSettingsArgs
    The LLM settings for the LLM Blueprint.
    name str
    The name of the LLM Blueprint.
    prompt_type str
    The prompt type for the LLM Blueprint.
    vector_database_id str
    The id of the Vector Database for the LLM Blueprint.
    vector_database_settings LlmBlueprintVectorDatabaseSettingsArgs
    The Vector Database settings for the LLM Blueprint.
    llmId String
    The id of the LLM for the LLM Blueprint.
    playgroundId String
    The id of the Playground for the LLM Blueprint.
    description String
    The description of the LLM Blueprint.
    llmSettings Property Map
    The LLM settings for the LLM Blueprint.
    name String
    The name of the LLM Blueprint.
    promptType String
    The prompt type for the LLM Blueprint.
    vectorDatabaseId String
    The id of the Vector Database for the LLM Blueprint.
    vectorDatabaseSettings Property Map
    The Vector Database settings for the LLM Blueprint.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the LlmBlueprint resource produces the following output properties:

    Id string
    The provider-assigned unique ID for this managed resource.
    Id string
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.
    id string
    The provider-assigned unique ID for this managed resource.
    id str
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.

    Look up Existing LlmBlueprint Resource

    Get an existing LlmBlueprint resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: LlmBlueprintState, opts?: CustomResourceOptions): LlmBlueprint
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            description: Optional[str] = None,
            llm_id: Optional[str] = None,
            llm_settings: Optional[LlmBlueprintLlmSettingsArgs] = None,
            name: Optional[str] = None,
            playground_id: Optional[str] = None,
            prompt_type: Optional[str] = None,
            vector_database_id: Optional[str] = None,
            vector_database_settings: Optional[LlmBlueprintVectorDatabaseSettingsArgs] = None) -> LlmBlueprint
    func GetLlmBlueprint(ctx *Context, name string, id IDInput, state *LlmBlueprintState, opts ...ResourceOption) (*LlmBlueprint, error)
    public static LlmBlueprint Get(string name, Input<string> id, LlmBlueprintState? state, CustomResourceOptions? opts = null)
    public static LlmBlueprint get(String name, Output<String> id, LlmBlueprintState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    Description string
    The description of the LLM Blueprint.
    LlmId string
    The id of the LLM for the LLM Blueprint.
    LlmSettings DataRobotLlmBlueprintLlmSettings
    The LLM settings for the LLM Blueprint.
    Name string
    The name of the LLM Blueprint.
    PlaygroundId string
    The id of the Playground for the LLM Blueprint.
    PromptType string
    The prompt type for the LLM Blueprint.
    VectorDatabaseId string
    The id of the Vector Database for the LLM Blueprint.
    VectorDatabaseSettings DataRobotLlmBlueprintVectorDatabaseSettings
    The Vector Database settings for the LLM Blueprint.
    Description string
    The description of the LLM Blueprint.
    LlmId string
    The id of the LLM for the LLM Blueprint.
    LlmSettings LlmBlueprintLlmSettingsArgs
    The LLM settings for the LLM Blueprint.
    Name string
    The name of the LLM Blueprint.
    PlaygroundId string
    The id of the Playground for the LLM Blueprint.
    PromptType string
    The prompt type for the LLM Blueprint.
    VectorDatabaseId string
    The id of the Vector Database for the LLM Blueprint.
    VectorDatabaseSettings LlmBlueprintVectorDatabaseSettingsArgs
    The Vector Database settings for the LLM Blueprint.
    description String
    The description of the LLM Blueprint.
    llmId String
    The id of the LLM for the LLM Blueprint.
    llmSettings LlmBlueprintLlmSettings
    The LLM settings for the LLM Blueprint.
    name String
    The name of the LLM Blueprint.
    playgroundId String
    The id of the Playground for the LLM Blueprint.
    promptType String
    The prompt type for the LLM Blueprint.
    vectorDatabaseId String
    The id of the Vector Database for the LLM Blueprint.
    vectorDatabaseSettings LlmBlueprintVectorDatabaseSettings
    The Vector Database settings for the LLM Blueprint.
    description string
    The description of the LLM Blueprint.
    llmId string
    The id of the LLM for the LLM Blueprint.
    llmSettings LlmBlueprintLlmSettings
    The LLM settings for the LLM Blueprint.
    name string
    The name of the LLM Blueprint.
    playgroundId string
    The id of the Playground for the LLM Blueprint.
    promptType string
    The prompt type for the LLM Blueprint.
    vectorDatabaseId string
    The id of the Vector Database for the LLM Blueprint.
    vectorDatabaseSettings LlmBlueprintVectorDatabaseSettings
    The Vector Database settings for the LLM Blueprint.
    description str
    The description of the LLM Blueprint.
    llm_id str
    The id of the LLM for the LLM Blueprint.
    llm_settings LlmBlueprintLlmSettingsArgs
    The LLM settings for the LLM Blueprint.
    name str
    The name of the LLM Blueprint.
    playground_id str
    The id of the Playground for the LLM Blueprint.
    prompt_type str
    The prompt type for the LLM Blueprint.
    vector_database_id str
    The id of the Vector Database for the LLM Blueprint.
    vector_database_settings LlmBlueprintVectorDatabaseSettingsArgs
    The Vector Database settings for the LLM Blueprint.
    description String
    The description of the LLM Blueprint.
    llmId String
    The id of the LLM for the LLM Blueprint.
    llmSettings Property Map
    The LLM settings for the LLM Blueprint.
    name String
    The name of the LLM Blueprint.
    playgroundId String
    The id of the Playground for the LLM Blueprint.
    promptType String
    The prompt type for the LLM Blueprint.
    vectorDatabaseId String
    The id of the Vector Database for the LLM Blueprint.
    vectorDatabaseSettings Property Map
    The Vector Database settings for the LLM Blueprint.

    Supporting Types

    LlmBlueprintLlmSettings, LlmBlueprintLlmSettingsArgs

    MaxCompletionLength int
    The maximum number of tokens allowed in the completion. The combined count of this value and prompt tokens must be below the model's maximum context size, where prompt token count is comprised of system prompt, user prompt, recent chat history, and vector database citations.
    SystemPrompt string
    Guides the style of the LLM response. It is a 'universal' prompt, prepended to all individual prompts.
    Temperature double
    Controls the randomness of model output, where higher values return more diverse output and lower values return more deterministic results.
    TopP double
    Threshold that controls the selection of words included in the response, based on a cumulative probability cutoff for token selection. Higher numbers return more diverse options for outputs.
    MaxCompletionLength int
    The maximum number of tokens allowed in the completion. The combined count of this value and prompt tokens must be below the model's maximum context size, where prompt token count is comprised of system prompt, user prompt, recent chat history, and vector database citations.
    SystemPrompt string
    Guides the style of the LLM response. It is a 'universal' prompt, prepended to all individual prompts.
    Temperature float64
    Controls the randomness of model output, where higher values return more diverse output and lower values return more deterministic results.
    TopP float64
    Threshold that controls the selection of words included in the response, based on a cumulative probability cutoff for token selection. Higher numbers return more diverse options for outputs.
    maxCompletionLength Integer
    The maximum number of tokens allowed in the completion. The combined count of this value and prompt tokens must be below the model's maximum context size, where prompt token count is comprised of system prompt, user prompt, recent chat history, and vector database citations.
    systemPrompt String
    Guides the style of the LLM response. It is a 'universal' prompt, prepended to all individual prompts.
    temperature Double
    Controls the randomness of model output, where higher values return more diverse output and lower values return more deterministic results.
    topP Double
    Threshold that controls the selection of words included in the response, based on a cumulative probability cutoff for token selection. Higher numbers return more diverse options for outputs.
    maxCompletionLength number
    The maximum number of tokens allowed in the completion. The combined count of this value and prompt tokens must be below the model's maximum context size, where prompt token count is comprised of system prompt, user prompt, recent chat history, and vector database citations.
    systemPrompt string
    Guides the style of the LLM response. It is a 'universal' prompt, prepended to all individual prompts.
    temperature number
    Controls the randomness of model output, where higher values return more diverse output and lower values return more deterministic results.
    topP number
    Threshold that controls the selection of words included in the response, based on a cumulative probability cutoff for token selection. Higher numbers return more diverse options for outputs.
    max_completion_length int
    The maximum number of tokens allowed in the completion. The combined count of this value and prompt tokens must be below the model's maximum context size, where prompt token count is comprised of system prompt, user prompt, recent chat history, and vector database citations.
    system_prompt str
    Guides the style of the LLM response. It is a 'universal' prompt, prepended to all individual prompts.
    temperature float
    Controls the randomness of model output, where higher values return more diverse output and lower values return more deterministic results.
    top_p float
    Threshold that controls the selection of words included in the response, based on a cumulative probability cutoff for token selection. Higher numbers return more diverse options for outputs.
    maxCompletionLength Number
    The maximum number of tokens allowed in the completion. The combined count of this value and prompt tokens must be below the model's maximum context size, where prompt token count is comprised of system prompt, user prompt, recent chat history, and vector database citations.
    systemPrompt String
    Guides the style of the LLM response. It is a 'universal' prompt, prepended to all individual prompts.
    temperature Number
    Controls the randomness of model output, where higher values return more diverse output and lower values return more deterministic results.
    topP Number
    Threshold that controls the selection of words included in the response, based on a cumulative probability cutoff for token selection. Higher numbers return more diverse options for outputs.

    LlmBlueprintVectorDatabaseSettings, LlmBlueprintVectorDatabaseSettingsArgs

    MaxDocumentsRetrievedPerPrompt int
    The maximum number of documents to retrieve from the Vector Database.
    MaxTokens int
    The maximum number of tokens to retrieve from the Vector Database.
    MaxDocumentsRetrievedPerPrompt int
    The maximum number of documents to retrieve from the Vector Database.
    MaxTokens int
    The maximum number of tokens to retrieve from the Vector Database.
    maxDocumentsRetrievedPerPrompt Integer
    The maximum number of documents to retrieve from the Vector Database.
    maxTokens Integer
    The maximum number of tokens to retrieve from the Vector Database.
    maxDocumentsRetrievedPerPrompt number
    The maximum number of documents to retrieve from the Vector Database.
    maxTokens number
    The maximum number of tokens to retrieve from the Vector Database.
    max_documents_retrieved_per_prompt int
    The maximum number of documents to retrieve from the Vector Database.
    max_tokens int
    The maximum number of tokens to retrieve from the Vector Database.
    maxDocumentsRetrievedPerPrompt Number
    The maximum number of documents to retrieve from the Vector Database.
    maxTokens Number
    The maximum number of tokens to retrieve from the Vector Database.

    Package Details

    Repository
    datarobot datarobot-community/pulumi-datarobot
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the datarobot Terraform Provider.
    datarobot logo
    DataRobot v0.1.44 published on Monday, Sep 23, 2024 by DataRobot, Inc.