1. Packages
  2. Databricks
  3. API Docs
  4. Pipeline
Databricks v1.50.2 published on Tuesday, Sep 24, 2024 by Pulumi

databricks.Pipeline

Explore with Pulumi AI

databricks logo
Databricks v1.50.2 published on Tuesday, Sep 24, 2024 by Pulumi

    Use databricks.Pipeline to deploy Delta Live Tables.

    Example Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as databricks from "@pulumi/databricks";
    
    const dltDemo = new databricks.Notebook("dlt_demo", {});
    const dltDemoRepo = new databricks.Repo("dlt_demo", {});
    const _this = new databricks.Pipeline("this", {
        name: "Pipeline Name",
        storage: "/test/first-pipeline",
        configuration: {
            key1: "value1",
            key2: "value2",
        },
        clusters: [
            {
                label: "default",
                numWorkers: 2,
                customTags: {
                    cluster_type: "default",
                },
            },
            {
                label: "maintenance",
                numWorkers: 1,
                customTags: {
                    cluster_type: "maintenance",
                },
            },
        ],
        libraries: [
            {
                notebook: {
                    path: dltDemo.id,
                },
            },
            {
                file: {
                    path: pulumi.interpolate`${dltDemoRepo.path}/pipeline.sql`,
                },
            },
        ],
        continuous: false,
        notifications: [{
            emailRecipients: [
                "user@domain.com",
                "user1@domain.com",
            ],
            alerts: [
                "on-update-failure",
                "on-update-fatal-failure",
                "on-update-success",
                "on-flow-failure",
            ],
        }],
    });
    
    import pulumi
    import pulumi_databricks as databricks
    
    dlt_demo = databricks.Notebook("dlt_demo")
    dlt_demo_repo = databricks.Repo("dlt_demo")
    this = databricks.Pipeline("this",
        name="Pipeline Name",
        storage="/test/first-pipeline",
        configuration={
            "key1": "value1",
            "key2": "value2",
        },
        clusters=[
            {
                "label": "default",
                "num_workers": 2,
                "custom_tags": {
                    "cluster_type": "default",
                },
            },
            {
                "label": "maintenance",
                "num_workers": 1,
                "custom_tags": {
                    "cluster_type": "maintenance",
                },
            },
        ],
        libraries=[
            {
                "notebook": {
                    "path": dlt_demo.id,
                },
            },
            {
                "file": {
                    "path": dlt_demo_repo.path.apply(lambda path: f"{path}/pipeline.sql"),
                },
            },
        ],
        continuous=False,
        notifications=[{
            "email_recipients": [
                "user@domain.com",
                "user1@domain.com",
            ],
            "alerts": [
                "on-update-failure",
                "on-update-fatal-failure",
                "on-update-success",
                "on-flow-failure",
            ],
        }])
    
    package main
    
    import (
    	"fmt"
    
    	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		dltDemo, err := databricks.NewNotebook(ctx, "dlt_demo", nil)
    		if err != nil {
    			return err
    		}
    		dltDemoRepo, err := databricks.NewRepo(ctx, "dlt_demo", nil)
    		if err != nil {
    			return err
    		}
    		_, err = databricks.NewPipeline(ctx, "this", &databricks.PipelineArgs{
    			Name:    pulumi.String("Pipeline Name"),
    			Storage: pulumi.String("/test/first-pipeline"),
    			Configuration: pulumi.StringMap{
    				"key1": pulumi.String("value1"),
    				"key2": pulumi.String("value2"),
    			},
    			Clusters: databricks.PipelineClusterArray{
    				&databricks.PipelineClusterArgs{
    					Label:      pulumi.String("default"),
    					NumWorkers: pulumi.Int(2),
    					CustomTags: pulumi.StringMap{
    						"cluster_type": pulumi.String("default"),
    					},
    				},
    				&databricks.PipelineClusterArgs{
    					Label:      pulumi.String("maintenance"),
    					NumWorkers: pulumi.Int(1),
    					CustomTags: pulumi.StringMap{
    						"cluster_type": pulumi.String("maintenance"),
    					},
    				},
    			},
    			Libraries: databricks.PipelineLibraryArray{
    				&databricks.PipelineLibraryArgs{
    					Notebook: &databricks.PipelineLibraryNotebookArgs{
    						Path: dltDemo.ID(),
    					},
    				},
    				&databricks.PipelineLibraryArgs{
    					File: &databricks.PipelineLibraryFileArgs{
    						Path: dltDemoRepo.Path.ApplyT(func(path string) (string, error) {
    							return fmt.Sprintf("%v/pipeline.sql", path), nil
    						}).(pulumi.StringOutput),
    					},
    				},
    			},
    			Continuous: pulumi.Bool(false),
    			Notifications: databricks.PipelineNotificationArray{
    				&databricks.PipelineNotificationArgs{
    					EmailRecipients: pulumi.StringArray{
    						pulumi.String("user@domain.com"),
    						pulumi.String("user1@domain.com"),
    					},
    					Alerts: pulumi.StringArray{
    						pulumi.String("on-update-failure"),
    						pulumi.String("on-update-fatal-failure"),
    						pulumi.String("on-update-success"),
    						pulumi.String("on-flow-failure"),
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Databricks = Pulumi.Databricks;
    
    return await Deployment.RunAsync(() => 
    {
        var dltDemo = new Databricks.Notebook("dlt_demo");
    
        var dltDemoRepo = new Databricks.Repo("dlt_demo");
    
        var @this = new Databricks.Pipeline("this", new()
        {
            Name = "Pipeline Name",
            Storage = "/test/first-pipeline",
            Configuration = 
            {
                { "key1", "value1" },
                { "key2", "value2" },
            },
            Clusters = new[]
            {
                new Databricks.Inputs.PipelineClusterArgs
                {
                    Label = "default",
                    NumWorkers = 2,
                    CustomTags = 
                    {
                        { "cluster_type", "default" },
                    },
                },
                new Databricks.Inputs.PipelineClusterArgs
                {
                    Label = "maintenance",
                    NumWorkers = 1,
                    CustomTags = 
                    {
                        { "cluster_type", "maintenance" },
                    },
                },
            },
            Libraries = new[]
            {
                new Databricks.Inputs.PipelineLibraryArgs
                {
                    Notebook = new Databricks.Inputs.PipelineLibraryNotebookArgs
                    {
                        Path = dltDemo.Id,
                    },
                },
                new Databricks.Inputs.PipelineLibraryArgs
                {
                    File = new Databricks.Inputs.PipelineLibraryFileArgs
                    {
                        Path = dltDemoRepo.Path.Apply(path => $"{path}/pipeline.sql"),
                    },
                },
            },
            Continuous = false,
            Notifications = new[]
            {
                new Databricks.Inputs.PipelineNotificationArgs
                {
                    EmailRecipients = new[]
                    {
                        "user@domain.com",
                        "user1@domain.com",
                    },
                    Alerts = new[]
                    {
                        "on-update-failure",
                        "on-update-fatal-failure",
                        "on-update-success",
                        "on-flow-failure",
                    },
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.databricks.Notebook;
    import com.pulumi.databricks.Repo;
    import com.pulumi.databricks.Pipeline;
    import com.pulumi.databricks.PipelineArgs;
    import com.pulumi.databricks.inputs.PipelineClusterArgs;
    import com.pulumi.databricks.inputs.PipelineLibraryArgs;
    import com.pulumi.databricks.inputs.PipelineLibraryNotebookArgs;
    import com.pulumi.databricks.inputs.PipelineLibraryFileArgs;
    import com.pulumi.databricks.inputs.PipelineNotificationArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var dltDemo = new Notebook("dltDemo");
    
            var dltDemoRepo = new Repo("dltDemoRepo");
    
            var this_ = new Pipeline("this", PipelineArgs.builder()
                .name("Pipeline Name")
                .storage("/test/first-pipeline")
                .configuration(Map.ofEntries(
                    Map.entry("key1", "value1"),
                    Map.entry("key2", "value2")
                ))
                .clusters(            
                    PipelineClusterArgs.builder()
                        .label("default")
                        .numWorkers(2)
                        .customTags(Map.of("cluster_type", "default"))
                        .build(),
                    PipelineClusterArgs.builder()
                        .label("maintenance")
                        .numWorkers(1)
                        .customTags(Map.of("cluster_type", "maintenance"))
                        .build())
                .libraries(            
                    PipelineLibraryArgs.builder()
                        .notebook(PipelineLibraryNotebookArgs.builder()
                            .path(dltDemo.id())
                            .build())
                        .build(),
                    PipelineLibraryArgs.builder()
                        .file(PipelineLibraryFileArgs.builder()
                            .path(dltDemoRepo.path().applyValue(path -> String.format("%s/pipeline.sql", path)))
                            .build())
                        .build())
                .continuous(false)
                .notifications(PipelineNotificationArgs.builder()
                    .emailRecipients(                
                        "user@domain.com",
                        "user1@domain.com")
                    .alerts(                
                        "on-update-failure",
                        "on-update-fatal-failure",
                        "on-update-success",
                        "on-flow-failure")
                    .build())
                .build());
    
        }
    }
    
    resources:
      dltDemo:
        type: databricks:Notebook
        name: dlt_demo
      dltDemoRepo:
        type: databricks:Repo
        name: dlt_demo
      this:
        type: databricks:Pipeline
        properties:
          name: Pipeline Name
          storage: /test/first-pipeline
          configuration:
            key1: value1
            key2: value2
          clusters:
            - label: default
              numWorkers: 2
              customTags:
                cluster_type: default
            - label: maintenance
              numWorkers: 1
              customTags:
                cluster_type: maintenance
          libraries:
            - notebook:
                path: ${dltDemo.id}
            - file:
                path: ${dltDemoRepo.path}/pipeline.sql
          continuous: false
          notifications:
            - emailRecipients:
                - user@domain.com
                - user1@domain.com
              alerts:
                - on-update-failure
                - on-update-fatal-failure
                - on-update-success
                - on-flow-failure
    

    The following resources are often used in the same context:

    Create Pipeline Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new Pipeline(name: string, args?: PipelineArgs, opts?: CustomResourceOptions);
    @overload
    def Pipeline(resource_name: str,
                 args: Optional[PipelineArgs] = None,
                 opts: Optional[ResourceOptions] = None)
    
    @overload
    def Pipeline(resource_name: str,
                 opts: Optional[ResourceOptions] = None,
                 allow_duplicate_names: Optional[bool] = None,
                 catalog: Optional[str] = None,
                 cause: Optional[str] = None,
                 channel: Optional[str] = None,
                 cluster_id: Optional[str] = None,
                 clusters: Optional[Sequence[PipelineClusterArgs]] = None,
                 configuration: Optional[Mapping[str, str]] = None,
                 continuous: Optional[bool] = None,
                 creator_user_name: Optional[str] = None,
                 deployment: Optional[PipelineDeploymentArgs] = None,
                 development: Optional[bool] = None,
                 edition: Optional[str] = None,
                 expected_last_modified: Optional[int] = None,
                 filters: Optional[PipelineFiltersArgs] = None,
                 gateway_definition: Optional[PipelineGatewayDefinitionArgs] = None,
                 health: Optional[str] = None,
                 ingestion_definition: Optional[PipelineIngestionDefinitionArgs] = None,
                 last_modified: Optional[int] = None,
                 latest_updates: Optional[Sequence[PipelineLatestUpdateArgs]] = None,
                 libraries: Optional[Sequence[PipelineLibraryArgs]] = None,
                 name: Optional[str] = None,
                 notifications: Optional[Sequence[PipelineNotificationArgs]] = None,
                 photon: Optional[bool] = None,
                 run_as_user_name: Optional[str] = None,
                 serverless: Optional[bool] = None,
                 state: Optional[str] = None,
                 storage: Optional[str] = None,
                 target: Optional[str] = None,
                 trigger: Optional[PipelineTriggerArgs] = None,
                 url: Optional[str] = None)
    func NewPipeline(ctx *Context, name string, args *PipelineArgs, opts ...ResourceOption) (*Pipeline, error)
    public Pipeline(string name, PipelineArgs? args = null, CustomResourceOptions? opts = null)
    public Pipeline(String name, PipelineArgs args)
    public Pipeline(String name, PipelineArgs args, CustomResourceOptions options)
    
    type: databricks:Pipeline
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args PipelineArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args PipelineArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args PipelineArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args PipelineArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args PipelineArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var pipelineResource = new Databricks.Pipeline("pipelineResource", new()
    {
        AllowDuplicateNames = false,
        Catalog = "string",
        Cause = "string",
        Channel = "string",
        ClusterId = "string",
        Clusters = new[]
        {
            new Databricks.Inputs.PipelineClusterArgs
            {
                ApplyPolicyDefaultValues = false,
                Autoscale = new Databricks.Inputs.PipelineClusterAutoscaleArgs
                {
                    MaxWorkers = 0,
                    MinWorkers = 0,
                    Mode = "string",
                },
                AwsAttributes = new Databricks.Inputs.PipelineClusterAwsAttributesArgs
                {
                    Availability = "string",
                    EbsVolumeCount = 0,
                    EbsVolumeIops = 0,
                    EbsVolumeSize = 0,
                    EbsVolumeThroughput = 0,
                    EbsVolumeType = "string",
                    FirstOnDemand = 0,
                    InstanceProfileArn = "string",
                    SpotBidPricePercent = 0,
                    ZoneId = "string",
                },
                AzureAttributes = new Databricks.Inputs.PipelineClusterAzureAttributesArgs
                {
                    Availability = "string",
                    FirstOnDemand = 0,
                    LogAnalyticsInfo = new Databricks.Inputs.PipelineClusterAzureAttributesLogAnalyticsInfoArgs
                    {
                        LogAnalyticsPrimaryKey = "string",
                        LogAnalyticsWorkspaceId = "string",
                    },
                    SpotBidMaxPrice = 0,
                },
                ClusterLogConf = new Databricks.Inputs.PipelineClusterClusterLogConfArgs
                {
                    Dbfs = new Databricks.Inputs.PipelineClusterClusterLogConfDbfsArgs
                    {
                        Destination = "string",
                    },
                    S3 = new Databricks.Inputs.PipelineClusterClusterLogConfS3Args
                    {
                        Destination = "string",
                        CannedAcl = "string",
                        EnableEncryption = false,
                        EncryptionType = "string",
                        Endpoint = "string",
                        KmsKey = "string",
                        Region = "string",
                    },
                },
                CustomTags = 
                {
                    { "string", "string" },
                },
                DriverInstancePoolId = "string",
                DriverNodeTypeId = "string",
                EnableLocalDiskEncryption = false,
                GcpAttributes = new Databricks.Inputs.PipelineClusterGcpAttributesArgs
                {
                    Availability = "string",
                    GoogleServiceAccount = "string",
                    LocalSsdCount = 0,
                    ZoneId = "string",
                },
                InitScripts = new[]
                {
                    new Databricks.Inputs.PipelineClusterInitScriptArgs
                    {
                        Abfss = new Databricks.Inputs.PipelineClusterInitScriptAbfssArgs
                        {
                            Destination = "string",
                        },
                        File = new Databricks.Inputs.PipelineClusterInitScriptFileArgs
                        {
                            Destination = "string",
                        },
                        Gcs = new Databricks.Inputs.PipelineClusterInitScriptGcsArgs
                        {
                            Destination = "string",
                        },
                        S3 = new Databricks.Inputs.PipelineClusterInitScriptS3Args
                        {
                            Destination = "string",
                            CannedAcl = "string",
                            EnableEncryption = false,
                            EncryptionType = "string",
                            Endpoint = "string",
                            KmsKey = "string",
                            Region = "string",
                        },
                        Volumes = new Databricks.Inputs.PipelineClusterInitScriptVolumesArgs
                        {
                            Destination = "string",
                        },
                        Workspace = new Databricks.Inputs.PipelineClusterInitScriptWorkspaceArgs
                        {
                            Destination = "string",
                        },
                    },
                },
                InstancePoolId = "string",
                Label = "string",
                NodeTypeId = "string",
                NumWorkers = 0,
                PolicyId = "string",
                SparkConf = 
                {
                    { "string", "string" },
                },
                SparkEnvVars = 
                {
                    { "string", "string" },
                },
                SshPublicKeys = new[]
                {
                    "string",
                },
            },
        },
        Configuration = 
        {
            { "string", "string" },
        },
        Continuous = false,
        CreatorUserName = "string",
        Deployment = new Databricks.Inputs.PipelineDeploymentArgs
        {
            Kind = "string",
            MetadataFilePath = "string",
        },
        Development = false,
        Edition = "string",
        ExpectedLastModified = 0,
        Filters = new Databricks.Inputs.PipelineFiltersArgs
        {
            Excludes = new[]
            {
                "string",
            },
            Includes = new[]
            {
                "string",
            },
        },
        GatewayDefinition = new Databricks.Inputs.PipelineGatewayDefinitionArgs
        {
            ConnectionId = "string",
            GatewayStorageCatalog = "string",
            GatewayStorageName = "string",
            GatewayStorageSchema = "string",
        },
        Health = "string",
        IngestionDefinition = new Databricks.Inputs.PipelineIngestionDefinitionArgs
        {
            ConnectionName = "string",
            IngestionGatewayId = "string",
            Objects = new[]
            {
                new Databricks.Inputs.PipelineIngestionDefinitionObjectArgs
                {
                    Schema = new Databricks.Inputs.PipelineIngestionDefinitionObjectSchemaArgs
                    {
                        DestinationCatalog = "string",
                        DestinationSchema = "string",
                        SourceCatalog = "string",
                        SourceSchema = "string",
                        TableConfiguration = new Databricks.Inputs.PipelineIngestionDefinitionObjectSchemaTableConfigurationArgs
                        {
                            PrimaryKeys = new[]
                            {
                                "string",
                            },
                            SalesforceIncludeFormulaFields = false,
                            ScdType = "string",
                        },
                    },
                    Table = new Databricks.Inputs.PipelineIngestionDefinitionObjectTableArgs
                    {
                        DestinationCatalog = "string",
                        DestinationSchema = "string",
                        DestinationTable = "string",
                        SourceCatalog = "string",
                        SourceSchema = "string",
                        SourceTable = "string",
                        TableConfiguration = new Databricks.Inputs.PipelineIngestionDefinitionObjectTableTableConfigurationArgs
                        {
                            PrimaryKeys = new[]
                            {
                                "string",
                            },
                            SalesforceIncludeFormulaFields = false,
                            ScdType = "string",
                        },
                    },
                },
            },
            TableConfiguration = new Databricks.Inputs.PipelineIngestionDefinitionTableConfigurationArgs
            {
                PrimaryKeys = new[]
                {
                    "string",
                },
                SalesforceIncludeFormulaFields = false,
                ScdType = "string",
            },
        },
        LastModified = 0,
        LatestUpdates = new[]
        {
            new Databricks.Inputs.PipelineLatestUpdateArgs
            {
                CreationTime = "string",
                State = "string",
                UpdateId = "string",
            },
        },
        Libraries = new[]
        {
            new Databricks.Inputs.PipelineLibraryArgs
            {
                File = new Databricks.Inputs.PipelineLibraryFileArgs
                {
                    Path = "string",
                },
                Jar = "string",
                Maven = new Databricks.Inputs.PipelineLibraryMavenArgs
                {
                    Coordinates = "string",
                    Exclusions = new[]
                    {
                        "string",
                    },
                    Repo = "string",
                },
                Notebook = new Databricks.Inputs.PipelineLibraryNotebookArgs
                {
                    Path = "string",
                },
            },
        },
        Name = "string",
        Notifications = new[]
        {
            new Databricks.Inputs.PipelineNotificationArgs
            {
                Alerts = new[]
                {
                    "string",
                },
                EmailRecipients = new[]
                {
                    "string",
                },
            },
        },
        Photon = false,
        RunAsUserName = "string",
        Serverless = false,
        State = "string",
        Storage = "string",
        Target = "string",
        Trigger = new Databricks.Inputs.PipelineTriggerArgs
        {
            Cron = new Databricks.Inputs.PipelineTriggerCronArgs
            {
                QuartzCronSchedule = "string",
                TimezoneId = "string",
            },
            Manual = null,
        },
        Url = "string",
    });
    
    example, err := databricks.NewPipeline(ctx, "pipelineResource", &databricks.PipelineArgs{
    	AllowDuplicateNames: pulumi.Bool(false),
    	Catalog:             pulumi.String("string"),
    	Cause:               pulumi.String("string"),
    	Channel:             pulumi.String("string"),
    	ClusterId:           pulumi.String("string"),
    	Clusters: databricks.PipelineClusterArray{
    		&databricks.PipelineClusterArgs{
    			ApplyPolicyDefaultValues: pulumi.Bool(false),
    			Autoscale: &databricks.PipelineClusterAutoscaleArgs{
    				MaxWorkers: pulumi.Int(0),
    				MinWorkers: pulumi.Int(0),
    				Mode:       pulumi.String("string"),
    			},
    			AwsAttributes: &databricks.PipelineClusterAwsAttributesArgs{
    				Availability:        pulumi.String("string"),
    				EbsVolumeCount:      pulumi.Int(0),
    				EbsVolumeIops:       pulumi.Int(0),
    				EbsVolumeSize:       pulumi.Int(0),
    				EbsVolumeThroughput: pulumi.Int(0),
    				EbsVolumeType:       pulumi.String("string"),
    				FirstOnDemand:       pulumi.Int(0),
    				InstanceProfileArn:  pulumi.String("string"),
    				SpotBidPricePercent: pulumi.Int(0),
    				ZoneId:              pulumi.String("string"),
    			},
    			AzureAttributes: &databricks.PipelineClusterAzureAttributesArgs{
    				Availability:  pulumi.String("string"),
    				FirstOnDemand: pulumi.Int(0),
    				LogAnalyticsInfo: &databricks.PipelineClusterAzureAttributesLogAnalyticsInfoArgs{
    					LogAnalyticsPrimaryKey:  pulumi.String("string"),
    					LogAnalyticsWorkspaceId: pulumi.String("string"),
    				},
    				SpotBidMaxPrice: pulumi.Float64(0),
    			},
    			ClusterLogConf: &databricks.PipelineClusterClusterLogConfArgs{
    				Dbfs: &databricks.PipelineClusterClusterLogConfDbfsArgs{
    					Destination: pulumi.String("string"),
    				},
    				S3: &databricks.PipelineClusterClusterLogConfS3Args{
    					Destination:      pulumi.String("string"),
    					CannedAcl:        pulumi.String("string"),
    					EnableEncryption: pulumi.Bool(false),
    					EncryptionType:   pulumi.String("string"),
    					Endpoint:         pulumi.String("string"),
    					KmsKey:           pulumi.String("string"),
    					Region:           pulumi.String("string"),
    				},
    			},
    			CustomTags: pulumi.StringMap{
    				"string": pulumi.String("string"),
    			},
    			DriverInstancePoolId:      pulumi.String("string"),
    			DriverNodeTypeId:          pulumi.String("string"),
    			EnableLocalDiskEncryption: pulumi.Bool(false),
    			GcpAttributes: &databricks.PipelineClusterGcpAttributesArgs{
    				Availability:         pulumi.String("string"),
    				GoogleServiceAccount: pulumi.String("string"),
    				LocalSsdCount:        pulumi.Int(0),
    				ZoneId:               pulumi.String("string"),
    			},
    			InitScripts: databricks.PipelineClusterInitScriptArray{
    				&databricks.PipelineClusterInitScriptArgs{
    					Abfss: &databricks.PipelineClusterInitScriptAbfssArgs{
    						Destination: pulumi.String("string"),
    					},
    					File: &databricks.PipelineClusterInitScriptFileArgs{
    						Destination: pulumi.String("string"),
    					},
    					Gcs: &databricks.PipelineClusterInitScriptGcsArgs{
    						Destination: pulumi.String("string"),
    					},
    					S3: &databricks.PipelineClusterInitScriptS3Args{
    						Destination:      pulumi.String("string"),
    						CannedAcl:        pulumi.String("string"),
    						EnableEncryption: pulumi.Bool(false),
    						EncryptionType:   pulumi.String("string"),
    						Endpoint:         pulumi.String("string"),
    						KmsKey:           pulumi.String("string"),
    						Region:           pulumi.String("string"),
    					},
    					Volumes: &databricks.PipelineClusterInitScriptVolumesArgs{
    						Destination: pulumi.String("string"),
    					},
    					Workspace: &databricks.PipelineClusterInitScriptWorkspaceArgs{
    						Destination: pulumi.String("string"),
    					},
    				},
    			},
    			InstancePoolId: pulumi.String("string"),
    			Label:          pulumi.String("string"),
    			NodeTypeId:     pulumi.String("string"),
    			NumWorkers:     pulumi.Int(0),
    			PolicyId:       pulumi.String("string"),
    			SparkConf: pulumi.StringMap{
    				"string": pulumi.String("string"),
    			},
    			SparkEnvVars: pulumi.StringMap{
    				"string": pulumi.String("string"),
    			},
    			SshPublicKeys: pulumi.StringArray{
    				pulumi.String("string"),
    			},
    		},
    	},
    	Configuration: pulumi.StringMap{
    		"string": pulumi.String("string"),
    	},
    	Continuous:      pulumi.Bool(false),
    	CreatorUserName: pulumi.String("string"),
    	Deployment: &databricks.PipelineDeploymentArgs{
    		Kind:             pulumi.String("string"),
    		MetadataFilePath: pulumi.String("string"),
    	},
    	Development:          pulumi.Bool(false),
    	Edition:              pulumi.String("string"),
    	ExpectedLastModified: pulumi.Int(0),
    	Filters: &databricks.PipelineFiltersArgs{
    		Excludes: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		Includes: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    	},
    	GatewayDefinition: &databricks.PipelineGatewayDefinitionArgs{
    		ConnectionId:          pulumi.String("string"),
    		GatewayStorageCatalog: pulumi.String("string"),
    		GatewayStorageName:    pulumi.String("string"),
    		GatewayStorageSchema:  pulumi.String("string"),
    	},
    	Health: pulumi.String("string"),
    	IngestionDefinition: &databricks.PipelineIngestionDefinitionArgs{
    		ConnectionName:     pulumi.String("string"),
    		IngestionGatewayId: pulumi.String("string"),
    		Objects: databricks.PipelineIngestionDefinitionObjectArray{
    			&databricks.PipelineIngestionDefinitionObjectArgs{
    				Schema: &databricks.PipelineIngestionDefinitionObjectSchemaArgs{
    					DestinationCatalog: pulumi.String("string"),
    					DestinationSchema:  pulumi.String("string"),
    					SourceCatalog:      pulumi.String("string"),
    					SourceSchema:       pulumi.String("string"),
    					TableConfiguration: &databricks.PipelineIngestionDefinitionObjectSchemaTableConfigurationArgs{
    						PrimaryKeys: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    						SalesforceIncludeFormulaFields: pulumi.Bool(false),
    						ScdType:                        pulumi.String("string"),
    					},
    				},
    				Table: &databricks.PipelineIngestionDefinitionObjectTableArgs{
    					DestinationCatalog: pulumi.String("string"),
    					DestinationSchema:  pulumi.String("string"),
    					DestinationTable:   pulumi.String("string"),
    					SourceCatalog:      pulumi.String("string"),
    					SourceSchema:       pulumi.String("string"),
    					SourceTable:        pulumi.String("string"),
    					TableConfiguration: &databricks.PipelineIngestionDefinitionObjectTableTableConfigurationArgs{
    						PrimaryKeys: pulumi.StringArray{
    							pulumi.String("string"),
    						},
    						SalesforceIncludeFormulaFields: pulumi.Bool(false),
    						ScdType:                        pulumi.String("string"),
    					},
    				},
    			},
    		},
    		TableConfiguration: &databricks.PipelineIngestionDefinitionTableConfigurationArgs{
    			PrimaryKeys: pulumi.StringArray{
    				pulumi.String("string"),
    			},
    			SalesforceIncludeFormulaFields: pulumi.Bool(false),
    			ScdType:                        pulumi.String("string"),
    		},
    	},
    	LastModified: pulumi.Int(0),
    	LatestUpdates: databricks.PipelineLatestUpdateArray{
    		&databricks.PipelineLatestUpdateArgs{
    			CreationTime: pulumi.String("string"),
    			State:        pulumi.String("string"),
    			UpdateId:     pulumi.String("string"),
    		},
    	},
    	Libraries: databricks.PipelineLibraryArray{
    		&databricks.PipelineLibraryArgs{
    			File: &databricks.PipelineLibraryFileArgs{
    				Path: pulumi.String("string"),
    			},
    			Jar: pulumi.String("string"),
    			Maven: &databricks.PipelineLibraryMavenArgs{
    				Coordinates: pulumi.String("string"),
    				Exclusions: pulumi.StringArray{
    					pulumi.String("string"),
    				},
    				Repo: pulumi.String("string"),
    			},
    			Notebook: &databricks.PipelineLibraryNotebookArgs{
    				Path: pulumi.String("string"),
    			},
    		},
    	},
    	Name: pulumi.String("string"),
    	Notifications: databricks.PipelineNotificationArray{
    		&databricks.PipelineNotificationArgs{
    			Alerts: pulumi.StringArray{
    				pulumi.String("string"),
    			},
    			EmailRecipients: pulumi.StringArray{
    				pulumi.String("string"),
    			},
    		},
    	},
    	Photon:        pulumi.Bool(false),
    	RunAsUserName: pulumi.String("string"),
    	Serverless:    pulumi.Bool(false),
    	State:         pulumi.String("string"),
    	Storage:       pulumi.String("string"),
    	Target:        pulumi.String("string"),
    	Trigger: &databricks.PipelineTriggerArgs{
    		Cron: &databricks.PipelineTriggerCronArgs{
    			QuartzCronSchedule: pulumi.String("string"),
    			TimezoneId:         pulumi.String("string"),
    		},
    		Manual: nil,
    	},
    	Url: pulumi.String("string"),
    })
    
    var pipelineResource = new Pipeline("pipelineResource", PipelineArgs.builder()
        .allowDuplicateNames(false)
        .catalog("string")
        .cause("string")
        .channel("string")
        .clusterId("string")
        .clusters(PipelineClusterArgs.builder()
            .applyPolicyDefaultValues(false)
            .autoscale(PipelineClusterAutoscaleArgs.builder()
                .maxWorkers(0)
                .minWorkers(0)
                .mode("string")
                .build())
            .awsAttributes(PipelineClusterAwsAttributesArgs.builder()
                .availability("string")
                .ebsVolumeCount(0)
                .ebsVolumeIops(0)
                .ebsVolumeSize(0)
                .ebsVolumeThroughput(0)
                .ebsVolumeType("string")
                .firstOnDemand(0)
                .instanceProfileArn("string")
                .spotBidPricePercent(0)
                .zoneId("string")
                .build())
            .azureAttributes(PipelineClusterAzureAttributesArgs.builder()
                .availability("string")
                .firstOnDemand(0)
                .logAnalyticsInfo(PipelineClusterAzureAttributesLogAnalyticsInfoArgs.builder()
                    .logAnalyticsPrimaryKey("string")
                    .logAnalyticsWorkspaceId("string")
                    .build())
                .spotBidMaxPrice(0)
                .build())
            .clusterLogConf(PipelineClusterClusterLogConfArgs.builder()
                .dbfs(PipelineClusterClusterLogConfDbfsArgs.builder()
                    .destination("string")
                    .build())
                .s3(PipelineClusterClusterLogConfS3Args.builder()
                    .destination("string")
                    .cannedAcl("string")
                    .enableEncryption(false)
                    .encryptionType("string")
                    .endpoint("string")
                    .kmsKey("string")
                    .region("string")
                    .build())
                .build())
            .customTags(Map.of("string", "string"))
            .driverInstancePoolId("string")
            .driverNodeTypeId("string")
            .enableLocalDiskEncryption(false)
            .gcpAttributes(PipelineClusterGcpAttributesArgs.builder()
                .availability("string")
                .googleServiceAccount("string")
                .localSsdCount(0)
                .zoneId("string")
                .build())
            .initScripts(PipelineClusterInitScriptArgs.builder()
                .abfss(PipelineClusterInitScriptAbfssArgs.builder()
                    .destination("string")
                    .build())
                .file(PipelineClusterInitScriptFileArgs.builder()
                    .destination("string")
                    .build())
                .gcs(PipelineClusterInitScriptGcsArgs.builder()
                    .destination("string")
                    .build())
                .s3(PipelineClusterInitScriptS3Args.builder()
                    .destination("string")
                    .cannedAcl("string")
                    .enableEncryption(false)
                    .encryptionType("string")
                    .endpoint("string")
                    .kmsKey("string")
                    .region("string")
                    .build())
                .volumes(PipelineClusterInitScriptVolumesArgs.builder()
                    .destination("string")
                    .build())
                .workspace(PipelineClusterInitScriptWorkspaceArgs.builder()
                    .destination("string")
                    .build())
                .build())
            .instancePoolId("string")
            .label("string")
            .nodeTypeId("string")
            .numWorkers(0)
            .policyId("string")
            .sparkConf(Map.of("string", "string"))
            .sparkEnvVars(Map.of("string", "string"))
            .sshPublicKeys("string")
            .build())
        .configuration(Map.of("string", "string"))
        .continuous(false)
        .creatorUserName("string")
        .deployment(PipelineDeploymentArgs.builder()
            .kind("string")
            .metadataFilePath("string")
            .build())
        .development(false)
        .edition("string")
        .expectedLastModified(0)
        .filters(PipelineFiltersArgs.builder()
            .excludes("string")
            .includes("string")
            .build())
        .gatewayDefinition(PipelineGatewayDefinitionArgs.builder()
            .connectionId("string")
            .gatewayStorageCatalog("string")
            .gatewayStorageName("string")
            .gatewayStorageSchema("string")
            .build())
        .health("string")
        .ingestionDefinition(PipelineIngestionDefinitionArgs.builder()
            .connectionName("string")
            .ingestionGatewayId("string")
            .objects(PipelineIngestionDefinitionObjectArgs.builder()
                .schema(PipelineIngestionDefinitionObjectSchemaArgs.builder()
                    .destinationCatalog("string")
                    .destinationSchema("string")
                    .sourceCatalog("string")
                    .sourceSchema("string")
                    .tableConfiguration(PipelineIngestionDefinitionObjectSchemaTableConfigurationArgs.builder()
                        .primaryKeys("string")
                        .salesforceIncludeFormulaFields(false)
                        .scdType("string")
                        .build())
                    .build())
                .table(PipelineIngestionDefinitionObjectTableArgs.builder()
                    .destinationCatalog("string")
                    .destinationSchema("string")
                    .destinationTable("string")
                    .sourceCatalog("string")
                    .sourceSchema("string")
                    .sourceTable("string")
                    .tableConfiguration(PipelineIngestionDefinitionObjectTableTableConfigurationArgs.builder()
                        .primaryKeys("string")
                        .salesforceIncludeFormulaFields(false)
                        .scdType("string")
                        .build())
                    .build())
                .build())
            .tableConfiguration(PipelineIngestionDefinitionTableConfigurationArgs.builder()
                .primaryKeys("string")
                .salesforceIncludeFormulaFields(false)
                .scdType("string")
                .build())
            .build())
        .lastModified(0)
        .latestUpdates(PipelineLatestUpdateArgs.builder()
            .creationTime("string")
            .state("string")
            .updateId("string")
            .build())
        .libraries(PipelineLibraryArgs.builder()
            .file(PipelineLibraryFileArgs.builder()
                .path("string")
                .build())
            .jar("string")
            .maven(PipelineLibraryMavenArgs.builder()
                .coordinates("string")
                .exclusions("string")
                .repo("string")
                .build())
            .notebook(PipelineLibraryNotebookArgs.builder()
                .path("string")
                .build())
            .build())
        .name("string")
        .notifications(PipelineNotificationArgs.builder()
            .alerts("string")
            .emailRecipients("string")
            .build())
        .photon(false)
        .runAsUserName("string")
        .serverless(false)
        .state("string")
        .storage("string")
        .target("string")
        .trigger(PipelineTriggerArgs.builder()
            .cron(PipelineTriggerCronArgs.builder()
                .quartzCronSchedule("string")
                .timezoneId("string")
                .build())
            .manual()
            .build())
        .url("string")
        .build());
    
    pipeline_resource = databricks.Pipeline("pipelineResource",
        allow_duplicate_names=False,
        catalog="string",
        cause="string",
        channel="string",
        cluster_id="string",
        clusters=[databricks.PipelineClusterArgs(
            apply_policy_default_values=False,
            autoscale=databricks.PipelineClusterAutoscaleArgs(
                max_workers=0,
                min_workers=0,
                mode="string",
            ),
            aws_attributes=databricks.PipelineClusterAwsAttributesArgs(
                availability="string",
                ebs_volume_count=0,
                ebs_volume_iops=0,
                ebs_volume_size=0,
                ebs_volume_throughput=0,
                ebs_volume_type="string",
                first_on_demand=0,
                instance_profile_arn="string",
                spot_bid_price_percent=0,
                zone_id="string",
            ),
            azure_attributes=databricks.PipelineClusterAzureAttributesArgs(
                availability="string",
                first_on_demand=0,
                log_analytics_info=databricks.PipelineClusterAzureAttributesLogAnalyticsInfoArgs(
                    log_analytics_primary_key="string",
                    log_analytics_workspace_id="string",
                ),
                spot_bid_max_price=0,
            ),
            cluster_log_conf=databricks.PipelineClusterClusterLogConfArgs(
                dbfs=databricks.PipelineClusterClusterLogConfDbfsArgs(
                    destination="string",
                ),
                s3=databricks.PipelineClusterClusterLogConfS3Args(
                    destination="string",
                    canned_acl="string",
                    enable_encryption=False,
                    encryption_type="string",
                    endpoint="string",
                    kms_key="string",
                    region="string",
                ),
            ),
            custom_tags={
                "string": "string",
            },
            driver_instance_pool_id="string",
            driver_node_type_id="string",
            enable_local_disk_encryption=False,
            gcp_attributes=databricks.PipelineClusterGcpAttributesArgs(
                availability="string",
                google_service_account="string",
                local_ssd_count=0,
                zone_id="string",
            ),
            init_scripts=[databricks.PipelineClusterInitScriptArgs(
                abfss=databricks.PipelineClusterInitScriptAbfssArgs(
                    destination="string",
                ),
                file=databricks.PipelineClusterInitScriptFileArgs(
                    destination="string",
                ),
                gcs=databricks.PipelineClusterInitScriptGcsArgs(
                    destination="string",
                ),
                s3=databricks.PipelineClusterInitScriptS3Args(
                    destination="string",
                    canned_acl="string",
                    enable_encryption=False,
                    encryption_type="string",
                    endpoint="string",
                    kms_key="string",
                    region="string",
                ),
                volumes=databricks.PipelineClusterInitScriptVolumesArgs(
                    destination="string",
                ),
                workspace=databricks.PipelineClusterInitScriptWorkspaceArgs(
                    destination="string",
                ),
            )],
            instance_pool_id="string",
            label="string",
            node_type_id="string",
            num_workers=0,
            policy_id="string",
            spark_conf={
                "string": "string",
            },
            spark_env_vars={
                "string": "string",
            },
            ssh_public_keys=["string"],
        )],
        configuration={
            "string": "string",
        },
        continuous=False,
        creator_user_name="string",
        deployment=databricks.PipelineDeploymentArgs(
            kind="string",
            metadata_file_path="string",
        ),
        development=False,
        edition="string",
        expected_last_modified=0,
        filters=databricks.PipelineFiltersArgs(
            excludes=["string"],
            includes=["string"],
        ),
        gateway_definition=databricks.PipelineGatewayDefinitionArgs(
            connection_id="string",
            gateway_storage_catalog="string",
            gateway_storage_name="string",
            gateway_storage_schema="string",
        ),
        health="string",
        ingestion_definition=databricks.PipelineIngestionDefinitionArgs(
            connection_name="string",
            ingestion_gateway_id="string",
            objects=[databricks.PipelineIngestionDefinitionObjectArgs(
                schema=databricks.PipelineIngestionDefinitionObjectSchemaArgs(
                    destination_catalog="string",
                    destination_schema="string",
                    source_catalog="string",
                    source_schema="string",
                    table_configuration=databricks.PipelineIngestionDefinitionObjectSchemaTableConfigurationArgs(
                        primary_keys=["string"],
                        salesforce_include_formula_fields=False,
                        scd_type="string",
                    ),
                ),
                table=databricks.PipelineIngestionDefinitionObjectTableArgs(
                    destination_catalog="string",
                    destination_schema="string",
                    destination_table="string",
                    source_catalog="string",
                    source_schema="string",
                    source_table="string",
                    table_configuration=databricks.PipelineIngestionDefinitionObjectTableTableConfigurationArgs(
                        primary_keys=["string"],
                        salesforce_include_formula_fields=False,
                        scd_type="string",
                    ),
                ),
            )],
            table_configuration=databricks.PipelineIngestionDefinitionTableConfigurationArgs(
                primary_keys=["string"],
                salesforce_include_formula_fields=False,
                scd_type="string",
            ),
        ),
        last_modified=0,
        latest_updates=[databricks.PipelineLatestUpdateArgs(
            creation_time="string",
            state="string",
            update_id="string",
        )],
        libraries=[databricks.PipelineLibraryArgs(
            file=databricks.PipelineLibraryFileArgs(
                path="string",
            ),
            jar="string",
            maven=databricks.PipelineLibraryMavenArgs(
                coordinates="string",
                exclusions=["string"],
                repo="string",
            ),
            notebook=databricks.PipelineLibraryNotebookArgs(
                path="string",
            ),
        )],
        name="string",
        notifications=[databricks.PipelineNotificationArgs(
            alerts=["string"],
            email_recipients=["string"],
        )],
        photon=False,
        run_as_user_name="string",
        serverless=False,
        state="string",
        storage="string",
        target="string",
        trigger=databricks.PipelineTriggerArgs(
            cron=databricks.PipelineTriggerCronArgs(
                quartz_cron_schedule="string",
                timezone_id="string",
            ),
            manual=databricks.PipelineTriggerManualArgs(),
        ),
        url="string")
    
    const pipelineResource = new databricks.Pipeline("pipelineResource", {
        allowDuplicateNames: false,
        catalog: "string",
        cause: "string",
        channel: "string",
        clusterId: "string",
        clusters: [{
            applyPolicyDefaultValues: false,
            autoscale: {
                maxWorkers: 0,
                minWorkers: 0,
                mode: "string",
            },
            awsAttributes: {
                availability: "string",
                ebsVolumeCount: 0,
                ebsVolumeIops: 0,
                ebsVolumeSize: 0,
                ebsVolumeThroughput: 0,
                ebsVolumeType: "string",
                firstOnDemand: 0,
                instanceProfileArn: "string",
                spotBidPricePercent: 0,
                zoneId: "string",
            },
            azureAttributes: {
                availability: "string",
                firstOnDemand: 0,
                logAnalyticsInfo: {
                    logAnalyticsPrimaryKey: "string",
                    logAnalyticsWorkspaceId: "string",
                },
                spotBidMaxPrice: 0,
            },
            clusterLogConf: {
                dbfs: {
                    destination: "string",
                },
                s3: {
                    destination: "string",
                    cannedAcl: "string",
                    enableEncryption: false,
                    encryptionType: "string",
                    endpoint: "string",
                    kmsKey: "string",
                    region: "string",
                },
            },
            customTags: {
                string: "string",
            },
            driverInstancePoolId: "string",
            driverNodeTypeId: "string",
            enableLocalDiskEncryption: false,
            gcpAttributes: {
                availability: "string",
                googleServiceAccount: "string",
                localSsdCount: 0,
                zoneId: "string",
            },
            initScripts: [{
                abfss: {
                    destination: "string",
                },
                file: {
                    destination: "string",
                },
                gcs: {
                    destination: "string",
                },
                s3: {
                    destination: "string",
                    cannedAcl: "string",
                    enableEncryption: false,
                    encryptionType: "string",
                    endpoint: "string",
                    kmsKey: "string",
                    region: "string",
                },
                volumes: {
                    destination: "string",
                },
                workspace: {
                    destination: "string",
                },
            }],
            instancePoolId: "string",
            label: "string",
            nodeTypeId: "string",
            numWorkers: 0,
            policyId: "string",
            sparkConf: {
                string: "string",
            },
            sparkEnvVars: {
                string: "string",
            },
            sshPublicKeys: ["string"],
        }],
        configuration: {
            string: "string",
        },
        continuous: false,
        creatorUserName: "string",
        deployment: {
            kind: "string",
            metadataFilePath: "string",
        },
        development: false,
        edition: "string",
        expectedLastModified: 0,
        filters: {
            excludes: ["string"],
            includes: ["string"],
        },
        gatewayDefinition: {
            connectionId: "string",
            gatewayStorageCatalog: "string",
            gatewayStorageName: "string",
            gatewayStorageSchema: "string",
        },
        health: "string",
        ingestionDefinition: {
            connectionName: "string",
            ingestionGatewayId: "string",
            objects: [{
                schema: {
                    destinationCatalog: "string",
                    destinationSchema: "string",
                    sourceCatalog: "string",
                    sourceSchema: "string",
                    tableConfiguration: {
                        primaryKeys: ["string"],
                        salesforceIncludeFormulaFields: false,
                        scdType: "string",
                    },
                },
                table: {
                    destinationCatalog: "string",
                    destinationSchema: "string",
                    destinationTable: "string",
                    sourceCatalog: "string",
                    sourceSchema: "string",
                    sourceTable: "string",
                    tableConfiguration: {
                        primaryKeys: ["string"],
                        salesforceIncludeFormulaFields: false,
                        scdType: "string",
                    },
                },
            }],
            tableConfiguration: {
                primaryKeys: ["string"],
                salesforceIncludeFormulaFields: false,
                scdType: "string",
            },
        },
        lastModified: 0,
        latestUpdates: [{
            creationTime: "string",
            state: "string",
            updateId: "string",
        }],
        libraries: [{
            file: {
                path: "string",
            },
            jar: "string",
            maven: {
                coordinates: "string",
                exclusions: ["string"],
                repo: "string",
            },
            notebook: {
                path: "string",
            },
        }],
        name: "string",
        notifications: [{
            alerts: ["string"],
            emailRecipients: ["string"],
        }],
        photon: false,
        runAsUserName: "string",
        serverless: false,
        state: "string",
        storage: "string",
        target: "string",
        trigger: {
            cron: {
                quartzCronSchedule: "string",
                timezoneId: "string",
            },
            manual: {},
        },
        url: "string",
    });
    
    type: databricks:Pipeline
    properties:
        allowDuplicateNames: false
        catalog: string
        cause: string
        channel: string
        clusterId: string
        clusters:
            - applyPolicyDefaultValues: false
              autoscale:
                maxWorkers: 0
                minWorkers: 0
                mode: string
              awsAttributes:
                availability: string
                ebsVolumeCount: 0
                ebsVolumeIops: 0
                ebsVolumeSize: 0
                ebsVolumeThroughput: 0
                ebsVolumeType: string
                firstOnDemand: 0
                instanceProfileArn: string
                spotBidPricePercent: 0
                zoneId: string
              azureAttributes:
                availability: string
                firstOnDemand: 0
                logAnalyticsInfo:
                    logAnalyticsPrimaryKey: string
                    logAnalyticsWorkspaceId: string
                spotBidMaxPrice: 0
              clusterLogConf:
                dbfs:
                    destination: string
                s3:
                    cannedAcl: string
                    destination: string
                    enableEncryption: false
                    encryptionType: string
                    endpoint: string
                    kmsKey: string
                    region: string
              customTags:
                string: string
              driverInstancePoolId: string
              driverNodeTypeId: string
              enableLocalDiskEncryption: false
              gcpAttributes:
                availability: string
                googleServiceAccount: string
                localSsdCount: 0
                zoneId: string
              initScripts:
                - abfss:
                    destination: string
                  file:
                    destination: string
                  gcs:
                    destination: string
                  s3:
                    cannedAcl: string
                    destination: string
                    enableEncryption: false
                    encryptionType: string
                    endpoint: string
                    kmsKey: string
                    region: string
                  volumes:
                    destination: string
                  workspace:
                    destination: string
              instancePoolId: string
              label: string
              nodeTypeId: string
              numWorkers: 0
              policyId: string
              sparkConf:
                string: string
              sparkEnvVars:
                string: string
              sshPublicKeys:
                - string
        configuration:
            string: string
        continuous: false
        creatorUserName: string
        deployment:
            kind: string
            metadataFilePath: string
        development: false
        edition: string
        expectedLastModified: 0
        filters:
            excludes:
                - string
            includes:
                - string
        gatewayDefinition:
            connectionId: string
            gatewayStorageCatalog: string
            gatewayStorageName: string
            gatewayStorageSchema: string
        health: string
        ingestionDefinition:
            connectionName: string
            ingestionGatewayId: string
            objects:
                - schema:
                    destinationCatalog: string
                    destinationSchema: string
                    sourceCatalog: string
                    sourceSchema: string
                    tableConfiguration:
                        primaryKeys:
                            - string
                        salesforceIncludeFormulaFields: false
                        scdType: string
                  table:
                    destinationCatalog: string
                    destinationSchema: string
                    destinationTable: string
                    sourceCatalog: string
                    sourceSchema: string
                    sourceTable: string
                    tableConfiguration:
                        primaryKeys:
                            - string
                        salesforceIncludeFormulaFields: false
                        scdType: string
            tableConfiguration:
                primaryKeys:
                    - string
                salesforceIncludeFormulaFields: false
                scdType: string
        lastModified: 0
        latestUpdates:
            - creationTime: string
              state: string
              updateId: string
        libraries:
            - file:
                path: string
              jar: string
              maven:
                coordinates: string
                exclusions:
                    - string
                repo: string
              notebook:
                path: string
        name: string
        notifications:
            - alerts:
                - string
              emailRecipients:
                - string
        photon: false
        runAsUserName: string
        serverless: false
        state: string
        storage: string
        target: string
        trigger:
            cron:
                quartzCronSchedule: string
                timezoneId: string
            manual: {}
        url: string
    

    Pipeline Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    The Pipeline resource accepts the following input properties:

    AllowDuplicateNames bool
    Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
    Catalog string
    The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
    Cause string
    Channel string
    optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
    ClusterId string
    Clusters List<PipelineCluster>
    blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
    Configuration Dictionary<string, string>
    An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
    Continuous bool
    A flag indicating whether to run the pipeline continuously. The default value is false.
    CreatorUserName string
    Deployment PipelineDeployment
    Deployment type of this pipeline. Supports following attributes:
    Development bool
    A flag indicating whether to run the pipeline in development mode. The default value is false.
    Edition string
    optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default). Not required when serverless is set to true.
    ExpectedLastModified int
    Filters PipelineFilters
    Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
    GatewayDefinition PipelineGatewayDefinition
    The definition of a gateway pipeline to support CDC. Consists of following attributes:
    Health string
    IngestionDefinition PipelineIngestionDefinition
    LastModified int
    LatestUpdates List<PipelineLatestUpdate>
    Libraries List<PipelineLibrary>
    blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
    Name string
    A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
    Notifications List<PipelineNotification>
    Photon bool
    A flag indicating whether to use Photon engine. The default value is false.
    RunAsUserName string
    Serverless bool
    An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalog to be set, as it could be used only with Unity Catalog.
    State string
    Storage string
    A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
    Target string
    The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
    Trigger PipelineTrigger
    Url string
    URL of the DLT pipeline on the given workspace.
    AllowDuplicateNames bool
    Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
    Catalog string
    The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
    Cause string
    Channel string
    optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
    ClusterId string
    Clusters []PipelineClusterArgs
    blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
    Configuration map[string]string
    An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
    Continuous bool
    A flag indicating whether to run the pipeline continuously. The default value is false.
    CreatorUserName string
    Deployment PipelineDeploymentArgs
    Deployment type of this pipeline. Supports following attributes:
    Development bool
    A flag indicating whether to run the pipeline in development mode. The default value is false.
    Edition string
    optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default). Not required when serverless is set to true.
    ExpectedLastModified int
    Filters PipelineFiltersArgs
    Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
    GatewayDefinition PipelineGatewayDefinitionArgs
    The definition of a gateway pipeline to support CDC. Consists of following attributes:
    Health string
    IngestionDefinition PipelineIngestionDefinitionArgs
    LastModified int
    LatestUpdates []PipelineLatestUpdateArgs
    Libraries []PipelineLibraryArgs
    blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
    Name string
    A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
    Notifications []PipelineNotificationArgs
    Photon bool
    A flag indicating whether to use Photon engine. The default value is false.
    RunAsUserName string
    Serverless bool
    An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalog to be set, as it could be used only with Unity Catalog.
    State string
    Storage string
    A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
    Target string
    The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
    Trigger PipelineTriggerArgs
    Url string
    URL of the DLT pipeline on the given workspace.
    allowDuplicateNames Boolean
    Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
    catalog String
    The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
    cause String
    channel String
    optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
    clusterId String
    clusters List<PipelineCluster>
    blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
    configuration Map<String,String>
    An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
    continuous Boolean
    A flag indicating whether to run the pipeline continuously. The default value is false.
    creatorUserName String
    deployment PipelineDeployment
    Deployment type of this pipeline. Supports following attributes:
    development Boolean
    A flag indicating whether to run the pipeline in development mode. The default value is false.
    edition String
    optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default). Not required when serverless is set to true.
    expectedLastModified Integer
    filters PipelineFilters
    Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
    gatewayDefinition PipelineGatewayDefinition
    The definition of a gateway pipeline to support CDC. Consists of following attributes:
    health String
    ingestionDefinition PipelineIngestionDefinition
    lastModified Integer
    latestUpdates List<PipelineLatestUpdate>
    libraries List<PipelineLibrary>
    blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
    name String
    A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
    notifications List<PipelineNotification>
    photon Boolean
    A flag indicating whether to use Photon engine. The default value is false.
    runAsUserName String
    serverless Boolean
    An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalog to be set, as it could be used only with Unity Catalog.
    state String
    storage String
    A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
    target String
    The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
    trigger PipelineTrigger
    url String
    URL of the DLT pipeline on the given workspace.
    allowDuplicateNames boolean
    Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
    catalog string
    The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
    cause string
    channel string
    optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
    clusterId string
    clusters PipelineCluster[]
    blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
    configuration {[key: string]: string}
    An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
    continuous boolean
    A flag indicating whether to run the pipeline continuously. The default value is false.
    creatorUserName string
    deployment PipelineDeployment
    Deployment type of this pipeline. Supports following attributes:
    development boolean
    A flag indicating whether to run the pipeline in development mode. The default value is false.
    edition string
    optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default). Not required when serverless is set to true.
    expectedLastModified number
    filters PipelineFilters
    Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
    gatewayDefinition PipelineGatewayDefinition
    The definition of a gateway pipeline to support CDC. Consists of following attributes:
    health string
    ingestionDefinition PipelineIngestionDefinition
    lastModified number
    latestUpdates PipelineLatestUpdate[]
    libraries PipelineLibrary[]
    blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
    name string
    A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
    notifications PipelineNotification[]
    photon boolean
    A flag indicating whether to use Photon engine. The default value is false.
    runAsUserName string
    serverless boolean
    An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalog to be set, as it could be used only with Unity Catalog.
    state string
    storage string
    A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
    target string
    The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
    trigger PipelineTrigger
    url string
    URL of the DLT pipeline on the given workspace.
    allow_duplicate_names bool
    Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
    catalog str
    The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
    cause str
    channel str
    optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
    cluster_id str
    clusters Sequence[PipelineClusterArgs]
    blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
    configuration Mapping[str, str]
    An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
    continuous bool
    A flag indicating whether to run the pipeline continuously. The default value is false.
    creator_user_name str
    deployment PipelineDeploymentArgs
    Deployment type of this pipeline. Supports following attributes:
    development bool
    A flag indicating whether to run the pipeline in development mode. The default value is false.
    edition str
    optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default). Not required when serverless is set to true.
    expected_last_modified int
    filters PipelineFiltersArgs
    Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
    gateway_definition PipelineGatewayDefinitionArgs
    The definition of a gateway pipeline to support CDC. Consists of following attributes:
    health str
    ingestion_definition PipelineIngestionDefinitionArgs
    last_modified int
    latest_updates Sequence[PipelineLatestUpdateArgs]
    libraries Sequence[PipelineLibraryArgs]
    blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
    name str
    A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
    notifications Sequence[PipelineNotificationArgs]
    photon bool
    A flag indicating whether to use Photon engine. The default value is false.
    run_as_user_name str
    serverless bool
    An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalog to be set, as it could be used only with Unity Catalog.
    state str
    storage str
    A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
    target str
    The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
    trigger PipelineTriggerArgs
    url str
    URL of the DLT pipeline on the given workspace.
    allowDuplicateNames Boolean
    Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
    catalog String
    The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
    cause String
    channel String
    optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
    clusterId String
    clusters List<Property Map>
    blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
    configuration Map<String>
    An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
    continuous Boolean
    A flag indicating whether to run the pipeline continuously. The default value is false.
    creatorUserName String
    deployment Property Map
    Deployment type of this pipeline. Supports following attributes:
    development Boolean
    A flag indicating whether to run the pipeline in development mode. The default value is false.
    edition String
    optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default). Not required when serverless is set to true.
    expectedLastModified Number
    filters Property Map
    Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
    gatewayDefinition Property Map
    The definition of a gateway pipeline to support CDC. Consists of following attributes:
    health String
    ingestionDefinition Property Map
    lastModified Number
    latestUpdates List<Property Map>
    libraries List<Property Map>
    blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
    name String
    A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
    notifications List<Property Map>
    photon Boolean
    A flag indicating whether to use Photon engine. The default value is false.
    runAsUserName String
    serverless Boolean
    An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalog to be set, as it could be used only with Unity Catalog.
    state String
    storage String
    A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
    target String
    The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
    trigger Property Map
    url String
    URL of the DLT pipeline on the given workspace.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the Pipeline resource produces the following output properties:

    Id string
    The provider-assigned unique ID for this managed resource.
    Id string
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.
    id string
    The provider-assigned unique ID for this managed resource.
    id str
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.

    Look up Existing Pipeline Resource

    Get an existing Pipeline resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: PipelineState, opts?: CustomResourceOptions): Pipeline
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            allow_duplicate_names: Optional[bool] = None,
            catalog: Optional[str] = None,
            cause: Optional[str] = None,
            channel: Optional[str] = None,
            cluster_id: Optional[str] = None,
            clusters: Optional[Sequence[PipelineClusterArgs]] = None,
            configuration: Optional[Mapping[str, str]] = None,
            continuous: Optional[bool] = None,
            creator_user_name: Optional[str] = None,
            deployment: Optional[PipelineDeploymentArgs] = None,
            development: Optional[bool] = None,
            edition: Optional[str] = None,
            expected_last_modified: Optional[int] = None,
            filters: Optional[PipelineFiltersArgs] = None,
            gateway_definition: Optional[PipelineGatewayDefinitionArgs] = None,
            health: Optional[str] = None,
            ingestion_definition: Optional[PipelineIngestionDefinitionArgs] = None,
            last_modified: Optional[int] = None,
            latest_updates: Optional[Sequence[PipelineLatestUpdateArgs]] = None,
            libraries: Optional[Sequence[PipelineLibraryArgs]] = None,
            name: Optional[str] = None,
            notifications: Optional[Sequence[PipelineNotificationArgs]] = None,
            photon: Optional[bool] = None,
            run_as_user_name: Optional[str] = None,
            serverless: Optional[bool] = None,
            state: Optional[str] = None,
            storage: Optional[str] = None,
            target: Optional[str] = None,
            trigger: Optional[PipelineTriggerArgs] = None,
            url: Optional[str] = None) -> Pipeline
    func GetPipeline(ctx *Context, name string, id IDInput, state *PipelineState, opts ...ResourceOption) (*Pipeline, error)
    public static Pipeline Get(string name, Input<string> id, PipelineState? state, CustomResourceOptions? opts = null)
    public static Pipeline get(String name, Output<String> id, PipelineState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    AllowDuplicateNames bool
    Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
    Catalog string
    The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
    Cause string
    Channel string
    optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
    ClusterId string
    Clusters List<PipelineCluster>
    blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
    Configuration Dictionary<string, string>
    An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
    Continuous bool
    A flag indicating whether to run the pipeline continuously. The default value is false.
    CreatorUserName string
    Deployment PipelineDeployment
    Deployment type of this pipeline. Supports following attributes:
    Development bool
    A flag indicating whether to run the pipeline in development mode. The default value is false.
    Edition string
    optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default). Not required when serverless is set to true.
    ExpectedLastModified int
    Filters PipelineFilters
    Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
    GatewayDefinition PipelineGatewayDefinition
    The definition of a gateway pipeline to support CDC. Consists of following attributes:
    Health string
    IngestionDefinition PipelineIngestionDefinition
    LastModified int
    LatestUpdates List<PipelineLatestUpdate>
    Libraries List<PipelineLibrary>
    blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
    Name string
    A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
    Notifications List<PipelineNotification>
    Photon bool
    A flag indicating whether to use Photon engine. The default value is false.
    RunAsUserName string
    Serverless bool
    An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalog to be set, as it could be used only with Unity Catalog.
    State string
    Storage string
    A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
    Target string
    The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
    Trigger PipelineTrigger
    Url string
    URL of the DLT pipeline on the given workspace.
    AllowDuplicateNames bool
    Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
    Catalog string
    The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
    Cause string
    Channel string
    optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
    ClusterId string
    Clusters []PipelineClusterArgs
    blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
    Configuration map[string]string
    An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
    Continuous bool
    A flag indicating whether to run the pipeline continuously. The default value is false.
    CreatorUserName string
    Deployment PipelineDeploymentArgs
    Deployment type of this pipeline. Supports following attributes:
    Development bool
    A flag indicating whether to run the pipeline in development mode. The default value is false.
    Edition string
    optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default). Not required when serverless is set to true.
    ExpectedLastModified int
    Filters PipelineFiltersArgs
    Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
    GatewayDefinition PipelineGatewayDefinitionArgs
    The definition of a gateway pipeline to support CDC. Consists of following attributes:
    Health string
    IngestionDefinition PipelineIngestionDefinitionArgs
    LastModified int
    LatestUpdates []PipelineLatestUpdateArgs
    Libraries []PipelineLibraryArgs
    blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
    Name string
    A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
    Notifications []PipelineNotificationArgs
    Photon bool
    A flag indicating whether to use Photon engine. The default value is false.
    RunAsUserName string
    Serverless bool
    An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalog to be set, as it could be used only with Unity Catalog.
    State string
    Storage string
    A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
    Target string
    The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
    Trigger PipelineTriggerArgs
    Url string
    URL of the DLT pipeline on the given workspace.
    allowDuplicateNames Boolean
    Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
    catalog String
    The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
    cause String
    channel String
    optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
    clusterId String
    clusters List<PipelineCluster>
    blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
    configuration Map<String,String>
    An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
    continuous Boolean
    A flag indicating whether to run the pipeline continuously. The default value is false.
    creatorUserName String
    deployment PipelineDeployment
    Deployment type of this pipeline. Supports following attributes:
    development Boolean
    A flag indicating whether to run the pipeline in development mode. The default value is false.
    edition String
    optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default). Not required when serverless is set to true.
    expectedLastModified Integer
    filters PipelineFilters
    Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
    gatewayDefinition PipelineGatewayDefinition
    The definition of a gateway pipeline to support CDC. Consists of following attributes:
    health String
    ingestionDefinition PipelineIngestionDefinition
    lastModified Integer
    latestUpdates List<PipelineLatestUpdate>
    libraries List<PipelineLibrary>
    blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
    name String
    A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
    notifications List<PipelineNotification>
    photon Boolean
    A flag indicating whether to use Photon engine. The default value is false.
    runAsUserName String
    serverless Boolean
    An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalog to be set, as it could be used only with Unity Catalog.
    state String
    storage String
    A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
    target String
    The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
    trigger PipelineTrigger
    url String
    URL of the DLT pipeline on the given workspace.
    allowDuplicateNames boolean
    Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
    catalog string
    The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
    cause string
    channel string
    optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
    clusterId string
    clusters PipelineCluster[]
    blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
    configuration {[key: string]: string}
    An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
    continuous boolean
    A flag indicating whether to run the pipeline continuously. The default value is false.
    creatorUserName string
    deployment PipelineDeployment
    Deployment type of this pipeline. Supports following attributes:
    development boolean
    A flag indicating whether to run the pipeline in development mode. The default value is false.
    edition string
    optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default). Not required when serverless is set to true.
    expectedLastModified number
    filters PipelineFilters
    Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
    gatewayDefinition PipelineGatewayDefinition
    The definition of a gateway pipeline to support CDC. Consists of following attributes:
    health string
    ingestionDefinition PipelineIngestionDefinition
    lastModified number
    latestUpdates PipelineLatestUpdate[]
    libraries PipelineLibrary[]
    blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
    name string
    A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
    notifications PipelineNotification[]
    photon boolean
    A flag indicating whether to use Photon engine. The default value is false.
    runAsUserName string
    serverless boolean
    An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalog to be set, as it could be used only with Unity Catalog.
    state string
    storage string
    A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
    target string
    The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
    trigger PipelineTrigger
    url string
    URL of the DLT pipeline on the given workspace.
    allow_duplicate_names bool
    Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
    catalog str
    The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
    cause str
    channel str
    optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
    cluster_id str
    clusters Sequence[PipelineClusterArgs]
    blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
    configuration Mapping[str, str]
    An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
    continuous bool
    A flag indicating whether to run the pipeline continuously. The default value is false.
    creator_user_name str
    deployment PipelineDeploymentArgs
    Deployment type of this pipeline. Supports following attributes:
    development bool
    A flag indicating whether to run the pipeline in development mode. The default value is false.
    edition str
    optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default). Not required when serverless is set to true.
    expected_last_modified int
    filters PipelineFiltersArgs
    Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
    gateway_definition PipelineGatewayDefinitionArgs
    The definition of a gateway pipeline to support CDC. Consists of following attributes:
    health str
    ingestion_definition PipelineIngestionDefinitionArgs
    last_modified int
    latest_updates Sequence[PipelineLatestUpdateArgs]
    libraries Sequence[PipelineLibraryArgs]
    blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
    name str
    A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
    notifications Sequence[PipelineNotificationArgs]
    photon bool
    A flag indicating whether to use Photon engine. The default value is false.
    run_as_user_name str
    serverless bool
    An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalog to be set, as it could be used only with Unity Catalog.
    state str
    storage str
    A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
    target str
    The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
    trigger PipelineTriggerArgs
    url str
    URL of the DLT pipeline on the given workspace.
    allowDuplicateNames Boolean
    Optional boolean flag. If false, deployment will fail if name conflicts with that of another pipeline. default is false.
    catalog String
    The name of catalog in Unity Catalog. Change of this parameter forces recreation of the pipeline. (Conflicts with storage).
    cause String
    channel String
    optional name of the release channel for Spark version used by DLT pipeline. Supported values are: CURRENT (default) and PREVIEW.
    clusterId String
    clusters List<Property Map>
    blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. Please note that DLT pipeline clusters are supporting only subset of attributes as described in documentation. Also, note that autoscale block is extended with the mode parameter that controls the autoscaling algorithm (possible values are ENHANCED for new, enhanced autoscaling algorithm, or LEGACY for old algorithm).
    configuration Map<String>
    An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
    continuous Boolean
    A flag indicating whether to run the pipeline continuously. The default value is false.
    creatorUserName String
    deployment Property Map
    Deployment type of this pipeline. Supports following attributes:
    development Boolean
    A flag indicating whether to run the pipeline in development mode. The default value is false.
    edition String
    optional name of the product edition. Supported values are: CORE, PRO, ADVANCED (default). Not required when serverless is set to true.
    expectedLastModified Number
    filters Property Map
    Filters on which Pipeline packages to include in the deployed graph. This block consists of following attributes:
    gatewayDefinition Property Map
    The definition of a gateway pipeline to support CDC. Consists of following attributes:
    health String
    ingestionDefinition Property Map
    lastModified Number
    latestUpdates List<Property Map>
    libraries List<Property Map>
    blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special notebook & file library types that should have the path attribute. Right now only the notebook & file types are supported.
    name String
    A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
    notifications List<Property Map>
    photon Boolean
    A flag indicating whether to use Photon engine. The default value is false.
    runAsUserName String
    serverless Boolean
    An optional flag indicating if serverless compute should be used for this DLT pipeline. Requires catalog to be set, as it could be used only with Unity Catalog.
    state String
    storage String
    A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. Change of this parameter forces recreation of the pipeline. (Conflicts with catalog).
    target String
    The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
    trigger Property Map
    url String
    URL of the DLT pipeline on the given workspace.

    Supporting Types

    PipelineCluster, PipelineClusterArgs

    PipelineClusterAutoscale, PipelineClusterAutoscaleArgs

    maxWorkers Integer
    minWorkers Integer
    mode String
    maxWorkers number
    minWorkers number
    mode string
    maxWorkers Number
    minWorkers Number
    mode String

    PipelineClusterAwsAttributes, PipelineClusterAwsAttributesArgs

    PipelineClusterAzureAttributes, PipelineClusterAzureAttributesArgs

    PipelineClusterAzureAttributesLogAnalyticsInfo, PipelineClusterAzureAttributesLogAnalyticsInfoArgs

    PipelineClusterClusterLogConf, PipelineClusterClusterLogConfArgs

    PipelineClusterClusterLogConfDbfs, PipelineClusterClusterLogConfDbfsArgs

    PipelineClusterClusterLogConfS3, PipelineClusterClusterLogConfS3Args

    Destination string
    CannedAcl string
    EnableEncryption bool
    EncryptionType string
    Endpoint string
    KmsKey string
    Region string
    Destination string
    CannedAcl string
    EnableEncryption bool
    EncryptionType string
    Endpoint string
    KmsKey string
    Region string
    destination String
    cannedAcl String
    enableEncryption Boolean
    encryptionType String
    endpoint String
    kmsKey String
    region String
    destination string
    cannedAcl string
    enableEncryption boolean
    encryptionType string
    endpoint string
    kmsKey string
    region string
    destination String
    cannedAcl String
    enableEncryption Boolean
    encryptionType String
    endpoint String
    kmsKey String
    region String

    PipelineClusterGcpAttributes, PipelineClusterGcpAttributesArgs

    PipelineClusterInitScript, PipelineClusterInitScriptArgs

    abfss Property Map
    dbfs Property Map

    Deprecated: For init scripts use 'volumes', 'workspace' or cloud storage location instead of 'dbfs'.

    file Property Map
    gcs Property Map
    s3 Property Map
    volumes Property Map
    workspace Property Map

    PipelineClusterInitScriptAbfss, PipelineClusterInitScriptAbfssArgs

    PipelineClusterInitScriptDbfs, PipelineClusterInitScriptDbfsArgs

    PipelineClusterInitScriptFile, PipelineClusterInitScriptFileArgs

    PipelineClusterInitScriptGcs, PipelineClusterInitScriptGcsArgs

    PipelineClusterInitScriptS3, PipelineClusterInitScriptS3Args

    Destination string
    CannedAcl string
    EnableEncryption bool
    EncryptionType string
    Endpoint string
    KmsKey string
    Region string
    Destination string
    CannedAcl string
    EnableEncryption bool
    EncryptionType string
    Endpoint string
    KmsKey string
    Region string
    destination String
    cannedAcl String
    enableEncryption Boolean
    encryptionType String
    endpoint String
    kmsKey String
    region String
    destination string
    cannedAcl string
    enableEncryption boolean
    encryptionType string
    endpoint string
    kmsKey string
    region string
    destination String
    cannedAcl String
    enableEncryption Boolean
    encryptionType String
    endpoint String
    kmsKey String
    region String

    PipelineClusterInitScriptVolumes, PipelineClusterInitScriptVolumesArgs

    PipelineClusterInitScriptWorkspace, PipelineClusterInitScriptWorkspaceArgs

    PipelineDeployment, PipelineDeploymentArgs

    Kind string
    The deployment method that manages the pipeline.
    MetadataFilePath string
    The path to the file containing metadata about the deployment.
    Kind string
    The deployment method that manages the pipeline.
    MetadataFilePath string
    The path to the file containing metadata about the deployment.
    kind String
    The deployment method that manages the pipeline.
    metadataFilePath String
    The path to the file containing metadata about the deployment.
    kind string
    The deployment method that manages the pipeline.
    metadataFilePath string
    The path to the file containing metadata about the deployment.
    kind str
    The deployment method that manages the pipeline.
    metadata_file_path str
    The path to the file containing metadata about the deployment.
    kind String
    The deployment method that manages the pipeline.
    metadataFilePath String
    The path to the file containing metadata about the deployment.

    PipelineFilters, PipelineFiltersArgs

    Excludes List<string>
    Paths to exclude.
    Includes List<string>
    Paths to include.
    Excludes []string
    Paths to exclude.
    Includes []string
    Paths to include.
    excludes List<String>
    Paths to exclude.
    includes List<String>
    Paths to include.
    excludes string[]
    Paths to exclude.
    includes string[]
    Paths to include.
    excludes Sequence[str]
    Paths to exclude.
    includes Sequence[str]
    Paths to include.
    excludes List<String>
    Paths to exclude.
    includes List<String>
    Paths to include.

    PipelineGatewayDefinition, PipelineGatewayDefinitionArgs

    ConnectionId string
    Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the source.
    GatewayStorageCatalog string
    Required, Immutable. The name of the catalog for the gateway pipeline's storage location.
    GatewayStorageName string
    Required. The Unity Catalog-compatible naming for the gateway storage location. This is the destination to use for the data that is extracted by the gateway. Delta Live Tables system will automatically create the storage location under the catalog and schema.
    GatewayStorageSchema string
    Required, Immutable. The name of the schema for the gateway pipelines's storage location.
    ConnectionId string
    Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the source.
    GatewayStorageCatalog string
    Required, Immutable. The name of the catalog for the gateway pipeline's storage location.
    GatewayStorageName string
    Required. The Unity Catalog-compatible naming for the gateway storage location. This is the destination to use for the data that is extracted by the gateway. Delta Live Tables system will automatically create the storage location under the catalog and schema.
    GatewayStorageSchema string
    Required, Immutable. The name of the schema for the gateway pipelines's storage location.
    connectionId String
    Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the source.
    gatewayStorageCatalog String
    Required, Immutable. The name of the catalog for the gateway pipeline's storage location.
    gatewayStorageName String
    Required. The Unity Catalog-compatible naming for the gateway storage location. This is the destination to use for the data that is extracted by the gateway. Delta Live Tables system will automatically create the storage location under the catalog and schema.
    gatewayStorageSchema String
    Required, Immutable. The name of the schema for the gateway pipelines's storage location.
    connectionId string
    Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the source.
    gatewayStorageCatalog string
    Required, Immutable. The name of the catalog for the gateway pipeline's storage location.
    gatewayStorageName string
    Required. The Unity Catalog-compatible naming for the gateway storage location. This is the destination to use for the data that is extracted by the gateway. Delta Live Tables system will automatically create the storage location under the catalog and schema.
    gatewayStorageSchema string
    Required, Immutable. The name of the schema for the gateway pipelines's storage location.
    connection_id str
    Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the source.
    gateway_storage_catalog str
    Required, Immutable. The name of the catalog for the gateway pipeline's storage location.
    gateway_storage_name str
    Required. The Unity Catalog-compatible naming for the gateway storage location. This is the destination to use for the data that is extracted by the gateway. Delta Live Tables system will automatically create the storage location under the catalog and schema.
    gateway_storage_schema str
    Required, Immutable. The name of the schema for the gateway pipelines's storage location.
    connectionId String
    Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the source.
    gatewayStorageCatalog String
    Required, Immutable. The name of the catalog for the gateway pipeline's storage location.
    gatewayStorageName String
    Required. The Unity Catalog-compatible naming for the gateway storage location. This is the destination to use for the data that is extracted by the gateway. Delta Live Tables system will automatically create the storage location under the catalog and schema.
    gatewayStorageSchema String
    Required, Immutable. The name of the schema for the gateway pipelines's storage location.

    PipelineIngestionDefinition, PipelineIngestionDefinitionArgs

    PipelineIngestionDefinitionObject, PipelineIngestionDefinitionObjectArgs

    PipelineIngestionDefinitionObjectSchema, PipelineIngestionDefinitionObjectSchemaArgs

    PipelineIngestionDefinitionObjectSchemaTableConfiguration, PipelineIngestionDefinitionObjectSchemaTableConfigurationArgs

    PipelineIngestionDefinitionObjectTable, PipelineIngestionDefinitionObjectTableArgs

    PipelineIngestionDefinitionObjectTableTableConfiguration, PipelineIngestionDefinitionObjectTableTableConfigurationArgs

    PipelineIngestionDefinitionTableConfiguration, PipelineIngestionDefinitionTableConfigurationArgs

    PipelineLatestUpdate, PipelineLatestUpdateArgs

    CreationTime string
    State string
    UpdateId string
    CreationTime string
    State string
    UpdateId string
    creationTime String
    state String
    updateId String
    creationTime string
    state string
    updateId string
    creationTime String
    state String
    updateId String

    PipelineLibrary, PipelineLibraryArgs

    File PipelineLibraryFile
    Jar string
    Maven PipelineLibraryMaven
    Notebook PipelineLibraryNotebook
    Whl string

    Deprecated: The 'whl' field is deprecated

    File PipelineLibraryFile
    Jar string
    Maven PipelineLibraryMaven
    Notebook PipelineLibraryNotebook
    Whl string

    Deprecated: The 'whl' field is deprecated

    file PipelineLibraryFile
    jar String
    maven PipelineLibraryMaven
    notebook PipelineLibraryNotebook
    whl String

    Deprecated: The 'whl' field is deprecated

    file PipelineLibraryFile
    jar string
    maven PipelineLibraryMaven
    notebook PipelineLibraryNotebook
    whl string

    Deprecated: The 'whl' field is deprecated

    file Property Map
    jar String
    maven Property Map
    notebook Property Map
    whl String

    Deprecated: The 'whl' field is deprecated

    PipelineLibraryFile, PipelineLibraryFileArgs

    Path string
    Path string
    path String
    path string
    path str
    path String

    PipelineLibraryMaven, PipelineLibraryMavenArgs

    Coordinates string
    Exclusions List<string>
    Repo string
    Coordinates string
    Exclusions []string
    Repo string
    coordinates String
    exclusions List<String>
    repo String
    coordinates string
    exclusions string[]
    repo string
    coordinates str
    exclusions Sequence[str]
    repo str
    coordinates String
    exclusions List<String>
    repo String

    PipelineLibraryNotebook, PipelineLibraryNotebookArgs

    Path string
    Path string
    path String
    path string
    path str
    path String

    PipelineNotification, PipelineNotificationArgs

    Alerts List<string>
    non-empty list of alert types. Right now following alert types are supported, consult documentation for actual list

    • on-update-success - a pipeline update completes successfully.
    • on-update-failure - a pipeline update fails with a retryable error.
    • on-update-fatal-failure - a pipeline update fails with a non-retryable (fatal) error.
    • on-flow-failure - a single data flow fails.
    EmailRecipients List<string>
    non-empty list of emails to notify.
    Alerts []string
    non-empty list of alert types. Right now following alert types are supported, consult documentation for actual list

    • on-update-success - a pipeline update completes successfully.
    • on-update-failure - a pipeline update fails with a retryable error.
    • on-update-fatal-failure - a pipeline update fails with a non-retryable (fatal) error.
    • on-flow-failure - a single data flow fails.
    EmailRecipients []string
    non-empty list of emails to notify.
    alerts List<String>
    non-empty list of alert types. Right now following alert types are supported, consult documentation for actual list

    • on-update-success - a pipeline update completes successfully.
    • on-update-failure - a pipeline update fails with a retryable error.
    • on-update-fatal-failure - a pipeline update fails with a non-retryable (fatal) error.
    • on-flow-failure - a single data flow fails.
    emailRecipients List<String>
    non-empty list of emails to notify.
    alerts string[]
    non-empty list of alert types. Right now following alert types are supported, consult documentation for actual list

    • on-update-success - a pipeline update completes successfully.
    • on-update-failure - a pipeline update fails with a retryable error.
    • on-update-fatal-failure - a pipeline update fails with a non-retryable (fatal) error.
    • on-flow-failure - a single data flow fails.
    emailRecipients string[]
    non-empty list of emails to notify.
    alerts Sequence[str]
    non-empty list of alert types. Right now following alert types are supported, consult documentation for actual list

    • on-update-success - a pipeline update completes successfully.
    • on-update-failure - a pipeline update fails with a retryable error.
    • on-update-fatal-failure - a pipeline update fails with a non-retryable (fatal) error.
    • on-flow-failure - a single data flow fails.
    email_recipients Sequence[str]
    non-empty list of emails to notify.
    alerts List<String>
    non-empty list of alert types. Right now following alert types are supported, consult documentation for actual list

    • on-update-success - a pipeline update completes successfully.
    • on-update-failure - a pipeline update fails with a retryable error.
    • on-update-fatal-failure - a pipeline update fails with a non-retryable (fatal) error.
    • on-flow-failure - a single data flow fails.
    emailRecipients List<String>
    non-empty list of emails to notify.

    PipelineTrigger, PipelineTriggerArgs

    PipelineTriggerCron, PipelineTriggerCronArgs

    Import

    The resource job can be imported using the id of the pipeline

    bash

    $ pulumi import databricks:index/pipeline:Pipeline this <pipeline-id>
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    databricks pulumi/pulumi-databricks
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the databricks Terraform Provider.
    databricks logo
    Databricks v1.50.2 published on Tuesday, Sep 24, 2024 by Pulumi