MongoDB Atlas v3.18.0 published on Thursday, Sep 12, 2024 by Pulumi
mongodbatlas.getDataLakePipelineRun
Explore with Pulumi AI
# Data Source: mongodbatlas.getDataLakePipelineRun
mongodbatlas.getDataLakePipelineRun
describes a Data Lake Pipeline Run.
NOTE: Groups and projects are synonymous terms. You may find
groupId
in the official documentation.
Example Usage
Coming soon!
Coming soon!
Coming soon!
Coming soon!
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.mongodbatlas.DataLakePipeline;
import com.pulumi.mongodbatlas.DataLakePipelineArgs;
import com.pulumi.mongodbatlas.inputs.DataLakePipelineSinkArgs;
import com.pulumi.mongodbatlas.inputs.DataLakePipelineSourceArgs;
import com.pulumi.mongodbatlas.inputs.DataLakePipelineTransformationArgs;
import com.pulumi.mongodbatlas.MongodbatlasFunctions;
import com.pulumi.mongodbatlas.inputs.GetDataLakePipelineRunsArgs;
import com.pulumi.mongodbatlas.inputs.GetDataLakePipelineRunArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var pipeline = new DataLakePipeline("pipeline", DataLakePipelineArgs.builder()
.projectId(projectTest.projectId())
.name("DataLakePipelineName")
.sink(DataLakePipelineSinkArgs.builder()
.type("DLS")
.partitionFields(DataLakePipelineSinkPartitionFieldArgs.builder()
.name("access")
.order(0)
.build())
.build())
.source(DataLakePipelineSourceArgs.builder()
.type("ON_DEMAND_CPS")
.clusterName(clusterTest.name())
.databaseName("sample_airbnb")
.collectionName("listingsAndReviews")
.build())
.transformations(
DataLakePipelineTransformationArgs.builder()
.field("test")
.type("EXCLUDE")
.build(),
DataLakePipelineTransformationArgs.builder()
.field("test22")
.type("EXCLUDE")
.build())
.build());
final var pipelineRun = MongodbatlasFunctions.getDataLakePipelineRuns(GetDataLakePipelineRunsArgs.builder()
.projectId(projectTest.projectId())
.name(pipeline.name())
.build());
final var test = MongodbatlasFunctions.getDataLakePipelineRun(GetDataLakePipelineRunArgs.builder()
.projectId(projectTest.projectId())
.pipelineName(pipeline.name())
.pipelineRunId(pipelineRunMongodbatlasDataLakePipelineRuns.results()[0].pipelineRunId())
.build());
}
}
resources:
pipeline:
type: mongodbatlas:DataLakePipeline
properties:
projectId: ${projectTest.projectId}
name: DataLakePipelineName
sink:
type: DLS
partitionFields:
- name: access
order: 0
source:
type: ON_DEMAND_CPS
clusterName: ${clusterTest.name}
databaseName: sample_airbnb
collectionName: listingsAndReviews
transformations:
- field: test
type: EXCLUDE
- field: test22
type: EXCLUDE
variables:
pipelineRun:
fn::invoke:
Function: mongodbatlas:getDataLakePipelineRuns
Arguments:
projectId: ${projectTest.projectId}
name: ${pipeline.name}
test:
fn::invoke:
Function: mongodbatlas:getDataLakePipelineRun
Arguments:
projectId: ${projectTest.projectId}
pipelineName: ${pipeline.name}
pipelineRunId: ${pipelineRunMongodbatlasDataLakePipelineRuns.results[0].pipelineRunId}
Using getDataLakePipelineRun
Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.
function getDataLakePipelineRun(args: GetDataLakePipelineRunArgs, opts?: InvokeOptions): Promise<GetDataLakePipelineRunResult>
function getDataLakePipelineRunOutput(args: GetDataLakePipelineRunOutputArgs, opts?: InvokeOptions): Output<GetDataLakePipelineRunResult>
def get_data_lake_pipeline_run(pipeline_name: Optional[str] = None,
pipeline_run_id: Optional[str] = None,
project_id: Optional[str] = None,
opts: Optional[InvokeOptions] = None) -> GetDataLakePipelineRunResult
def get_data_lake_pipeline_run_output(pipeline_name: Optional[pulumi.Input[str]] = None,
pipeline_run_id: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
opts: Optional[InvokeOptions] = None) -> Output[GetDataLakePipelineRunResult]
func GetDataLakePipelineRun(ctx *Context, args *GetDataLakePipelineRunArgs, opts ...InvokeOption) (*GetDataLakePipelineRunResult, error)
func GetDataLakePipelineRunOutput(ctx *Context, args *GetDataLakePipelineRunOutputArgs, opts ...InvokeOption) GetDataLakePipelineRunResultOutput
> Note: This function is named GetDataLakePipelineRun
in the Go SDK.
public static class GetDataLakePipelineRun
{
public static Task<GetDataLakePipelineRunResult> InvokeAsync(GetDataLakePipelineRunArgs args, InvokeOptions? opts = null)
public static Output<GetDataLakePipelineRunResult> Invoke(GetDataLakePipelineRunInvokeArgs args, InvokeOptions? opts = null)
}
public static CompletableFuture<GetDataLakePipelineRunResult> getDataLakePipelineRun(GetDataLakePipelineRunArgs args, InvokeOptions options)
// Output-based functions aren't available in Java yet
fn::invoke:
function: mongodbatlas:index/getDataLakePipelineRun:getDataLakePipelineRun
arguments:
# arguments dictionary
The following arguments are supported:
- Pipeline
Name string - Human-readable label that identifies the Data Lake Pipeline.
- Pipeline
Run stringId - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run.
- Project
Id string - Unique 24-hexadecimal digit string that identifies your project.
- Pipeline
Name string - Human-readable label that identifies the Data Lake Pipeline.
- Pipeline
Run stringId - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run.
- Project
Id string - Unique 24-hexadecimal digit string that identifies your project.
- pipeline
Name String - Human-readable label that identifies the Data Lake Pipeline.
- pipeline
Run StringId - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run.
- project
Id String - Unique 24-hexadecimal digit string that identifies your project.
- pipeline
Name string - Human-readable label that identifies the Data Lake Pipeline.
- pipeline
Run stringId - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run.
- project
Id string - Unique 24-hexadecimal digit string that identifies your project.
- pipeline_
name str - Human-readable label that identifies the Data Lake Pipeline.
- pipeline_
run_ strid - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run.
- project_
id str - Unique 24-hexadecimal digit string that identifies your project.
- pipeline
Name String - Human-readable label that identifies the Data Lake Pipeline.
- pipeline
Run StringId - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run.
- project
Id String - Unique 24-hexadecimal digit string that identifies your project.
getDataLakePipelineRun Result
The following output properties are available:
- Backup
Frequency stringType - Backup schedule interval of the Data Lake Pipeline.
- Created
Date string - Timestamp that indicates when the pipeline run was created.
- Dataset
Name string - Human-readable label that identifies the dataset that Atlas generates during this pipeline run.
- Id string
- Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run.
- Last
Updated stringDate - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run.
- Phase string
- Processing phase of the Data Lake Pipeline.
- Pipeline
Id string - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline.
- Pipeline
Name string - Pipeline
Run stringId - Project
Id string - Snapshot
Id string - Unique 24-hexadecimal character string that identifies the snapshot of a cluster.
- State string
- State of the pipeline run.
- Stats
List<Get
Data Lake Pipeline Run Stat> - Runtime statistics for this Data Lake Pipeline run.
- Backup
Frequency stringType - Backup schedule interval of the Data Lake Pipeline.
- Created
Date string - Timestamp that indicates when the pipeline run was created.
- Dataset
Name string - Human-readable label that identifies the dataset that Atlas generates during this pipeline run.
- Id string
- Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run.
- Last
Updated stringDate - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run.
- Phase string
- Processing phase of the Data Lake Pipeline.
- Pipeline
Id string - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline.
- Pipeline
Name string - Pipeline
Run stringId - Project
Id string - Snapshot
Id string - Unique 24-hexadecimal character string that identifies the snapshot of a cluster.
- State string
- State of the pipeline run.
- Stats
[]Get
Data Lake Pipeline Run Stat - Runtime statistics for this Data Lake Pipeline run.
- backup
Frequency StringType - Backup schedule interval of the Data Lake Pipeline.
- created
Date String - Timestamp that indicates when the pipeline run was created.
- dataset
Name String - Human-readable label that identifies the dataset that Atlas generates during this pipeline run.
- id String
- Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run.
- last
Updated StringDate - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run.
- phase String
- Processing phase of the Data Lake Pipeline.
- pipeline
Id String - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline.
- pipeline
Name String - pipeline
Run StringId - project
Id String - snapshot
Id String - Unique 24-hexadecimal character string that identifies the snapshot of a cluster.
- state String
- State of the pipeline run.
- stats
List<Get
Data Lake Pipeline Run Stat> - Runtime statistics for this Data Lake Pipeline run.
- backup
Frequency stringType - Backup schedule interval of the Data Lake Pipeline.
- created
Date string - Timestamp that indicates when the pipeline run was created.
- dataset
Name string - Human-readable label that identifies the dataset that Atlas generates during this pipeline run.
- id string
- Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run.
- last
Updated stringDate - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run.
- phase string
- Processing phase of the Data Lake Pipeline.
- pipeline
Id string - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline.
- pipeline
Name string - pipeline
Run stringId - project
Id string - snapshot
Id string - Unique 24-hexadecimal character string that identifies the snapshot of a cluster.
- state string
- State of the pipeline run.
- stats
Get
Data Lake Pipeline Run Stat[] - Runtime statistics for this Data Lake Pipeline run.
- backup_
frequency_ strtype - Backup schedule interval of the Data Lake Pipeline.
- created_
date str - Timestamp that indicates when the pipeline run was created.
- dataset_
name str - Human-readable label that identifies the dataset that Atlas generates during this pipeline run.
- id str
- Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run.
- last_
updated_ strdate - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run.
- phase str
- Processing phase of the Data Lake Pipeline.
- pipeline_
id str - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline.
- pipeline_
name str - pipeline_
run_ strid - project_
id str - snapshot_
id str - Unique 24-hexadecimal character string that identifies the snapshot of a cluster.
- state str
- State of the pipeline run.
- stats
Sequence[Get
Data Lake Pipeline Run Stat] - Runtime statistics for this Data Lake Pipeline run.
- backup
Frequency StringType - Backup schedule interval of the Data Lake Pipeline.
- created
Date String - Timestamp that indicates when the pipeline run was created.
- dataset
Name String - Human-readable label that identifies the dataset that Atlas generates during this pipeline run.
- id String
- Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run.
- last
Updated StringDate - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline run.
- phase String
- Processing phase of the Data Lake Pipeline.
- pipeline
Id String - Unique 24-hexadecimal character string that identifies a Data Lake Pipeline.
- pipeline
Name String - pipeline
Run StringId - project
Id String - snapshot
Id String - Unique 24-hexadecimal character string that identifies the snapshot of a cluster.
- state String
- State of the pipeline run.
- stats List<Property Map>
- Runtime statistics for this Data Lake Pipeline run.
Supporting Types
GetDataLakePipelineRunStat
- Bytes
Exported int - Total data size in bytes exported for this pipeline run.
- Num
Docs int - Number of docs ingested for a this pipeline run.
- Bytes
Exported int - Total data size in bytes exported for this pipeline run.
- Num
Docs int - Number of docs ingested for a this pipeline run.
- bytes
Exported Integer - Total data size in bytes exported for this pipeline run.
- num
Docs Integer - Number of docs ingested for a this pipeline run.
- bytes
Exported number - Total data size in bytes exported for this pipeline run.
- num
Docs number - Number of docs ingested for a this pipeline run.
- bytes_
exported int - Total data size in bytes exported for this pipeline run.
- num_
docs int - Number of docs ingested for a this pipeline run.
- bytes
Exported Number - Total data size in bytes exported for this pipeline run.
- num
Docs Number - Number of docs ingested for a this pipeline run.
Package Details
- Repository
- MongoDB Atlas pulumi/pulumi-mongodbatlas
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
mongodbatlas
Terraform Provider.