azure-native.synapse.BigDataPool
Explore with Pulumi AI
A Big Data pool API Version: 2021-03-01.
Example Usage
Create or update a Big Data pool
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AzureNative = Pulumi.AzureNative;
return await Deployment.RunAsync(() =>
{
var bigDataPool = new AzureNative.Synapse.BigDataPool("bigDataPool", new()
{
AutoPause = new AzureNative.Synapse.Inputs.AutoPausePropertiesArgs
{
DelayInMinutes = 15,
Enabled = true,
},
AutoScale = new AzureNative.Synapse.Inputs.AutoScalePropertiesArgs
{
Enabled = true,
MaxNodeCount = 50,
MinNodeCount = 3,
},
BigDataPoolName = "ExamplePool",
DefaultSparkLogFolder = "/logs",
LibraryRequirements = new AzureNative.Synapse.Inputs.LibraryRequirementsArgs
{
Content = "",
Filename = "requirements.txt",
},
Location = "West US 2",
NodeCount = 4,
NodeSize = "Medium",
NodeSizeFamily = "MemoryOptimized",
ResourceGroupName = "ExampleResourceGroup",
SparkEventsFolder = "/events",
SparkVersion = "3.3",
Tags =
{
{ "key", "value" },
},
WorkspaceName = "ExampleWorkspace",
});
});
package main
import (
synapse "github.com/pulumi/pulumi-azure-native-sdk/synapse"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := synapse.NewBigDataPool(ctx, "bigDataPool", &synapse.BigDataPoolArgs{
AutoPause: &synapse.AutoPausePropertiesArgs{
DelayInMinutes: pulumi.Int(15),
Enabled: pulumi.Bool(true),
},
AutoScale: &synapse.AutoScalePropertiesArgs{
Enabled: pulumi.Bool(true),
MaxNodeCount: pulumi.Int(50),
MinNodeCount: pulumi.Int(3),
},
BigDataPoolName: pulumi.String("ExamplePool"),
DefaultSparkLogFolder: pulumi.String("/logs"),
LibraryRequirements: &synapse.LibraryRequirementsArgs{
Content: pulumi.String(""),
Filename: pulumi.String("requirements.txt"),
},
Location: pulumi.String("West US 2"),
NodeCount: pulumi.Int(4),
NodeSize: pulumi.String("Medium"),
NodeSizeFamily: pulumi.String("MemoryOptimized"),
ResourceGroupName: pulumi.String("ExampleResourceGroup"),
SparkEventsFolder: pulumi.String("/events"),
SparkVersion: pulumi.String("3.3"),
Tags: pulumi.StringMap{
"key": pulumi.String("value"),
},
WorkspaceName: pulumi.String("ExampleWorkspace"),
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azurenative.synapse.BigDataPool;
import com.pulumi.azurenative.synapse.BigDataPoolArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var bigDataPool = new BigDataPool("bigDataPool", BigDataPoolArgs.builder()
.autoPause(Map.ofEntries(
Map.entry("delayInMinutes", 15),
Map.entry("enabled", true)
))
.autoScale(Map.ofEntries(
Map.entry("enabled", true),
Map.entry("maxNodeCount", 50),
Map.entry("minNodeCount", 3)
))
.bigDataPoolName("ExamplePool")
.defaultSparkLogFolder("/logs")
.libraryRequirements(Map.ofEntries(
Map.entry("content", ""),
Map.entry("filename", "requirements.txt")
))
.location("West US 2")
.nodeCount(4)
.nodeSize("Medium")
.nodeSizeFamily("MemoryOptimized")
.resourceGroupName("ExampleResourceGroup")
.sparkEventsFolder("/events")
.sparkVersion("3.3")
.tags(Map.of("key", "value"))
.workspaceName("ExampleWorkspace")
.build());
}
}
import pulumi
import pulumi_azure_native as azure_native
big_data_pool = azure_native.synapse.BigDataPool("bigDataPool",
auto_pause=azure_native.synapse.AutoPausePropertiesArgs(
delay_in_minutes=15,
enabled=True,
),
auto_scale=azure_native.synapse.AutoScalePropertiesArgs(
enabled=True,
max_node_count=50,
min_node_count=3,
),
big_data_pool_name="ExamplePool",
default_spark_log_folder="/logs",
library_requirements=azure_native.synapse.LibraryRequirementsArgs(
content="",
filename="requirements.txt",
),
location="West US 2",
node_count=4,
node_size="Medium",
node_size_family="MemoryOptimized",
resource_group_name="ExampleResourceGroup",
spark_events_folder="/events",
spark_version="3.3",
tags={
"key": "value",
},
workspace_name="ExampleWorkspace")
import * as pulumi from "@pulumi/pulumi";
import * as azure_native from "@pulumi/azure-native";
const bigDataPool = new azure_native.synapse.BigDataPool("bigDataPool", {
autoPause: {
delayInMinutes: 15,
enabled: true,
},
autoScale: {
enabled: true,
maxNodeCount: 50,
minNodeCount: 3,
},
bigDataPoolName: "ExamplePool",
defaultSparkLogFolder: "/logs",
libraryRequirements: {
content: "",
filename: "requirements.txt",
},
location: "West US 2",
nodeCount: 4,
nodeSize: "Medium",
nodeSizeFamily: "MemoryOptimized",
resourceGroupName: "ExampleResourceGroup",
sparkEventsFolder: "/events",
sparkVersion: "3.3",
tags: {
key: "value",
},
workspaceName: "ExampleWorkspace",
});
resources:
bigDataPool:
type: azure-native:synapse:BigDataPool
properties:
autoPause:
delayInMinutes: 15
enabled: true
autoScale:
enabled: true
maxNodeCount: 50
minNodeCount: 3
bigDataPoolName: ExamplePool
defaultSparkLogFolder: /logs
libraryRequirements:
content:
filename: requirements.txt
location: West US 2
nodeCount: 4
nodeSize: Medium
nodeSizeFamily: MemoryOptimized
resourceGroupName: ExampleResourceGroup
sparkEventsFolder: /events
sparkVersion: '3.3'
tags:
key: value
workspaceName: ExampleWorkspace
Create BigDataPool Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new BigDataPool(name: string, args: BigDataPoolArgs, opts?: CustomResourceOptions);
@overload
def BigDataPool(resource_name: str,
args: BigDataPoolArgs,
opts: Optional[ResourceOptions] = None)
@overload
def BigDataPool(resource_name: str,
opts: Optional[ResourceOptions] = None,
resource_group_name: Optional[str] = None,
workspace_name: Optional[str] = None,
library_requirements: Optional[LibraryRequirementsArgs] = None,
node_size: Optional[Union[str, NodeSize]] = None,
creation_date: Optional[str] = None,
custom_libraries: Optional[Sequence[LibraryInfoArgs]] = None,
default_spark_log_folder: Optional[str] = None,
dynamic_executor_allocation: Optional[DynamicExecutorAllocationArgs] = None,
force: Optional[bool] = None,
is_compute_isolation_enabled: Optional[bool] = None,
auto_pause: Optional[AutoPausePropertiesArgs] = None,
location: Optional[str] = None,
node_count: Optional[int] = None,
cache_size: Optional[int] = None,
node_size_family: Optional[Union[str, NodeSizeFamily]] = None,
provisioning_state: Optional[str] = None,
big_data_pool_name: Optional[str] = None,
session_level_packages_enabled: Optional[bool] = None,
spark_config_properties: Optional[LibraryRequirementsArgs] = None,
spark_events_folder: Optional[str] = None,
spark_version: Optional[str] = None,
tags: Optional[Mapping[str, str]] = None,
auto_scale: Optional[AutoScalePropertiesArgs] = None)
func NewBigDataPool(ctx *Context, name string, args BigDataPoolArgs, opts ...ResourceOption) (*BigDataPool, error)
public BigDataPool(string name, BigDataPoolArgs args, CustomResourceOptions? opts = null)
public BigDataPool(String name, BigDataPoolArgs args)
public BigDataPool(String name, BigDataPoolArgs args, CustomResourceOptions options)
type: azure-native:synapse:BigDataPool
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args BigDataPoolArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args BigDataPoolArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args BigDataPoolArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args BigDataPoolArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args BigDataPoolArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var bigDataPoolResource = new AzureNative.Synapse.BigDataPool("bigDataPoolResource", new()
{
ResourceGroupName = "string",
WorkspaceName = "string",
LibraryRequirements =
{
{ "content", "string" },
{ "filename", "string" },
},
NodeSize = "string",
CreationDate = "string",
CustomLibraries = new[]
{
{
{ "containerName", "string" },
{ "name", "string" },
{ "path", "string" },
{ "type", "string" },
},
},
DefaultSparkLogFolder = "string",
DynamicExecutorAllocation =
{
{ "enabled", false },
},
Force = false,
IsComputeIsolationEnabled = false,
AutoPause =
{
{ "delayInMinutes", 0 },
{ "enabled", false },
},
Location = "string",
NodeCount = 0,
CacheSize = 0,
NodeSizeFamily = "string",
ProvisioningState = "string",
BigDataPoolName = "string",
SessionLevelPackagesEnabled = false,
SparkConfigProperties =
{
{ "content", "string" },
{ "filename", "string" },
},
SparkEventsFolder = "string",
SparkVersion = "string",
Tags =
{
{ "string", "string" },
},
AutoScale =
{
{ "enabled", false },
{ "maxNodeCount", 0 },
{ "minNodeCount", 0 },
},
});
example, err := synapse.NewBigDataPool(ctx, "bigDataPoolResource", &synapse.BigDataPoolArgs{
ResourceGroupName: "string",
WorkspaceName: "string",
LibraryRequirements: map[string]interface{}{
"content": "string",
"filename": "string",
},
NodeSize: "string",
CreationDate: "string",
CustomLibraries: []map[string]interface{}{
map[string]interface{}{
"containerName": "string",
"name": "string",
"path": "string",
"type": "string",
},
},
DefaultSparkLogFolder: "string",
DynamicExecutorAllocation: map[string]interface{}{
"enabled": false,
},
Force: false,
IsComputeIsolationEnabled: false,
AutoPause: map[string]interface{}{
"delayInMinutes": 0,
"enabled": false,
},
Location: "string",
NodeCount: 0,
CacheSize: 0,
NodeSizeFamily: "string",
ProvisioningState: "string",
BigDataPoolName: "string",
SessionLevelPackagesEnabled: false,
SparkConfigProperties: map[string]interface{}{
"content": "string",
"filename": "string",
},
SparkEventsFolder: "string",
SparkVersion: "string",
Tags: map[string]interface{}{
"string": "string",
},
AutoScale: map[string]interface{}{
"enabled": false,
"maxNodeCount": 0,
"minNodeCount": 0,
},
})
var bigDataPoolResource = new BigDataPool("bigDataPoolResource", BigDataPoolArgs.builder()
.resourceGroupName("string")
.workspaceName("string")
.libraryRequirements(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
.nodeSize("string")
.creationDate("string")
.customLibraries(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
.defaultSparkLogFolder("string")
.dynamicExecutorAllocation(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
.force(false)
.isComputeIsolationEnabled(false)
.autoPause(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
.location("string")
.nodeCount(0)
.cacheSize(0)
.nodeSizeFamily("string")
.provisioningState("string")
.bigDataPoolName("string")
.sessionLevelPackagesEnabled(false)
.sparkConfigProperties(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
.sparkEventsFolder("string")
.sparkVersion("string")
.tags(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
.autoScale(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
.build());
big_data_pool_resource = azure_native.synapse.BigDataPool("bigDataPoolResource",
resource_group_name=string,
workspace_name=string,
library_requirements={
content: string,
filename: string,
},
node_size=string,
creation_date=string,
custom_libraries=[{
containerName: string,
name: string,
path: string,
type: string,
}],
default_spark_log_folder=string,
dynamic_executor_allocation={
enabled: False,
},
force=False,
is_compute_isolation_enabled=False,
auto_pause={
delayInMinutes: 0,
enabled: False,
},
location=string,
node_count=0,
cache_size=0,
node_size_family=string,
provisioning_state=string,
big_data_pool_name=string,
session_level_packages_enabled=False,
spark_config_properties={
content: string,
filename: string,
},
spark_events_folder=string,
spark_version=string,
tags={
string: string,
},
auto_scale={
enabled: False,
maxNodeCount: 0,
minNodeCount: 0,
})
const bigDataPoolResource = new azure_native.synapse.BigDataPool("bigDataPoolResource", {
resourceGroupName: "string",
workspaceName: "string",
libraryRequirements: {
content: "string",
filename: "string",
},
nodeSize: "string",
creationDate: "string",
customLibraries: [{
containerName: "string",
name: "string",
path: "string",
type: "string",
}],
defaultSparkLogFolder: "string",
dynamicExecutorAllocation: {
enabled: false,
},
force: false,
isComputeIsolationEnabled: false,
autoPause: {
delayInMinutes: 0,
enabled: false,
},
location: "string",
nodeCount: 0,
cacheSize: 0,
nodeSizeFamily: "string",
provisioningState: "string",
bigDataPoolName: "string",
sessionLevelPackagesEnabled: false,
sparkConfigProperties: {
content: "string",
filename: "string",
},
sparkEventsFolder: "string",
sparkVersion: "string",
tags: {
string: "string",
},
autoScale: {
enabled: false,
maxNodeCount: 0,
minNodeCount: 0,
},
});
type: azure-native:synapse:BigDataPool
properties:
autoPause:
delayInMinutes: 0
enabled: false
autoScale:
enabled: false
maxNodeCount: 0
minNodeCount: 0
bigDataPoolName: string
cacheSize: 0
creationDate: string
customLibraries:
- containerName: string
name: string
path: string
type: string
defaultSparkLogFolder: string
dynamicExecutorAllocation:
enabled: false
force: false
isComputeIsolationEnabled: false
libraryRequirements:
content: string
filename: string
location: string
nodeCount: 0
nodeSize: string
nodeSizeFamily: string
provisioningState: string
resourceGroupName: string
sessionLevelPackagesEnabled: false
sparkConfigProperties:
content: string
filename: string
sparkEventsFolder: string
sparkVersion: string
tags:
string: string
workspaceName: string
BigDataPool Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
The BigDataPool resource accepts the following input properties:
- Resource
Group stringName - The name of the resource group. The name is case insensitive.
- Workspace
Name string - The name of the workspace
- Auto
Pause Pulumi.Azure Native. Synapse. Inputs. Auto Pause Properties - Auto-pausing properties
- Auto
Scale Pulumi.Azure Native. Synapse. Inputs. Auto Scale Properties - Auto-scaling properties
- Big
Data stringPool Name - Big Data pool name
- Cache
Size int - The cache size
- Creation
Date string - The time when the Big Data pool was created.
- Custom
Libraries List<Pulumi.Azure Native. Synapse. Inputs. Library Info> - List of custom libraries/packages associated with the spark pool.
- Default
Spark stringLog Folder - The default folder where Spark logs will be written.
- Dynamic
Executor Pulumi.Allocation Azure Native. Synapse. Inputs. Dynamic Executor Allocation - Dynamic Executor Allocation
- Force bool
- Whether to stop any running jobs in the Big Data pool
- Is
Compute boolIsolation Enabled - Whether compute isolation is required or not.
- Library
Requirements Pulumi.Azure Native. Synapse. Inputs. Library Requirements - Library version requirements
- Location string
- The geo-location where the resource lives
- Node
Count int - The number of nodes in the Big Data pool.
- Node
Size string | Pulumi.Azure Native. Synapse. Node Size - The level of compute power that each node in the Big Data pool has.
- Node
Size string | Pulumi.Family Azure Native. Synapse. Node Size Family - The kind of nodes that the Big Data pool provides.
- Provisioning
State string - The state of the Big Data pool.
- Session
Level boolPackages Enabled - Whether session level packages enabled.
- Spark
Config Pulumi.Properties Azure Native. Synapse. Inputs. Library Requirements - Spark configuration file to specify additional properties
- Spark
Events stringFolder - The Spark events folder
- Spark
Version string - The Apache Spark version.
- Dictionary<string, string>
- Resource tags.
- Resource
Group stringName - The name of the resource group. The name is case insensitive.
- Workspace
Name string - The name of the workspace
- Auto
Pause AutoPause Properties Args - Auto-pausing properties
- Auto
Scale AutoScale Properties Args - Auto-scaling properties
- Big
Data stringPool Name - Big Data pool name
- Cache
Size int - The cache size
- Creation
Date string - The time when the Big Data pool was created.
- Custom
Libraries []LibraryInfo Args - List of custom libraries/packages associated with the spark pool.
- Default
Spark stringLog Folder - The default folder where Spark logs will be written.
- Dynamic
Executor DynamicAllocation Executor Allocation Args - Dynamic Executor Allocation
- Force bool
- Whether to stop any running jobs in the Big Data pool
- Is
Compute boolIsolation Enabled - Whether compute isolation is required or not.
- Library
Requirements LibraryRequirements Args - Library version requirements
- Location string
- The geo-location where the resource lives
- Node
Count int - The number of nodes in the Big Data pool.
- Node
Size string | NodeSize - The level of compute power that each node in the Big Data pool has.
- Node
Size string | NodeFamily Size Family - The kind of nodes that the Big Data pool provides.
- Provisioning
State string - The state of the Big Data pool.
- Session
Level boolPackages Enabled - Whether session level packages enabled.
- Spark
Config LibraryProperties Requirements Args - Spark configuration file to specify additional properties
- Spark
Events stringFolder - The Spark events folder
- Spark
Version string - The Apache Spark version.
- map[string]string
- Resource tags.
- resource
Group StringName - The name of the resource group. The name is case insensitive.
- workspace
Name String - The name of the workspace
- auto
Pause AutoPause Properties - Auto-pausing properties
- auto
Scale AutoScale Properties - Auto-scaling properties
- big
Data StringPool Name - Big Data pool name
- cache
Size Integer - The cache size
- creation
Date String - The time when the Big Data pool was created.
- custom
Libraries List<LibraryInfo> - List of custom libraries/packages associated with the spark pool.
- default
Spark StringLog Folder - The default folder where Spark logs will be written.
- dynamic
Executor DynamicAllocation Executor Allocation - Dynamic Executor Allocation
- force Boolean
- Whether to stop any running jobs in the Big Data pool
- is
Compute BooleanIsolation Enabled - Whether compute isolation is required or not.
- library
Requirements LibraryRequirements - Library version requirements
- location String
- The geo-location where the resource lives
- node
Count Integer - The number of nodes in the Big Data pool.
- node
Size String | NodeSize - The level of compute power that each node in the Big Data pool has.
- node
Size String | NodeFamily Size Family - The kind of nodes that the Big Data pool provides.
- provisioning
State String - The state of the Big Data pool.
- session
Level BooleanPackages Enabled - Whether session level packages enabled.
- spark
Config LibraryProperties Requirements - Spark configuration file to specify additional properties
- spark
Events StringFolder - The Spark events folder
- spark
Version String - The Apache Spark version.
- Map<String,String>
- Resource tags.
- resource
Group stringName - The name of the resource group. The name is case insensitive.
- workspace
Name string - The name of the workspace
- auto
Pause AutoPause Properties - Auto-pausing properties
- auto
Scale AutoScale Properties - Auto-scaling properties
- big
Data stringPool Name - Big Data pool name
- cache
Size number - The cache size
- creation
Date string - The time when the Big Data pool was created.
- custom
Libraries LibraryInfo[] - List of custom libraries/packages associated with the spark pool.
- default
Spark stringLog Folder - The default folder where Spark logs will be written.
- dynamic
Executor DynamicAllocation Executor Allocation - Dynamic Executor Allocation
- force boolean
- Whether to stop any running jobs in the Big Data pool
- is
Compute booleanIsolation Enabled - Whether compute isolation is required or not.
- library
Requirements LibraryRequirements - Library version requirements
- location string
- The geo-location where the resource lives
- node
Count number - The number of nodes in the Big Data pool.
- node
Size string | NodeSize - The level of compute power that each node in the Big Data pool has.
- node
Size string | NodeFamily Size Family - The kind of nodes that the Big Data pool provides.
- provisioning
State string - The state of the Big Data pool.
- session
Level booleanPackages Enabled - Whether session level packages enabled.
- spark
Config LibraryProperties Requirements - Spark configuration file to specify additional properties
- spark
Events stringFolder - The Spark events folder
- spark
Version string - The Apache Spark version.
- {[key: string]: string}
- Resource tags.
- resource_
group_ strname - The name of the resource group. The name is case insensitive.
- workspace_
name str - The name of the workspace
- auto_
pause AutoPause Properties Args - Auto-pausing properties
- auto_
scale AutoScale Properties Args - Auto-scaling properties
- big_
data_ strpool_ name - Big Data pool name
- cache_
size int - The cache size
- creation_
date str - The time when the Big Data pool was created.
- custom_
libraries Sequence[LibraryInfo Args] - List of custom libraries/packages associated with the spark pool.
- default_
spark_ strlog_ folder - The default folder where Spark logs will be written.
- dynamic_
executor_ Dynamicallocation Executor Allocation Args - Dynamic Executor Allocation
- force bool
- Whether to stop any running jobs in the Big Data pool
- is_
compute_ boolisolation_ enabled - Whether compute isolation is required or not.
- library_
requirements LibraryRequirements Args - Library version requirements
- location str
- The geo-location where the resource lives
- node_
count int - The number of nodes in the Big Data pool.
- node_
size str | NodeSize - The level of compute power that each node in the Big Data pool has.
- node_
size_ str | Nodefamily Size Family - The kind of nodes that the Big Data pool provides.
- provisioning_
state str - The state of the Big Data pool.
- session_
level_ boolpackages_ enabled - Whether session level packages enabled.
- spark_
config_ Libraryproperties Requirements Args - Spark configuration file to specify additional properties
- spark_
events_ strfolder - The Spark events folder
- spark_
version str - The Apache Spark version.
- Mapping[str, str]
- Resource tags.
- resource
Group StringName - The name of the resource group. The name is case insensitive.
- workspace
Name String - The name of the workspace
- auto
Pause Property Map - Auto-pausing properties
- auto
Scale Property Map - Auto-scaling properties
- big
Data StringPool Name - Big Data pool name
- cache
Size Number - The cache size
- creation
Date String - The time when the Big Data pool was created.
- custom
Libraries List<Property Map> - List of custom libraries/packages associated with the spark pool.
- default
Spark StringLog Folder - The default folder where Spark logs will be written.
- dynamic
Executor Property MapAllocation - Dynamic Executor Allocation
- force Boolean
- Whether to stop any running jobs in the Big Data pool
- is
Compute BooleanIsolation Enabled - Whether compute isolation is required or not.
- library
Requirements Property Map - Library version requirements
- location String
- The geo-location where the resource lives
- node
Count Number - The number of nodes in the Big Data pool.
- node
Size String | "None" | "Small" | "Medium" | "Large" | "XLarge" | "XXLarge" | "XXXLarge" - The level of compute power that each node in the Big Data pool has.
- node
Size String | "None" | "MemoryFamily Optimized" | "Hardware Accelerated FPGA" | "Hardware Accelerated GPU" - The kind of nodes that the Big Data pool provides.
- provisioning
State String - The state of the Big Data pool.
- session
Level BooleanPackages Enabled - Whether session level packages enabled.
- spark
Config Property MapProperties - Spark configuration file to specify additional properties
- spark
Events StringFolder - The Spark events folder
- spark
Version String - The Apache Spark version.
- Map<String>
- Resource tags.
Outputs
All input properties are implicitly available as output properties. Additionally, the BigDataPool resource produces the following output properties:
- Id string
- The provider-assigned unique ID for this managed resource.
- Last
Succeeded stringTimestamp - The time when the Big Data pool was updated successfully.
- Name string
- The name of the resource
- Type string
- The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
- Id string
- The provider-assigned unique ID for this managed resource.
- Last
Succeeded stringTimestamp - The time when the Big Data pool was updated successfully.
- Name string
- The name of the resource
- Type string
- The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
- id String
- The provider-assigned unique ID for this managed resource.
- last
Succeeded StringTimestamp - The time when the Big Data pool was updated successfully.
- name String
- The name of the resource
- type String
- The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
- id string
- The provider-assigned unique ID for this managed resource.
- last
Succeeded stringTimestamp - The time when the Big Data pool was updated successfully.
- name string
- The name of the resource
- type string
- The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
- id str
- The provider-assigned unique ID for this managed resource.
- last_
succeeded_ strtimestamp - The time when the Big Data pool was updated successfully.
- name str
- The name of the resource
- type str
- The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
- id String
- The provider-assigned unique ID for this managed resource.
- last
Succeeded StringTimestamp - The time when the Big Data pool was updated successfully.
- name String
- The name of the resource
- type String
- The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
Supporting Types
AutoPauseProperties, AutoPausePropertiesArgs
- Delay
In intMinutes - Number of minutes of idle time before the Big Data pool is automatically paused.
- Enabled bool
- Whether auto-pausing is enabled for the Big Data pool.
- Delay
In intMinutes - Number of minutes of idle time before the Big Data pool is automatically paused.
- Enabled bool
- Whether auto-pausing is enabled for the Big Data pool.
- delay
In IntegerMinutes - Number of minutes of idle time before the Big Data pool is automatically paused.
- enabled Boolean
- Whether auto-pausing is enabled for the Big Data pool.
- delay
In numberMinutes - Number of minutes of idle time before the Big Data pool is automatically paused.
- enabled boolean
- Whether auto-pausing is enabled for the Big Data pool.
- delay_
in_ intminutes - Number of minutes of idle time before the Big Data pool is automatically paused.
- enabled bool
- Whether auto-pausing is enabled for the Big Data pool.
- delay
In NumberMinutes - Number of minutes of idle time before the Big Data pool is automatically paused.
- enabled Boolean
- Whether auto-pausing is enabled for the Big Data pool.
AutoPausePropertiesResponse, AutoPausePropertiesResponseArgs
- Delay
In intMinutes - Number of minutes of idle time before the Big Data pool is automatically paused.
- Enabled bool
- Whether auto-pausing is enabled for the Big Data pool.
- Delay
In intMinutes - Number of minutes of idle time before the Big Data pool is automatically paused.
- Enabled bool
- Whether auto-pausing is enabled for the Big Data pool.
- delay
In IntegerMinutes - Number of minutes of idle time before the Big Data pool is automatically paused.
- enabled Boolean
- Whether auto-pausing is enabled for the Big Data pool.
- delay
In numberMinutes - Number of minutes of idle time before the Big Data pool is automatically paused.
- enabled boolean
- Whether auto-pausing is enabled for the Big Data pool.
- delay_
in_ intminutes - Number of minutes of idle time before the Big Data pool is automatically paused.
- enabled bool
- Whether auto-pausing is enabled for the Big Data pool.
- delay
In NumberMinutes - Number of minutes of idle time before the Big Data pool is automatically paused.
- enabled Boolean
- Whether auto-pausing is enabled for the Big Data pool.
AutoScaleProperties, AutoScalePropertiesArgs
- Enabled bool
- Whether automatic scaling is enabled for the Big Data pool.
- Max
Node intCount - The maximum number of nodes the Big Data pool can support.
- Min
Node intCount - The minimum number of nodes the Big Data pool can support.
- Enabled bool
- Whether automatic scaling is enabled for the Big Data pool.
- Max
Node intCount - The maximum number of nodes the Big Data pool can support.
- Min
Node intCount - The minimum number of nodes the Big Data pool can support.
- enabled Boolean
- Whether automatic scaling is enabled for the Big Data pool.
- max
Node IntegerCount - The maximum number of nodes the Big Data pool can support.
- min
Node IntegerCount - The minimum number of nodes the Big Data pool can support.
- enabled boolean
- Whether automatic scaling is enabled for the Big Data pool.
- max
Node numberCount - The maximum number of nodes the Big Data pool can support.
- min
Node numberCount - The minimum number of nodes the Big Data pool can support.
- enabled bool
- Whether automatic scaling is enabled for the Big Data pool.
- max_
node_ intcount - The maximum number of nodes the Big Data pool can support.
- min_
node_ intcount - The minimum number of nodes the Big Data pool can support.
- enabled Boolean
- Whether automatic scaling is enabled for the Big Data pool.
- max
Node NumberCount - The maximum number of nodes the Big Data pool can support.
- min
Node NumberCount - The minimum number of nodes the Big Data pool can support.
AutoScalePropertiesResponse, AutoScalePropertiesResponseArgs
- Enabled bool
- Whether automatic scaling is enabled for the Big Data pool.
- Max
Node intCount - The maximum number of nodes the Big Data pool can support.
- Min
Node intCount - The minimum number of nodes the Big Data pool can support.
- Enabled bool
- Whether automatic scaling is enabled for the Big Data pool.
- Max
Node intCount - The maximum number of nodes the Big Data pool can support.
- Min
Node intCount - The minimum number of nodes the Big Data pool can support.
- enabled Boolean
- Whether automatic scaling is enabled for the Big Data pool.
- max
Node IntegerCount - The maximum number of nodes the Big Data pool can support.
- min
Node IntegerCount - The minimum number of nodes the Big Data pool can support.
- enabled boolean
- Whether automatic scaling is enabled for the Big Data pool.
- max
Node numberCount - The maximum number of nodes the Big Data pool can support.
- min
Node numberCount - The minimum number of nodes the Big Data pool can support.
- enabled bool
- Whether automatic scaling is enabled for the Big Data pool.
- max_
node_ intcount - The maximum number of nodes the Big Data pool can support.
- min_
node_ intcount - The minimum number of nodes the Big Data pool can support.
- enabled Boolean
- Whether automatic scaling is enabled for the Big Data pool.
- max
Node NumberCount - The maximum number of nodes the Big Data pool can support.
- min
Node NumberCount - The minimum number of nodes the Big Data pool can support.
DynamicExecutorAllocation, DynamicExecutorAllocationArgs
- Enabled bool
- Indicates whether Dynamic Executor Allocation is enabled or not.
- Enabled bool
- Indicates whether Dynamic Executor Allocation is enabled or not.
- enabled Boolean
- Indicates whether Dynamic Executor Allocation is enabled or not.
- enabled boolean
- Indicates whether Dynamic Executor Allocation is enabled or not.
- enabled bool
- Indicates whether Dynamic Executor Allocation is enabled or not.
- enabled Boolean
- Indicates whether Dynamic Executor Allocation is enabled or not.
DynamicExecutorAllocationResponse, DynamicExecutorAllocationResponseArgs
- Enabled bool
- Indicates whether Dynamic Executor Allocation is enabled or not.
- Enabled bool
- Indicates whether Dynamic Executor Allocation is enabled or not.
- enabled Boolean
- Indicates whether Dynamic Executor Allocation is enabled or not.
- enabled boolean
- Indicates whether Dynamic Executor Allocation is enabled or not.
- enabled bool
- Indicates whether Dynamic Executor Allocation is enabled or not.
- enabled Boolean
- Indicates whether Dynamic Executor Allocation is enabled or not.
LibraryInfo, LibraryInfoArgs
- Container
Name string - Storage blob container name.
- Name string
- Name of the library.
- Path string
- Storage blob path of library.
- Type string
- Type of the library.
- Container
Name string - Storage blob container name.
- Name string
- Name of the library.
- Path string
- Storage blob path of library.
- Type string
- Type of the library.
- container
Name String - Storage blob container name.
- name String
- Name of the library.
- path String
- Storage blob path of library.
- type String
- Type of the library.
- container
Name string - Storage blob container name.
- name string
- Name of the library.
- path string
- Storage blob path of library.
- type string
- Type of the library.
- container_
name str - Storage blob container name.
- name str
- Name of the library.
- path str
- Storage blob path of library.
- type str
- Type of the library.
- container
Name String - Storage blob container name.
- name String
- Name of the library.
- path String
- Storage blob path of library.
- type String
- Type of the library.
LibraryInfoResponse, LibraryInfoResponseArgs
- Creator
Id string - Creator Id of the library/package.
- Provisioning
Status string - Provisioning status of the library/package.
- Uploaded
Timestamp string - The last update time of the library.
- Container
Name string - Storage blob container name.
- Name string
- Name of the library.
- Path string
- Storage blob path of library.
- Type string
- Type of the library.
- Creator
Id string - Creator Id of the library/package.
- Provisioning
Status string - Provisioning status of the library/package.
- Uploaded
Timestamp string - The last update time of the library.
- Container
Name string - Storage blob container name.
- Name string
- Name of the library.
- Path string
- Storage blob path of library.
- Type string
- Type of the library.
- creator
Id String - Creator Id of the library/package.
- provisioning
Status String - Provisioning status of the library/package.
- uploaded
Timestamp String - The last update time of the library.
- container
Name String - Storage blob container name.
- name String
- Name of the library.
- path String
- Storage blob path of library.
- type String
- Type of the library.
- creator
Id string - Creator Id of the library/package.
- provisioning
Status string - Provisioning status of the library/package.
- uploaded
Timestamp string - The last update time of the library.
- container
Name string - Storage blob container name.
- name string
- Name of the library.
- path string
- Storage blob path of library.
- type string
- Type of the library.
- creator_
id str - Creator Id of the library/package.
- provisioning_
status str - Provisioning status of the library/package.
- uploaded_
timestamp str - The last update time of the library.
- container_
name str - Storage blob container name.
- name str
- Name of the library.
- path str
- Storage blob path of library.
- type str
- Type of the library.
- creator
Id String - Creator Id of the library/package.
- provisioning
Status String - Provisioning status of the library/package.
- uploaded
Timestamp String - The last update time of the library.
- container
Name String - Storage blob container name.
- name String
- Name of the library.
- path String
- Storage blob path of library.
- type String
- Type of the library.
LibraryRequirements, LibraryRequirementsArgs
LibraryRequirementsResponse, LibraryRequirementsResponseArgs
NodeSize, NodeSizeArgs
- None
- None
- Small
- Small
- Medium
- Medium
- Large
- Large
- XLarge
- XLarge
- XXLarge
- XXLarge
- XXXLarge
- XXXLarge
- Node
Size None - None
- Node
Size Small - Small
- Node
Size Medium - Medium
- Node
Size Large - Large
- Node
Size XLarge - XLarge
- Node
Size XXLarge - XXLarge
- Node
Size XXXLarge - XXXLarge
- None
- None
- Small
- Small
- Medium
- Medium
- Large
- Large
- XLarge
- XLarge
- XXLarge
- XXLarge
- XXXLarge
- XXXLarge
- None
- None
- Small
- Small
- Medium
- Medium
- Large
- Large
- XLarge
- XLarge
- XXLarge
- XXLarge
- XXXLarge
- XXXLarge
- NONE
- None
- SMALL
- Small
- MEDIUM
- Medium
- LARGE
- Large
- X_LARGE
- XLarge
- XX_LARGE
- XXLarge
- XXX_LARGE
- XXXLarge
- "None"
- None
- "Small"
- Small
- "Medium"
- Medium
- "Large"
- Large
- "XLarge"
- XLarge
- "XXLarge"
- XXLarge
- "XXXLarge"
- XXXLarge
NodeSizeFamily, NodeSizeFamilyArgs
- None
- None
- Memory
Optimized - MemoryOptimized
- Hardware
Accelerated FPGA - HardwareAcceleratedFPGA
- Hardware
Accelerated GPU - HardwareAcceleratedGPU
- Node
Size Family None - None
- Node
Size Family Memory Optimized - MemoryOptimized
- Node
Size Family Hardware Accelerated FPGA - HardwareAcceleratedFPGA
- Node
Size Family Hardware Accelerated GPU - HardwareAcceleratedGPU
- None
- None
- Memory
Optimized - MemoryOptimized
- Hardware
Accelerated FPGA - HardwareAcceleratedFPGA
- Hardware
Accelerated GPU - HardwareAcceleratedGPU
- None
- None
- Memory
Optimized - MemoryOptimized
- Hardware
Accelerated FPGA - HardwareAcceleratedFPGA
- Hardware
Accelerated GPU - HardwareAcceleratedGPU
- NONE
- None
- MEMORY_OPTIMIZED
- MemoryOptimized
- HARDWARE_ACCELERATED_FPGA
- HardwareAcceleratedFPGA
- HARDWARE_ACCELERATED_GPU
- HardwareAcceleratedGPU
- "None"
- None
- "Memory
Optimized" - MemoryOptimized
- "Hardware
Accelerated FPGA" - HardwareAcceleratedFPGA
- "Hardware
Accelerated GPU" - HardwareAcceleratedGPU
Import
An existing resource can be imported using its type token, name, and identifier, e.g.
$ pulumi import azure-native:synapse:BigDataPool ExamplePool /subscriptions/01234567-89ab-4def-0123-456789abcdef/resourceGroups/ExampleResourceGroup/providers/Microsoft.Synapse/workspaces/ExampleWorkspace/bigDataPools/ExamplePool
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- azure-native-v1 pulumi/pulumi-azure-native
- License
- Apache-2.0