cloudflare.LogpushJob
Explore with Pulumi AI
Example Usage
Create LogpushJob Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new LogpushJob(name: string, args: LogpushJobArgs, opts?: CustomResourceOptions);
@overload
def LogpushJob(resource_name: str,
args: LogpushJobArgs,
opts: Optional[ResourceOptions] = None)
@overload
def LogpushJob(resource_name: str,
opts: Optional[ResourceOptions] = None,
destination_conf: Optional[str] = None,
dataset: Optional[str] = None,
kind: Optional[str] = None,
enabled: Optional[bool] = None,
filter: Optional[str] = None,
frequency: Optional[str] = None,
account_id: Optional[str] = None,
logpull_options: Optional[str] = None,
max_upload_bytes: Optional[int] = None,
max_upload_interval_seconds: Optional[int] = None,
max_upload_records: Optional[int] = None,
name: Optional[str] = None,
output_options: Optional[LogpushJobOutputOptionsArgs] = None,
ownership_challenge: Optional[str] = None,
zone_id: Optional[str] = None)
func NewLogpushJob(ctx *Context, name string, args LogpushJobArgs, opts ...ResourceOption) (*LogpushJob, error)
public LogpushJob(string name, LogpushJobArgs args, CustomResourceOptions? opts = null)
public LogpushJob(String name, LogpushJobArgs args)
public LogpushJob(String name, LogpushJobArgs args, CustomResourceOptions options)
type: cloudflare:LogpushJob
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args LogpushJobArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args LogpushJobArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args LogpushJobArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args LogpushJobArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args LogpushJobArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var logpushJobResource = new Cloudflare.LogpushJob("logpushJobResource", new()
{
DestinationConf = "string",
Dataset = "string",
Kind = "string",
Enabled = false,
Filter = "string",
AccountId = "string",
LogpullOptions = "string",
MaxUploadBytes = 0,
MaxUploadIntervalSeconds = 0,
MaxUploadRecords = 0,
Name = "string",
OutputOptions = new Cloudflare.Inputs.LogpushJobOutputOptionsArgs
{
BatchPrefix = "string",
BatchSuffix = "string",
Cve20214428 = false,
FieldDelimiter = "string",
FieldNames = new[]
{
"string",
},
OutputType = "string",
RecordDelimiter = "string",
RecordPrefix = "string",
RecordSuffix = "string",
RecordTemplate = "string",
SampleRate = 0,
TimestampFormat = "string",
},
OwnershipChallenge = "string",
ZoneId = "string",
});
example, err := cloudflare.NewLogpushJob(ctx, "logpushJobResource", &cloudflare.LogpushJobArgs{
DestinationConf: pulumi.String("string"),
Dataset: pulumi.String("string"),
Kind: pulumi.String("string"),
Enabled: pulumi.Bool(false),
Filter: pulumi.String("string"),
AccountId: pulumi.String("string"),
LogpullOptions: pulumi.String("string"),
MaxUploadBytes: pulumi.Int(0),
MaxUploadIntervalSeconds: pulumi.Int(0),
MaxUploadRecords: pulumi.Int(0),
Name: pulumi.String("string"),
OutputOptions: &cloudflare.LogpushJobOutputOptionsArgs{
BatchPrefix: pulumi.String("string"),
BatchSuffix: pulumi.String("string"),
Cve20214428: pulumi.Bool(false),
FieldDelimiter: pulumi.String("string"),
FieldNames: pulumi.StringArray{
pulumi.String("string"),
},
OutputType: pulumi.String("string"),
RecordDelimiter: pulumi.String("string"),
RecordPrefix: pulumi.String("string"),
RecordSuffix: pulumi.String("string"),
RecordTemplate: pulumi.String("string"),
SampleRate: pulumi.Float64(0),
TimestampFormat: pulumi.String("string"),
},
OwnershipChallenge: pulumi.String("string"),
ZoneId: pulumi.String("string"),
})
var logpushJobResource = new LogpushJob("logpushJobResource", LogpushJobArgs.builder()
.destinationConf("string")
.dataset("string")
.kind("string")
.enabled(false)
.filter("string")
.accountId("string")
.logpullOptions("string")
.maxUploadBytes(0)
.maxUploadIntervalSeconds(0)
.maxUploadRecords(0)
.name("string")
.outputOptions(LogpushJobOutputOptionsArgs.builder()
.batchPrefix("string")
.batchSuffix("string")
.cve20214428(false)
.fieldDelimiter("string")
.fieldNames("string")
.outputType("string")
.recordDelimiter("string")
.recordPrefix("string")
.recordSuffix("string")
.recordTemplate("string")
.sampleRate(0)
.timestampFormat("string")
.build())
.ownershipChallenge("string")
.zoneId("string")
.build());
logpush_job_resource = cloudflare.LogpushJob("logpushJobResource",
destination_conf="string",
dataset="string",
kind="string",
enabled=False,
filter="string",
account_id="string",
logpull_options="string",
max_upload_bytes=0,
max_upload_interval_seconds=0,
max_upload_records=0,
name="string",
output_options=cloudflare.LogpushJobOutputOptionsArgs(
batch_prefix="string",
batch_suffix="string",
cve20214428=False,
field_delimiter="string",
field_names=["string"],
output_type="string",
record_delimiter="string",
record_prefix="string",
record_suffix="string",
record_template="string",
sample_rate=0,
timestamp_format="string",
),
ownership_challenge="string",
zone_id="string")
const logpushJobResource = new cloudflare.LogpushJob("logpushJobResource", {
destinationConf: "string",
dataset: "string",
kind: "string",
enabled: false,
filter: "string",
accountId: "string",
logpullOptions: "string",
maxUploadBytes: 0,
maxUploadIntervalSeconds: 0,
maxUploadRecords: 0,
name: "string",
outputOptions: {
batchPrefix: "string",
batchSuffix: "string",
cve20214428: false,
fieldDelimiter: "string",
fieldNames: ["string"],
outputType: "string",
recordDelimiter: "string",
recordPrefix: "string",
recordSuffix: "string",
recordTemplate: "string",
sampleRate: 0,
timestampFormat: "string",
},
ownershipChallenge: "string",
zoneId: "string",
});
type: cloudflare:LogpushJob
properties:
accountId: string
dataset: string
destinationConf: string
enabled: false
filter: string
kind: string
logpullOptions: string
maxUploadBytes: 0
maxUploadIntervalSeconds: 0
maxUploadRecords: 0
name: string
outputOptions:
batchPrefix: string
batchSuffix: string
cve20214428: false
fieldDelimiter: string
fieldNames:
- string
outputType: string
recordDelimiter: string
recordPrefix: string
recordSuffix: string
recordTemplate: string
sampleRate: 0
timestampFormat: string
ownershipChallenge: string
zoneId: string
LogpushJob Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
The LogpushJob resource accepts the following input properties:
- Dataset string
- The kind of the dataset to use with the logpush job. Available values:
access_requests
,casb_findings
,firewall_events
,http_requests
,spectrum_events
,nel_reports
,audit_logs
,gateway_dns
,gateway_http
,gateway_network
,dns_logs
,network_analytics_logs
,workers_trace_events
,device_posture_results
,zero_trust_network_sessions
,magic_ids_detections
,page_shield_events
. - Destination
Conf string - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included. See Logpush destination documentation.
- Account
Id string - The account identifier to target for the resource. Must provide only one of
account_id
,zone_id
. - Enabled bool
- Whether to enable the job.
- Filter string
- Use filters to select the events to include and/or remove from your logs. For more information, refer to Filters.
- Frequency string
- A higher frequency will result in logs being pushed on faster with smaller files.
low
frequency will push logs less often with larger files. Available values:high
,low
. Defaults tohigh
. - Kind string
- The kind of logpush job to create. Available values:
edge
,instant-logs
,""
. - Logpull
Options string - Configuration string for the Logshare API. It specifies things like requested fields and timestamp formats. See Logpush options documentation.
- Max
Upload intBytes - The maximum uncompressed file size of a batch of logs. Value must be between 5MB and 1GB.
- Max
Upload intInterval Seconds - The maximum interval in seconds for log batches. Value must be between 30 and 300.
- Max
Upload intRecords - The maximum number of log lines per batch. Value must be between 1000 and 1,000,000.
- Name string
- The name of the logpush job to create.
- Output
Options LogpushJob Output Options - Structured replacement for logpulloptions. When including this field, the logpulloption field will be ignored.
- Ownership
Challenge string - Ownership challenge token to prove destination ownership, required when destination is Amazon S3, Google Cloud Storage, Microsoft Azure or Sumo Logic. See Developer documentation.
- Zone
Id string - The zone identifier to target for the resource. Must provide only one of
account_id
,zone_id
.
- Dataset string
- The kind of the dataset to use with the logpush job. Available values:
access_requests
,casb_findings
,firewall_events
,http_requests
,spectrum_events
,nel_reports
,audit_logs
,gateway_dns
,gateway_http
,gateway_network
,dns_logs
,network_analytics_logs
,workers_trace_events
,device_posture_results
,zero_trust_network_sessions
,magic_ids_detections
,page_shield_events
. - Destination
Conf string - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included. See Logpush destination documentation.
- Account
Id string - The account identifier to target for the resource. Must provide only one of
account_id
,zone_id
. - Enabled bool
- Whether to enable the job.
- Filter string
- Use filters to select the events to include and/or remove from your logs. For more information, refer to Filters.
- Frequency string
- A higher frequency will result in logs being pushed on faster with smaller files.
low
frequency will push logs less often with larger files. Available values:high
,low
. Defaults tohigh
. - Kind string
- The kind of logpush job to create. Available values:
edge
,instant-logs
,""
. - Logpull
Options string - Configuration string for the Logshare API. It specifies things like requested fields and timestamp formats. See Logpush options documentation.
- Max
Upload intBytes - The maximum uncompressed file size of a batch of logs. Value must be between 5MB and 1GB.
- Max
Upload intInterval Seconds - The maximum interval in seconds for log batches. Value must be between 30 and 300.
- Max
Upload intRecords - The maximum number of log lines per batch. Value must be between 1000 and 1,000,000.
- Name string
- The name of the logpush job to create.
- Output
Options LogpushJob Output Options Args - Structured replacement for logpulloptions. When including this field, the logpulloption field will be ignored.
- Ownership
Challenge string - Ownership challenge token to prove destination ownership, required when destination is Amazon S3, Google Cloud Storage, Microsoft Azure or Sumo Logic. See Developer documentation.
- Zone
Id string - The zone identifier to target for the resource. Must provide only one of
account_id
,zone_id
.
- dataset String
- The kind of the dataset to use with the logpush job. Available values:
access_requests
,casb_findings
,firewall_events
,http_requests
,spectrum_events
,nel_reports
,audit_logs
,gateway_dns
,gateway_http
,gateway_network
,dns_logs
,network_analytics_logs
,workers_trace_events
,device_posture_results
,zero_trust_network_sessions
,magic_ids_detections
,page_shield_events
. - destination
Conf String - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included. See Logpush destination documentation.
- account
Id String - The account identifier to target for the resource. Must provide only one of
account_id
,zone_id
. - enabled Boolean
- Whether to enable the job.
- filter String
- Use filters to select the events to include and/or remove from your logs. For more information, refer to Filters.
- frequency String
- A higher frequency will result in logs being pushed on faster with smaller files.
low
frequency will push logs less often with larger files. Available values:high
,low
. Defaults tohigh
. - kind String
- The kind of logpush job to create. Available values:
edge
,instant-logs
,""
. - logpull
Options String - Configuration string for the Logshare API. It specifies things like requested fields and timestamp formats. See Logpush options documentation.
- max
Upload IntegerBytes - The maximum uncompressed file size of a batch of logs. Value must be between 5MB and 1GB.
- max
Upload IntegerInterval Seconds - The maximum interval in seconds for log batches. Value must be between 30 and 300.
- max
Upload IntegerRecords - The maximum number of log lines per batch. Value must be between 1000 and 1,000,000.
- name String
- The name of the logpush job to create.
- output
Options LogpushJob Output Options - Structured replacement for logpulloptions. When including this field, the logpulloption field will be ignored.
- ownership
Challenge String - Ownership challenge token to prove destination ownership, required when destination is Amazon S3, Google Cloud Storage, Microsoft Azure or Sumo Logic. See Developer documentation.
- zone
Id String - The zone identifier to target for the resource. Must provide only one of
account_id
,zone_id
.
- dataset string
- The kind of the dataset to use with the logpush job. Available values:
access_requests
,casb_findings
,firewall_events
,http_requests
,spectrum_events
,nel_reports
,audit_logs
,gateway_dns
,gateway_http
,gateway_network
,dns_logs
,network_analytics_logs
,workers_trace_events
,device_posture_results
,zero_trust_network_sessions
,magic_ids_detections
,page_shield_events
. - destination
Conf string - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included. See Logpush destination documentation.
- account
Id string - The account identifier to target for the resource. Must provide only one of
account_id
,zone_id
. - enabled boolean
- Whether to enable the job.
- filter string
- Use filters to select the events to include and/or remove from your logs. For more information, refer to Filters.
- frequency string
- A higher frequency will result in logs being pushed on faster with smaller files.
low
frequency will push logs less often with larger files. Available values:high
,low
. Defaults tohigh
. - kind string
- The kind of logpush job to create. Available values:
edge
,instant-logs
,""
. - logpull
Options string - Configuration string for the Logshare API. It specifies things like requested fields and timestamp formats. See Logpush options documentation.
- max
Upload numberBytes - The maximum uncompressed file size of a batch of logs. Value must be between 5MB and 1GB.
- max
Upload numberInterval Seconds - The maximum interval in seconds for log batches. Value must be between 30 and 300.
- max
Upload numberRecords - The maximum number of log lines per batch. Value must be between 1000 and 1,000,000.
- name string
- The name of the logpush job to create.
- output
Options LogpushJob Output Options - Structured replacement for logpulloptions. When including this field, the logpulloption field will be ignored.
- ownership
Challenge string - Ownership challenge token to prove destination ownership, required when destination is Amazon S3, Google Cloud Storage, Microsoft Azure or Sumo Logic. See Developer documentation.
- zone
Id string - The zone identifier to target for the resource. Must provide only one of
account_id
,zone_id
.
- dataset str
- The kind of the dataset to use with the logpush job. Available values:
access_requests
,casb_findings
,firewall_events
,http_requests
,spectrum_events
,nel_reports
,audit_logs
,gateway_dns
,gateway_http
,gateway_network
,dns_logs
,network_analytics_logs
,workers_trace_events
,device_posture_results
,zero_trust_network_sessions
,magic_ids_detections
,page_shield_events
. - destination_
conf str - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included. See Logpush destination documentation.
- account_
id str - The account identifier to target for the resource. Must provide only one of
account_id
,zone_id
. - enabled bool
- Whether to enable the job.
- filter str
- Use filters to select the events to include and/or remove from your logs. For more information, refer to Filters.
- frequency str
- A higher frequency will result in logs being pushed on faster with smaller files.
low
frequency will push logs less often with larger files. Available values:high
,low
. Defaults tohigh
. - kind str
- The kind of logpush job to create. Available values:
edge
,instant-logs
,""
. - logpull_
options str - Configuration string for the Logshare API. It specifies things like requested fields and timestamp formats. See Logpush options documentation.
- max_
upload_ intbytes - The maximum uncompressed file size of a batch of logs. Value must be between 5MB and 1GB.
- max_
upload_ intinterval_ seconds - The maximum interval in seconds for log batches. Value must be between 30 and 300.
- max_
upload_ intrecords - The maximum number of log lines per batch. Value must be between 1000 and 1,000,000.
- name str
- The name of the logpush job to create.
- output_
options LogpushJob Output Options Args - Structured replacement for logpulloptions. When including this field, the logpulloption field will be ignored.
- ownership_
challenge str - Ownership challenge token to prove destination ownership, required when destination is Amazon S3, Google Cloud Storage, Microsoft Azure or Sumo Logic. See Developer documentation.
- zone_
id str - The zone identifier to target for the resource. Must provide only one of
account_id
,zone_id
.
- dataset String
- The kind of the dataset to use with the logpush job. Available values:
access_requests
,casb_findings
,firewall_events
,http_requests
,spectrum_events
,nel_reports
,audit_logs
,gateway_dns
,gateway_http
,gateway_network
,dns_logs
,network_analytics_logs
,workers_trace_events
,device_posture_results
,zero_trust_network_sessions
,magic_ids_detections
,page_shield_events
. - destination
Conf String - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included. See Logpush destination documentation.
- account
Id String - The account identifier to target for the resource. Must provide only one of
account_id
,zone_id
. - enabled Boolean
- Whether to enable the job.
- filter String
- Use filters to select the events to include and/or remove from your logs. For more information, refer to Filters.
- frequency String
- A higher frequency will result in logs being pushed on faster with smaller files.
low
frequency will push logs less often with larger files. Available values:high
,low
. Defaults tohigh
. - kind String
- The kind of logpush job to create. Available values:
edge
,instant-logs
,""
. - logpull
Options String - Configuration string for the Logshare API. It specifies things like requested fields and timestamp formats. See Logpush options documentation.
- max
Upload NumberBytes - The maximum uncompressed file size of a batch of logs. Value must be between 5MB and 1GB.
- max
Upload NumberInterval Seconds - The maximum interval in seconds for log batches. Value must be between 30 and 300.
- max
Upload NumberRecords - The maximum number of log lines per batch. Value must be between 1000 and 1,000,000.
- name String
- The name of the logpush job to create.
- output
Options Property Map - Structured replacement for logpulloptions. When including this field, the logpulloption field will be ignored.
- ownership
Challenge String - Ownership challenge token to prove destination ownership, required when destination is Amazon S3, Google Cloud Storage, Microsoft Azure or Sumo Logic. See Developer documentation.
- zone
Id String - The zone identifier to target for the resource. Must provide only one of
account_id
,zone_id
.
Outputs
All input properties are implicitly available as output properties. Additionally, the LogpushJob resource produces the following output properties:
- Id string
- The provider-assigned unique ID for this managed resource.
- Id string
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
- id string
- The provider-assigned unique ID for this managed resource.
- id str
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
Look up Existing LogpushJob Resource
Get an existing LogpushJob resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: LogpushJobState, opts?: CustomResourceOptions): LogpushJob
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
account_id: Optional[str] = None,
dataset: Optional[str] = None,
destination_conf: Optional[str] = None,
enabled: Optional[bool] = None,
filter: Optional[str] = None,
frequency: Optional[str] = None,
kind: Optional[str] = None,
logpull_options: Optional[str] = None,
max_upload_bytes: Optional[int] = None,
max_upload_interval_seconds: Optional[int] = None,
max_upload_records: Optional[int] = None,
name: Optional[str] = None,
output_options: Optional[LogpushJobOutputOptionsArgs] = None,
ownership_challenge: Optional[str] = None,
zone_id: Optional[str] = None) -> LogpushJob
func GetLogpushJob(ctx *Context, name string, id IDInput, state *LogpushJobState, opts ...ResourceOption) (*LogpushJob, error)
public static LogpushJob Get(string name, Input<string> id, LogpushJobState? state, CustomResourceOptions? opts = null)
public static LogpushJob get(String name, Output<String> id, LogpushJobState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Account
Id string - The account identifier to target for the resource. Must provide only one of
account_id
,zone_id
. - Dataset string
- The kind of the dataset to use with the logpush job. Available values:
access_requests
,casb_findings
,firewall_events
,http_requests
,spectrum_events
,nel_reports
,audit_logs
,gateway_dns
,gateway_http
,gateway_network
,dns_logs
,network_analytics_logs
,workers_trace_events
,device_posture_results
,zero_trust_network_sessions
,magic_ids_detections
,page_shield_events
. - Destination
Conf string - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included. See Logpush destination documentation.
- Enabled bool
- Whether to enable the job.
- Filter string
- Use filters to select the events to include and/or remove from your logs. For more information, refer to Filters.
- Frequency string
- A higher frequency will result in logs being pushed on faster with smaller files.
low
frequency will push logs less often with larger files. Available values:high
,low
. Defaults tohigh
. - Kind string
- The kind of logpush job to create. Available values:
edge
,instant-logs
,""
. - Logpull
Options string - Configuration string for the Logshare API. It specifies things like requested fields and timestamp formats. See Logpush options documentation.
- Max
Upload intBytes - The maximum uncompressed file size of a batch of logs. Value must be between 5MB and 1GB.
- Max
Upload intInterval Seconds - The maximum interval in seconds for log batches. Value must be between 30 and 300.
- Max
Upload intRecords - The maximum number of log lines per batch. Value must be between 1000 and 1,000,000.
- Name string
- The name of the logpush job to create.
- Output
Options LogpushJob Output Options - Structured replacement for logpulloptions. When including this field, the logpulloption field will be ignored.
- Ownership
Challenge string - Ownership challenge token to prove destination ownership, required when destination is Amazon S3, Google Cloud Storage, Microsoft Azure or Sumo Logic. See Developer documentation.
- Zone
Id string - The zone identifier to target for the resource. Must provide only one of
account_id
,zone_id
.
- Account
Id string - The account identifier to target for the resource. Must provide only one of
account_id
,zone_id
. - Dataset string
- The kind of the dataset to use with the logpush job. Available values:
access_requests
,casb_findings
,firewall_events
,http_requests
,spectrum_events
,nel_reports
,audit_logs
,gateway_dns
,gateway_http
,gateway_network
,dns_logs
,network_analytics_logs
,workers_trace_events
,device_posture_results
,zero_trust_network_sessions
,magic_ids_detections
,page_shield_events
. - Destination
Conf string - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included. See Logpush destination documentation.
- Enabled bool
- Whether to enable the job.
- Filter string
- Use filters to select the events to include and/or remove from your logs. For more information, refer to Filters.
- Frequency string
- A higher frequency will result in logs being pushed on faster with smaller files.
low
frequency will push logs less often with larger files. Available values:high
,low
. Defaults tohigh
. - Kind string
- The kind of logpush job to create. Available values:
edge
,instant-logs
,""
. - Logpull
Options string - Configuration string for the Logshare API. It specifies things like requested fields and timestamp formats. See Logpush options documentation.
- Max
Upload intBytes - The maximum uncompressed file size of a batch of logs. Value must be between 5MB and 1GB.
- Max
Upload intInterval Seconds - The maximum interval in seconds for log batches. Value must be between 30 and 300.
- Max
Upload intRecords - The maximum number of log lines per batch. Value must be between 1000 and 1,000,000.
- Name string
- The name of the logpush job to create.
- Output
Options LogpushJob Output Options Args - Structured replacement for logpulloptions. When including this field, the logpulloption field will be ignored.
- Ownership
Challenge string - Ownership challenge token to prove destination ownership, required when destination is Amazon S3, Google Cloud Storage, Microsoft Azure or Sumo Logic. See Developer documentation.
- Zone
Id string - The zone identifier to target for the resource. Must provide only one of
account_id
,zone_id
.
- account
Id String - The account identifier to target for the resource. Must provide only one of
account_id
,zone_id
. - dataset String
- The kind of the dataset to use with the logpush job. Available values:
access_requests
,casb_findings
,firewall_events
,http_requests
,spectrum_events
,nel_reports
,audit_logs
,gateway_dns
,gateway_http
,gateway_network
,dns_logs
,network_analytics_logs
,workers_trace_events
,device_posture_results
,zero_trust_network_sessions
,magic_ids_detections
,page_shield_events
. - destination
Conf String - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included. See Logpush destination documentation.
- enabled Boolean
- Whether to enable the job.
- filter String
- Use filters to select the events to include and/or remove from your logs. For more information, refer to Filters.
- frequency String
- A higher frequency will result in logs being pushed on faster with smaller files.
low
frequency will push logs less often with larger files. Available values:high
,low
. Defaults tohigh
. - kind String
- The kind of logpush job to create. Available values:
edge
,instant-logs
,""
. - logpull
Options String - Configuration string for the Logshare API. It specifies things like requested fields and timestamp formats. See Logpush options documentation.
- max
Upload IntegerBytes - The maximum uncompressed file size of a batch of logs. Value must be between 5MB and 1GB.
- max
Upload IntegerInterval Seconds - The maximum interval in seconds for log batches. Value must be between 30 and 300.
- max
Upload IntegerRecords - The maximum number of log lines per batch. Value must be between 1000 and 1,000,000.
- name String
- The name of the logpush job to create.
- output
Options LogpushJob Output Options - Structured replacement for logpulloptions. When including this field, the logpulloption field will be ignored.
- ownership
Challenge String - Ownership challenge token to prove destination ownership, required when destination is Amazon S3, Google Cloud Storage, Microsoft Azure or Sumo Logic. See Developer documentation.
- zone
Id String - The zone identifier to target for the resource. Must provide only one of
account_id
,zone_id
.
- account
Id string - The account identifier to target for the resource. Must provide only one of
account_id
,zone_id
. - dataset string
- The kind of the dataset to use with the logpush job. Available values:
access_requests
,casb_findings
,firewall_events
,http_requests
,spectrum_events
,nel_reports
,audit_logs
,gateway_dns
,gateway_http
,gateway_network
,dns_logs
,network_analytics_logs
,workers_trace_events
,device_posture_results
,zero_trust_network_sessions
,magic_ids_detections
,page_shield_events
. - destination
Conf string - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included. See Logpush destination documentation.
- enabled boolean
- Whether to enable the job.
- filter string
- Use filters to select the events to include and/or remove from your logs. For more information, refer to Filters.
- frequency string
- A higher frequency will result in logs being pushed on faster with smaller files.
low
frequency will push logs less often with larger files. Available values:high
,low
. Defaults tohigh
. - kind string
- The kind of logpush job to create. Available values:
edge
,instant-logs
,""
. - logpull
Options string - Configuration string for the Logshare API. It specifies things like requested fields and timestamp formats. See Logpush options documentation.
- max
Upload numberBytes - The maximum uncompressed file size of a batch of logs. Value must be between 5MB and 1GB.
- max
Upload numberInterval Seconds - The maximum interval in seconds for log batches. Value must be between 30 and 300.
- max
Upload numberRecords - The maximum number of log lines per batch. Value must be between 1000 and 1,000,000.
- name string
- The name of the logpush job to create.
- output
Options LogpushJob Output Options - Structured replacement for logpulloptions. When including this field, the logpulloption field will be ignored.
- ownership
Challenge string - Ownership challenge token to prove destination ownership, required when destination is Amazon S3, Google Cloud Storage, Microsoft Azure or Sumo Logic. See Developer documentation.
- zone
Id string - The zone identifier to target for the resource. Must provide only one of
account_id
,zone_id
.
- account_
id str - The account identifier to target for the resource. Must provide only one of
account_id
,zone_id
. - dataset str
- The kind of the dataset to use with the logpush job. Available values:
access_requests
,casb_findings
,firewall_events
,http_requests
,spectrum_events
,nel_reports
,audit_logs
,gateway_dns
,gateway_http
,gateway_network
,dns_logs
,network_analytics_logs
,workers_trace_events
,device_posture_results
,zero_trust_network_sessions
,magic_ids_detections
,page_shield_events
. - destination_
conf str - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included. See Logpush destination documentation.
- enabled bool
- Whether to enable the job.
- filter str
- Use filters to select the events to include and/or remove from your logs. For more information, refer to Filters.
- frequency str
- A higher frequency will result in logs being pushed on faster with smaller files.
low
frequency will push logs less often with larger files. Available values:high
,low
. Defaults tohigh
. - kind str
- The kind of logpush job to create. Available values:
edge
,instant-logs
,""
. - logpull_
options str - Configuration string for the Logshare API. It specifies things like requested fields and timestamp formats. See Logpush options documentation.
- max_
upload_ intbytes - The maximum uncompressed file size of a batch of logs. Value must be between 5MB and 1GB.
- max_
upload_ intinterval_ seconds - The maximum interval in seconds for log batches. Value must be between 30 and 300.
- max_
upload_ intrecords - The maximum number of log lines per batch. Value must be between 1000 and 1,000,000.
- name str
- The name of the logpush job to create.
- output_
options LogpushJob Output Options Args - Structured replacement for logpulloptions. When including this field, the logpulloption field will be ignored.
- ownership_
challenge str - Ownership challenge token to prove destination ownership, required when destination is Amazon S3, Google Cloud Storage, Microsoft Azure or Sumo Logic. See Developer documentation.
- zone_
id str - The zone identifier to target for the resource. Must provide only one of
account_id
,zone_id
.
- account
Id String - The account identifier to target for the resource. Must provide only one of
account_id
,zone_id
. - dataset String
- The kind of the dataset to use with the logpush job. Available values:
access_requests
,casb_findings
,firewall_events
,http_requests
,spectrum_events
,nel_reports
,audit_logs
,gateway_dns
,gateway_http
,gateway_network
,dns_logs
,network_analytics_logs
,workers_trace_events
,device_posture_results
,zero_trust_network_sessions
,magic_ids_detections
,page_shield_events
. - destination
Conf String - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included. See Logpush destination documentation.
- enabled Boolean
- Whether to enable the job.
- filter String
- Use filters to select the events to include and/or remove from your logs. For more information, refer to Filters.
- frequency String
- A higher frequency will result in logs being pushed on faster with smaller files.
low
frequency will push logs less often with larger files. Available values:high
,low
. Defaults tohigh
. - kind String
- The kind of logpush job to create. Available values:
edge
,instant-logs
,""
. - logpull
Options String - Configuration string for the Logshare API. It specifies things like requested fields and timestamp formats. See Logpush options documentation.
- max
Upload NumberBytes - The maximum uncompressed file size of a batch of logs. Value must be between 5MB and 1GB.
- max
Upload NumberInterval Seconds - The maximum interval in seconds for log batches. Value must be between 30 and 300.
- max
Upload NumberRecords - The maximum number of log lines per batch. Value must be between 1000 and 1,000,000.
- name String
- The name of the logpush job to create.
- output
Options Property Map - Structured replacement for logpulloptions. When including this field, the logpulloption field will be ignored.
- ownership
Challenge String - Ownership challenge token to prove destination ownership, required when destination is Amazon S3, Google Cloud Storage, Microsoft Azure or Sumo Logic. See Developer documentation.
- zone
Id String - The zone identifier to target for the resource. Must provide only one of
account_id
,zone_id
.
Supporting Types
LogpushJobOutputOptions, LogpushJobOutputOptionsArgs
- Batch
Prefix string - String to be prepended before each batch.
- Batch
Suffix string - String to be appended after each batch.
- Cve20214428 bool
- Mitigation for CVE-2021-44228. If set to true, will cause all occurrences of ${ in the generated files to be replaced with x{. Defaults to
false
. - Field
Delimiter string - String to join fields. This field be ignored when record_template is set. Defaults to
,
. - Field
Names List<string> - List of field names to be included in the Logpush output.
- Output
Type string - Specifies the output type. Available values:
ndjson
,csv
. Defaults tondjson
. - Record
Delimiter string - String to be inserted in-between the records as separator.
- Record
Prefix string - String to be prepended before each record. Defaults to
{
. - Record
Suffix string - String to be appended after each record. Defaults to
}
. - Record
Template string - String to use as template for each record instead of the default comma-separated list.
- Sample
Rate double - Specifies the sampling rate. Defaults to
1
. - Timestamp
Format string - Specifies the format for timestamps. Available values:
unixnano
,unix
,rfc3339
. Defaults tounixnano
.
- Batch
Prefix string - String to be prepended before each batch.
- Batch
Suffix string - String to be appended after each batch.
- Cve20214428 bool
- Mitigation for CVE-2021-44228. If set to true, will cause all occurrences of ${ in the generated files to be replaced with x{. Defaults to
false
. - Field
Delimiter string - String to join fields. This field be ignored when record_template is set. Defaults to
,
. - Field
Names []string - List of field names to be included in the Logpush output.
- Output
Type string - Specifies the output type. Available values:
ndjson
,csv
. Defaults tondjson
. - Record
Delimiter string - String to be inserted in-between the records as separator.
- Record
Prefix string - String to be prepended before each record. Defaults to
{
. - Record
Suffix string - String to be appended after each record. Defaults to
}
. - Record
Template string - String to use as template for each record instead of the default comma-separated list.
- Sample
Rate float64 - Specifies the sampling rate. Defaults to
1
. - Timestamp
Format string - Specifies the format for timestamps. Available values:
unixnano
,unix
,rfc3339
. Defaults tounixnano
.
- batch
Prefix String - String to be prepended before each batch.
- batch
Suffix String - String to be appended after each batch.
- cve20214428 Boolean
- Mitigation for CVE-2021-44228. If set to true, will cause all occurrences of ${ in the generated files to be replaced with x{. Defaults to
false
. - field
Delimiter String - String to join fields. This field be ignored when record_template is set. Defaults to
,
. - field
Names List<String> - List of field names to be included in the Logpush output.
- output
Type String - Specifies the output type. Available values:
ndjson
,csv
. Defaults tondjson
. - record
Delimiter String - String to be inserted in-between the records as separator.
- record
Prefix String - String to be prepended before each record. Defaults to
{
. - record
Suffix String - String to be appended after each record. Defaults to
}
. - record
Template String - String to use as template for each record instead of the default comma-separated list.
- sample
Rate Double - Specifies the sampling rate. Defaults to
1
. - timestamp
Format String - Specifies the format for timestamps. Available values:
unixnano
,unix
,rfc3339
. Defaults tounixnano
.
- batch
Prefix string - String to be prepended before each batch.
- batch
Suffix string - String to be appended after each batch.
- cve20214428 boolean
- Mitigation for CVE-2021-44228. If set to true, will cause all occurrences of ${ in the generated files to be replaced with x{. Defaults to
false
. - field
Delimiter string - String to join fields. This field be ignored when record_template is set. Defaults to
,
. - field
Names string[] - List of field names to be included in the Logpush output.
- output
Type string - Specifies the output type. Available values:
ndjson
,csv
. Defaults tondjson
. - record
Delimiter string - String to be inserted in-between the records as separator.
- record
Prefix string - String to be prepended before each record. Defaults to
{
. - record
Suffix string - String to be appended after each record. Defaults to
}
. - record
Template string - String to use as template for each record instead of the default comma-separated list.
- sample
Rate number - Specifies the sampling rate. Defaults to
1
. - timestamp
Format string - Specifies the format for timestamps. Available values:
unixnano
,unix
,rfc3339
. Defaults tounixnano
.
- batch_
prefix str - String to be prepended before each batch.
- batch_
suffix str - String to be appended after each batch.
- cve20214428 bool
- Mitigation for CVE-2021-44228. If set to true, will cause all occurrences of ${ in the generated files to be replaced with x{. Defaults to
false
. - field_
delimiter str - String to join fields. This field be ignored when record_template is set. Defaults to
,
. - field_
names Sequence[str] - List of field names to be included in the Logpush output.
- output_
type str - Specifies the output type. Available values:
ndjson
,csv
. Defaults tondjson
. - record_
delimiter str - String to be inserted in-between the records as separator.
- record_
prefix str - String to be prepended before each record. Defaults to
{
. - record_
suffix str - String to be appended after each record. Defaults to
}
. - record_
template str - String to use as template for each record instead of the default comma-separated list.
- sample_
rate float - Specifies the sampling rate. Defaults to
1
. - timestamp_
format str - Specifies the format for timestamps. Available values:
unixnano
,unix
,rfc3339
. Defaults tounixnano
.
- batch
Prefix String - String to be prepended before each batch.
- batch
Suffix String - String to be appended after each batch.
- cve20214428 Boolean
- Mitigation for CVE-2021-44228. If set to true, will cause all occurrences of ${ in the generated files to be replaced with x{. Defaults to
false
. - field
Delimiter String - String to join fields. This field be ignored when record_template is set. Defaults to
,
. - field
Names List<String> - List of field names to be included in the Logpush output.
- output
Type String - Specifies the output type. Available values:
ndjson
,csv
. Defaults tondjson
. - record
Delimiter String - String to be inserted in-between the records as separator.
- record
Prefix String - String to be prepended before each record. Defaults to
{
. - record
Suffix String - String to be appended after each record. Defaults to
}
. - record
Template String - String to use as template for each record instead of the default comma-separated list.
- sample
Rate Number - Specifies the sampling rate. Defaults to
1
. - timestamp
Format String - Specifies the format for timestamps. Available values:
unixnano
,unix
,rfc3339
. Defaults tounixnano
.
Import
Import an account-scoped job.
$ pulumi import cloudflare:index/logpushJob:LogpushJob example account/<account_id>/<job_id>
Import a zone-scoped job.
$ pulumi import cloudflare:index/logpushJob:LogpushJob example zone/<zone_id>/<job_id>
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- Cloudflare pulumi/pulumi-cloudflare
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
cloudflare
Terraform Provider.