IMPORTANT: No additional bug fixes or documentation updates
will be released for this version. For the latest information, see the
current release documentation.
Create and upload config.yaml to S3 bucket
edit
IMPORTANT: This documentation is no longer updated. Refer to Elastic's version policy and the latest documentation.
Create and upload config.yaml to S3 bucket
editElastic Serverless Forwarder requires a config.yaml file to be uploaded to an S3 bucket and referenced by the S3_CONFIG_FILE environment variable.
Save the following YAML content as config.yaml and edit as required before uploading to an S3 bucket. You should remove any inputs or arguments you are not using, and ensure you have entered the correct URLs and credentials as per the inline comments.
inputs:
- type: "s3-sqs"
id: "arn:aws:sqs:%REGION%:%ACCOUNT%:%QUEUENAME%"
outputs:
- type: "elasticsearch"
args:
# either elasticsearch_url or cloud_id, elasticsearch_url takes precedence if both are included
elasticsearch_url: "http(s)://domain.tld:port"
cloud_id: "cloud_id:bG9jYWxob3N0OjkyMDAkMA=="
# either api_key or username/password, username/password takes precedence if both are included
api_key: "YXBpX2tleV9pZDphcGlfa2V5X3NlY3JldAo="
username: "username"
password: "password"
es_datastream_name: "logs-generic-default"
batch_max_actions: 500 # optional: default value is 500
batch_max_bytes: 10485760 # optional: default value is 10485760
- type: "logstash"
args:
logstash_url: "http(s)://host:port"
username: "username" #optional
password: "password" #optional
max_batch_size: 500 #optional
compression_level: 1 #optional
ssl_assert_fingerprint: "22:F7:FB:84:1D:43:3E:E7:BB:F9:72:F3:D8:97:AD:7C:86:E3:08:42" #optional
- type: "sqs"
id: "arn:aws:sqs:%REGION%:%ACCOUNT%:%QUEUENAME%"
outputs:
- type: "elasticsearch"
args:
# either elasticsearch_url or cloud_id, elasticsearch_url takes precedence if both are included
elasticsearch_url: "http(s)://domain.tld:port"
cloud_id: "cloud_id:bG9jYWxob3N0OjkyMDAkMA=="
# either api_key or username/password, username/password takes precedence if both are included
api_key: "YXBpX2tleV9pZDphcGlfa2V5X3NlY3JldAo="
username: "username"
password: "password"
es_datastream_name: "logs-generic-default"
batch_max_actions: 500 # optional: default value is 500
batch_max_bytes: 10485760 # optional: default value is 10485760
- type: "logstash"
args:
logstash_url: "http(s)://host:port"
username: "username" #optional
password: "password" #optional
max_batch_size: 500 #optional
compression_level: 1 #optional
ssl_assert_fingerprint: "22:F7:FB:84:1D:43:3E:E7:BB:F9:72:F3:D8:97:AD:7C:86:E3:08:42" #optional
- type: "kinesis-data-stream"
id: "arn:aws:kinesis:%REGION%:%ACCOUNT%:stream/%STREAMNAME%"
outputs:
- type: "elasticsearch"
args:
# either elasticsearch_url or cloud_id, elasticsearch_url takes precedence if both are included
elasticsearch_url: "http(s)://domain.tld:port"
cloud_id: "cloud_id:bG9jYWxob3N0OjkyMDAkMA=="
# either api_key or username/password, username/password takes precedence if both are included
api_key: "YXBpX2tleV9pZDphcGlfa2V5X3NlY3JldAo="
username: "username"
password: "password"
es_datastream_name: "logs-generic-default"
batch_max_actions: 500 # optional: default value is 500
batch_max_bytes: 10485760 # optional: default value is 10485760
- type: "logstash"
args:
logstash_url: "http(s)://host:port"
username: "username" #optional
password: "password" #optional
max_batch_size: 500 #optional
compression_level: 1 #optional
ssl_assert_fingerprint: "22:F7:FB:84:1D:43:3E:E7:BB:F9:72:F3:D8:97:AD:7C:86:E3:08:42" #optional
- type: "cloudwatch-logs"
id: "arn:aws:logs:%AWS_REGION%:%AWS_ACCOUNT_ID%:log-group:%LOG_GROUP_NAME%:*"
outputs:
- type: "elasticsearch"
args:
# either elasticsearch_url or cloud_id, elasticsearch_url takes precedence if both are included
elasticsearch_url: "http(s)://domain.tld:port"
cloud_id: "cloud_id:bG9jYWxob3N0OjkyMDAkMA=="
# either api_key or username/password, username/password takes precedence if both are included
api_key: "YXBpX2tleV9pZDphcGlfa2V5X3NlY3JldAo="
username: "username"
password: "password"
es_datastream_name: "logs-generic-default"
batch_max_actions: 500 # optional: default value is 500
batch_max_bytes: 10485760 # optional: default value is 10485760
- type: "logstash"
args:
logstash_url: "http(s)://host:port"
username: "username" #optional
password: "password" #optional
max_batch_size: 500 #optional
compression_level: 1 #optional
ssl_assert_fingerprint: "22:F7:FB:84:1D:43:3E:E7:BB:F9:72:F3:D8:97:AD:7C:86:E3:08:42" #optional
- type: "cloudwatch-logs"
id: "arn:aws:logs:%AWS_REGION%:%AWS_ACCOUNT_ID%:log-group:%LOG_GROUP_NAME%:log-stream:%LOG_STREAM_NAME%"
outputs:
- type: "elasticsearch"
args:
# either elasticsearch_url or cloud_id, elasticsearch_url takes precedence if both are included
elasticsearch_url: "http(s)://domain.tld:port"
cloud_id: "cloud_id:bG9jYWxob3N0OjkyMDAkMA=="
# either api_key or username/password, username/password takes precedence if both are included
api_key: "YXBpX2tleV9pZDphcGlfa2V5X3NlY3JldAo="
username: "username"
password: "password"
es_datastream_name: "logs-generic-default"
batch_max_actions: 500 # optional: default value is 500
batch_max_bytes: 10485760 # optional: default value is 10485760
- type: "logstash"
args:
logstash_url: "http(s)://host:port"
username: "username" #optional
password: "password" #optional
max_batch_size: 500 #optional
compression_level: 1 #optional
ssl_assert_fingerprint: "22:F7:FB:84:1D:43:3E:E7:BB:F9:72:F3:D8:97:AD:7C:86:E3:08:42" #optional