Create a pipeline that is used for Logstash Central Management. If the specified pipeline exists, it is replaced.
manage_logstash_pipelinesAn identifier for the pipeline. Pipeline IDs must begin with a letter or underscore and contain only letters, underscores, dashes, hyphens and numbers.
A description of the pipeline. This description is not used by Elasticsearch or Logstash.
The date the pipeline was last updated.
It must be in the yyyy-MM-dd'T'HH:mm:ss.SSSZZ strict_date_time format.
The configuration for the pipeline.
Optional metadata about the pipeline, which can have any contents. This metadata is not generated or used by Elasticsearch or Logstash.
Settings for the pipeline. It supports only flat keys in dot notation.
The user who last updated the pipeline.
PUT _logstash/pipeline/my_pipeline
{
"description": "Sample pipeline for illustration purposes",
"last_modified": "2021-01-02T02:50:51.250Z",
"pipeline_metadata": {
"type": "logstash_pipeline",
"version": 1
},
"username": "elastic",
"pipeline": "input {}\\n filter { grok {} }\\n output {}",
"pipeline_settings": {
"pipeline.workers": 1,
"pipeline.batch.size": 125,
"pipeline.batch.delay": 50,
"queue.type": "memory",
"queue.max_bytes": "1gb",
"queue.checkpoint.writes": 1024
}
}
resp = client.logstash.put_pipeline(
id="my_pipeline",
pipeline={
"description": "Sample pipeline for illustration purposes",
"last_modified": "2021-01-02T02:50:51.250Z",
"pipeline_metadata": {
"type": "logstash_pipeline",
"version": 1
},
"username": "elastic",
"pipeline": "input {}\\n filter { grok {} }\\n output {}",
"pipeline_settings": {
"pipeline.workers": 1,
"pipeline.batch.size": 125,
"pipeline.batch.delay": 50,
"queue.type": "memory",
"queue.max_bytes": "1gb",
"queue.checkpoint.writes": 1024
}
},
)
const response = await client.logstash.putPipeline({
id: "my_pipeline",
pipeline: {
description: "Sample pipeline for illustration purposes",
last_modified: "2021-01-02T02:50:51.250Z",
pipeline_metadata: {
type: "logstash_pipeline",
version: 1,
},
username: "elastic",
pipeline: "input {}\\n filter { grok {} }\\n output {}",
pipeline_settings: {
"pipeline.workers": 1,
"pipeline.batch.size": 125,
"pipeline.batch.delay": 50,
"queue.type": "memory",
"queue.max_bytes": "1gb",
"queue.checkpoint.writes": 1024,
},
},
});
response = client.logstash.put_pipeline(
id: "my_pipeline",
body: {
"description": "Sample pipeline for illustration purposes",
"last_modified": "2021-01-02T02:50:51.250Z",
"pipeline_metadata": {
"type": "logstash_pipeline",
"version": 1
},
"username": "elastic",
"pipeline": "input {}\\n filter { grok {} }\\n output {}",
"pipeline_settings": {
"pipeline.workers": 1,
"pipeline.batch.size": 125,
"pipeline.batch.delay": 50,
"queue.type": "memory",
"queue.max_bytes": "1gb",
"queue.checkpoint.writes": 1024
}
}
)
$resp = $client->logstash()->putPipeline([
"id" => "my_pipeline",
"body" => [
"description" => "Sample pipeline for illustration purposes",
"last_modified" => "2021-01-02T02:50:51.250Z",
"pipeline_metadata" => [
"type" => "logstash_pipeline",
"version" => 1,
],
"username" => "elastic",
"pipeline" => "input {}\\n filter { grok {} }\\n output {}",
"pipeline_settings" => [
"pipeline.workers" => 1,
"pipeline.batch.size" => 125,
"pipeline.batch.delay" => 50,
"queue.type" => "memory",
"queue.max_bytes" => "1gb",
"queue.checkpoint.writes" => 1024,
],
],
]);
curl -X PUT -H "Authorization: ApiKey $ELASTIC_API_KEY" -H "Content-Type: application/json" -d '{"description":"Sample pipeline for illustration purposes","last_modified":"2021-01-02T02:50:51.250Z","pipeline_metadata":{"type":"logstash_pipeline","version":1},"username":"elastic","pipeline":"input {}\\n filter { grok {} }\\n output {}","pipeline_settings":{"pipeline.workers":1,"pipeline.batch.size":125,"pipeline.batch.delay":50,"queue.type":"memory","queue.max_bytes":"1gb","queue.checkpoint.writes":1024}}' "$ELASTICSEARCH_URL/_logstash/pipeline/my_pipeline"
{
"description": "Sample pipeline for illustration purposes",
"last_modified": "2021-01-02T02:50:51.250Z",
"pipeline_metadata": {
"type": "logstash_pipeline",
"version": 1
},
"username": "elastic",
"pipeline": "input {}\\n filter { grok {} }\\n output {}",
"pipeline_settings": {
"pipeline.workers": 1,
"pipeline.batch.size": 125,
"pipeline.batch.delay": 50,
"queue.type": "memory",
"queue.max_bytes": "1gb",
"queue.checkpoint.writes": 1024
}
}