Csv文件通过logstash加载到elasticsearch不工作



我试图通过logstash(docker based)从Linux系统加载csv文件,下面是conf文件。

./logstash/pipeline/logstash_csv_report.conf
input {
file {
path => "/home/user/elk/logstash/report-file.csv"
start_position => "beginning"
sincedb_path => "/dev/null"
}
}
filter {
csv {
separator => ","
columns => ["start_time", "date", "requester", "full-name", "id", "config", "status"]
}
}
output {
elasticsearch {
action => "index"
hosts => "http://elasticsearch:9200"
index => "project-info"
}
stdout {}
}

我不知道为什么我的csv文件没有上传到Elasticsearch的原因。我的logstashdockerlogs最后几行如下。在我的logstash中,我没有看到任何错误。

logstash         | [2021-01-18T04:12:36,076][INFO ][logstash.javapipeline    ][main] Pipeline Java execution initialization time {"seconds"=>1.1}
logstash         | [2021-01-18T04:12:36,213][INFO ][logstash.javapipeline    ][main] Pipeline started {"pipeline.id"=>"main"}
logstash         | [2021-01-18T04:12:36,280][INFO ][filewatch.observingtail  ][main][497c9eb0da97efa19ad20783321e7bf30eb302262f92ac565b074e3ad91ea72d] START, creating Discoverer, Watch with file and sincedb collections
logstash         | [2021-01-18T04:12:36,282][INFO ][logstash.agent           ] Pipelines running {:count=>2, :running_pipelines=>[:".monitoring-logstash", :main], :non_running_pipelines=>[]}
logstash         | [2021-01-18T04:12:36,474][INFO ][logstash.agent           ] Successfully started Logstash API endpoint {:port=>9600}

我的docker-compose文件如下。

version: '3.7'
services:
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:7.10.1
container_name: elasticsearch
restart: unless-stopped
environment:
- node.name=elasticsearch
- discovery.seed_hosts=elasticsearch
- cluster.initial_master_nodes=elasticsearch
- cluster.name=docker-cluster
- bootstrap.memory_lock=true
- "ES_JAVA_OPTS=-Xms1g -Xmx1g"
ulimits:
memlock:
soft: -1
hard: -1
ports:
- '9200:9200'
- '9300:9300'      
volumes:
- './elasticsearch:/usr/share/elasticsearch/data'
networks:
- elk
kibana:
image: docker.elastic.co/kibana/kibana:7.10.1
container_name: kibana
restart: unless-stopped
environment:
ELASTICSEARCH_URL: "http://elasticsearch:9200"
ports:
- '5601:5601'
volumes:
- './kibana:/usr/share/kibana/data'
depends_on:
- elasticsearch
networks:
- elk
logstash:
image: docker.elastic.co/logstash/logstash:7.10.1
container_name: logstash
restart: unless-stopped
environment:
- 'HEAP_SIZE:1g'
- 'LS_JAVA_OPTS=-Xms1g -Xmx1g'
- 'ELASTICSEARCH_HOST:elasticsearch'
- 'ELASTICSEARCH_PORT:9200'
command: sh -c "logstash -f /usr/share/logstash/pipeline/logstash_csv_report.conf"
ports:
- '5044:5044'
- '5000:5000/tcp'
- '5000:5000/udp'
- '9600:9600'
volumes:
- './logstash/pipeline:/usr/share/logstash/pipeline'
depends_on:
- elasticsearch  
networks:
- elk
networks:
elk:
driver: bridge

在我的./logstash/pipeline文件夹中我只有logstash_csv_report.conf文件。

相同的csv文件可以使用Kibana GUI使用import选项上传。

谁来帮我解决这个问题使用logstash上传。

旋度输出。

# curl -XGET http://51.52.53.54:9600/_node/stats/?pretty
{
"host" : "3c08f83dfc9b",
"version" : "7.10.1",
"http_address" : "0.0.0.0:9600",
"id" : "5f301139-33bf-4e4d-99a0-7b4d7b464675",
"name" : "3c08f83dfc9b",
"ephemeral_id" : "95a0101e-e54d-4f72-aa7a-dd18ccb2814e",
"status" : "green",
"snapshot" : false,
"pipeline" : {
"workers" : 64,
"batch_size" : 125,
"batch_delay" : 50
},
"jvm" : {
"threads" : {
"count" : 157,
"peak_count" : 158
},
"mem" : {
"heap_used_percent" : 16,
"heap_committed_in_bytes" : 4151836672,
"heap_max_in_bytes" : 4151836672,
"heap_used_in_bytes" : 689455928,
"non_heap_used_in_bytes" : 190752760,
"non_heap_committed_in_bytes" : 218345472,
"pools" : {
"survivor" : {
"peak_max_in_bytes" : 143130624,
"max_in_bytes" : 143130624,
"committed_in_bytes" : 143130624,
"peak_used_in_bytes" : 65310304,
"used_in_bytes" : 39570400
},
"old" : {
"peak_max_in_bytes" : 2863333376,
"max_in_bytes" : 2863333376,
"committed_in_bytes" : 2863333376,
"peak_used_in_bytes" : 115589344,
"used_in_bytes" : 115589344
},
"young" : {
"peak_max_in_bytes" : 1145372672,
"max_in_bytes" : 1145372672,
"committed_in_bytes" : 1145372672,
"peak_used_in_bytes" : 1145372672,
"used_in_bytes" : 534296184
}
}
},
"gc" : {
"collectors" : {
"old" : {
"collection_count" : 3,
"collection_time_in_millis" : 1492
},
"young" : {
"collection_count" : 7,
"collection_time_in_millis" : 303
}
}
},
"uptime_in_millis" : 4896504
},
"process" : {
"open_file_descriptors" : 91,
"peak_open_file_descriptors" : 92,
"max_file_descriptors" : 1048576,
"mem" : {
"total_virtual_in_bytes" : 21971415040
},
"cpu" : {
"total_in_millis" : 478180,
"percent" : 0,
"load_average" : {
"1m" : 1.35,
"5m" : 0.7,
"15m" : 0.53
}
}
},
"events" : {
"in" : 0,
"filtered" : 0,
"out" : 0,
"duration_in_millis" : 0,
"queue_push_duration_in_millis" : 0
},
"pipelines" : {
"main" : {
"events" : {
"out" : 0,
"duration_in_millis" : 0,
"queue_push_duration_in_millis" : 0,
"filtered" : 0,
"in" : 0
},
"plugins" : {
"inputs" : [ {
"id" : "497c9eb0da97efa19ad20783321e7bf30eb302262f92ac565b074e3ad91ea72d",
"events" : {
"out" : 0,
"queue_push_duration_in_millis" : 0
},
"name" : "file"
} ],
"codecs" : [ {
"id" : "rubydebug_a060ea28-52ce-4186-a474-272841e0429e",
"decode" : {
"out" : 0,
"writes_in" : 0,
"duration_in_millis" : 0
},
"encode" : {
"writes_in" : 0,
"duration_in_millis" : 2
},
"name" : "rubydebug"
}, {
"id" : "plain_d2037602-bfe9-4eaf-8cc8-0a84665fa186",
"decode" : {
"out" : 0,
"writes_in" : 0,
"duration_in_millis" : 0
},
"encode" : {
"writes_in" : 0,
"duration_in_millis" : 0
},
"name" : "plain"
}, {
"id" : "plain_1c01f964-82e5-45a1-b9f9-a400bc2ac486",
"decode" : {
"out" : 0,
"writes_in" : 0,
"duration_in_millis" : 0
},
"encode" : {
"writes_in" : 0,
"duration_in_millis" : 0
},
"name" : "plain"
} ],
"filters" : [ {
"id" : "3eee98d7d4b500333a2c45a729786d4d2aefb7cee7ae79b066a50a1630312b25",
"events" : {
"out" : 0,
"duration_in_millis" : 39,
"in" : 0
},
"name" : "csv"
} ],
"outputs" : [ {
"id" : "8959d62efd3616a9763067781ec2ff67a7d8150d6773a48fc54f71478a9ef7ab",
"events" : {
"out" : 0,
"duration_in_millis" : 0,
"in" : 0
},
"name" : "elasticsearch"
}, {
"id" : "b457147a2293c2dee97b6ee9a5205de24159b520e86eb89be71fde7ba394a0d2",
"events" : {
"out" : 0,
"duration_in_millis" : 22,
"in" : 0
},
"name" : "stdout"
} ]
},
"reloads" : {
"last_success_timestamp" : null,
"last_error" : null,
"successes" : 0,
"failures" : 0,
"last_failure_timestamp" : null
},
"queue" : {
"type" : "memory",
"events_count" : 0,
"queue_size_in_bytes" : 0,
"max_queue_size_in_bytes" : 0
},
"hash" : "3479b7408213a7b52f36d8ad3dbd5a3174768a004119776e0244ed1971814f72",
"ephemeral_id" : "ffc4d5d6-6f90-4c24-8b2a-e932d027a5f2"
},
".monitoring-logstash" : {
"events" : null,
"plugins" : {
"inputs" : [ ],
"codecs" : [ ],
"filters" : [ ],
"outputs" : [ ]
},
"reloads" : {
"last_success_timestamp" : null,
"last_error" : null,
"successes" : 0,
"failures" : 0,
"last_failure_timestamp" : null
},
"queue" : null
}
},
"reloads" : {
"successes" : 0,
"failures" : 0
},
"os" : {
"cgroup" : {
"cpuacct" : {
"usage_nanos" : 478146261497,
"control_group" : "/"
},
"cpu" : {
"cfs_quota_micros" : -1,
"stat" : {
"number_of_times_throttled" : 0,
"time_throttled_nanos" : 0,
"number_of_elapsed_periods" : 0
},
"control_group" : "/",
"cfs_period_micros" : 100000
}
}
},
"queue" : {
"events_count" : 0
}

您需要确保Logstash可以读取/home/user/elk/logstash/report-file.csv。我没有看到这个文件被映射到一个可以被Logstash访问的卷。

在您的docker组合配置中,您需要像这样添加另一个卷:

logstash:
...
volumes:
- './logstash/pipeline:/usr/share/logstash/pipeline'
- '/home/user/elk/logstash:/home/user/elk/logstash'

最新更新