锥智这套日志收集系统流程是:
各种日志文件 —> FileBeats —> Kafka —> Logstash —> ElasticSearch
FileBeats配置
###################### Filebeat Configuration Example #########################
#=========================== Filebeat inputs =============================
filebeat.inputs:
- enabled: true
paths:
- /data/app/*/logs/mqtt.log
- /data/app/*/logs/exception.log
- /data/app/*/logs/messages.log
- /data/app/*/logs/requestcontext.log
- /data/app/*/logs/responsecontext.log
- /data/app/*/logs/transaction_tracking.log
- /data/app/*/logs/transaction_tracking_step.log
type: log
- enabled: true
exclude_files: []
paths:
- /data/app/*/logs/run.log
type: log
### Multiline options
# 针对Java日志报错内容进行多行配置
multiline.pattern: '^\s*(\d{4}|\d{2})\-(\d{2}|[a-zA-Z]{3})\-(\d{2}|\d{4})'
multiline.negate: true
multiline.match: after
#============================= Filebeat modules ===============================
filebeat.config.modules:
# Glob pattern for configuration loading
path: ${path.config}/modules.d/*.yml
# Set to true to enable config reloading
reload.enabled: false
#==================== Elasticsearch template setting ==========================
setup.template.settings:
index.number_of_shards: 3
#================================ General =====================================
#============================== Dashboards =====================================
#============================== Kibana =====================================
setup.kibana:
# ============================== kafaka output ===================================
output.kafka:
# hosts: ['10.111.212.165:9092', '10.111.212.166:9092','10.111.212.167:9092', '10.111.212.168:9092']
hosts: ["10.11.7.25:9092"]
topic: file_logging
required_acks: 1
配置文件其实还是比较好懂得
- 输入配置两拨文件,run.log需要多行处理
- 发送到Kafka,并且acks方式为1,那就确保全收到
Logstash
input {
kafka {
bootstrap_servers => "10.11.7.25:9092"
topics => ["file_logging"]
consumer_threads => 8
codec => json {
charset => "UTF-8"
}
client_id => "logstash-logging"
type => "logging"
}
kafka {
bootstrap_servers => "10.11.7.25:9092"
topics => ["middleware_monitor"]
consumer_threads => 4
client_id => "logstash-monitor"
codec => json {
charset => "UTF-8"
}
type => "monitor"
}
kafka {
bootstrap_servers => "10.11.7.25:9092"
topics => ["server_monitor_topic"]
consumer_threads => 4
codec => json
client_id => "logstash-services"
type => "services"
}
}
filter {
if[type] == "logging"{
mutate{
split => {"source" => "/"}
}
mutate{
add_field => {
"service" => "%{[source][3]}"
"is_nginx" => "%{[source][4]}"
"level" => "%{[source][5]}"
}
}
mutate{
remove_field => ["beat", "source", "offset", "prospector", "input", "@metadata"]
}
}
}
output {
if[type] == "monitor"{
elasticsearch{
hosts => ['10.11.7.24:9200']
index => "monitor-%{+YYYY.MM.dd}"
timeout => 300
}
} else if[type] == "services"{
elasticsearch{
hosts => ['10.11.7.24:9200']
index => "server-monitor-%{+YYYY.MM.dd}"
timeout => 300
}
}
if[type] == "logging"{
if[level] == "exception.log"{
elasticsearch{
hosts => ['10.11.7.24:9200']
index => "exception-logging-%{+YYYY.MM.dd}"
timeout => 300
}
}
if[level] == "responsecontext.log"{
elasticsearch{
hosts => ['10.11.7.24:9200']
index => "responsecontext-logging-%{+YYYY.MM.dd}"
timeout => 300
}
}
if[level] == "requestcontext.log"{
elasticsearch{
hosts => ['10.11.7.24:9200']
index => "requestcontext-logging-%{+YYYY.MM.dd}"
timeout => 300
}
}
if[level] == "run.log"{
elasticsearch{
hosts => ['10.11.7.24:9200']
index => "run-logging-%{+YYYY.MM.dd}"
timeout => 300
}
}
if[level] == "transaction_tracking_step.log"{
elasticsearch{
hosts => ['10.11.7.24:9200']
index => "transaction-logging-%{+YYYY.MM.dd}"
timeout => 300
}
}
if[level] == "transaction_tracking.log"{
elasticsearch{
hosts => ['10.11.7.24:9200']
index => "transaction-logging-%{+YYYY.MM.dd}"
timeout => 300
}
}
if[level] == "messages.log"{
elasticsearch{
hosts => ['10.11.7.24:9200']
index => "messages-logging-%{+YYYY.MM.dd}"
timeout => 300
}
}
if[level] == "access.log"{
elasticsearch{
hosts => ['10.11.7.24:9200']
index => "nginx-logging-%{+YYYY.MM.dd}"
timeout => 300
}
}
if[level] == "error.log"{
elasticsearch{
hosts => ['10.11.7.24:9200']
index => "nginx-logging-%{+YYYY.MM.dd}"
timeout => 300
}
}
if[level] == "mqtt.log"{
elasticsearch{
hosts => ['10.11.7.24:9200']
index => "mqtt-logging-%{+YYYY.MM.dd}"
timeout => 300
}
}
}
}