date: 2020-07-16title: filebeat及logstash配置 #标题
tags: elk线上配置 #标签
categories: elastic stack # 分类

记录下filebeat及logstash配置语法。

配置filebeat收集nginx日志及java日志

  1. filebeat.inputs:
  2. - type: log
  3. enabled: True
  4. fields:
  5. log_type: nginx-access
  6. project_name: shsp
  7. log_topic: common_nginx
  8. fields_under_root: true
  9. paths:
  10. - /apps/usr/nginx/logs/access.log
  11. - type: log
  12. enabled: True
  13. fields:
  14. project_name: shsp
  15. log_type: nginx-error
  16. log_topic: common_nginx
  17. fields_under_root: true
  18. paths:
  19. - /apps/usr/nginx/logs/error.log
  20. - type: log
  21. enabled: True
  22. multiline.pattern: '^[[:space:]]+|^Caused by:'
  23. multiline.negate: false
  24. multiline.match: after
  25. fields:
  26. project_name: shsp
  27. log_type: app_log
  28. log_topic: app_all
  29. fields_under_root: true
  30. paths:
  31. - /apps/usr/appdata/logs/*.log
  32. output.kafka:
  33. hosts: ["192.168.20.2:9092", "192.168.20.3:9092", "192.168.20.4:9092"]
  34. topic: '%{[log_topic]}'
  35. partition.round_robin:
  36. reachable_only: false
  37. required_acks: 1
  38. compression: gzip
  39. max_message_bytes: 1000000
  40. processors:
  41. - drop_fields:
  42. fields: ['ecs', 'beat', 'input', '@version', 'agent']

logstash处理nginx日志

  1. input {
  2. kafka {
  3. bootstrap_servers => "192.168.20.2:9092,192.168.20.3:9092,192.168.20.4:9092"
  4. topics => ["common_nginx"]
  5. codec => json { charset => "UTF-8" }
  6. group_id => "standard"
  7. consumer_threads => 8
  8. }
  9. }
  10. filter {
  11. if [log_type] == "nginx-access" {
  12. grok {
  13. match => { "message" => ["%{IPORHOST:[access][remote_ip]} - %{DATA:[access][user_name]} \[%{HTTPDATE:[access][time]}\] \"%{WORD:[access][method]} %{DATA:[access][url]} HTTP/%{NUMBER:[access][http_version]}\" %{NUMBER:[access][response_code]} %{NUMBER:[access][body_sent][bytes]} \"%{DATA:[access][referrer]}\" \"%{DATA:[access][agent]}\""] }
  14. remove_field => "message"
  15. }
  16. mutate {
  17. add_field => { "read_timestamp" => "%{@timestamp}" }
  18. }
  19. date {
  20. match => [ "[access][time]", "dd/MMM/YYYY:H:m:s Z" ]
  21. remove_field => "[access][time]"
  22. }
  23. useragent {
  24. source => "[access][agent]"
  25. target => "[access][user_agent]"
  26. remove_field => "[access][agent]"
  27. }
  28. geoip {
  29. source => "[access][remote_ip]"
  30. target => "[geoip]"
  31. }
  32. }
  33. else if [log_type] == "nginx-error" {
  34. grok {
  35. match => { "message" => ["%{DATA:[error][time]} \[%{DATA:[error][level]}\] %{NUMBER:[error][pid]}#%{NUMBER:[error][tid]}: (\*%{NUMBER:[error][connection_id]} )?%{GREEDYDATA:[error][message]}"] }
  36. remove_field => "message"
  37. }
  38. mutate {
  39. rename => { "@timestamp" => "read_timestamp" }
  40. }
  41. date {
  42. match => [ "[error][time]", "YYYY/MM/dd H:m:s"]
  43. remove_field => "[error][time]"
  44. }
  45. }
  46. }
  47. output {
  48. #stdout {
  49. # codec => rubydebug
  50. #}
  51. elasticsearch {
  52. hosts => [ "192.168.20.11:9200","192.168.20.12:9200","192.168.20.13:9200" ]
  53. user => "elastic"
  54. password => "abcd"
  55. index => "logstash-nginx-%{project_name}-%{+YYYY.MM.dd}"
  56. codec => json { charset => "UTF-8" }
  57. }
  58. }

logstash处理java日志

  1. input {
  2. kafka {
  3. bootstrap_servers => "192.168.20.2:9092,192.168.20.3:9092,192.168.20.4:9092"
  4. topics => ["app_all"]
  5. codec => json { charset => "UTF-8" }
  6. group_id => "standard"
  7. consumer_threads => 8
  8. }
  9. }
  10. filter {
  11. mutate {
  12. add_field => { "log_path" => "%{[log][file][path]}" }
  13. add_field => { "host_name" => "%{[host][name]}"}
  14. }
  15. mutate {
  16. split => ["[log][file][path]", "/"]
  17. }
  18. mutate {
  19. split => ["[log][file][path][-1]", "."]
  20. }
  21. mutate {
  22. add_field => { "log_name" => "%{[log][file][path][-1][0]}" }
  23. }
  24. mutate{
  25. remove_field => ["log", "host"]
  26. }
  27. }
  28. output {
  29. #stdout {
  30. # codec => rubydebug
  31. #}
  32. elasticsearch {
  33. hosts => [ "192.168.20.11:9200","192.168.20.12:9200","192.168.20.13:9200" ]
  34. user => "elastic"
  35. password => "abcd"
  36. index => "logstash-app_%{project_name}-%{+YYYY.MM.dd}"
  37. codec => json { charset => "UTF-8" }
  38. }
  39. }