1.安装filebeat
[root@client ~]# wget https://artifacts.elastic.co/downloads/beats/filebeat/filebeat-7.2.0-linux-x86_64.tar.gz[root@client ~]# tar zxvf filebeat-7.2.0-linux-x86_64.tar.gz[root@client ~]# mv filebeat-7.2.0-linux-x86_64 /usr/local/filebeat[root@client ~]# cd /usr/local/filebeat[root@client ~]# cp filebeat.yml filebeat.yml.bak# 修改配置文件(需要指定日志path和服务器ip)[root@client ~]# cat /usr/local/filebeat/filebeat.yml | grep -v "^$" |grep -v "^#" |grep -v "#"filebeat.inputs:- type: log enabled: true paths: - /usr/local/seektruth/logs/aeonlife_prod/info.*.log# exclude_files: [".gz$"] : #移除这个目录下面相关gz结尾的文件# exclude_lines: ["^DBG"]: #表示移除什么样的结尾的行。 fields: appname: online-aeonlife-info-log- type: log enabled: true paths: - /usr/local/seektruth/logs/aeonlife_prod/access.*.log fields: appname: online-aeonlife-access-log- type: log enabled: true paths: - /usr/local/seektruth/logs/aeonlife_prod/error.*.log fields: appname: online-aeonlife-error-logfilebeat.config.modules: path: ${path.config}/modules.d/*.yml reload.enabled: falsesetup.template.settings: index.number_of_shards: 1setup.kibana:#----------------------------- Logstash output --------------------------------output.logstash: hosts: ["192.168.3.3:5044"]processors: - add_host_metadata: ~ - add_cloud_metadata: ~#----------------------------- kafka output --------------------------------#output.kafka:# enabled: true# hosts: ["192.168.3.3:9092"]# topic表示filebeat将数据输出到topic为sparksys-log的主题下# topic: sparksys-log
2.启动服务
[root@client ~]# cd /usr/local/filebeat[root@client ~]# nohup ./filebeat &# 注意:filebeat没有监听端口,主要看日志和进程[root@client filebeat]# ps aux |grep filebeatroot 3819 0.0 0.5 25944 11304 ? Ssl 7月15 0:17 /usr/share/filebeat/bin/filebeat -c /etc/filebeat/filebeat.yml -path.home /usr/share/filebeat -path.config /etc/filebeat -path.data /var/lib/filebeat -path.logs /var/log/filebeatroot 3879 0.0 0.0 112664 972 pts/1 S+ 15:29 0:00 grep --color=auto filebeatroot 26223 0.0 0.3 17488 7188 pts/1 Sl 15:00 0:00 ./filebeat