image.png

    1. Flume要想将数据输出到HDFS,必须持有Hadoop相关jar
    2. commons-configuration-1.6.jar
    3. hadoop-auth-2.7.2.jar
    4. hadoop-common-2.7.2.jar
    5. hadoop-hdfs-2.7.2.jar
    6. commons-io-2.4.jar
    7. htrace-core-3.1.0-incubating.jar
    8. 拷贝到/opt/egg/flume/lib文件夹下。
    9. 创建本次案例的配置文件
    10. vim flume-file-hdfs.conf
    11. # Name the components on this agent
    12. a2.sources = r2
    13. a2.sinks = k2
    14. a2.channels = c2
    15. # Describe/configure the source
    16. a2.sources.r2.type = exec
    17. #选择监控的文件
    18. a2.sources.r2.command = tail -F /opt/ha/hadoop-2.7.2/logs/hadoop-root-namenode-hadoop1.log
    19. #执行脚本的绝对路径
    20. a2.sources.r2.shell = /bin/bash -c
    21. # Describe the sink
    22. #sink类型
    23. a2.sinks.k2.type = hdfs
    24. a2.sinks.k2.hdfs.path = hdfs://hadoop1:9000/flume/%Y%m%d/%H
    25. #上传文件的前缀
    26. a2.sinks.k2.hdfs.filePrefix = logs-
    27. #是否按照时间滚动文件夹
    28. a2.sinks.k2.hdfs.round = true
    29. #多少时间单位创建一个新的文件夹
    30. a2.sinks.k2.hdfs.roundValue = 1
    31. #重新定义时间单位
    32. a2.sinks.k2.hdfs.roundUnit = hour
    33. #是否使用本地时间戳
    34. a2.sinks.k2.hdfs.useLocalTimeStamp = true
    35. #积攒多少个Event才flush到HDFS一次
    36. a2.sinks.k2.hdfs.batchSize = 1000
    37. #设置文件类型,可支持压缩
    38. a2.sinks.k2.hdfs.fileType = DataStream
    39. #多久生成一个新的文件
    40. a2.sinks.k2.hdfs.rollInterval = 60
    41. #设置每个文件的滚动大小
    42. a2.sinks.k2.hdfs.rollSize = 134217700
    43. #文件的滚动与Event数量无关
    44. a2.sinks.k2.hdfs.rollCount = 0
    45. # Use a channel which buffers events in memory
    46. a2.channels.c2.type = file
    47. a2.channels.c2.checkpointDir = /opt
    48. a2.channels.c2.dataDirs = /opt
    49. a2.channels.c2.capacity = 1000
    50. a2.channels.c2.transactionCapacity = 100
    51. # Bind the source and sink to the channel
    52. a2.sources.r2.channels = c2
    53. a2.sinks.k2.channel = c2
    54. 运行flume
    55. bin/flume-ng agent --conf conf/ --name a2 --conf-file job/flume-file-hdfs.conf
    56. 开启HadoopHive并操作Hive产生日志
    57. sbin/start-dfs.sh
    58. sbin/start-yarn.sh
    59. bin/hive
    60. hdfs上查看文件
    61. hadoop fs -ls /flume/20190901/20
    62. drwxr-xr-x - root supergroup 0 2019-09-01 16:01 /flume/20190901/16
    63. -rw-r--r-- 3 root supergroup 307944 2019-09-01 16:01 /flume/20190901/16/logs-.1567324803944
    64. -rw-r--r-- 3 root supergroup 0 2019-09-01 16:01 /flume/20190901/16/logs-.1567324914795.tmp