LogStash7.11.1
1、Point
input {jdbc {jdbc_driver_library => "D:/elk/logstash-7.11.1/test/postgresql-42.2.19.jar"jdbc_driver_class => "org.postgresql.Driver"jdbc_connection_string => "jdbc:postgresql://localhost:5432/waterway?characterEncoding=utf8&serverTimezone=GMT%2B8"jdbc_user => "postgres"jdbc_password => "953598751"schedule => "*/5 * * * * *"statement => "select gid id,ST_AsGeoJSON(ST_Force2D(geom)) geojson,name from polylines where gid = 134"tracking_column => "id"tracking_column_type => "numeric"id => "geotest"use_column_value => truejdbc_default_timezone =>"Asia/Shanghai"}}filter {json {source => "geojson"}mutate {add_field => { "location" => "%{[coordinates][1]},%{[coordinates][0]}"}}mutate {remove_field => ["@version","geojson","coordinates","@timestamp"]}}output {stdout {codec => rubydebug}# elasticsearch {# hosts => ["localhost:9200"]# index => "example"# document_id => "%{id}"# user => "elastic"# password => "953598751"# }}############ ES ######PUT index_point{"mappings": {"properties": {"location": {"type": "geo_point"}}}}
2、通用
input {jdbc {jdbc_driver_library => "D:/elk/logstash-7.11.1/test/postgresql-42.2.19.jar"jdbc_driver_class => "org.postgresql.Driver"jdbc_connection_string => "jdbc:postgresql://localhost:5432/waterway?characterEncoding=utf8&serverTimezone=GMT%2B8"jdbc_user => "postgres"jdbc_password => "953598751"schedule => "*/5 * * * * *"statement => "select gid id,st_astext(ST_Force2D(geom)) locations,name from polylines where gid = 134"tracking_column => "id"tracking_column_type => "numeric"id => "geotest"use_column_value => truejdbc_default_timezone =>"Asia/Shanghai"type => "polylines"}}filter {# json {# source => "geojson"# }# mutate {# add_field => { "location" => "" }# }mutate {remove_field => ["@version","@timestamp"]}}output {stdout {codec => rubydebug}# elasticsearch {# hosts => ["localhost:9200"]# index => "example"# document_id => "%{id}"# user => "elastic"# password => "953598751"# }}
3、Output ES Template
order:表示模板的优先级,当一个索引匹配到多个模板时,order值越大匹配优先级越高。order值默认为0
settings: 指定index的配置信息, 比如分片数、副本数, tranlog同步条件、refresh策略等信息; | 属性 | 说明 | | :—- | —- | | number_of_replicas | 副本数 | | number_of_shards | 分片数 | | index.refresh_interval | fsync频率 |
mappings: 指定index的内部构建信息, 主要有
_all: All Field字段, 如果开启,_all字段就会把所有字段的内容都包含进来,检索的时候可以不用指定字段查询 —— 会检索多个字段, 设置方式:"_all": {"enabled": true},在ES 6.0开始,_all字段被禁用了, 作为替换, 可以通过copy_to自定义实现all字段的功能_source: Source Field字段, ES为每个文档都保存一份源数据, 如果不开启, 也就是"_source": {"enabled": false}, 查询的时候就只会返回文档的ID, 其他的文档内容需要通过Fields字段到索引中再次获取, 效率很低. 但若开启, 索引的体积会更大, 此时就可以通过Compress进行压缩, 并通过inclueds、excludes等方式在field上进行限制 —— 指定义允许哪些字段存储到_source中, 哪些不存储_default_文档类型,可以根据实际情况自定义。表示对应文档类型下field的数据类型。如果想要一个索引中的所有文档类型都使用这个mappings配置,名称可配置为_default_dynamic在索引文档时,对于不在mapping中的field,elastcisearch会使用什么策略进行动态映射,可以通过这个属性进行配置,只可以使用elastcisearch中约定的值,如下 | dynamic | 说明 | | —- | —- | | true | 在索引一个文档时,如果文档中有field不在mapping中,会自动映射类型,添加到mapping,并索引该字段 | | false | 在索引一个文档时,如果文档中有field不在mapping中,不会添加到mapping,也不会索引该字段,但是会保存下来,在_source可以看到该字段,但该字段不能被搜索 | | strict | 在索引一个文档时,如果文档中有field不在mapping中,会直接抛出异常,拒绝索引 |
3.1静态模板
########## template.json ##########{"index_patterns": "index_*","order" : 1000,"settings": {"number_of_shards": "5","number_of_replicas": "0"},"mappings": {"properties": {"locations": {"type": "geo_shape"},"id": {"type": "keyword"},"type": {"type": "text"},"name": {"type": "text","analyzer": "ik_max_word"},"username": {"type": "text","analyzer": "ik_max_word"}}},"aliases": {}}########## config.conf ##########output {stdout {codec => rubydebug}elasticsearch {hosts => ["localhost:9200"]index => "index_my"document_id => "%{id}"template =>"D:/elk/logstash-7.11.1/test/template/template.json"template_name => "index_my"template_overwrite => trueuser => "elastic"password => "953598751"}}
3.2 拼音分词和IK分词的结合
########## template.json ##########{"index_patterns": "index_*","order": 1000,"settings": {"number_of_shards": "5","number_of_replicas": "0","analysis": {"analyzer": {"my_analyzer": {"type": "custom","tokenizer": "ik_max_word","filter": ["lowercase","my_pinyin"]}},"filter": {"my_pinyin": {"type": "pinyin","keep_separate_first_letter": true,"keep_full_pinyin": true,"keep_first_letter":true,"keep_original": false,"limit_first_letter_length": 10,"lowercase": true,"remove_duplicated_term": true}}}},"mappings": {"_source": {"enabled": true},"properties": {"locations": {"type": "geo_shape"},"id": {"type": "keyword"},"type": {"type": "text"},"name": {"type": "text","analyzer": "my_analyzer"}}},"aliases": {}}########## config.conf ##########output {stdout {codec => rubydebug}elasticsearch {hosts => ["localhost:9200"]index => "index_my"document_id => "%{id}"template =>"D:/elk/logstash-7.11.1/test/template/template.json"template_name => "index_my"template_overwrite => trueuser => "elastic"password => "953598751"}}
