[2021-10-29 03:39:12] saveData.INFO: saveData {"params":{"index":"fulfillments_1","id":5941107,"body":{"id":5941107,"shippingMethodId":null,"shippingMethodName":null,"pluginId":null,"shipToName":"tan2","shipToPhone":null,"shipToSuburb":"FRASER RISE","shipToState":"VIC","shipToPostcode":"3336","shipToCountry":"AU","shipToAddress1":"Second St","shipToAddress2":null,"shipToCompanyName":"eiz","shipToEmail":null,"fromAddress1":"tet-1","fromAddress2":null,"fromSuburb":"Moorabbin","fromState":"VIC","fromCountry":"AU","fromPostcode":"3189","fromCompany_name":"eiz","fromName":"jin2","fromPhone":"47658975","fromEmail":null,"carrierName":null,"labelNumber":[],"fulfillmentStatus":1,"consignments":[],"products":[{"id":4,"account_id":1,"product_id":4,"sku":"124","title":"dsadasds","weight":1,"length":11,"width":11,"height":11,"quantity":0,"location":null,"insured_amount":null,"status":0,"custom_label":null,"custom_label2":null,"custom_label3":null,"img_url":null,"barcode":null,"wms_stock":0,"pivot":{"fulfillment_id":5941107,"product_id":4,"qty":1,"note":null,"sku":"124"}}],"consignmentStatus":0,"picklistStatus":0,"createdAt":"2021-10-26 13:33:03","updatedAt":"2021-10-29 14:39:11","package_info":[{"packObj":[],"qty":"2","weight":"13","length":"6","width":"7","height":"8","package_id":null}],"price":null,"note":null,"tags":[{"id":95,"account_id":1,"parent_id":null,"name":"test","description":"{"name":"test","color":"#eb2f96"}"}],"errors":null,"tracking_status":0,"packageNum":2,"productNum":1,"autoQuoteResult":[],"orders":[],"log":[],"shipToRef":"TJ0000212"}}} []
解决方案:通过filebeat+es的pipeline就可以实现我们预期的效果,且不耗CPU,我们来动手试试,先看看实现效果:可以看到我们使用这种方法,依然实现了上一篇的效果
第一步:
先来看看我的项目目录:关于dockerfiles文件夹下的文件内容我都写在上一篇了,还有这块我用的是opensearch+opensearch-dashboards,其实和Elasticsearch+Kibana是一样的,上篇文章有说到
编辑docker-compose.yaml文件
version: "2.2"
services:
opensearch:
build:
context: dockerfiles
dockerfile: opensearch-no-security.dockerfile
restart: always
container_name: opensearch
image: wangyi/opensearch:latest
environment:
- discovery.type=single-node
ports:
- 9200:9200
- 9600:9600 # required for Performance Analyzer
volumes:
- opensearch-data1:/usr/share/opensearch/data
opensearch-dashboards:
build:
context: dockerfiles
dockerfile: opensearch-dashboards-no-security.dockerfile
image: wangyi/opensearch-dashboard:latest
container_name: opensearch-dashboards
ports:
- 5601:5601
environment:
OPENSEARCH_HOSTS: '["http://opensearch:9200"]' # must be a string with no spaces when specified as an environment variable
filebeat:
build: ./filebeat
restart: "always"
container_name: filebeat
volumes:
- ./storage/logs/:/tools/logs/
user: root
volumes:
opensearch-data1:
第二步:
配置filebeat文件夹下的Dockerfile文件:
FROM docker.elastic.co/beats/filebeat-oss:7.11.0 # Copy our custom configuration file COPY ./filebeat.yml /usr/share/filebeat/filebeat.yml USER root # Create a directory to map volume with all docker log files RUN mkdir /usr/share/filebeat/dockerlogs RUN chown -R root /usr/share/filebeat/ RUN chmod -R go-w /usr/share/filebeat/
编辑filebeat.yml文件
filebeat.inputs:
- type: log
enabled: true
paths:
- /tools/logs/saveData/*/*/*.log
pipeline: savedata_log ##记住这个pipeline
fields:
type: savedata_log
tail_files: true ##设置监察文件写入时执行,不从头读取
- type: log
enabled: true
paths:
- /tools/logs/condition/*/*/*.log
pipeline: my_pipeline_id ##记住这个pipeline
fields:
type: condition_log
tail_files: true
setup.ilm.enabled: false
setup.template.settings:
index.number_of_shards: 1
index.number_of_replicas: 0
index.codec: best_compression
output.elasticsearch:
hosts: ["opensearch:9200"]
indices:
- index: "savedata_logs_%{+yyyy.MM.dd}"
when.equals:
fields.type: "savedata_log"
- index: "condition_logs_%{+yyyy.MM.dd}"
when.equals:
fields.type: "condition_log"
第三步:
启动三个服务,注意启动顺序
1.先启动opensearch(ES)docker-compose up -d opensearch
2.再启动filebeat docker-compose up -d filebeat
3.再启动opensearch-dashboards(Kibana) docker-compose up -d opensearch-dashboards
待所有服务启动成功后,我们打开Kibana后台,配置pipeline
PUT _ingest/pipeline/my_pipeline_id ##我们在这里创建两个对应filebeat文件的pipeline就行
{
"description": "test pipeline",
"processors": [
{
"grok": {
"field": "message",
"patterns": ["""[%{TIMESTAMP_ISO8601:logtime}] %{WORD:env}.(?[A-Z]{4,5}): %{WORD:params} %{GREEDYdata:message} """] ##此配置仅限于提取Laravel文件配置
},
"json": {
"field": "message"
}
}
]
}
大家可以通过GROK在线测试自己的正则
EFK一行就此结束,前前后后换了三次方案,最终得出这套完美的解决方案,CPU也看过了,非常完美!!!



