1. 首页 > Linux教程 > 正文

Linux教程FG524-Linux企业级日志分析平台

内容简介:本文风哥教程参考Linux官方文档、Red Hat Enterprise Linux官方文档、Ansible Automation Platform官方文档、Docker官方文档、Kubernetes官方文档和Podman官方文档等内容,详细介绍了相关技术的配置和使用方法。

风哥提示:

本文档介绍Linux企业级日志分析平台部署和配置实战。

Part01-ELK Stack高级配置

1.1 Elasticsearch集群优化

# 配置Elasticsearch高级参数
[root@fgedu-elastic1 ~]# cat > /etc/elasticsearch/elasticsearch.yml << 'EOF' cluster.name: fgedu-logs node.name: fgedu-elastic1 node.roles: [master, data, ingest] # 网络配置 network.host: 192.168.1.10 http.port: 9200 transport.port: 9300 # 集群发现 disfrom PG视频:www.itpux.comcovery.seed_hosts: ["192.168.1.10", "192.168.1.11", "192.168.1.12"] cluster.initial_master_nodes: ["fgedu-elastic1", "fgedu-elastic2", "fgedu-elastic3"] # 内存配置 bootstrap.memory_lock: true # 路径配置 path.data: /data/elasticsearch path.logs: /var/log/elasticsearch # 索引配置 index.number_of_shards: 5 index.number_of_replicas: 1 index.refresh_interval: 30s # 线程池配置 thread_pool: search: size: 30 queue_size: 1000 write: size: 30 queue_size: 1000 # 网关配置 gateway: recover_after_nodes: 2 recover_after_time: 5m expected_nodes: 3 # 安全配置 xpack.security.enabled: true xpack.security.transport.ssl.enabled: true xpack.security.transport.ssl.verification_mode: certificate xpack.security.transport.ssl.keystore.path: elastic-certificates.p12 xpack.security.transport.ssl.truststore.path: elastic-certificates.p12 # 监控配置 xpack.monitoring.enabled: true xpack.monitoring.collection.enabled: true EOF # 配置JVM [root@fgedu-elastic1 ~]# cat > /etc/elasticsearch/jvm.options.d/performance.options << 'EOF' -Xms16g -Xmx16g -XX:+UseG1GC -XX:G1HeapRegionSize=16m -XX:InitiatingHeapOccupancyPercent=30 -XX:MaxGCPauseMillis=200 -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=/var/log/elasticsearch/heapdump.hprof EOF # 创建索引生命周期策略 [root@fgedu-elastic1 ~]# curl -X PUT "http://localhost:9200/_ilm/policy/fgedu-policy" -u elastic:Elastic@123 -H 'Content-Type: application/json' -d '{ "policy": { "phases": { "hot": { "min_age": "0ms", "actions": { "rollover": { "max_size": "50gb", "max_age": "1d" }, "set_priority": { "priority": 100 } } }, "warm": { "min_age": "7d", "actions": { "forcemerge": { "max_num_segments": 1 }, "shrink": { "number_of_shards": 1 }, "set_priority": { "priority": 50 } } }, "cold": { "min_age": "30d", "actions": { "freeze": {}, "set_priority": { "priority": 0 } } }, "delete": { "min_age": "90d", "actions": { "delete": {} } } } } }'

Part02-更多学习教程公众号风哥教程itpux_comLogstash管道优化

2.1 高性能日志处理

# 配置Logstash多管道
[root@fgedu-logstash ~]# cat > /etc/logstash/pipelines.yml << 'EOF' - pipeline.id: nginx-pipeline path.config: "/etc/logstash/conf.d/nginx-pipeline.conf" pipeline.workers: 4 pipeline.batch.size: 1000 pipeline.batch.delay: 50 - pipeline.id: syslog-pipeline path.config: "/etc/logstash/conf.d/syslog-pipeline.conf" pipeline.workers: 2 pipeline.batch.size: 500 pipeline.batch.delay: 100 - pipeline.id: app-pipeline path.config: "/etc/logstash/conf.d/app-pipeline.conf" pipeline.workers: 4 pipeline.batch.size: 学习交流加群风哥微信: itpux-com1000 pipeline.batch.delay: 50 EOF # 配置Nginx日志管道 [root@fgedu-logstash ~]# cat > /etc/logstash/conf.d/nginx-pipeline.conf << 'EOF' input { beats { port => 5044
ssl => true
ssl_certificate => “/etc/logstash/certs/logstash.crt”
ssl_key => “/etc/logstash/certs/logstash.key”
client_inactivity_timeout => 3600
}
}

filter {
grok {
match => {
“message” => ‘%{IPORHOST:client_ip} – %{USERNAME:user} \[%{HTTPDATE:timestamp}\] “%{WORD:method} %{URIPATHPARAM:request} HTTP/%{NUMBER:httpversion}” %{NUMBER:status} %{NUMBER:bytes} “%{DATA:referrer}” “%{DATA:agent}” rt=%{NUMBER:request_time}’
}
overwrite => [“message”]
}

date {
match => [“timestamp”, “dd/MMM/yyyy:HH:mm:ss Z”]
target => “@timestamp”
}

geoip {
source => “client_ip”
target => “geoip”
fields => [“city_name”, “country_name”, “location”]
}

useragent {
source => “agent”
target => “useragent”
}

mutate {
convert => {
“status” => “integer”
“bytes” => “integer”
“request_time” => “float”
}
remove_field => [“timestamp”, “agent”]
}

if [status] >= 500 {
mutate {
add_tag => [“error”]
}
}
}

output {
elasticsearch {
hosts => [“https://192.168.1.10:9200”, “https://192.168.1.11:9200”]
user => “logstash_writer”
password => “Logstash@123”
index => “fgedu-nginx-%{+YYYY.MM.dd}”
ilm_enabled => true
ilm_rollover_alias => “fgedu-nginx”
ilm_policy => “fgedu-policy”
template => “/etc/logstash/templates/nginx-template.json”
template_name => “fgedu-nginx”
}
}
EOF

# 配置JVM
[root@fgedu-logstash ~]# cat > /etc/logstash/jvm.options << 'EOF' -Xms8g -Xmx8g -XX:+UseG1GC -XX:G1HeapRegionSize=16m -XX:InitiatingHeapOccupancyPercent=30 EOF # 重启Logstash [root@fgedu-logstash ~]# systemctl restart logstash

Part03-Filebeat配置

3.1 日志采集配置

# 安装Filebeat
[root@fgedu-web ~]# yum install -y filebeat

# 配置Filebeat
[root@fgedu-web ~]# cat > /etc/filebeat/filebeat.yml << 'EOF' filebeat.inputs: - type: log enabled: true paths: - /var/log/nginx/access.log fields: type: nginx-access env: production fields_under_root: true multiline: pattern: '^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}' negate: true match: after - type: log enabled: true paths: - /var/log/nginx/error.log fields: type: nginx-error env: production fields_under_root: true - type: log enabled: true paths: - /var/log/php-fpm/*.log fields: type: php-fpm env: production fields_under_root: true - type: log enabled: true paths: - /var/www/fgedu/storage/logs/*.log fields: type: application env: production fields_under_root: true multiline: pattern: '^\[' negate: true match: after filebeat.config.modules: path: ${path.config}/modules.d/*.yml processors: - add_host_metadata: when.not.contains.tags: forwarded - add_cloud_metadata: ~ - add_docker_metadata: ~ - drop_fields: fields: ["agent.ephemeral_id", "agent.id", "agent.name", "ecs"] ignore_missing: true output.logstash: hosts: ["192.168.1.13:5044"] ssl.enabled: true ssl.certificate_authorities: ["/etc/filebeat/certs/ca.crt"] ssl.certificate: "/etc/filebeat/certs/filebeat.crt" ssl.key: "/etc/filebeat/certs/filebeat.key" loadbalance: true logging: level: info to_files: true files: path: /var/log/filebeat name: filebeat.log keepfiles: 7 permissions: 0644 monitoring: enabled: true elasticsearch: hosts: ["https://192.168.1.10:9200"] username: beats_system password: Beats@123 EOF # 启动Filebeat [root@fgedu-web ~]# systemctl enable filebeat --now # 测试配置 [root@fgedu-web ~]# filebeat test config Config OK [root@fgedu-web ~]# filebeat test output logstash: 192.168.1.13:5044... connection... parse host... OK dns lookup... OK addresses: 192.168.1.13 dial up... OK TLS... OK talk to server... OK

Part04-日志分析实战

4.1 日志查询分析

# 查询最近1小时的错误日志
[root@fgedu-elastic1 ~]# curl -X GET “http://localhost:9200/fgedu-nginx-*/_search” -u elastic:Elastic@123 -H ‘Content-Type: application/json’ -d ‘{
“query”: {
“bool”: {
“must”: [
{“range”: {“@timestamp”: {“gte”: “now-1h”}}},
{“term”: {“status”: 500}}
]
}
},
“size”: 10,
“sort”: [{“@timestamp”: “desc”}]
}’ | jq ‘.hits.hits[]._source’

# 统计各状态码数量
[root@fgedu-elastic1 ~]# curl -X GET “http://localhost:9200/fgedu-nginx-*/_search” -u elastic:Elastic@123 -H ‘Content-Type: application/json’ -d ‘{
“size”: 0,
“aggs”: {
“status_codes”: {
“terms”: {
“field”: “status”,
“size”: 10
}
}
}
}’ | jq ‘.aggregations.status_codes’

# 统计访问量TOP10的IP
[root@fgedu-elastic1 ~]# curl -X GET “http://localhost:9200/fgedu-nginx-*/_search” -u elastic:Elastic@123 -H ‘Content-Type: application/json’ -d ‘{
“size”: 0,
“aggs”: {
“top_ips”: {
“terms”: {
“field”: “client_ip”,
“size”: 10
}
}
}
}’ | jq ‘.aggregations.top_ips’

# 创建日志分析脚本
[root@fgedu-elastic1 ~]# cat > /usr/local/bin/log-analysis.sh << 'EOF' #!/bin/bash # log-analysis.sh # from:www.itpux.com.qq113257174.wx:itpux-com # web: http://www.fgedu.net.cn ES_HOST="localhost:9200" ES_USER="elastic" ES_PASS="Elastic@123" echo "=== 日志分析报告 ===" echo "分析时间: $(date)" echo "" echo "1. 今日日志总量" curl -s -u $ES_USER:$ES_PASS "http://$ES_HOST/fgedu-nginx-$(date +%Y.%m.%d)/_count" | jq '.count' echo "" echo "2. 今日错误统计" curl -s -u $ES_USER:$ES_PASS "http://$ES_HOST/fgedu-nginx-$(date +%Y.%m.%d)/_search" -H 'Content-Type: application/json' -d '{ "size": 0, "query": {"range": {"status": {"gte": 400}}}, "aggs": {"errors": {"terms": {"field": "status"}}} }' | jq '.aggregations.errors' echo "" echo "3. 今日访问TOP10 IP" curl -s -u $ES_USER:$ES_PASS "http://$ES_HOST/fgedu-nginx-$(date +%Y.%m.%d)/_search" -H 'Content-Type: application/json' -d '{ "size": 0, "aggs": {"top_ips": {"terms": {"field": "client_ip", "size": 10}}} }' | jq '.aggregations.top_ips' echo "" echo "4. 今日响应时间分析" curl -s -u $ES_USER:$ES_PASS "http://$ES_HOST/fgedu-nginx-$(date +%Y.%m.%d)/_search" -H 'Content-Type: application/json' -d '{ "size": 0, "aggs": { "avg_time": {"avg": {"field": "request_time"}}, "max_time": {"max": {"field": "request_time"}}, "slow_requests": {"filter": {"range": {"request_time": {"gt": 1}}}} } }' | jq '.aggregations' echo "" echo "=== 分析完成 ===" EOF [root@fgedu-elastic1 ~]# chmod +x /usr/local/bin/log-analysis.sh

风哥针对日志分析建议:

  • 配置合理的日志保留策略
  • 建立日志分类和标签
  • 实施敏感信息脱敏
  • 配置日志告警规则
  • 定期归档历史日志

本文由风哥教程整理发布,仅用于学习测试使用,转载注明出处:http://www.fgedu.net.cn/10327.html

联系我们

在线咨询:点击这里给我发消息

微信号:itpux-com

工作日:9:30-18:30,节假日休息