平台实现前的说明
踩坑记录
,在文章末尾)实现日志采集处理与展示
服务器日志
/var/log/audit/audit.log
:审计日志,跟用户相关的日志。/var/log/cron
:记录与系统定时任务相关的日志。/var/log/messages
:记录系统中主要信息的日志。/var/log/secure
:记录验证和授权方面信息的日志,如:ssh登录、su切换用户、sudo授权等。/var/log/yum.log
:记录yum安装软件信息。vim /etc/filebeat/filebeat.yml
。
rpm -ivh filebeat-7.4.1-x86_64.rpm
vim /etc/filebeat/filebeat.yml
setup.kibana:
host: "10.10.10.9:5601"output.elasticsearch:
hosts: ["10.10.10.9:9200"]
filebeat setup
filebeat modules enable system
filebeat modules enable auditd
filebeat setup --pipelines --modules system
filebeat setup --pipelines --modules auditd
vim /etc/filebeat/modules.d/system.yml
- module: system
syslog:
enabled: true
var.paths: ["/var/log/messages"]auth:
enabled: true
var.paths: ["/var/log/secure"]
vim /etc/filebeat/modules.d/auditd.yml
- module: auditd
log:
enabled: true
var.paths: ["/var/log/audit/audit.log"]
vim /etc/filebeat/filebeat.yml
output.kafka:
hosts: ["10.10.10.9:9092"]
topics:
- topic: '%{[service.type]}-%{[fileset.name]}'
when.has_fields: ["@metadata.pipeline"]
filebeat test config -c /etc/filebeat/filebeat.yml
filebeat -e
vim /etc/logstash/conf.d/system.conf
input {
kafka {
bootstrap_servers => "10.10.10.9:9092"
topics => ["system-auth","system-syslog","auditd-log"]
codec => json
}
}
output {
if [@metadata][pipeline] and [@metadata][topic] == "auditd-log" {
elasticsearch {
hosts => ["10.10.10.9:9200"]
index => "%{[@metadata][beat]}-%{[@metadata][version]}-audit-log-%{+YYYY.MM.dd}"
pipeline => "%{[@metadata][pipeline]}"
}
}
else if [@metadata][pipeline] and [@metadata][topic] == "system-syslog" {
elasticsearch {
hosts => ["10.10.10.9:9200"]
index => "%{[@metadata][beat]}-%{[@metadata][version]}-system-syslog-%{+YYYY.MM.dd}"
pipeline => "%{[@metadata][pipeline]}"
}
}# **********
}
/usr/share/logstash/bin/logstash -f /etc/logstash/conf.d
Web日志
vim /usr/share/filebeat/module/nginx/access/ingest/default.json
vim /etc/filebeat/filebeat.yml
filebeat.inputs: - type: log
paths:
- /opt/nginx/logs/access.log
fields:
nginx: access
json.keys_under_root: true
json.overwrite_keys: trueoutput.kafka:
hosts: ["10.10.10.9:9092"]
topics:
- topic: '%{[service.type]}-%{[fileset.name]}'
when.has_fields: ["@metadata.pipeline"]
- topic: 'nginx-access'
when.has_fields: ["fields.nginx"]
vim /etc/logstash/conf.d/nginx.conf
input {
kafka {
bootstrap_servers => "10.10.10.9:9092"
topics => ["nginx-access"]
codec => json
}
}
output {
if [fields][nginx] == "access" {
elasticsearch {
hosts => ["10.10.10.9:9200"]
index => "nginx-access-%{+YYYY.MM.dd}"
}
}
}
堡垒机日志
vim /etc/logstash/conf.d/jumpserver.conf
input {
jdbc {
jdbc_driver_library => "/usr/share/logstash/logstash-core/lib/jars/mysql-connector-java-5.1.36.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
jdbc_paging_enabled => "true"
jdbc_connection_string => "jdbc:mysql://10.10.10.6:3306/jumpserver"
jdbc_user => "root"
jdbc_password => "root"
schedule => "*/1 * * * *"
statement => "select * from terminal_session where date_start >= SUBDATE(now(),interval 481 minute)"
type => "terminal_session"
}
jdbc {
jdbc_driver_library => "/usr/share/logstash/logstash-core/lib/jars/mysql-connector-java-5.1.36.jar"
jdbc_driver_class => "com.mysql.jdbc.Driver"
jdbc_paging_enabled => "true"
jdbc_connection_string => "jdbc:mysql://10.10.10.6:3306/jumpserver"
jdbc_user => "root"
jdbc_password => "root"
schedule => "*/1 * * * *"
statement => "select id,user,asset,system_user,input,session,timestamp,org_id from terminal_command where FROM_UNIXTIME(timestamp) >= SUBDATE(now(),interval 1 minute);"
type => "terminal_command"
}
}
output {
if [type] == "terminal_session" {
elasticsearch {
hosts => ["10.10.10.9:9200"]
index => "jumpserver-session-%{+YYYY.MM.dd}"
}
}
else if [type] == "terminal_command" {
elasticsearch {
hosts => ["10.10.10.9:9200"]
index => "jumpserver-command-%{+YYYY.MM.dd}"
}
}
}
入侵检测系统日志
yum install cmake.x86_64 gcc.x86_64 gcc-c++.x86_64 flex.x86_64 bison.x86_64 libpcap.x86_64 libpcap-devel.x86_64 openssl-devel.x86_64 python-devel.x86_64 swig.x86_64 zlib-devel.x86_64 -y
wget http://download.opensuse.org/repositories/network:bro/CentOS_7/network:bro.repo -O /etc/yum.repos.d/network:bro.repoyum install bro.x86_64 -y
echo 'export PATH=/opt/bro/bin:$PATH' >> /etc/profile
source /etc/profile
vim /opt/bro/share/bro/site/local.bro
# 在文件末尾添加
@load tuning/json-logs
redef LogAscii::json_timestamps = JSON::TS_ISO8601;
redef LogAscii::use_json = T;
filebeat setup --pipelines --modules zeek
filebeat -e
input {
kafka {
bootstrap_servers => "10.10.10.9:9092"
topics => ["system-auth","system-syslog","auditd-log","zeek-connection","zeek-dns","zeek-http","zeek-files"]
codec => json
}
}# output内容不用变换,只需要替换条件即可
vim /etc/filebeat/filebeat.yml
filebeat.modules:
- module: wazuh
alerts:
enabled: true
archives:
enabled: falsesetup.template.json.enabled: true
setup.template.json.path: '/etc/filebeat/wazuh-template.json'
setup.template.json.name: 'wazuh'
setup.template.overwrite: true
setup.ilm.enabled: falseoutput.kafka:
hosts: ["10.10.10.9:9092"]
topics:
- topic: '%{[service.type]}-%{[fileset.name]}'
when.has_fields: ["@metadata.pipeline"]
vim /etc/logstash/conf.d/wazuh.conf
input {
kafka {
bootstrap_servers => "10.10.10.9:9092"
topics => ["wazuh-alert"]
codec => json
}
}output {
if [@metadata][pipeline] and [@metadata][topic] == "auditd-log" {
elasticsearch {
hosts => ["10.10.10.9:9200"]
index => "wazuh-alerts-3.x-%{+YYYY.MM.dd}"
}
}
}
收集方式 --> 存储 --> 展示
。因为篇幅原因不想写的太冗长影响阅读。后续还会有一些番外传:踩坑记录
vim /etc/hosts
10.10.10.9 big_data
vim /usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-patterns-core-4.1.2/patterns/grok-patterns
# 将 HOSTNAME \b(?:[0-9A-Za-z][0-9A-Za-z-]{0,62})(?:\.(?:[0-9A-Za-z][0-9A-Za-z-]{0,62}))*(\.?|\b) 修改为
HOSTNAME \b(?:[0-9A-Za-z][0-9A-Za-z_-]{0,62})(?:\.(?:[0-9A-Za-z][0-9A-Za-z_-]{0,62}))*(\.?|\b) # 注意看两者多了两个下划线
/usr/share/logstash/bin/logstash-plugin install logstash-input-jdbc wget https://repo1.maven.org/maven2/mysql/mysql-connector-java/5.1.36/mysql-connector-java-5.1.36.jar -O /usr/share/logstash/logstash-core/lib/jars/mysql-connector-java-5.1.36.jar
chown logstash:logstash /usr/share/logstash/logstash-core/lib/jars/mysql-connector-java-5.1.36.jar
chmod 644 /usr/share/logstash/logstash-core/lib/jars/mysql-connector-java-5.1.36.jar
sudo -u kibana /usr/share/kibana/bin/kibana-plugin install https://packages.wazuh.com/wazuhapp/wazuhapp-3.10.2_7.4.1.zip