vlambda博客
学习文章列表

日志系统ELK+Filebeat集群部署

一、部署环境

192.168.236.179  es1  Elasticsearch  Logstach  Kibana  Filebeat  JDK  

192.168.236.180  es2  Elasticsearch  Filebeat   JDK   

192.168.236.181  es3  Elasticsearch  Filebeat   JDK    

二、安装jdk和系统参数调优(所有服务器)

tar xf jdk-1.8.0_linux-x64_bin.tar.gz -C /usr/local/

vim /etc/profile.d #设置环境变量,如果服务器上当前已经存在JVM环境变量请删除

export JAVA_HOME=/usr/local/jdk-1.8.0/

export PATH=$PATH:$JAVA_HOME/bin

export CLASSPATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar

source /etc/profile.d

[root@es1 ~]# java -version

java version "1.8.0_181"

Java(TM) SE Runtime Environment (build 1.8.0_181-b13)

Java HotSpot(TM) 64-Bit Server VM (build 25.181-b13, mixed mode)

日志系统ELK+Filebeat集群部署

[root@es1 ~]# sysctl -p

[root@es1 ~]# reboot

三、ES集群安装配置(3,179服务器为例)

[root@es1 ~]# tar xf elasticsearch-7.6.2-linux-x86_64.tar.gz -C /usr/local/

[root@es1 ~]# cd /usr/local/

[root@es1 ~]# mv elasticsearch-7.6.2 elasticsearch

[root@es1 ~]# cd /usr/local/elasticsearch/config/

[root@es1 config]# pwd

/usr/local/elasticsearch/config

[root@es1 config]# grep "^[a-z]" elasticsearch.yml

cluster.name: my-elk #集群名称,所有机器相同

node.name: node-1  #当前服务器的node名称,集群中保持唯一

path.data: /data/es/data

path.logs: /data/es/log

bootstrap.memory_lock: true

http.port: 9200

discovery.seed_hosts: ["192.168.236.179", "192.168.236.180","192.168.236.181"] #集群主机IP

cluster.initial_master_nodes: ["192.168.236.179", "192.168.236.180","192.168.236.181"] #集群中首次启动时可被选举为master的节点

discovery.zen.minimum_master_nodes: 2  #最少有两个节点存活才可以选举master

gateway.recover_after_nodes: 2 #最少两个节点存活在开始数据存活

日志系统ELK+Filebeat集群部署

[root@es1 config]#  mkdir -pv /data/es/data

[root@es1 config]#  mkdir -pv /data/es/logs

[root@es1 config]#  useradd elastic

[root@es1 config]#  chown -R elastic:elastic /data/es/

[root@es1 config]#  chown -R elastic:elastic /usr/local/elasticsearch/

1、启动服务

[elastic@es1 ~]$ su - elastic

[elastic@es1 ~]$ cd /usr/local/elasticsearch

[elastic@es1 ~]$ nohup ./bin/elasticsearch > /tmp/elastic.log &

[elastic@es1 ~]$ tailf  /tmp/elastic.log

日志系统ELK+Filebeat集群部署

[elastic@es1 ~]$ netstat -nltp |grep -E "9200|9300"

[elastic@es1 ~]$ curl http://192.168.236.179:9200/

{

  "name" : "node-1",

  "cluster_name" : "my-elk",

  "cluster_uuid" : "YUSDGz9fT2q0Kz0NdoppVg",

  "version" : {

    "number" : "7.6.2",

    "build_flavor" : "default",

    "build_type" : "tar",

    "build_hash" : "ef48eb35cf30adf4db14086e8aabd07ef6fb113f",

    "build_date" : "2020-03-26T06:34:37.794943Z",

    "build_snapshot" : false,

    "lucene_version" : "8.4.0",

    "minimum_wire_compatibility_version" : "6.8.0",

    "minimum_index_compatibility_version" : "6.0.0-beta1"

  },

  "tagline" : "You Know, for Search"

}

日志系统ELK+Filebeat集群部署

[root@es1 local]# scp -r elasticsearch [email protected]:/usr/local/

[root@es1 local]# scp -r elasticsearch [email protected]:/usr/local/

*两个节点创建数据和日志目录,创建普通用户elastic 并授权*


四、安装Kibana

[root@es1 ~]# tar xf kibana-7.6.2-linux-x86_64.tar.gz -C /usr/local/

[root@es1 ~]# cd /usr/local/

[root@es1 ~]# ln -sv kibana-7.6.2-linux-x86_64/ kibana

[root@es1 ~]# cd kibana/config

[root@es1 ~]# grep "^[a-Z]" /usr/local/kibana/config/kibana.yml

server.port: 5601  #服务器端口,默认5601 必须

i18n.locale: "zh-CN"  #7版本支持中文,按需配置

日志系统ELK+Filebeat集群部署

启动服务

[root@es1 ~]# nohup ./kibana --allow-root > /tmp/kibana.log &

五、安装配置filebeat

[root@es1 ]# tar xf filebeat-7.6.2-linux-x86_64.tar.gz -C /usr/local/

[root@es1 ]# cd /usr/local/filebea/

[root@es1 filebeat]# vim filebeat.yml

日志系统ELK+Filebeat集群部署

[root@es1 filebeat]# nohup /usr/local/filebeat/filebeat -e -c /usr/local/filebeat/filebeat.yml > /tmp/filebeat.log &

日志系统ELK+Filebeat集群部署

1、安装nginx,nginx 日志改json格式

日志系统ELK+Filebeat集群部署

六、安装logstach

[root@es2 ~]#  tar xf logstash-7.6.2.tar.gz -C /usr/local/

[root@es2 ~]# cd /usr/local/logstash/config/

[root@es2 config]# more logstash_nginx.conf

input {

   file {

       path => "/opt/nginx/logs/access.log"

       type => ""nginxlog

       start_position => "beginning"

          }

}

 

filter{

    if [type]=="nginxlog" {

     grok {

       match => ["message", "%{COMMONAPACHELOG}"]

       }

     date {

       match => ["timestamp","dd/MMM/yyyy:HH:mm:ss Z"]

    }

 }

}

output {

    stdout { codec => rubydebug }

    elasticsearch { hosts => "192.168.236.179:9200","192.168.236.180:9200","192.168.236.181:9200" }

}

[root@es2 ~]# nohup /usr/local/logstash/bin/logstash -f logstash_nginx.conf > /tmp/logstch.log &

访问kibana界面

在此基础上部署分布式日志系统ELK+Filebeat+Kafka集群

----------------------end---------------------

推荐阅读: