vlambda博客
学习文章列表

Log4j配合kafka上报数据

背景

大数据发展到一定程度,对任务、文件的资源的治理越来越迫切,对于各种数据表的治理其中的一部分,通过统计hdfs文件的使用情况,筛选出长时间不使用或者使用率低的文件,从而做出相应的治理策略,原先通过flume程序采集上报hdfs文件使用情况到采集的kafka,网上搜罗一圈后发现可以通过log4j直接上报log日志到kafka,遂做尝试,记录一下。

pom.xml

<dependency>
    <groupId>org.apache.logging.log4j</groupId>
    <artifactId>log4j-slf4j-impl</artifactId>
    <version>2.9.1</version>
  </dependency>
  <!-- slf4j核心包-->
  <dependency>
    <groupId>org.slf4j</groupId>
    <artifactId>slf4j-api</artifactId>
    <version>1.7.25</version>
  </dependency>
  <dependency>
    <groupId>org.apache.logging.log4j</groupId>
    <artifactId>log4j-api</artifactId>
    <version>2.9.1</version>
  </dependency>
  <dependency>
  <groupId>org.apache.logging.log4j</groupId>
  <artifactId>log4j-core</artifactId>
  <version>2.9.1</version>
  </dependency>
  <dependency>
    <groupId>org.apache.kafka</groupId>
    <artifactId>kafka-clients</artifactId>
    <version>2.4.1</version>
  </dependency>
  <dependency>
    <groupId>org.apache.kafka</groupId>
    <artifactId>kafka_2.12</artifactId>
    <version>2.0.1</version>
  </dependency>
  <dependency>
    <groupId>com.alibaba</groupId>
    <artifactId>fastjson</artifactId>
    <version>1.2.47</version>
  </dependency>
</dependencies>

log4j2.xml

<?xml version= "1.0" encoding= "UTF-8" ?>
<Configuration status= "info" >
   <Appenders>
       <Console name= "Console"  target= "SYSTEM_OUT" >
           <PatternLayout pattern= "%d{HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n" />
       </Console>
       <Kafka name="Kafka" topic="topic">
           <PatternLayout pattern="%date %message"/>
           <Property name="bootstrap.servers">bootstrap.servers</Property>
       </Kafka>
   </Appenders>
   <Loggers>
       <Root level="info">
           <AppenderRef ref="Kafka"/>
           <AppenderRef ref= "Console" />
       </Root>
       <Logger name="org.apache.kafka" level="INFO" /> <!-- avoid recursive logging -->
   </Loggers>
</Configuration>

测试类log4j2kafka.class

public class log4j2kafka {
   private static Logger logger = LoggerFactory.getLogger(log4j2kafka.class);

   public static void main(String[] args) throws InterruptedException {

       for (int i = 0; i <= 100; i++) {
           logger.info("This is Message [" + i + "] from log4j producer .. ");
      }
  }
}

执行命令查询结果:

bin/kafka-console-consumer.sh --bootstrap-server 127.0.0.1:9092 --topic_name --from-begin