乐趣区

关于java:Api接口统计通用组件附源码

转自:https://blog.csdn.net/appearb…

目录

1. 需要

2. 设计思路

3. 架构图

4. 具体实现

1. 定义拦截器

2. 获取接口参数服务

3. 对外须要导入的注解

5. 同步到 ClickHouse

6. 内部零碎应用

7. 完结


1. 需要

记录服务接口调用状况,包含通用字段: 域名、IP、接口门路、查问参数、是否胜利、申请工夫、耗时、错误信息、接口名 ;以及业务自定义字段

做成通用组件的模式,最小化的侵入业务零碎

2. 设计思路

自定义 Spring 拦截器,获取通用字段,以及业务自定义字段放到 ThreadLocal,接口调用实现后异步发送到 Kafka,而后生产到 ClickHouse 进行统计

须要统计的服务只须要依赖本组件,加一个注解,并配置指定的 kafka 地址即可

3. 架构图

4. 具体实现

1. 定义拦截器

在接口调用前初始化并从 request 里获取 ip、域名、申请工夫、申请参数等

接口调用后设置接口耗时、错误信息等;须要删除该线程的 ThreadLocal 对象,因为 spring 的申请线程会在线程池里复用,如果不删掉,后续申请会用到该线程的 ThreadLocal 数据

package cn.xianbin.apistat.interceptor;

import cn.xianbin.apistat.service.ApiStatService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.web.servlet.HandlerInterceptor;

import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;

@Component
@Slf4j
public class ApiStatInterceptor implements HandlerInterceptor {

    @Autowired
    private ApiStatService apiStatService;

    @Override
    public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception {apiStatService.before(request);
        return true;
    }

    @Override
    public void afterCompletion(HttpServletRequest request, HttpServletResponse response, Object handler, Exception ex) {apiStatService.after(ex);
        ApiStatService.threadLocal.remove();}

} 

2. 获取接口参数服务

package cn.xianbin.apistat.service;

import cn.xianbin.apistat.bean.ApiStatBean;
import cn.xianbin.apistat.utils.IpUtil;
import com.alibaba.fastjson.JSONObject;
import lombok.extern.slf4j.Slf4j;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Service;

import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;

import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;

@Slf4j
@Service
public class ApiStatService {private static DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSS");

    public static ThreadLocal<ApiStatBean> threadLocal = new ThreadLocal<>();

    @Resource(name = "apiStatKafkaTemplate")
    private KafkaTemplate<String, String> kafkaTemplate;

    /**
     * 业务代码调用
     */
    public static void apiName(String apiName) {ApiStatBean apiStatBean = threadLocal.get();
        if(apiStatBean == null){log.warn("api stat not init");
            return;
        }
        apiStatBean.setApi_name(apiName);
    }

    public void before(HttpServletRequest request) {LocalDateTime now = LocalDateTime.now();
        ApiStatBean apiStatBean = ApiStatBean.builder()
                .ip(IpUtil.getIP(request))
                .domain(domain(request))
                .path(request.getRequestURI())
                .query_param(request.getQueryString())
                .startTime(System.currentTimeMillis())
                .start_time(now.format(dateTimeFormatter))
                .build();
        threadLocal.set(apiStatBean);
    }

    public void after(Exception ex) {ApiStatBean apiStatBean = threadLocal.get();
        apiStatBean.setCost_time(System.currentTimeMillis() - apiStatBean.getStartTime());
        if (ex == null) {apiStatBean.setIs_success(1);
        } else {apiStatBean.setError(ex.getMessage());
            apiStatBean.setIs_success(0);
        }
        log();}

    public void log() {String invokeLog = JSONObject.toJSONString(threadLocal.get());
        log.debug("asyncSend={}", invokeLog);
        kafkaTemplate.send("api_stat_test", invokeLog);
    }

    private String domain(HttpServletRequest request) {return String.format("%s://%s:%s", request.getScheme(), request.getServerName(), request.getServerPort());
    }

} 

3. 对外须要导入的注解

用 spring 的 @import 注解导入咱们的配置类,用 @ComponentScan 扫描咱们的类

package cn.xianbin.apistat;

import cn.xianbin.apistat.config.ApiStatConfig;
import org.springframework.context.annotation.Import;

import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;

@Target({ElementType.TYPE})
@Retention(RetentionPolicy.RUNTIME)
@Documented
@Import(ApiStatConfig.class)
public @interface EnableApiStat {} 
package cn.xianbin.apistat.config;

import cn.xianbin.apistat.interceptor.ApiStatInterceptor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;

@ComponentScan("cn.dgg.bigdata.apistat")
@Configuration
public class ApiStatConfig implements WebMvcConfigurer {

    @Autowired
    private ApiStatInterceptor apiStatHandlerInterceptor;

    @Override
    public void addInterceptors(InterceptorRegistry registry) {registry.addInterceptor(apiStatHandlerInterceptor).addPathPatterns("/**");
    }
} 

5. 同步到 ClickHouse

1. 创立 kafka 引擎表

CREATE TABLE api_stat.kafka_api_stat (
    `ip` String,
    `api_name` String,
    `domain` String,
    `path` String,
    `query_param` String,
    `start_time` DateTime64 (3, 'Asia/Shanghai'),
    `is_success` UInt8,
    `cost_time` Int32,
    `error` String,`api_name` String
  ) ENGINE = Kafka('localhost:9092', 'topic', 'group1', 'JSONEachRow');

2. 创立 ClickHouse 物理表

CREATE TABLE api_stat.api_stat (
    `ip` String,
    `api_name` String,
    `domain` String,
    `path` String,
    `query_param` String,
    `start_time` DateTime64 (3, 'Asia/Shanghai'),
    `is_success` UInt8,
    `cost_time` Int32,
    `error` String,
    `api_name` String
  ) ENGINE = MergeTree()
  PARTITION BY toYYYYMM(start_time) 
  ORDER BY (domain, path, start_time);

3. 创立 kafka 到物理表的视图

CREATE MATERIALIZED VIEW api_stat.consumer_api_stat TO api_stat.api_stat
    AS SELECT  ip,api_name,domain,path,query_param,start_time,is_success,cost_time,error,api_name FROM api_stat.kafka_api_stat;

6. 内部零碎应用

1. 增加依赖:

<dependency>
  <groupId>cn.dgg.bigdata</groupId>
  <artifactId>apistat</artifactId>
  <version>1.0-SNAPSHOT</version>
</dependency>

2. 配置 kafka 地址

apiStat:
  kafka:
    bootstrap-servers: localhost:9200 

3. 退出注解:@EnableApiStat

7. 完结

源码地址: https://github.com/ostarsier/apistat

退出移动版