使用flink-cdc技术(2.3.0)解析binlog实现实时数据大屏。
一、项目环境
该项目主要为一个数据大屏,采用了flink-cdc技术(2.3.0),flink版本为(1.14.6),利用他,实现自动获取并解析Mysql的binlog,来实时把对应的数据解析出来用于大屏展示。
<!-- https://mvnrepository.com/artifact/org.apache.flink/flink-connector-base -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-base</artifactId>
<version>1.14.6</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.ververica/flink-connector-mysql-cdc -->
<dependency>
<groupId>com.ververica</groupId>
<artifactId>flink-connector-mysql-cdc</artifactId>
<version>2.3.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.ververica/flink-sql-connector-mysql-cdc -->
<dependency>
<groupId>com.ververica</groupId>
<artifactId>flink-sql-connector-mysql-cdc</artifactId>
<version>2.3.0</version>
</dependency>
该文章主要参考了,我自己的https://blog.51cto.com/lenglingx/11242672【十一分钟上手Flink CDC】
同样这篇就是“使用mysql-binlog-connector-java技术(0.29.2)解析binlog实现实时数据大屏”的Flink-CDC的技术实现。
同样使用了多线程来,不过线程池是放到了程序代码了。上篇用到mysql-binlog-connector-java技术放到单独一个叫做sdk包目录,这篇我把flink-cdc的技术放到一个dese包的目录。
pom.xml
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.cqsym</groupId>
<artifactId>newbig</artifactId>
<version>1.0-SNAPSHOT</version>
<properties>
<maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-dependencies</artifactId>
<version>2.6.15</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>
<dependencies>
<!--lombok -->
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-thymeleaf</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-json</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-devtools</artifactId>
<optional>true</optional>
<!-- optional=true,依赖不会传递,该项目依赖devtools;之后依赖myboot项目的项目如果想要使用devtools,需要重新引入 -->
<scope>true</scope>
</dependency>
<!-- jackson-datatype-jsr310 -->
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-jsr310</artifactId>
</dependency>
<!-- https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-jdbc -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-jdbc</artifactId>
</dependency>
<!-- https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-data-jpa -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-jpa</artifactId>
</dependency>
<!-- https://mvnrepository.com/artifact/mysql/mysql-connector-java -->
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>8.0.30</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.springframework.boot/spring-boot-starter-data-redis -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-redis</artifactId>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.flink/flink-connector-base -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-base</artifactId>
<version>1.14.6</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.ververica/flink-connector-mysql-cdc -->
<dependency>
<groupId>com.ververica</groupId>
<artifactId>flink-connector-mysql-cdc</artifactId>
<version>2.3.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.ververica/flink-sql-connector-mysql-cdc -->
<dependency>
<groupId>com.ververica</groupId>
<artifactId>flink-sql-connector-mysql-cdc</artifactId>
<version>2.3.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.flink/flink-streaming-java -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_2.12</artifactId>
<version>1.14.6</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.flink/flink-clients -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients_2.12</artifactId>
<version>1.14.6</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.flink/flink-runtime-web -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-runtime-web_2.12</artifactId>
<version>1.14.6</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.flink/flink-table-runtime -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-runtime_2.12</artifactId>
<version>1.14.6</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-api-scala-bridge_2.12</artifactId>
<version>1.14.6</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-planner_2.12</artifactId>
<version>1.14.6</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.google.guava/guava -->
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>32.1.3-jre</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.alibaba/fastjson -->
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>1.2.83</version>
</dependency>
</dependencies>
<build>
<finalName>newbig</finalName>
<resources>
<resource>
<directory>src/main/resources</directory>
</resource>
</resources>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<version>2.6.15</version>
<configuration>
<fork>true</fork>
<includeSystemScope>true</includeSystemScope>
<!--fork : 如果没有该项配置,肯呢个devtools不会起作用,即应用不会restart -->
<!--这里写上main方法所在类的路径-->
<mainClass>com.cqsym.newbig.Application</mainClass>
</configuration>
<executions>
<execution>
<goals>
<goal>repackage</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.22.2</version>
<configuration>
<skip>true</skip>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.11.0</version>
<configuration>
<source>8</source>
<target>8</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-resources-plugin</artifactId>
<version>3.3.1</version>
<configuration>
<encoding>UTF-8</encoding>
<!-- 过滤后缀为pem、pfx的证书文件 -->
<nonFilteredFileExtensions>
<nonFilteredFileExtension>p12</nonFilteredFileExtension>
<nonFilteredFileExtension>cer</nonFilteredFileExtension>
<nonFilteredFileExtension>pem</nonFilteredFileExtension>
<nonFilteredFileExtension>pfx</nonFilteredFileExtension>
</nonFilteredFileExtensions>
</configuration>
</plugin>
</plugins>
</build>
</project>
application-dev.properties
#-----------------自定义信息配置---------------------
com.imddy.layuiadmin.title=BOOT
com.imddy.layuiadmin.description=学习一下BOOT是什么
#-----------------自定义信息配置---------------------
executeTask=2
#----------数据库基础配置--------------------
#spring.datasource.driver-class-name=com.mysql.cj.jdbc.Driver
#spring.datasource.url=jdbc:mysql://127.0.0.1:3306/layuiadmin2?useUnicode=true&useSSL=false&characterEncoding=UTF-8&serverTimezone=UTC
#spring.datasource.url=jdbc:mysql://127.0.0.1:3306/layuiadmin2?useUnicode=true&useSSL=false&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai&zeroDateTimeBehavior=convertToNull
#spring.datasource.url=jdbc:mysql://127.0.0.1:3306/twmshelp?useUnicode=true&useSSL=false&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai
#spring.datasource.username=root
#spring.datasource.password=root__
#spring.datasource.name=HikaraPool-100
#----------数据库连接池基础配置--------------------
spring.datasource.type=com.zaxxer.hikari.HikariDataSource
# 指定连接池的名称 - 默认自动生成
spring.datasource.hikari.pool-name=HikaraPool-1
# 如果你的驱动程序支持JDBC4,强烈建议不要设置此属性。
#spring.datasource.hikari.connection-test-query=select 1
# 连接超时时间 - 默认值:30秒。
spring.datasource.hikari.connection-timeout=30000
# 连接池中允许闲置的最长时间 - 默认值:10分钟
spring.datasource.hikari.idle-timeout=600000
# 一个连接生命时长(毫秒),超时而没被使用则被释放 - 默认值:30分钟
spring.datasource.hikari.max-lifetime=1800000
# 连接池中允许的最大连接数,包括闲置和使用中的连接 - 默认值:10
spring.datasource.hikari.maximum-pool-size=100
# 连接池中允许的最小空闲连接数 - 默认值:10。
spring.datasource.hikari.minimum-idle=10
# 连接被测试活动的最长时间 - 默认值:5秒。
spring.datasource.hikari.validation-timeout=5000
spring.jpa.show-sql=true
spring.jpa.open-in-view=false
spring.datasource.primary.driver-class-name=com.mysql.cj.jdbc.Driver
spring.datasource.primary.url=jdbc:mysql://127.0.0.1:3306/newbig?useUnicode=true&useSSL=false&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai
spring.datasource.primary.username=root
spring.datasource.primary.password=root__
# 指定为HikariDataSource
spring.datasource.primary.type=com.zaxxer.hikari.HikariDataSource
# hikari连接池配置 对应 HikariConfig 配置属性类
spring.datasource.primary.hikari.pool-name=HikariCP-Primary
#最小空闲连接数
spring.datasource.primary.hikari.minimum-idle=5
# 空闲连接存活最大时间,默认10分钟
spring.datasource.primary.hikari.idle-timeout=600000
# 连接池最大连接数,默认是10
spring.datasource.primary.hikari.maximum-pool-size=10
# 此属性控制从池返回的连接的默认自动提交行为,默认值:true
spring.datasource.primary.hikari.auto-commit=true
# 此属性控制池中连接的最长生命周期,值0表示无限生命周期,默认30分钟
spring.datasource.primary.hikari.max-lifetime=1800000
# 数据库连接超时时间,默认30秒
spring.datasource.primary.hikari.connection-timeout=30000
# 连接测试query,如果你的驱动程序支持JDBC4,强烈建议不要设置此属性。
#spring.datasource.primary.hikari.connection-test-query=SELECT 1
spring.datasource.second.driver-class-name=com.mysql.cj.jdbc.Driver
spring.datasource.second.url=jdbc:mysql://192.168.203.150:3306/twms?useUnicode=true&useSSL=false&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai
spring.datasource.second.username=aliyun_root
spring.datasource.second.password=root__
spring.datasource.second.host=192.168.203.150
spring.datasource.second.port=3306
# 指定为HikariDataSource
spring.datasource.second.type=com.zaxxer.hikari.HikariDataSource
# hikari连接池配置 对应 HikariConfig 配置属性类
spring.datasource.second.hikari.pool-name=HikariCP-Second
#最小空闲连接数
spring.datasource.second.hikari.minimum-idle=5
# 空闲连接存活最大时间,默认10分钟
spring.datasource.second.hikari.idle-timeout=600000
# 连接池最大连接数,默认是10
spring.datasource.second.hikari.maximum-pool-size=10
# 此属性控制从池返回的连接的默认自动提交行为,默认值:true
spring.datasource.second.hikari.auto-commit=true
# 此属性控制池中连接的最长生命周期,值0表示无限生命周期,默认30分钟
spring.datasource.second.hikari.max-lifetime=1800000
# 数据库连接超时时间,默认30秒
spring.datasource.second.hikari.connection-timeout=30000
# 连接测试query,如果你的驱动程序支持JDBC4,强烈建议不要设置此属性。
#spring.datasource.second.hikari.connection-test-query=SELECT 1
## redis 配置
spring.redis.host=127.0.0.1
spring.redis.port=6379
spring.redis.database=0
spring.redis.timeout=10s
spring.redis.lettuce.pool.min-idle=0
spring.redis.lettuce.pool.max-idle=8
spring.redis.lettuce.pool.max-active=8
spring.redis.lettuce.pool.max-wait=-1ms
这里配置了redis,但实际项目中没有使用。
NbigscreenController.java
package com.cqsym.newbig.controller;
import com.cqsym.newbig.primary.entity.TownerBase;
import com.cqsym.newbig.service.BinLogService;
import com.cqsym.newbig.service.TownerBaseService;
import com.cqsym.newbig.service.XianluMingchengService;
import com.cqsym.newbig.service.ZaituDingdanNumSerivce;
import com.cqsym.newbig.service.ZuoyeNumService;
import com.cqsym.newbig.utils.AjaxResult;
import com.cqsym.newbig.vo.DaquNumVo;
import com.cqsym.newbig.vo.OwnerBaseNumVo;
import com.cqsym.newbig.vo.XianluMingchengVo;
import com.cqsym.newbig.vo.ZaituDingdanVo;
import com.cqsym.newbig.vo.ZhanbiVo;
import com.cqsym.newbig.vo.ZuoyeShishiQingkuangVo;
import com.fasterxml.jackson.core.JsonProcessingException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import java.util.List;
import java.util.Map;
@Controller
@RequestMapping("/nbigscreen")
public class NbigscreenController {
private static final Logger log = LoggerFactory.getLogger(NbigscreenController.class);
@Autowired
private BinLogService binLogService;
@Autowired
private ZuoyeNumService zuoyeNumService;
@Autowired
private XianluMingchengService xianluMingchengService;
@Autowired
private ZaituDingdanNumSerivce zaituDingdanNumSerivce;
@Autowired
private TownerBaseService townerBaseService;
@ResponseBody
@RequestMapping("/index")
public AjaxResult index() {
log.info("index... ");
return AjaxResult.success("index");
}
@ResponseBody
@RequestMapping("/on")
public AjaxResult on() {
log.info("on... ");
try {
binLogService.start();
} catch (Exception exception) {
return AjaxResult.error(exception.getMessage());
}
return AjaxResult.success();
}
@ResponseBody
@RequestMapping("/off")
public AjaxResult off() {
log.info("off... ");
try {
binLogService.stop();
} catch (Exception exception) {
return AjaxResult.error(exception.getMessage());
}
return AjaxResult.success();
}
@ResponseBody
@RequestMapping("/getZuoyeNum")
public AjaxResult getZuoyeNum() {
log.info("getZuoyeNum... ");
Map map = zuoyeNumService.getZuoyeNum();
return AjaxResult.success(map);
}
@ResponseBody
@RequestMapping("/getBanshichuZuoyeNum")
public AjaxResult getBanshichuZuoyeNum() {
log.info("getBanshichuZuoyeNum... ");
List<OwnerBaseNumVo> list = zuoyeNumService.getBanshichuZuoyeNum();
return AjaxResult.success(list);
}
@ResponseBody
@RequestMapping("/getDaquZuoyeNum")
public AjaxResult getDaquZuoyeNum() {
log.info("getDaquZuoyeNum... ");
List<DaquNumVo> list = zuoyeNumService.getDaquZuoyeNum();
return AjaxResult.success(list);
}
@ResponseBody
@RequestMapping("/getShishiZuoyeQingkuang")
public AjaxResult getShishiZuoyeQingkuang() {
log.info("getShishiZuoyeQingkuang... ");
List<ZuoyeShishiQingkuangVo> list = zuoyeNumService.getShishiZuoyeQingkuang();
return AjaxResult.success(list);
}
@ResponseBody
@RequestMapping("/getXianluMingcheng")
public AjaxResult getXianluMingcheng() {
log.info("getXianluMingcheng... ");
List<XianluMingchengVo> list = xianluMingchengService.getXianluMingcheng();
return AjaxResult.success(list);
}
@ResponseBody
@RequestMapping("/getZaituDingdanNumForEveryDay")
public AjaxResult getZaituDingdanNumForEveryDay() throws JsonProcessingException {
log.info("getZaituDingdanNumForEveryDay... ");
List<ZaituDingdanVo> list = zaituDingdanNumSerivce.getZaituDingdanNumForEveryDay();
return AjaxResult.success(list);
}
@ResponseBody
@RequestMapping("/getTOwnerBaseAll")
public AjaxResult getTOwnerBaseAll() {
log.info("getTOwnerBaseAll... ");
List<TownerBase> list = townerBaseService.findAll();
return AjaxResult.success(list);
}
@ResponseBody
@RequestMapping("/getZuoyeZhanbi")
public AjaxResult getZuoyeZhanbi() {
log.info("getZuoyeZhanbi... ");
List<ZhanbiVo> list = zuoyeNumService.getZuoyeZhanbi();
return AjaxResult.success(list);
}
}
二、各种Service
ZuoyeNumService.java
package com.cqsym.newbig.service;
import com.cqsym.newbig.config.DataSourceRepository;
import com.cqsym.newbig.constant.ZuoyeNumConstants;
import com.cqsym.newbig.enums.OperateTypeEnum;
import com.cqsym.newbig.vo.DaquNumVo;
import com.cqsym.newbig.vo.DaquOwnerBaseNumVo;
import com.cqsym.newbig.vo.OwnerBaseNumVo;
import com.cqsym.newbig.vo.ZhanbiVo;
import com.cqsym.newbig.vo.ZuoyeShishiQingkuangVo;
import com.cqsym.newbig.vo.ZuoyeShishiQingkuangVo1;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Service;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.time.temporal.TemporalAdjusters;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import static com.cqsym.newbig.cache.AppMap.BanshichuDaquCacheMap;
import static com.cqsym.newbig.cache.AppMap.BanshichuZuoyeNumCacheMap;
import static com.cqsym.newbig.cache.AppMap.OwnerBaseIdNameCacheName;
import static com.cqsym.newbig.cache.AppMap.ZuoyeNumCacheMap;
import static com.cqsym.newbig.cache.AppMap.ZuoyeShishiQingkuang;
@Service
public class ZuoyeNumService {
private static final Logger log = LoggerFactory.getLogger(ZuoyeNumService.class);
@Autowired
@Qualifier(value="secondDataSourceRepository")
private DataSourceRepository secondDataSourceRepository;
@Autowired
private ObjectMapper objectMapper;
public void init() {
log.info("ZuoyeNumService init() 开始 ... ");
log.info("initZuoyeNumCacheMap ... ");
initZuoyeNumCacheMap();
log.info("getDayZuoyeNum ... ");
getDayZuoyeNum();
log.info("getMonthZuoyeNum ... ");
getMonthZuoyeNum();
log.info("getYearZuoyeNum ... ");
getYearZuoyeNum();
log.info("getGongsiYearNum ... ");
getGongsiYearNum();
log.info("getGongsiMonthNum ... ");
getGongsiMonthNum();
log.info("getGongsiDayNum ... ");
getGongsiDayNum();
log.info("ZuoyeNumService init() complate完成 ... ");
}
public void initZuoyeNumCacheMap() {
ZuoyeNumCacheMap.clear();
ZuoyeNumCacheMap.put(ZuoyeNumConstants.GongsiYearTotal, 0L);
ZuoyeNumCacheMap.put(ZuoyeNumConstants.GongsiMonthTotal, 0L);
ZuoyeNumCacheMap.put(ZuoyeNumConstants.GongsiDayTotal, 0L);
ZuoyeNumCacheMap.put(ZuoyeNumConstants.JiecheduanboYearTotal, 0L);
ZuoyeNumCacheMap.put(ZuoyeNumConstants.JiecheduanboMonthTotal, 0L);
ZuoyeNumCacheMap.put(ZuoyeNumConstants.JiecheduanboDayTotal, 0L);
ZuoyeNumCacheMap.put(ZuoyeNumConstants.ZhuangcheYearTotal, 0L);
ZuoyeNumCacheMap.put(ZuoyeNumConstants.ZhuangcheMonthTotal, 0L);
ZuoyeNumCacheMap.put(ZuoyeNumConstants.ZhuangcheDayTotal, 0L);
ZuoyeNumCacheMap.put(ZuoyeNumConstants.XiecheYearTotal, 0L);
ZuoyeNumCacheMap.put(ZuoyeNumConstants.XiecheMonthTotal, 0L);
ZuoyeNumCacheMap.put(ZuoyeNumConstants.XiecheDayTotal, 0L);
ZuoyeNumCacheMap.put(ZuoyeNumConstants.PeisongYearTotal, 0L);
ZuoyeNumCacheMap.put(ZuoyeNumConstants.PeisongMonthTotal, 0L);
ZuoyeNumCacheMap.put(ZuoyeNumConstants.PeisongDayTotal, 0L);
ZuoyeNumCacheMap.put(ZuoyeNumConstants.GongluzhifaYearTotal, 0L);
ZuoyeNumCacheMap.put(ZuoyeNumConstants.GongluzhifaMonthTotal, 0L);
ZuoyeNumCacheMap.put(ZuoyeNumConstants.GongluzhifaDayTotal, 0L);
ZuoyeNumCacheMap.put(ZuoyeNumConstants.YikuduanboYearTotal, 0L);
ZuoyeNumCacheMap.put(ZuoyeNumConstants.YikuduanboMonthTotal, 0L);
ZuoyeNumCacheMap.put(ZuoyeNumConstants.YikuduanboDayTotal, 0L);
ZuoyeNumCacheMap.put(ZuoyeNumConstants.JiaguYearTotal, 0L);
ZuoyeNumCacheMap.put(ZuoyeNumConstants.JiaguMonthTotal, 0L);
ZuoyeNumCacheMap.put(ZuoyeNumConstants.JiaguDayTotal, 0L);
ZuoyeNumCacheMap.put(ZuoyeNumConstants.JieguYearTotal, 0L);
ZuoyeNumCacheMap.put(ZuoyeNumConstants.JieguMonthTotal, 0L);
ZuoyeNumCacheMap.put(ZuoyeNumConstants.JieguDayTotal, 0L);
}
public void getYearZuoyeNum() {
DateTimeFormatter datetimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
LocalDateTime yearStart = LocalDateTime.now().withDayOfYear(1).withHour(0).withMinute(0).withSecond(0).withNano(0);
String yearStartStr = yearStart.format(datetimeFormatter);
List<Map<String, Object>> list = getZuoyeNum(yearStartStr);
if (list != null && list.size() > 0) {
for (Map<String, Object> map : list) {
switch (map.get("tddOperationType").toString()) {
case "1":
ZuoyeNumCacheMap.put(ZuoyeNumConstants.JiecheduanboYearTotal, Long.valueOf(String.valueOf(map.get("operationNum"))));
break;
case "2":
ZuoyeNumCacheMap.put(ZuoyeNumConstants.ZhuangcheYearTotal, Long.valueOf(String.valueOf(map.get("operationNum"))));
break;
case "3":
ZuoyeNumCacheMap.put(ZuoyeNumConstants.XiecheYearTotal, Long.valueOf(String.valueOf(map.get("operationNum"))));
break;
case "4":
ZuoyeNumCacheMap.put(ZuoyeNumConstants.JiaguYearTotal, Long.valueOf(String.valueOf(map.get("operationNum"))));
break;
case "5":
ZuoyeNumCacheMap.put(ZuoyeNumConstants.JieguYearTotal, Long.valueOf(String.valueOf(map.get("operationNum"))));
break;
case "10":
ZuoyeNumCacheMap.put(ZuoyeNumConstants.PeisongYearTotal, Long.valueOf(String.valueOf(map.get("operationNum"))));
break;
case "11":
ZuoyeNumCacheMap.put(ZuoyeNumConstants.YikuduanboYearTotal, Long.valueOf(String.valueOf(map.get("operationNum"))));
break;
case "15":
ZuoyeNumCacheMap.put(ZuoyeNumConstants.GongluzhifaYearTotal, Long.valueOf(String.valueOf(map.get("operationNum"))));
break;
}
}
}
}
public void getMonthZuoyeNum() {
DateTimeFormatter datetimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
LocalDateTime monthStart = LocalDateTime.now().with(TemporalAdjusters.firstDayOfMonth()).withHour(0).withMinute(0).withSecond(0).withNano(0);
String monthStartStr = monthStart.format(datetimeFormatter);
List<Map<String, Object>> list = getZuoyeNum(monthStartStr);
if (list != null && list.size() > 0) {
for (Map<String, Object> map : list) {
switch (map.get("tddOperationType").toString()) {
case "1":
ZuoyeNumCacheMap.put(ZuoyeNumConstants.JiecheduanboMonthTotal, Long.valueOf(String.valueOf(map.get("operationNum"))));
break;
case "2":
ZuoyeNumCacheMap.put(ZuoyeNumConstants.ZhuangcheMonthTotal, Long.valueOf(String.valueOf(map.get("operationNum"))));
break;
case "3":
ZuoyeNumCacheMap.put(ZuoyeNumConstants.XiecheMonthTotal, Long.valueOf(String.valueOf(map.get("operationNum"))));
break;
case "4":
ZuoyeNumCacheMap.put(ZuoyeNumConstants.JiaguMonthTotal, Long.valueOf(String.valueOf(map.get("operationNum"))));
break;
case "5":
ZuoyeNumCacheMap.put(ZuoyeNumConstants.JieguMonthTotal, Long.valueOf(String.valueOf(map.get("operationNum"))));
break;
case "10":
ZuoyeNumCacheMap.put(ZuoyeNumConstants.PeisongMonthTotal, Long.valueOf(String.valueOf(map.get("operationNum"))));
break;
case "11":
ZuoyeNumCacheMap.put(ZuoyeNumConstants.YikuduanboMonthTotal, Long.valueOf(String.valueOf(map.get("operationNum"))));
break;
case "15":
ZuoyeNumCacheMap.put(ZuoyeNumConstants.GongluzhifaMonthTotal, Long.valueOf(String.valueOf(map.get("operationNum"))));
break;
}
}
}
}
public void getDayZuoyeNum() {
DateTimeFormatter datetimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
LocalDateTime dayStart = LocalDateTime.now().withHour(0).withMinute(0).withSecond(0).withNano(0);
String dayStartStr = dayStart.format(datetimeFormatter);
List<Map<String, Object>> list = getZuoyeNum(dayStartStr);
if (list != null && list.size() > 0) {
for (Map<String, Object> map : list) {
switch (map.get("tddOperationType").toString()) {
case "1":
ZuoyeNumCacheMap.put(ZuoyeNumConstants.JiecheduanboDayTotal, Long.valueOf(String.valueOf(map.get("operationNum"))));
break;
case "2":
ZuoyeNumCacheMap.put(ZuoyeNumConstants.ZhuangcheDayTotal, Long.valueOf(String.valueOf(map.get("operationNum"))));
break;
case "3":
ZuoyeNumCacheMap.put(ZuoyeNumConstants.XiecheDayTotal, Long.valueOf(String.valueOf(map.get("operationNum"))));
break;
case "4":
ZuoyeNumCacheMap.put(ZuoyeNumConstants.JiaguDayTotal, Long.valueOf(String.valueOf(map.get("operationNum"))));
break;
case "5":
ZuoyeNumCacheMap.put(ZuoyeNumConstants.JieguDayTotal, Long.valueOf(String.valueOf(map.get("operationNum"))));
break;
case "10":
ZuoyeNumCacheMap.put(ZuoyeNumConstants.PeisongDayTotal, Long.valueOf(String.valueOf(map.get("operationNum"))));
break;
case "11":
ZuoyeNumCacheMap.put(ZuoyeNumConstants.YikuduanboDayTotal, Long.valueOf(String.valueOf(map.get("operationNum"))));
break;
case "15":
ZuoyeNumCacheMap.put(ZuoyeNumConstants.GongluzhifaDayTotal, Long.valueOf(String.valueOf(map.get("operationNum"))));
break;
}
}
}
}
private List<Map<String, Object>> getZuoyeNum(String zuoyeshijian) {
try {
String sql = "SELECT tdd_operation_type AS tddOperationType, COUNT(1) AS operationNum " +
"FROM t_dispatch_detail " +
"WHERE tdd_status = 2 " +
"AND tdd_operation_end_time >= ? " +
"AND dr = 0 " +
"GROUP BY tdd_operation_type ";
List<Map<String, Object>> list = secondDataSourceRepository.queryForListMap(sql, zuoyeshijian);
return list;
} catch (Exception e) {
log.error("getZuoyeNum error", e);
return null;
}
}
public void getGongsiYearNum() {
DateTimeFormatter datetimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
LocalDateTime yearStart = LocalDateTime.now().withDayOfYear(1).withHour(0).withMinute(0).withSecond(0).withNano(0);
String yearStartStr = yearStart.format(datetimeFormatter);
Long gongsiYearNum = getGongsiNum(yearStartStr);
log.info("gongsiYearNum:{}", gongsiYearNum);
ZuoyeNumCacheMap.put(ZuoyeNumConstants.GongsiYearTotal, gongsiYearNum);
}
public void getGongsiMonthNum() {
DateTimeFormatter datetimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
LocalDateTime monthStart = LocalDateTime.now().with(TemporalAdjusters.firstDayOfMonth()).withHour(0).withMinute(0).withSecond(0).withNano(0);
String monthStartStr = monthStart.format(datetimeFormatter);
Long gongsiMonthNum = getGongsiNum(monthStartStr);
log.info("gongsiMonthNum:{}", gongsiMonthNum);
ZuoyeNumCacheMap.put(ZuoyeNumConstants.GongsiMonthTotal, gongsiMonthNum);
}
public void getGongsiDayNum() {
DateTimeFormatter datetimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
LocalDateTime dayStart = LocalDateTime.now().withHour(0).withMinute(0).withSecond(0).withNano(0);
String dayStartStr = dayStart.format(datetimeFormatter);
Long gongsiDayNum = getGongsiNum(dayStartStr);
log.info("gongsiDayNum:{}", gongsiDayNum);
ZuoyeNumCacheMap.put(ZuoyeNumConstants.GongsiDayTotal, gongsiDayNum);
}
private Long getGongsiNum(String zuoyeshijian) {
String sql = "SELECT COUNT(1) AS operationNum " +
"FROM t_dispatch_detail " +
"WHERE tdd_status = 2 " +
"AND tdd_operation_end_time >= ? " +
"AND dr = 0";
Map map = secondDataSourceRepository.queryForMap(sql, zuoyeshijian);
if (null != map ) {
return (Long) map.getOrDefault("operationNum", 0L);
};
return 0L;
}
public void initBanshichuZuoyeNum() {
DateTimeFormatter datetimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
LocalDateTime yearStart = LocalDateTime.now().withDayOfYear(1).withHour(0).withMinute(0).withSecond(0).withNano(0);
String yearStartStr = yearStart.format(datetimeFormatter);
String sql = "SELECT tdd_owner_base AS tddOwnerBase, COUNT(*) AS totalNum " +
"FROM t_dispatch_detail " +
"WHERE tdd_status = 2 " +
"AND tdd_operation_end_time >= ? " +
"AND dr = 0 " +
"GROUP BY tdd_owner_base ";
List<OwnerBaseNumVo> ownerBaseNumVoList =secondDataSourceRepository.queryForList(sql, OwnerBaseNumVo.class, yearStartStr);
Map<Integer, Long> ownerBaseNumVoMap = new HashMap<>();
if (ownerBaseNumVoList != null && ownerBaseNumVoList.size() > 0) {
ownerBaseNumVoList.forEach(ownerBaseNumVo -> {
ownerBaseNumVoMap.put(ownerBaseNumVo.getTddOwnerBase(), ownerBaseNumVo.getTotalNum());
});
}
log.info("ownerBaseNumVoMap:{}", objectMapper.valueToTree(ownerBaseNumVoMap));
try {
log.info("ownerBaseNumVoMap: " + objectMapper.writeValueAsString(ownerBaseNumVoMap));
} catch (JsonProcessingException e) {
log.error("ownerBaseNumVoMap: " + e.getMessage());
}
BanshichuZuoyeNumCacheMap.putAll(ownerBaseNumVoMap);
}
public List<OwnerBaseNumVo> getBanshichuZuoyeNum() {
Map<Integer, Long> ownerBaseNumVoMap = BanshichuZuoyeNumCacheMap.asMap();
List<OwnerBaseNumVo> ownerBaseNumVoList = new ArrayList<>();
if (ownerBaseNumVoMap != null && ownerBaseNumVoMap.size() > 0) {
ownerBaseNumVoMap.forEach((key, value) -> {
OwnerBaseNumVo ownerBaseNumVo = new OwnerBaseNumVo();
ownerBaseNumVo.setTddOwnerBase(key);
ownerBaseNumVo.setTotalNum(value);
String ownerBaseName = OwnerBaseIdNameCacheName.get(String.valueOf(key));
ownerBaseNumVo.setTddOwnerBaseName(ownerBaseName);
ownerBaseNumVoList.add(ownerBaseNumVo);
});
}
List<OwnerBaseNumVo> ownerBaseNumVoList1 = ownerBaseNumVoList.stream().sorted(Comparator.comparing(OwnerBaseNumVo::getTotalNum).reversed()).collect(Collectors.toList());
return ownerBaseNumVoList1;
}
public List<DaquNumVo> getDaquZuoyeNum() {
Map<Integer, Long> ownerBaseNumVoMap = BanshichuZuoyeNumCacheMap.asMap();
List<DaquOwnerBaseNumVo> daquOwnerBaseNumVoList = new ArrayList<>();
if (ownerBaseNumVoMap != null && ownerBaseNumVoMap.size() > 0) {
ownerBaseNumVoMap.forEach((key, value) -> {
DaquOwnerBaseNumVo daquOwnerBaseNumVo = new DaquOwnerBaseNumVo();
daquOwnerBaseNumVo.setTddOwnerBase(key);
daquOwnerBaseNumVo.setTotalNum(value);
String ownerBaseName = OwnerBaseIdNameCacheName.get(String.valueOf(key));
daquOwnerBaseNumVo.setTddOwnerBaseName(ownerBaseName);
String daquName = (String) BanshichuDaquCacheMap.get(ownerBaseName);
daquOwnerBaseNumVo.setDaquName(daquName);
daquOwnerBaseNumVoList.add(daquOwnerBaseNumVo);
});
}
Map<String,List<DaquOwnerBaseNumVo>> map = daquOwnerBaseNumVoList.stream().filter(daquOwnerBaseNumVo -> daquOwnerBaseNumVo.getDaquName() != null).collect(Collectors.groupingBy(daquOwnerBaseNumVo -> daquOwnerBaseNumVo.getDaquName()));
List<DaquNumVo> daquNumVoList = new ArrayList<>();
map.entrySet().forEach(entry -> {
DaquNumVo daquNumVo = new DaquNumVo();
daquNumVo.setDaquName(entry.getKey());
daquNumVo.setTotalNum(entry.getValue().stream().mapToLong(DaquOwnerBaseNumVo::getTotalNum).sum());
daquNumVoList.add(daquNumVo);
});
List<DaquNumVo> daquNumVoList1 = daquNumVoList.stream().sorted(Comparator.comparing(DaquNumVo::getTotalNum).reversed()).collect(Collectors.toList());
return daquNumVoList1;
}
public Map getZuoyeNum() {
Map map = new HashMap();
map = ZuoyeNumCacheMap.asMap();
return map;
}
/** 对实时作业情况进行初始化 */
public void initShishiZuoyeQingkuang() {
String sql = "SELECT tdd_owner_base, tdd_operation_type, tdd_vin, tdd_operation_end_time " +
"FROM t_dispatch_detail " +
"WHERE tdd_status = 2 " +
"AND dr = 0 " +
"ORDER BY tdd_id DESC " +
"LIMIT 50 ";
List<ZuoyeShishiQingkuangVo1> zuoyeShishiQingkuangVo1List = secondDataSourceRepository.queryForList(sql, ZuoyeShishiQingkuangVo1.class);
if (zuoyeShishiQingkuangVo1List != null && zuoyeShishiQingkuangVo1List.size() > 0) {
zuoyeShishiQingkuangVo1List.forEach(zuoyeShishiQingkuangVo1 -> {
ZuoyeShishiQingkuangVo zuoyeShishiQingkuangVo = new ZuoyeShishiQingkuangVo();
String ownerBaseName = OwnerBaseIdNameCacheName.get(String.valueOf(zuoyeShishiQingkuangVo1.getTddOwnerBase()));
zuoyeShishiQingkuangVo.setOwnerBaseName(ownerBaseName);
OperateTypeEnum operateTypeEnum = OperateTypeEnum.fromValue(zuoyeShishiQingkuangVo1.getTddOperationType());
zuoyeShishiQingkuangVo.setOperateType(operateTypeEnum);
zuoyeShishiQingkuangVo.setOperateTime(zuoyeShishiQingkuangVo1.getTddOperationEndTime());
zuoyeShishiQingkuangVo.setVin(zuoyeShishiQingkuangVo1.getTddVin());
ZuoyeShishiQingkuang.offer(zuoyeShishiQingkuangVo);
});
}
}
/** 获取实时作业情况 */
public List<ZuoyeShishiQingkuangVo> getShishiZuoyeQingkuang() {
List<ZuoyeShishiQingkuangVo> zuoyeShishiQingkuangVoList = new ArrayList<>();
ZuoyeShishiQingkuang.iterator().forEachRemaining(zuoyeShishiQingkuangVo -> {
zuoyeShishiQingkuangVoList.add(zuoyeShishiQingkuangVo);
});
return zuoyeShishiQingkuangVoList;
}
/** 获取作业占比 */
public List<ZhanbiVo> getZuoyeZhanbi() {
List<ZhanbiVo> zhanbiVoList = new ArrayList<>();
List<String> nameList = new ArrayList<>();
nameList.add("接车短驳");
nameList.add("装车");
nameList.add("卸车");
nameList.add("配送");
nameList.add("公路直发");
nameList.add("移库");
nameList.add("加固");
nameList.add("解固");
nameList.add("其他");
ZhanbiVo zhanbiVo0 = new ZhanbiVo();
zhanbiVo0.setName("接车短驳");
zhanbiVo0.setValue(ZuoyeNumCacheMap.get(ZuoyeNumConstants.JiecheduanboYearTotal));
zhanbiVoList.add(zhanbiVo0);
ZhanbiVo zhanbiVo1 = new ZhanbiVo();
zhanbiVo1.setName("装车");
zhanbiVo1.setValue(ZuoyeNumCacheMap.get(ZuoyeNumConstants.ZhuangcheYearTotal));
zhanbiVoList.add(zhanbiVo1);
ZhanbiVo zhanbiVo2 = new ZhanbiVo();
zhanbiVo2.setName("卸车");
zhanbiVo2.setValue(ZuoyeNumCacheMap.get(ZuoyeNumConstants.XiecheYearTotal));
zhanbiVoList.add(zhanbiVo2);
ZhanbiVo zhanbiVo3 = new ZhanbiVo();
zhanbiVo3.setName("配送");
zhanbiVo3.setValue(ZuoyeNumCacheMap.get(ZuoyeNumConstants.PeisongYearTotal));
zhanbiVoList.add(zhanbiVo3);
ZhanbiVo zhanbiVo4 = new ZhanbiVo();
zhanbiVo4.setName("公路直发");
zhanbiVo4.setValue(ZuoyeNumCacheMap.get(ZuoyeNumConstants.GongluzhifaYearTotal));
zhanbiVoList.add(zhanbiVo4);
ZhanbiVo zhanbiVo5 = new ZhanbiVo();
zhanbiVo5.setName("移库");
zhanbiVo5.setValue(ZuoyeNumCacheMap.get(ZuoyeNumConstants.YikuduanboYearTotal));
zhanbiVoList.add(zhanbiVo5);
ZhanbiVo zhanbiVo6 = new ZhanbiVo();
zhanbiVo6.setName("加固");
zhanbiVo6.setValue(ZuoyeNumCacheMap.get(ZuoyeNumConstants.JiaguYearTotal));
zhanbiVoList.add(zhanbiVo6);
ZhanbiVo zhanbiVo7 = new ZhanbiVo();
zhanbiVo7.setName("解固");
zhanbiVo7.setValue(ZuoyeNumCacheMap.get(ZuoyeNumConstants.JieguYearTotal));
zhanbiVoList.add(zhanbiVo7);
ZhanbiVo zhanbiVo8 = new ZhanbiVo();
zhanbiVo8.setName("其他");
long a1 = ZuoyeNumCacheMap.get(ZuoyeNumConstants.GongsiYearTotal);
zhanbiVo8.setValue(a1 - zhanbiVo0.getValue() - zhanbiVo1.getValue() - zhanbiVo2.getValue() - zhanbiVo3.getValue() - zhanbiVo4.getValue() - zhanbiVo5.getValue() - zhanbiVo6.getValue() - zhanbiVo7.getValue());
zhanbiVoList.add(zhanbiVo8);
String json = "";
try {
json = objectMapper.writeValueAsString(zhanbiVoList);
} catch (JsonProcessingException e) {
log.error("json转换异常:" + e.getMessage());
throw new RuntimeException(e);
}
log.info("zhanbiVoList: " + json);
return zhanbiVoList;
}
}
ZaituDingdanNumSerivce.java
package com.cqsym.newbig.service;
import com.cqsym.newbig.config.DataSourceRepository;
import com.cqsym.newbig.vo.ZaituDingdanVo;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Service;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import static com.cqsym.newbig.cache.AppMap.ZaituDingdanNumForEveryDayCacheList;
@Service
public class ZaituDingdanNumSerivce {
private static final Logger log = LoggerFactory.getLogger(ZaituDingdanNumSerivce.class);
@Autowired
@Qualifier(value="secondDataSourceRepository")
private DataSourceRepository secondDataSourceRepository;
@Autowired
private ObjectMapper objectMapper;
public void initZaituDingdanNumForEveryDay() {
log.info("initZaituDingdanNumForEveryDay... ");
DateTimeFormatter datetimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
LocalDateTime yearStart = LocalDateTime.now().withDayOfYear(1).withHour(0).withMinute(0).withSecond(0).withNano(0);
String yearStartStr = yearStart.format(datetimeFormatter);
String sql = "SELECT to_start_province, to_delivery_province , COUNT(*) AS count_num " +
"FROM t_order " +
"WHERE to_status = 3 " +
"AND to_order_date >= ? " +
"AND to_start_province != to_delivery_province " +
"GROUP BY to_start_province, to_delivery_province " +
"ORDER BY count_num DESC ";
List<ZaituDingdanVo> zaituDingdanVoList =secondDataSourceRepository.queryForList(sql, ZaituDingdanVo.class, yearStartStr);
String json = "";
try {
json = objectMapper.writeValueAsString(zaituDingdanVoList);
} catch (JsonProcessingException e) {
e.printStackTrace();
}
log.info("zaituDingdanVoList: " + json);
ZaituDingdanNumForEveryDayCacheList.clear();
ZaituDingdanNumForEveryDayCacheList.addAll(zaituDingdanVoList);
log.info("zaituDingdanVoList: " + json);
}
public List<ZaituDingdanVo> getZaituDingdanNumForEveryDay() throws JsonProcessingException {
log.info("getZaituDingdanNumForEveryDay... ");
List<ZaituDingdanVo> zaituDingdanVoListResult = new ArrayList<>();
List<ZaituDingdanVo> zaituDingdanVoList = ZaituDingdanNumForEveryDayCacheList;
zaituDingdanVoList.stream().forEach(zaituDingdanVo -> {
ZaituDingdanVo zaituDingdanVo1 = new ZaituDingdanVo();
zaituDingdanVo1.setCountNum(zaituDingdanVo.getCountNum());
String tosp = zaituDingdanVo.getToStartProvince().replace("省", "").replace("市", "").replace("自治区", "").replace("维吾尔", "").replace("壮族", "").replace("回族", "");
String todp = zaituDingdanVo.getToDeliveryProvince().replace("省", "").replace("市", "").replace("自治区", "").replace("维吾尔", "").replace("壮族", "").replace("回族", "");
zaituDingdanVo1.setToStartProvince(tosp);
zaituDingdanVo1.setToDeliveryProvince(todp);
zaituDingdanVoListResult.add(zaituDingdanVo1);
});
log.info("zaituDingdanVoList: " + objectMapper.writeValueAsString(zaituDingdanVoList));
return zaituDingdanVoListResult;
}
}
XianluMingchengService.java
package com.cqsym.newbig.service;
import com.cqsym.newbig.config.DataSourceRepository;
import com.cqsym.newbig.vo.XianluMingchengVo;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Service;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import static com.cqsym.newbig.cache.AppMap.XianluMingchengCacheMap;
@Service
public class XianluMingchengService {
private static final Logger log = LoggerFactory.getLogger(XianluMingchengService.class);
@Autowired
private ObjectMapper objectMapper;
@Autowired
@Qualifier(value="secondDataSourceRepository")
private DataSourceRepository secondDataSourceRepository;
public void init() {
log.info("XianluMingchengService init() ... ");
initXianluMingcheng();
}
public void initXianluMingcheng() {
DateTimeFormatter datetimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss");
LocalDateTime yearStart = LocalDateTime.now().withDayOfYear(1).withHour(0).withMinute(0).withSecond(0).withNano(0);
String yearStartStr = yearStart.format(datetimeFormatter);
String sql = "SELECT to_start_province AS toStartProvince, to_delivery_province AS toDeliveryProvince, COUNT(*) AS toTotalNum " +
"FROM t_order " +
"WHERE dr = 0 " +
"AND to_status != 5 " +
"AND createtime >= ? " +
"AND to_start_province IS NOT NULL " +
"AND to_delivery_province IS NOT NULL " +
"GROUP BY to_start_province,to_delivery_province " +
"ORDER BY toTotalNum DESC ";
List<XianluMingchengVo> list = secondDataSourceRepository.queryForList(sql, XianluMingchengVo.class, yearStartStr);
if (null != list && list.size() > 0) {
for (XianluMingchengVo xianluMingchengVo : list) {
log.info(xianluMingchengVo.getToStartProvince() + "--" + xianluMingchengVo.getToDeliveryProvince() + "--" + xianluMingchengVo.getToTotalNum());
XianluMingchengCacheMap.put(xianluMingchengVo.getToStartProvince() + "--" + xianluMingchengVo.getToDeliveryProvince(), xianluMingchengVo.getToTotalNum());
}
}
String mapStr = "";
try {
mapStr = objectMapper.writeValueAsString(XianluMingchengCacheMap);
} catch (Exception exception) {
exception.printStackTrace();
log.error("XianluMingchengService getXianluMingcheng() 发生了异常! " + exception.getMessage());
}
log.info("XianluMingchengCacheMap: {}", mapStr );
}
public List<XianluMingchengVo> getXianluMingcheng() {
Map<String, Long> map = XianluMingchengCacheMap.asMap();
List<XianluMingchengVo> list = new ArrayList<>();
map.entrySet().forEach(entry -> {
XianluMingchengVo xianluMingchengVo = new XianluMingchengVo();
String key1 = entry.getKey().replace("省", "").replace("市", "").replace("自治区", "").replace("维吾尔", "").replace("壮族", "").replace("回族", "");
log.info("key:{},value:{}", key1, entry.getValue());
xianluMingchengVo.setXianluMingcheng(key1);
xianluMingchengVo.setToTotalNum(entry.getValue());
list.add(xianluMingchengVo);
});
List<XianluMingchengVo> list1 = list.stream().sorted((o1, o2) -> o2.getToTotalNum().compareTo(o1.getToTotalNum())).collect(Collectors.toList());
log.info("list1:{}", list1);
return list1;
}
}
TownerBaseService.java
package com.cqsym.newbig.service;
import com.cqsym.newbig.primary.entity.TownerBase;
import com.cqsym.newbig.primary.repository.TownerBaseRepository;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.List;
@Service
public class TownerBaseService {
private static final Logger log = LoggerFactory.getLogger(TownerBaseService.class);
@Autowired
private TownerBaseRepository townerBaseRepository;
public List<TownerBase> findAll() {
return townerBaseRepository.findAll();
}
}
BinLogService.java
package com.cqsym.newbig.service;
import com.cqsym.newbig.dese.DataChangeInfo;
import com.cqsym.newbig.dese.DataChangeSink;
import com.cqsym.newbig.dese.MysqlDeserialization;
import com.ververica.cdc.connectors.mysql.source.MySqlSource;
import com.ververica.cdc.connectors.mysql.table.StartupOptions;
import org.apache.flink.api.common.JobExecutionResult;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.RestOptions;
import org.apache.flink.streaming.api.datastream.DataStreamSink;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
@Service
public class BinLogService {
private static final Logger log = LoggerFactory.getLogger(BinLogService.class);
private StreamExecutionEnvironment env;
private JobExecutionResult jobExecutionResult;
@Value("${spring.datasource.second.host}")
private String host;
@Value("${spring.datasource.second.port}")
private Integer port;
@Value("${spring.datasource.second.username}")
private String username;
@Value("${spring.datasource.second.password}")
private String password;
public void start() {
log.info("BinLogService start ... ");
MySqlSource<DataChangeInfo> source = MySqlSource.<DataChangeInfo>builder()
.hostname(host)
.port(port)
.databaseList("twms")
.tableList("twms.t_order","twms.t_dispatch_detail")
.username(username)
.password(password)
// initial:初始化快照,即全量导入后增量导入(检测更新数据写入)
.startupOptions(StartupOptions.latest())
.deserializer(new MysqlDeserialization())
.includeSchemaChanges(true)
.serverTimeZone("GMT+8")
.build();
Configuration configuration = new Configuration();
configuration.setInteger(RestOptions.PORT, 8081);
env = StreamExecutionEnvironment.getExecutionEnvironment(configuration);
env.enableCheckpointing(5000);
//DataStreamSink<String> sink = env.fromSource(source, WatermarkStrategy.noWatermarks(), "MySQL Source").addSink(new CustomSink());
DataStreamSink<DataChangeInfo> sink = env.fromSource(source, WatermarkStrategy.noWatermarks(), "MySQL Source").addSink(new DataChangeSink());
try {
log.info("开始执行 env.execute() ... ");
jobExecutionResult = env.execute("BinLogService1");
} catch (Exception e) {
log.info("env.execute() 执行异常 ... " + e.getMessage());
e.printStackTrace();
}
}
public void stop() {
// TODO:: 关闭任务;
log.info("BinLogService stop ... ");
}
}
三、这里有2个业务相关多RUNNABLE
TDispatchDetailListenerTaskRunnable.java
package com.cqsym.newbig.listener;
import com.cqsym.newbig.cache.AppMap;
import com.cqsym.newbig.constant.AppConstants;
import com.cqsym.newbig.constant.ZuoyeNumConstants;
import com.cqsym.newbig.dese.DataChangeInfo;
import com.cqsym.newbig.enums.OperateTypeEnum;
import com.cqsym.newbig.vo.ZuoyeShishiQingkuangVo;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.time.Instant;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
import java.util.HashMap;
import java.util.Map;
public class TDispatchDetailListenerTaskRunnable implements Runnable {
private static final Logger log = LoggerFactory.getLogger(TDispatchDetailListenerTaskRunnable.class);
private ObjectMapper objectMapper = new ObjectMapper().registerModule(new JavaTimeModule());
private DataChangeInfo dataChangeInfo;
public TDispatchDetailListenerTaskRunnable(DataChangeInfo dataChangeInfo) {
this.dataChangeInfo = dataChangeInfo;
}
@Override
public void run() {
log.info("--------TDispatchDetailListenerTaskRunnable---run------");
log.info(dataChangeInfo.toString());
handleBinlogRowData(dataChangeInfo);
}
private void handleBinlogRowData(DataChangeInfo dataChangeInfo) {
if (!(dataChangeInfo.getDatabase().equals("twms") && dataChangeInfo.getTableName().equals("t_dispatch_detail"))) {
log.error("数据库名:{},表名:{},不是twms.t_dispatch_detail", dataChangeInfo.getDatabase(), dataChangeInfo.getTableName());
return;
}
log.info("TDispatchDetailListenerTaskRunnable dataChangeInfo: " + dataChangeInfo);
if (dataChangeInfo.getOperatorType().equals(1)) {
String afterData = dataChangeInfo.getAfterData();
log.info("TDispatchDetailListenerTaskRunnable 新增 afterData:{}", afterData);
Map row = new HashMap();
try {
row = objectMapper.readValue(afterData, Map.class);
} catch (JsonProcessingException e) {
log.info("afterData转换成对象发生异常:" + e.getMessage());
}
log.info("TDispatchDetailListenerTaskRunnable row: " + row);
if (row.get("tdd_status").toString().equals("2") ) {
log.info("作业tdd_operation_type:{}", row.get("tdd_operation_type").toString());
/** 完成作业数据量的更新 */
handleZuoyeNum(row);
/** 添加作业实时情况 */
handleZuoyeShishiQiankuang(row);
/** 添加作业到办事处 */
handleZuoyeNumAddToOwnerBase(row);
}
} else if (dataChangeInfo.getOperatorType().equals(2)) {
String afterData = dataChangeInfo.getAfterData();
log.info("TDispatchDetailListenerTaskRunnable 修改 afterData:{}", afterData);
Map row = new HashMap();
try {
row = objectMapper.readValue(afterData, Map.class);
} catch (JsonProcessingException e) {
log.info("afterData转换成对象发生异常:" + e.getMessage());
}
log.info("TDispatchDetailListenerTaskRunnable row: " + row);
if (row.get("tdd_status").toString().equals("2") ) {
log.info("作业tdd_operation_type:{}", row.get("tdd_operation_type").toString());
/** 完成作业数据量的更新 */
handleZuoyeNum(row);
/** 添加作业实时情况 */
handleZuoyeShishiQiankuang(row);
/** 添加作业到办事处 */
handleZuoyeNumAddToOwnerBase(row);
}
} else if (dataChangeInfo.getOperatorType().equals(3)) {
String afterData = dataChangeInfo.getAfterData();
log.info("TDispatchDetailListenerTaskRunnable 删除 afterData:{}", afterData);
}
}
/** 完成作业数据量的更新 */
private void handleZuoyeNum(Map row) {
switch (row.get("tdd_operation_type").toString()) {
case "1":
handleZuoyeCacheNum(ZuoyeNumConstants.JiecheduanboDayTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.JiecheduanboMonthTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.JiecheduanboYearTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.GongsiDayTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.GongsiMonthTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.GongsiYearTotal);
break;
case "2":
handleZuoyeCacheNum(ZuoyeNumConstants.ZhuangcheDayTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.ZhuangcheMonthTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.ZhuangcheYearTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.GongsiDayTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.GongsiMonthTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.GongsiYearTotal);
break;
case "3":
handleZuoyeCacheNum(ZuoyeNumConstants.XiecheDayTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.XiecheMonthTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.XiecheYearTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.GongsiDayTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.GongsiMonthTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.GongsiYearTotal);
break;
case "4":
handleZuoyeCacheNum(ZuoyeNumConstants.JiaguDayTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.JiaguMonthTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.JiaguYearTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.GongsiDayTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.GongsiMonthTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.GongsiYearTotal);
break;
case "5":
handleZuoyeCacheNum(ZuoyeNumConstants.JieguDayTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.JieguMonthTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.JieguYearTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.GongsiDayTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.GongsiMonthTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.GongsiYearTotal);
break;
case "10":
handleZuoyeCacheNum(ZuoyeNumConstants.PeisongDayTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.PeisongMonthTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.PeisongYearTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.GongsiDayTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.GongsiMonthTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.GongsiYearTotal);
break;
case "11":
handleZuoyeCacheNum(ZuoyeNumConstants.YikuduanboDayTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.YikuduanboMonthTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.YikuduanboYearTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.GongsiDayTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.GongsiMonthTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.GongsiYearTotal);
break;
case "15":
handleZuoyeCacheNum(ZuoyeNumConstants.GongluzhifaDayTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.GongluzhifaMonthTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.GongluzhifaYearTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.GongsiDayTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.GongsiMonthTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.GongsiYearTotal);
break;
default:
handleZuoyeCacheNum(ZuoyeNumConstants.GongsiDayTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.GongsiMonthTotal);
handleZuoyeCacheNum(ZuoyeNumConstants.GongsiYearTotal);
//break;
}
}
private void handleZuoyeCacheNum(String str) {
AppMap.ZuoyeNumCacheMap.incrementAndGet(str);
}
/** 添加作业实时情况 */
private void handleZuoyeShishiQiankuang(Map row) {
/** 添加作业实时情况 */
log.info("作业实时情况after row:{}", row.toString());
// log.info("作业实时情况tdd_operation_type: " + row.get("tdd_operation_type").toString());
// log.info("作业实时情况tdd_owner_base: " + row.get("tdd_owner_base").toString());
// log.info("作业实时情况tdd_vin: " + row.get("tdd_vin").toString());
// log.info("作业实时情况tdd_operation_end_time: " + row.get("tdd_operation_end_time").toString());
// 28,800 8小时,要减去
String a1 = row.get("tdd_owner_base").toString();
String a2 = row.get("tdd_operation_type").toString();
String a3 = row.get("tdd_vin").toString();
Long a40 = Long.valueOf(row.get("tdd_operation_end_time").toString())-Long.valueOf(28800000);
LocalDateTime localDateTimea4 = LocalDateTime.ofInstant(Instant.ofEpochMilli(Long.valueOf(row.get("tdd_operation_end_time").toString())), ZoneOffset.of("+0"));
// log.info("作业实时情况a1: " + a1);
// log.info("作业实时情况a2: " + a2);
// log.info("作业实时情况a3: " + a3);
// log.info("作业实时情况a4: " + localDateTimea4);
// log.info("作业实时情况now: " + LocalDateTime.now());
// Long l1 = localDateTimea4.toEpochSecond(ZoneOffset.of("+0"));
// Long l2 = LocalDateTime.now().toEpochSecond(ZoneOffset.of("+0"));
// Long l3 = Math.abs(l2 - l1);
// log.info("作业实时情况l2-l1时间差异: " + l3);
// if (l3 >= AppConstants.ZuoyeShijianCha) {
// log.info("作业实时情况4:" + "时间差异太大,不要了");
// return;
// }
// log.info("作业实时情况3:" + "算正常情况");
ZuoyeShishiQingkuangVo zuoyeShishiQingkuangVo = new ZuoyeShishiQingkuangVo();
String ownerBaseNamea1 = "";
ownerBaseNamea1 = AppMap.OwnerBaseIdNameCacheName.get(a1);
zuoyeShishiQingkuangVo.setOwnerBaseName(ownerBaseNamea1);
zuoyeShishiQingkuangVo.setOperateType(OperateTypeEnum.fromValue(Integer.valueOf(a2)));
zuoyeShishiQingkuangVo.setVin(a3);
//zuoyeShishiQingkuangVo.setOperateTime(localDateTimea4);
zuoyeShishiQingkuangVo.setOperateTime(LocalDateTime.now());
log.info("作业实时情况1:" + zuoyeShishiQingkuangVo);
String temp00 = "";
try {
temp00 = objectMapper.writeValueAsString(zuoyeShishiQingkuangVo);
} catch (JsonProcessingException e) {
log.info("作业实时情况2报错:报错信息" + e.getMessage());
}
log.info("作业实时情况2:" + temp00);
AppMap.ZuoyeShishiQingkuang.offer(zuoyeShishiQingkuangVo);
}
/** 添加作业到办事处 */
private void handleZuoyeNumAddToOwnerBase(Map row) {
/** 添加作业到办事处 */
log.info("after row:{}", row.toString());
if (row.get("tdd_owner_base") != null) {
handleZuoyeNumAddToOwnerBase(Integer.valueOf(row.get("tdd_owner_base").toString()));
}
}
private void handleZuoyeNumAddToOwnerBase(Integer str) {
AppMap.BanshichuZuoyeNumCacheMap.incrementAndGet(str);
}
}
TOrderListenerTaskRunnable.java
package com.cqsym.newbig.listener;
import com.cqsym.newbig.cache.AppMap;
import com.cqsym.newbig.dese.DataChangeInfo;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.Map;
public class TOrderListenerTaskRunnable implements Runnable {
private static final Logger log = LoggerFactory.getLogger(TOrderListenerTaskRunnable.class);
private ObjectMapper objectMapper = new ObjectMapper().registerModule(new JavaTimeModule());
private DataChangeInfo dataChangeInfo;
public TOrderListenerTaskRunnable(DataChangeInfo dataChangeInfo) {
this.dataChangeInfo = dataChangeInfo;
}
@Override
public void run() {
log.info("--------TOrderListenerTaskRunnable---run------");
log.info(dataChangeInfo.toString());
handleBinlogRowData(dataChangeInfo);
}
private void handleBinlogRowData(DataChangeInfo dataChangeInfo) {
if (!(dataChangeInfo.getDatabase().equals("twms") && dataChangeInfo.getTableName().equals("t_order"))) {
log.error("数据库名:{},表名:{},不是twms.t_order", dataChangeInfo.getDatabase(), dataChangeInfo.getTableName());
return;
}
log.info("TOrderListenerTaskRunnable dataChangeInfo: " + dataChangeInfo);
if (dataChangeInfo.getOperatorType().equals(1)) {
String afterData = dataChangeInfo.getAfterData();
log.info("TOrderListenerTaskRunnable 新增 afterData:{}", afterData);
Map row = new HashMap();
try {
row = objectMapper.readValue(afterData, Map.class);
} catch (JsonProcessingException e) {
log.info("afterData转换成对象发生异常:" + e.getMessage());
}
log.info("TOrderListenerTaskRunnable row: " + row);
Long num = 0L;
num = (Long) AppMap.XianluMingchengCacheMap.get(row.get("to_start_province").toString()+"--"+row.get("to_delivery_province").toString());
if (num == null && num.equals(0)) {
num = AppMap.XianluMingchengCacheMap.addAndGet(row.get("to_start_province").toString()+"--"+row.get("to_delivery_province").toString(), 1L);
log.info("新增了一条线路: 【{}--{}】 数量为{}。 ", row.get("to_start_province").toString(), row.get("to_delivery_province").toString(), num);
} else {
num = AppMap.XianluMingchengCacheMap.incrementAndGet(row.get("to_start_province").toString()+"--"+row.get("to_delivery_province").toString());
log.info("线路: 【{}--{}】 数量为{}。 ", row.get("to_start_province").toString(), row.get("to_delivery_province").toString(), num);
}
} else if (dataChangeInfo.getOperatorType().equals(2)) {
String afterData = dataChangeInfo.getAfterData();
log.info("TOrderListenerTaskRunnable 修改 afterData:{}", afterData);
Map row = new HashMap();
try {
row = objectMapper.readValue(afterData, Map.class);
} catch (JsonProcessingException e) {
log.info("afterData转换成对象发生异常:" + e.getMessage());
}
log.info("TOrderListenerTaskRunnable row: " + row);
} else if (dataChangeInfo.getOperatorType().equals(3)) {
String afterData = dataChangeInfo.getAfterData();
log.info("TOrderListenerTaskRunnable 删除 afterData:{}", afterData);
}
}
}
四、这里是Flink-CDC技术,我把所有用到的都放到了dese包里了。
dese包图片
AggregationDataChangSinkUtils.java
这个主要用来实现对mysql-binlog解析后按照数据库名和表名选择不同的处理方式,这里虽然使用了静态类,但是在多线程环境下,需要在每个处理业务的都调用下initRegister()不然会获取不到对应的listener 接口。
package com.cqsym.newbig.dese;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ThreadPoolExecutor;
public class AggregationDataChangSinkUtils {
private static final Logger log = LoggerFactory.getLogger(AggregationDataChangSinkUtils.class);
public static Map<String, Ilistener> listenerMap = new HashMap<>();
public static String genKey(String dbName, String tableName) {
return dbName + ":" + tableName;
}
public static void register(String _dbName, String _tableName,
Ilistener ilistener) {
log.info("register : {}-{}", _dbName, _tableName);
listenerMap.put(genKey(_dbName, _tableName), ilistener);
}
public static Ilistener getListener(String _dbName, String _tableName) {
return listenerMap.get(genKey(_dbName, _tableName));
}
public static Ilistener getListener(String getKey) {
return listenerMap.get(getKey);
}
public static void initRegister() {
register("twms", "t_dispatch_detail", new TDispatchDetailListener());
register("twms", "t_order", new TOrderListener());
}
public static void printRegister() {
Ilistener ilistener1 = AggregationDataChangSinkUtils.getListener("twms", "t_order");
Ilistener ilistener2 = AggregationDataChangSinkUtils.getListener("twms", "t_dispatch_detail");
log.info("ilistener1:" + ilistener1);
log.info("ilistener2:" + ilistener2);
}
public static final ThreadPoolTaskExecutor aggregationListenerThreadPoolTaskExecutor;
static {
aggregationListenerThreadPoolTaskExecutor = new ThreadPoolTaskExecutor();
aggregationListenerThreadPoolTaskExecutor.setCorePoolSize(40);
aggregationListenerThreadPoolTaskExecutor.setMaxPoolSize(100);
aggregationListenerThreadPoolTaskExecutor.setQueueCapacity(500);
aggregationListenerThreadPoolTaskExecutor.setKeepAliveSeconds(60);
aggregationListenerThreadPoolTaskExecutor.setThreadNamePrefix("aggregationListenerThreadPoolTaskExecutor--");
aggregationListenerThreadPoolTaskExecutor.setRejectedExecutionHandler(new ThreadPoolExecutor.CallerRunsPolicy());
aggregationListenerThreadPoolTaskExecutor.initialize(); // 初始化线程池
}
}
DataChangeInfo.java
flink-cdc获取到的binlog解析后的数据。
package com.cqsym.newbig.dese;
import lombok.Data;
@Data
public class DataChangeInfo {
/**
* 变更类型: 0 初始化 1新增 2修改 3删除 4导致源中的现有表被截断的操作
*/
private Integer operatorType;
/**
* 变更前数据
*/
private String beforeData;
/**
* 变更后数据
*/
private String afterData;
/**
* 操作的数据
*/
private String data;
/**
* binlog文件名
*/
private String fileName;
/**
* binlog当前读取点位
*/
private Integer filePos;
/**
* 数据库名
*/
private String database;
/**
* 表名
*/
private String tableName;
/**
* 变更时间
*/
private Long operatorTime;
}
DataChangeSink.java
这个是cdc用于处理数据的sink。
package com.cqsym.newbig.dese;
import com.cqsym.newbig.utils.SpringContextUtils;
import com.cqsym.newbig.utils.SpringbootContextUtils;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationContext;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import java.util.concurrent.ThreadPoolExecutor;
public class DataChangeSink implements SinkFunction<DataChangeInfo> {
private static final Logger log = LoggerFactory.getLogger(DataChangeSink.class);
private static final ObjectMapper objectMapper = new ObjectMapper().registerModule(new JavaTimeModule());
public static final ThreadPoolTaskExecutor aggregationListenerThreadPoolTaskExecutor;
static {
aggregationListenerThreadPoolTaskExecutor = new ThreadPoolTaskExecutor();
aggregationListenerThreadPoolTaskExecutor.setCorePoolSize(40);
aggregationListenerThreadPoolTaskExecutor.setMaxPoolSize(100);
aggregationListenerThreadPoolTaskExecutor.setQueueCapacity(500);
aggregationListenerThreadPoolTaskExecutor.setKeepAliveSeconds(60);
aggregationListenerThreadPoolTaskExecutor.setThreadNamePrefix("aggregationListenerThreadPoolTaskExecutor--");
aggregationListenerThreadPoolTaskExecutor.setRejectedExecutionHandler(new ThreadPoolExecutor.CallerRunsPolicy());
aggregationListenerThreadPoolTaskExecutor.initialize(); // 初始化线程池
}
@Override
public void invoke(DataChangeInfo dataChangeInfo, Context context) throws JsonProcessingException {
// log.info("DataChangeSink invoke1: ... ");
// log.info("DataChangeSink invoke3: " + SpringbootContextUtils.containsBean("aggregationListenerThreadPoolTaskExecutor"));
// log.info("DataChangeSink invoke2: " + SpringContextUtils.containsBean("aggregationListenerThreadPoolTaskExecutor"));
//ThreadPoolTaskExecutor threadPoolTaskExecutor = (ThreadPoolTaskExecutor) SpringContextUtils.getBean("aggregationListenerThreadPoolTaskExecutor");
log.info("DataChangeSink invoke dataChangeInfo: " + dataChangeInfo);
aggregationListenerThreadPoolTaskExecutor.submit(new DataChangeSinkRunnable(dataChangeInfo));
}
}
DataChangeSinkRunnable.java
这里需要执行
AggregationDataChangSinkUtils.initRegister();
AggregationDataChangSinkUtils.printRegister();
不然获取不到注册的Listener
package com.cqsym.newbig.dese;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DataChangeSinkRunnable implements Runnable {
private static final Logger log = LoggerFactory.getLogger(DataChangeSinkRunnable.class);
private static ObjectMapper objectMapper = new ObjectMapper().registerModule(new JavaTimeModule());
private DataChangeInfo dataChangeInfo;
public DataChangeSinkRunnable(DataChangeInfo dataChangeInfo) {
this.dataChangeInfo = dataChangeInfo;
}
@Override
public void run() {
AggregationDataChangSinkUtils.initRegister();
AggregationDataChangSinkUtils.printRegister();
log.info("DataChangeSinkRunnable run:");
log.info("DataChangeSinkRunnable run:{}-{}", dataChangeInfo.getDatabase(), dataChangeInfo.getTableName());
Ilistener ilistener = AggregationDataChangSinkUtils.getListener(dataChangeInfo.getDatabase(), dataChangeInfo.getTableName());
log.info("DataChangeSinkRunnable run:ilistener: {}", ilistener);
log.info("DataChangeSinkRunnable run:dataChangeInfo: {}", dataChangeInfo);
if (ilistener != null) {
ilistener.onEvent(dataChangeInfo);
} else {
log.warn("No listener registered for: {}:{}", dataChangeInfo.getDatabase(), dataChangeInfo.getTableName());
}
}
}
Ilistener.java
Ilistener for 为了后续扩展不同的实现,为了不同的表实现不同的功能。
package com.cqsym.newbig.dese;
/**
* Ilistener for 为了后续扩展不同的实现
*/
public interface Ilistener {
void onEvent(DataChangeInfo dataChangeInfo);
}
MysqlDeserialization.java
MysqlDeserialization主要是用于解析mysql-binlog的。
package com.cqsym.newbig.dese;
import com.alibaba.fastjson.JSONObject;
import com.google.common.collect.ImmutableMap;
import com.ververica.cdc.debezium.DebeziumDeserializationSchema;
import io.debezium.data.Envelope;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.util.Collector;
import org.apache.kafka.connect.data.Field;
import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.data.Struct;
import org.apache.kafka.connect.source.SourceRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.Map;
import java.util.Optional;
public class MysqlDeserialization implements DebeziumDeserializationSchema<DataChangeInfo> {
private static final Logger log = LoggerFactory.getLogger(MysqlDeserialization.class);
public static final String TS_MS = "ts_ms";
public static final String BIN_FILE = "file";
public static final String POS = "pos";
public static final String BEFORE = "before";
public static final String AFTER = "after";
public static final String SOURCE = "source";
/**
* 获取操作类型 READ CREATE UPDATE DELETE TRUNCATE;
* 变更类型: 0 初始化 1新增 2修改 3删除 4导致源中的现有表被截断的操作
*/
private static final Map<String, Integer> OPERATION_MAP = ImmutableMap.of(
"READ", 0,
"CREATE", 1,
"UPDATE", 2,
"DELETE", 3,
"TRUNCATE", 4);
@Override
public void deserialize(SourceRecord sourceRecord, Collector<DataChangeInfo> collector) throws Exception {
String topic = sourceRecord.topic();
String[] fields = topic.split("\\.");
String database = fields[1];
String tableName = fields[2];
Struct struct = (Struct) sourceRecord.value();
final Struct source = struct.getStruct(SOURCE);
DataChangeInfo dataChangeInfo = new DataChangeInfo();
// 获取操作类型 READ CREATE UPDATE DELETE TRUNCATE;
Envelope.Operation operation = Envelope.operationFor(sourceRecord);
String type = operation.toString().toUpperCase();
int eventType = OPERATION_MAP.get(type);
// fixme 一般情况是无需关心其之前之后数据的,直接获取最新的数据即可,但这里为了演示,都进行输出
dataChangeInfo.setBeforeData(getJsonObject(struct, BEFORE).toJSONString());
dataChangeInfo.setAfterData(getJsonObject(struct, AFTER).toJSONString());
if (eventType == 3) {
dataChangeInfo.setData(getJsonObject(struct, BEFORE).toJSONString());
} else {
dataChangeInfo.setData(getJsonObject(struct, AFTER).toJSONString());
}
dataChangeInfo.setOperatorType(eventType);
dataChangeInfo.setFileName(Optional.ofNullable(source.get(BIN_FILE)).map(Object::toString).orElse(""));
dataChangeInfo.setFilePos(
Optional.ofNullable(source.get(POS))
.map(x -> Integer.parseInt(x.toString()))
.orElse(0)
);
dataChangeInfo.setDatabase(database);
dataChangeInfo.setTableName(tableName);
dataChangeInfo.setOperatorTime(Optional.ofNullable(struct.get(TS_MS))
.map(x -> Long.parseLong(x.toString())).orElseGet(System::currentTimeMillis));
// 输出数据
collector.collect(dataChangeInfo);
}
@Override
public TypeInformation<DataChangeInfo> getProducedType() {
return TypeInformation.of(DataChangeInfo.class);
}
/**
* 从元素数据获取出变更之前或之后的数据
*
* @param value value
* @param fieldElement fieldElement
* @return JSONObject
*/
private JSONObject getJsonObject(Struct value, String fieldElement) {
Struct element = value.getStruct(fieldElement);
JSONObject jsonObject = new JSONObject();
if (element != null) {
Schema afterSchema = element.schema();
List<Field> fieldList = afterSchema.fields();
for (Field field : fieldList) {
Object afterValue = element.get(field);
jsonObject.put(field.name(), afterValue);
}
}
return jsonObject;
}
}
TDispatchDetailListener.java
这个有2个功能,一个是用于提交到其他线程池进行真正的业务处理。他还有个被用的就是在Sink的可以根据获取的数据库名和表名,来进行不用的listener。
package com.cqsym.newbig.dese;
import com.cqsym.newbig.listener.TDispatchDetailListenerTaskRunnable;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import java.util.concurrent.ThreadPoolExecutor;
public class TDispatchDetailListener implements Ilistener {
private static final Logger log = LoggerFactory.getLogger(TDispatchDetailListener.class);
private static final ObjectMapper objectMapper = new ObjectMapper().registerModule(new JavaTimeModule());
private static ThreadPoolTaskExecutor tDispatchDetailListenerThreadPoolTaskExecutor;
static {
tDispatchDetailListenerThreadPoolTaskExecutor = new ThreadPoolTaskExecutor();
tDispatchDetailListenerThreadPoolTaskExecutor.setCorePoolSize(20);
tDispatchDetailListenerThreadPoolTaskExecutor.setMaxPoolSize(500);
tDispatchDetailListenerThreadPoolTaskExecutor.setQueueCapacity(500);
tDispatchDetailListenerThreadPoolTaskExecutor.setKeepAliveSeconds(60);
tDispatchDetailListenerThreadPoolTaskExecutor.setThreadNamePrefix("tDispatchDetailListenerThreadPoolTaskExecutor--");
tDispatchDetailListenerThreadPoolTaskExecutor.setRejectedExecutionHandler(new ThreadPoolExecutor.CallerRunsPolicy());
tDispatchDetailListenerThreadPoolTaskExecutor.initialize(); // 初始化线程池
}
@Override
public void onEvent(DataChangeInfo dataChangeInfo) {
log.info("--------TDispatchDetailListener---onEvent------");
log.info(dataChangeInfo.toString());
tDispatchDetailListenerThreadPoolTaskExecutor.submit(new TDispatchDetailListenerTaskRunnable(dataChangeInfo));
}
}
TOrderListener.java
package com.cqsym.newbig.dese;
import com.cqsym.newbig.listener.TOrderListenerTaskRunnable;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import java.util.concurrent.ThreadPoolExecutor;
public class TOrderListener implements Ilistener {
private static final Logger log = LoggerFactory.getLogger(TOrderListener.class);
private static final ObjectMapper objectMapper = new ObjectMapper().registerModule(new JavaTimeModule());
private static ThreadPoolTaskExecutor tOrderListenerThreadPoolTaskExecutor;
static {
tOrderListenerThreadPoolTaskExecutor = new ThreadPoolTaskExecutor();
tOrderListenerThreadPoolTaskExecutor.setCorePoolSize(20);
tOrderListenerThreadPoolTaskExecutor.setMaxPoolSize(500);
tOrderListenerThreadPoolTaskExecutor.setQueueCapacity(500);
tOrderListenerThreadPoolTaskExecutor.setKeepAliveSeconds(60);
tOrderListenerThreadPoolTaskExecutor.setThreadNamePrefix("tOrderListenerThreadPoolTaskExecutor--");
tOrderListenerThreadPoolTaskExecutor.setRejectedExecutionHandler(new ThreadPoolExecutor.CallerRunsPolicy());
tOrderListenerThreadPoolTaskExecutor.initialize(); // 初始化线程池
}
@Override
public void onEvent(DataChangeInfo dataChangeInfo) {
log.info("--------TOrderListener---onEvent------");
log.info(dataChangeInfo.toString());
tOrderListenerThreadPoolTaskExecutor.execute(new TOrderListenerTaskRunnable(dataChangeInfo));
}
}
五、flink-cdc技术封装的dese包里,主要使用步骤如下
前面的dese包的调用还是主要由binLogService该类的start方法启动。
六、介绍下用到的3个缓存工具
AppCacheMap.java
好像这个项目这里没有使用到。
package com.cqsym.newbig.cache;
import java.util.HashMap;
import java.util.Map;
public class AppCacheMap {
public static Map cache = new HashMap();
public static Object get(Object key) {
return cache.get(key);
}
public static void put(Object key, Object value) {
cache.put(key, value);
}
}
AppMap.java
package com.cqsym.newbig.cache;
import com.cqsym.newbig.vo.ZaituDingdanVo;
import com.cqsym.newbig.vo.ZuoyeShishiQingkuangVo;
import com.google.common.util.concurrent.AtomicLongMap;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class AppMap {
public static Map<String, String> BanshichuDaquCacheMap = new HashMap();
public static final AtomicLongMap<Integer> BanshichuZuoyeNumCacheMap = AtomicLongMap.create();
public static final AtomicLongMap<String> ZuoyeNumCacheMap = AtomicLongMap.create();
public static final AtomicLongMap<String> XianluMingchengCacheMap = AtomicLongMap.create();
public static final FixedSizeConcurrentQueue<ZuoyeShishiQingkuangVo> ZuoyeShishiQingkuang = new FixedSizeConcurrentQueue<ZuoyeShishiQingkuangVo>(50);
public static Map<String, String> OwnerBaseIdNameCacheName = new HashMap();
public static List<ZaituDingdanVo> ZaituDingdanNumForEveryDayCacheList = new ArrayList<>();
static {
initBanshichuDAquCacheMap();
}
private static void initBanshichuDAquCacheMap() {
BanshichuDaquCacheMap.put("包头作业点", "华北");
BanshichuDaquCacheMap.put("保定办事处", "华北");
BanshichuDaquCacheMap.put("北京办事处", "华北");
BanshichuDaquCacheMap.put("成都办事处", "西南");
BanshichuDaquCacheMap.put("定州办事处", "华北");
BanshichuDaquCacheMap.put("福州作业点", "华南");
BanshichuDaquCacheMap.put("格尔木作业点", "西北");
}
}
FixedSizeConcurrentQueue.java
FixedSizeConcurrentQueues是基于ConcurrentLinkedQueue封装的固定长度的,可以用于多线程环境的一个队列(Queue)。
package com.cqsym.newbig.cache;
import java.util.Collection;
import java.util.Iterator;
import java.util.concurrent.ConcurrentLinkedQueue;
public class FixedSizeConcurrentQueue<T> {
private final ConcurrentLinkedQueue<T> queue;
private final int maxSize;
public FixedSizeConcurrentQueue(int maxSize) {
this.maxSize = maxSize;
this.queue = new ConcurrentLinkedQueue<>();
}
public boolean add(T element) {
while (queue.size() >= maxSize) {
// 队列已满,移除队头元素
T polled = queue.poll();
if (polled == null) {
// poll() 返回 null 表示队列为空,这不应该发生,因为我们已经检查了队列大小
throw new IllegalStateException("Queue unexpectedly empty");
}
}
return queue.add(element);
}
public boolean offer(T element) {
while (queue.size() >= maxSize) {
// 队列已满,移除队头元素
T polled = queue.poll();
if (polled == null) {
// poll() 返回 null 表示队列为空,这不应该发生,因为我们已经检查了队列大小
throw new IllegalStateException("Queue unexpectedly empty");
}
}
// 添加新元素到队列
return queue.offer(element);
}
public T poll() {
return queue.poll();
}
public T peek() {
return queue.peek();
}
public int size() {
return queue.size();
}
public boolean isEmpty() {
return queue.isEmpty();
}
public boolean contains(Object o) {
return queue.contains(o);
}
public boolean remove(Object o) {
return queue.remove(o);
}
public boolean addAll(Collection<? extends T> c) {
return queue.addAll(c);
}
public Iterator<T> iterator() {
return queue.iterator();
}
public Object[] toArray() {
return queue.toArray();
}
public <T> T[] toArray(T[] a) {
return queue.toArray(a);
}
@Override
public String toString() {
return queue.toString();
}
}