场景:
用户请求信息通过kafka到flink计算引擎,flink拿到请求信息后去查询三方数据处理,数据处理完成后把处理结果放回到kafka中。
这里三方数据源两种:1.mysql数据源
2.http三方接口
项目地址:https://github.com/1105748319/flinkDemoTest.git
下面直接上代码:
主要的类:

package com.flink;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;
/**
 * HttpUtil
 * @author ly
 * @date 2020/11/20
 */
public class TestMainMysql {
   protected static StreamExecutionEnvironment env;
   protected static StreamTableEnvironment tEnv;
   public static void main(String[] args) throws Exception {
      //初始化flinkStream环境
      env = StreamExecutionEnvironment.getExecutionEnvironment();
      //初始化flinkStreamTable环境
      tEnv = StreamTableEnvironment.create(
         env,
         EnvironmentSettings.newInstance()
            .useBlinkPlanner()
            .inStreamingMode()
            .build()
      );
      //数据源涉及到kafka所以我们这里设置检查点,每秒去检查一次避免kafka消息没有消费
      env.enableCheckpointing(1000);
      //读取kafka的数据并创建一张叫UserScores的表字段为requestId,recordCount
      String createTable = String.format(
         "CREATE TABLE UserScores (requestId STRING,recordCount STRING)\n" +
            //"CREATE TABLE UserScores (requestId STRING, dataList ARRAY<ROW(orderNo STRING, money FLOAT, name STRING, zoneCode STRING, zoneName STRING)>)\n"+
            "WITH (\n" +
            "  'connector' = 'kafka',\n" +
            "  'topic' = 'topic.flink.mysql',\n" +
            "  'properties.bootstrap.servers' = '127.0.0.1:9092',\n" +
            "  'properties.group.id' = 'testGroup1',\n" +
            "  'format' = 'json',\n" +
            "  'scan.startup.mode' = 'group-offsets'\n" +
            ")");
      TableResult tableResult = tEnv.executeSql(createTable);
      //设置表UserScores的执行sql
      Table table = tEnv.sqlQuery("SELECT * FROM UserScores ");
      //通过sql的形式读取出kafka的数据
      DataStream<Row> infoDataStream1 = tEnv.toAppendStream(table, Row.class);
      //处理kafka的数据可以作为其他数据源的参数使用
      infoDataStream1.addSink(new HttpGetData());
      env.execute();
   }
}

三方数据处理类:

package com.flink;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import org.apache.flink.types.Row;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.codehaus.jettison.json.JSONObject;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.stream.Collectors;
/**
 * HttpUtil
 * @author ly
 * @date 2020/11/20
 */
public class HttpGetData extends RichSinkFunction<Row> {
   private Connection connection;
   private PreparedStatement preparedStatement;
   private Producer<String, String> producer;
   @Override
   public void open(Configuration parameters) throws Exception {
      //初始化kafka连接环境
      super.open(parameters);
      Properties props = new Properties();
      props.put("bootstrap.servers", "127.0.0.1:9092");
      props.put("acks", "all");
      props.put("retries", 0);
      props.put("batch.size", 16384);
      props.put("linger.ms", 1);
      props.put("buffer.memory", 33554432);
      props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
      props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
      producer = new KafkaProducer<String, String>(props);
      //初始化mysql数据源连接环境
      String className = "com.mysql.jdbc.Driver";
      Class.forName(className);
      String url = "jdbc:mysql://127.0.0.1:3306/flink";
      //String url = "jdbc:mysql://1127.0.0.1:3306/flink";
      String user = "root";
      String password = "root";
      connection = DriverManager.getConnection(url, user, password);
      //String sql = "select sum(money) as money from new_table_1 limit ?";
      String sql = "select  sum(a.money) as money from (select  orderno,money from new_table limit ?) a";
      //String sql = "select  sum(a.money) as money from (select  order_no,money from new_table_1 limit ?, ?) a group by order_no";
      preparedStatement = connection.prepareStatement(sql);
      super.open(parameters);
   }
   @Override
   public void close() throws Exception {
      if (preparedStatement != null) {
         preparedStatement.close();
      }
      if (connection != null) {
         connection.close();
      }
      super.close();
   }
   @Override
   public void invoke(Row value, Context context) throws Exception {
      Long ff1 = System.currentTimeMillis();
      try {
         //把kafka的数据作为参数去处理其他数据源
         String[] split = value.toString().split(",");
      /* System.out.println(value.toString() + "999999999999999999999999999");
          //mysql中查询sql的第一个条件
         preparedStatement.setInt(1, new Random().nextInt(100));
         //mysql中查询sql的第二个条件
         preparedStatement.setInt(2, Integer.parseInt(split[1]));
         //执行mysql查询操作
         ResultSet resultSet = preparedStatement.executeQuery();
         Float money = null;
         //处理查询结果
         while (resultSet.next()) {
            money = resultSet.getFloat("money");
         }
         System.out.println(money.toString() + "------------------------");
         Long ff2 = System.currentTimeMillis();
         System.out.printf("聚合计算时间=============:" + ((ff2 - ff1)) + "毫秒");
         JSONObject event = new JSONObject();
         event.put("f0",split[0]);
         event.put("f1",money);
         event.put("f2",ff2 - ff1);
         //把处理的结果放回KAFKA
         producer.send(new ProducerRecord<String, String>("topic.flink.mysql.response1",split[0], event.toString()));
*/
            //把某个接口信息作为查询的数据源
         String loginUrl =
            "http://127.0.0.1:8091/kafkaMsg/mysql/getData?recordCount=" + Integer.parseInt(
               split[1]);
         //获取接口信息
         String result1 = HttpUtil.get(loginUrl);
         //处理接口返回的结果
         List<Info> userInfos = JsonUtil.fromJsonArray(result1, Info.class);
         //对返回的结果做分类并做聚合
         Map<String, Double> collect = userInfos.stream()
            .collect(Collectors.groupingBy(
               Info::getOrderNo,
               Collectors.summingDouble(Info::getMoney)));
         Long ff2 = System.currentTimeMillis();
         JSONObject event = new JSONObject();
         event.put("f0", split[0]);
         event.put("f1", collect.toString());
         event.put("f2", ff2 - ff1);
         System.out.println(split[1] + "=========" + (ff2 - ff1));
         //把处理的结果放回到消息队列
         producer.send(new ProducerRecord<String, String>(
            "topic.flink.mysql.response",
            split[0],
            event.toString()));
      } catch (Exception e) {
         e.printStackTrace();
      }
      //System.out.println("DeviceMap>>>>>>" + DeviceMap);
      // DeviceMap.clear();
   }
}

其他的工具类:

package com.flink;
import org.apache.http.Header;
import org.apache.http.HttpEntity;
import org.apache.http.NameValuePair;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.config.Registry;
import org.apache.http.config.RegistryBuilder;
import org.apache.http.conn.socket.ConnectionSocketFactory;
import org.apache.http.conn.socket.PlainConnectionSocketFactory;
import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
import org.apache.http.conn.ssl.TrustSelfSignedStrategy;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.ssl.SSLContextBuilder;
import org.apache.http.util.CharsetUtils;
import org.apache.http.util.EntityUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.TimeUnit;
/**
 * HttpUtil
 * @author ly
 * @date 2020/04/20
 */
public class HttpUtil {
    private static final Logger LOGGER = LoggerFactory.getLogger(HttpUtil.class);
    private static PoolingHttpClientConnectionManager cm;
    private static RequestConfig requestConfig;
    public static final String CHAR_SET = "UTF-8";
    /**  最大连接数400. */
    private static int MAX_CONNECTION_NUM = 400;
    /** 单路由最大连接数80 */
    private static int MAX_PER_ROUTE = 80;
    /** 向服务端请求超时时间设置(单位:毫秒) */
    private static int SERVER_REQUEST_TIME_OUT = 60000;
    /** 服务端响应超时时间设置(单位:毫秒) */
    private static int SERVER_RESPONSE_TIME_OUT = 90000;
    static {
        try {
            cm = getPoolingHttpClientConnectionManager();
            requestConfig = RequestConfig.custom().setConnectionRequestTimeout(SERVER_REQUEST_TIME_OUT).setSocketTimeout(SERVER_REQUEST_TIME_OUT).setConnectTimeout(SERVER_RESPONSE_TIME_OUT).build();
        } catch (Exception e) {
            LOGGER.error("", e);
        }
        /** 定时处理过期的连接和空闲连接. */
        new Timer().schedule(new TimerTask() {
            @Override
            public void run() {
                // 关闭过期的连接
                cm.closeExpiredConnections();
                // 关闭空闲时间超过30秒的连接
                cm.closeIdleConnections(30, TimeUnit.SECONDS);
            }
        }, 50000, 50000);
    }
    public static PoolingHttpClientConnectionManager getPoolingHttpClientConnectionManager () {
        try {
            SSLContextBuilder sslContextBuilder = new SSLContextBuilder();
            sslContextBuilder.loadTrustMaterial(null, new TrustSelfSignedStrategy());
            SSLConnectionSocketFactory socketFactory = new SSLConnectionSocketFactory(sslContextBuilder.build());
            Registry<ConnectionSocketFactory> socketFactoryRegistry = RegistryBuilder.<ConnectionSocketFactory> create()
                    .register("https", socketFactory)
                    .register("http", new PlainConnectionSocketFactory())
                    .build();
            PoolingHttpClientConnectionManager clientConnectionManager = new PoolingHttpClientConnectionManager(socketFactoryRegistry);
            clientConnectionManager.setMaxTotal(MAX_CONNECTION_NUM);
            clientConnectionManager.setDefaultMaxPerRoute(MAX_PER_ROUTE);
            return clientConnectionManager;
        } catch (Exception e) {
            LOGGER.error("", e);
            return null;
        }
    }
    /**
     * @description 获取一个http连接
     * @return
     */
    private static CloseableHttpClient getHttpClient(RequestConfig config) {
        CloseableHttpClient httpClient = HttpClients.custom().setDefaultRequestConfig(config).setConnectionManager(cm).build();
        return httpClient;
    }
    /**
     * Https post请求
     * @param url
     * @param header
     * @param params
     * @return
     */
    public static HttpBean getHttpPostBean(String url, Map<String, String> header, Map<String, String> params) {
        HttpPost post = new HttpPost(url);
        post.setConfig(requestConfig);
        CloseableHttpResponse response = null;
        HttpBean httpBean = null;
        try {
            if (header != null && !header.isEmpty()) {
                for (String key : header.keySet()) {
                    post.addHeader(key, header.get(key));
                }
            }
            List<NameValuePair> paramList = new ArrayList<>();
            if (params != null && params.size() > 0) {
                Iterator<Entry<String, String>> iterator = params.entrySet().iterator();
                while (iterator.hasNext()) {
                    Entry<String, String> elem = iterator.next();
                    paramList.add(new BasicNameValuePair(elem.getKey(), elem.getValue()));
                }
            }
            UrlEncodedFormEntity paramEntity = new UrlEncodedFormEntity(paramList, CharsetUtils.get(CHAR_SET));
            post.setEntity(paramEntity);
            response = getHttpClient(requestConfig).execute(post);
            HttpEntity entity = response.getEntity();
            httpBean = new HttpBean();
            Header[] headers = response.getAllHeaders();
            httpBean.setResponseHeader(headers);
            if (entity != null) {
                // 使用EntityUtils的toString方法,传递默认编码,在EntityUtils中的默认编码是ISO-8859-1
                String content = EntityUtils.toString(entity, CHAR_SET);
                httpBean.setResponseContent(content);
            }
        } catch (Exception e) {
            LOGGER.error("", e);
        } finally {
            if (response != null) {
                try {
                    response.close();
                } catch (Exception e) {
                    LOGGER.error("", e);
                }
            }
            post.abort();
        }
        return httpBean;
    }
    /**
     * Https post请求
     * @param url
     * @param header
     * @param params
     * @return
     */
    public static String post(String url, Map<String, String> header, Map<String, String> params) {
        HttpBean httpBean = getHttpPostBean(url, header, params);
        return httpBean != null ? httpBean.getResponseContent() : null;
    }
    /**
     * Https post请求
     * @param url
     * @param params
     * @return
     */
    public static String post(String url, Map<String, String> params) {
        HttpBean httpBean = getHttpPostBean(url, null, params);
        return httpBean != null ? httpBean.getResponseContent() : null;
    }
    /**
     * Https get请求
     * @param url
     * @return
     */
    public static String get(String url) {
        return get(url, null, null, null);
    }
    /**
     * Https get请求
     * @param url
     * @param charSet
     * @return
     */
    public static String get(String url, String charSet) {
        return get(url, null, null, charSet);
    }
    /**
     * Https get请求
     * @param url
     * @param header
     * @return
     */
    public static String get(String url, Map<String, String> header) {
        return get(url, header, null, null);
    }
    public static HttpBean getHttpGetBean(String url) {
        HttpGet get = new HttpGet(url);
        get.setConfig(requestConfig);
        CloseableHttpResponse response = null;
        String content = null;
        HttpBean httpBean = null;
        try {
            response = getHttpClient(requestConfig).execute(get);
            HttpEntity entity = response.getEntity();
            if (entity != null) {
                content = EntityUtils.toString(entity, CHAR_SET);
            }
            httpBean = new HttpBean();
            Header[] headers = response.getAllHeaders();
            httpBean.setResponseHeader(headers);
            httpBean.setResponseContent(content);
        } catch (Exception e) {
            LOGGER.error("", e);
        } finally {
            if (response != null) {
                try {
                    response.close();
                } catch (Exception e) {
                    LOGGER.error("", e);
                }
            }
            get.abort();
        }
        return httpBean;
    }
    /**
     * Https get请求
     * @param url
     * @param header
     * @param params
     * @return
     */
    public static String get(String url, Map<String, String> header, Map<String, String> params, String charSet) {
        if (params != null && !params.isEmpty()) {
            String str = "";
            for (String key : params.keySet()) {
                str += ("".equals(str) ? "?" : "&") + key + "=" + params.get(key);
            }
            url += str;
        }
        HttpGet get = new HttpGet(url);
        get.setConfig(requestConfig);
        CloseableHttpResponse response = null;
        String content = null;
        try {
            if (header != null && !header.isEmpty()) {
                for (String key : header.keySet()) {
                    get.addHeader(key, header.get(key));
                }
            }
            response = getHttpClient(requestConfig).execute(get);
            HttpEntity entity = response.getEntity();
            if (entity != null) {
                content = EntityUtils.toString(entity, charSet == null ? CHAR_SET : charSet);
            }
        } catch (Exception e) {
            LOGGER.error("", e);
        } finally {
            if (response != null) {
                try {
                    response.close();
                } catch (Exception e) {
                    LOGGER.error("", e);
                }
            }
            get.abort();
        }
        return content;
    }
    /**
     * @description HTTP POST
     * @param url
     * @param msg
     * @return
     */
    public static String post(String url, String msg) {
        HttpPost postMethod = new HttpPost(url);
        postMethod.setConfig(requestConfig);
        CloseableHttpResponse response = null;
        String content = null;
        try {
            // 从接过过来的代码转换为UTF-8的编码
            HttpEntity stringEntity = new StringEntity(msg, CharsetUtils.get("UTF-8"));
            postMethod.setEntity(stringEntity);
            postMethod.addHeader("Content-Type", "application/json;charset=UTF-8");
            response = getHttpClient(requestConfig).execute(postMethod);
            HttpEntity entity = response.getEntity();
            if (entity != null) {
                // 使用EntityUtils的toString方法,传递默认编码,在EntityUtils中的默认编码是ISO-8859-1
                content = EntityUtils.toString(entity, "UTF-8");
            }
        } catch (Exception e) {
            LOGGER.error("", e);
        } finally {
            if (response != null) {
                try {
                    response.close();;
                } catch (IOException e) {
                    LOGGER.error("", e);
                }
            }
            postMethod.abort();
        }
        return content;
    }
    public static String combineQuery(Map<String, String> params) {
        if (params != null && !params.isEmpty()) {
            StringBuilder sb = new StringBuilder();
            for (String key : params.keySet()) {
                sb.append("".equals(sb.toString()) ? "?" : "&").append(key).append("=").append(params.get(key));
            }
            return sb.toString();
        } else {
            return "";
        }
    }
    public static class HttpBean {
        private String responseContent;
        private Header[] responseHeader;
        public String getResponseContent() {
            return responseContent;
        }
        public void setResponseContent(String responseContent) {
            this.responseContent = responseContent;
        }
        public Header[] getResponseHeader() {
            return responseHeader;
        }
        public void setResponseHeader(Header[] responseHeader) {
            this.responseHeader = responseHeader;
        }
    }
    public static void main(String[] args) {
        try {
            // 组装请求头
            Map<String, String> header = new HashMap<>(16);
            header.put("Content-Type", "application/json; charset=utf-8");
            // 组装请求参数
            Map<String, String> params = new HashMap<>(16);
            //params.put("recordCount", "10");
            // 登录
            String loginUrl = "http://192.168.120.192:8090/kafkaMsg/mysql/getData?recordCount=20";
            String result1 = HttpUtil.get(loginUrl);
            System.out.println(result1);
         List<Info> userInfos = JsonUtil.fromJsonArray(result1, Info.class);
         System.out.println(userInfos.size());
            //获取ticket
        } catch (Exception e) {
        }
    }
}
package com.flink;
import com.google.gson.*;
import com.google.gson.stream.JsonReader;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
 * JsonUtil
 * @author ly
 * @date 2020/04/20
 */
public class JsonUtil {
    private static final Gson GSON = new GsonBuilder().disableHtmlEscaping().setDateFormat("yyyy-MM-dd HH:mm:ss").create();
    private static final JsonParser JSON_PARSER = new JsonParser();
    /**
     * @description json字符串转换成对象
     * @param json
     * @param clazz
     * @return
     */
    public static <T> T fromJson(String json, Class<T> clazz) throws Exception {
        T dto = null;
        try {
            dto = GSON.fromJson(json, clazz);
        } catch (JsonSyntaxException e) {
            throw new Exception("数据异常");
        }
        return dto;
    }
    /**
     * 以非严格模式将json字符串转换成对象
     * @param json
     * @param clazz
     * @param <T>
     * @return
     * @throws Exception
     */
    public static <T> T fromJsonInLenient(String json, Class<T> clazz) throws Exception {
        T dto = null;
        try {
            JsonReader reader = new JsonReader(new StringReader(json));
            dto = GSON.fromJson(reader, clazz);
        } catch (JsonSyntaxException e) {
            throw new Exception("数据解析异常");
        }
        return dto;
    }
    /**
     * @description json array字符串转换成List对象
     * @param jsonArray
     * @param clazz
     * @return
     */
    public static <T> List<T> fromJsonArray(String jsonArray, Class<T> clazz) throws Exception {
        if (org.apache.commons.lang3.StringUtils.isBlank(jsonArray) || org.apache.commons.lang3.StringUtils.isBlank(jsonArray.trim())) {
            return null;
        }
        try {
            JsonArray array = strToJsonArray(jsonArray);
            if (array == null || array.size() == 0) {
                return null;
            }
            List<T> list = new ArrayList<>();
            for (int i = 0; i < array.size(); i++) {
                T dto = GSON.fromJson(array.get(i), clazz);
                list.add(dto);
            }
            return list;
        } catch (JsonSyntaxException e) {
            throw new Exception("数据异常");
        }
    }
    /**
     * @description json字符串转换成对象
     * @param json
     * @param clazz
     * @return
     */
    public static <T> T fromJson(JsonElement json, Class<T> clazz) throws Exception {
        T dto = null;
        try {
            dto = GSON.fromJson(json, clazz);
        } catch (JsonSyntaxException e) {
            throw new Exception("数据异常");
        }
        return dto;
    }
    /**
     * @description json字符串转换成对象
     * @param json
     * @param clazz
     * @return
     */
    public static <T> T fromJson(JsonElement json, Class<T> clazz, String format) throws Exception {
        T dto = null;
        try {
            Gson gson = new GsonBuilder().disableHtmlEscaping().setDateFormat(format).create();
            dto = gson.fromJson(json, clazz);
        } catch (JsonSyntaxException e) {
            throw new Exception("数据异常");
        }
        return dto;
    }
    /**
     * @description json字符串转换成对象
     * @param json
     * @param clazz
     * @return
     */
    public static <T> T fromJson(String json, Class<T> clazz, String dateFormat) throws Exception {
        T dto = null;
        try {
            Gson gson = new GsonBuilder().disableHtmlEscaping().setDateFormat(dateFormat).create();
            dto = gson.fromJson(json, clazz);
        } catch (JsonSyntaxException e) {
            throw new Exception("数据异常");
        }
        return dto;
    }
    public static JsonObject strToJson(String str) {
        if (org.apache.commons.lang3.StringUtils.isBlank(str)) {
            return null;
        }
        try {
            JsonObject json = JSON_PARSER.parse(str).getAsJsonObject();
            return json;
        } catch (Exception e) {
            return null;
        }
    }
    /**
     * 以非严格模式将字符串转换成json对象
     * @param str
     * @return
     */
    public static JsonObject strToJsonInLenient(String str) {
        if (org.apache.commons.lang3.StringUtils.isBlank(str)) {
            return null;
        }
        try {
            JsonReader reader = new JsonReader(new StringReader(str));
            JsonObject json = JSON_PARSER.parse(reader).getAsJsonObject();
            return json;
        } catch (Exception e) {
            return null;
        }
    }
    public static JsonArray strToJsonArray(String str) {
        if (org.apache.commons.lang3.StringUtils.isBlank(str)) {
            return null;
        }
        try {
            JsonArray json = JSON_PARSER.parse(str).getAsJsonArray();
            return json;
        } catch (Exception e) {
            return null;
        }
    }
    /**
     * @description getStringFromJsonByKey
     * @param json
     * @param key
     * @return
     */
    public static String getString(JsonObject json, String key) {
        if (json == null) {
            return null;
        }
        if (key == null) {
            return null;
        }
        if (json.has(key)) {
            return json.get(key).getAsString();
        }
        return null;
    }
    /**
     * @description getDouble
     * @param json
     * @param key
     * @return
     */
    public static Double getDouble(JsonObject json, String key) {
        if (json == null || org.apache.commons.lang3.StringUtils.isBlank(key)) {
            return null;
        }
        if (json.has(key)) {
            return json.get(key).getAsDouble();
        }
        return null;
    }
    /**
     * @description getInteger
     * @param json
     * @param key
     * @return
     */
    public static Integer getInteger(JsonObject json, String key) {
        if (json == null || org.apache.commons.lang3.StringUtils.isBlank(key)) {
            return null;
        }
        if (json.has(key)) {
            return json.get(key).getAsInt();
        }
        return null;
    }
    /**
     * @description getInteger
     * @param json
     * @param key
     * @return
     */
    public static Long getLong(JsonObject json, String key) {
        if (json == null || org.apache.commons.lang3.StringUtils.isBlank(key)) {
            return null;
        }
        if (json.has(key)) {
            return json.get(key).getAsLong();
        }
        return null;
    }
    /**
     * @description getJsonArrayFronJsonByKey
     * @param json
     * @param key
     * @return
     */
    public static JsonObject getJsonObject(JsonObject json, String key) {
        if (json == null) {
            return null;
        }
        if (json.has(key)) {
            try {
                return json.get(key).getAsJsonObject();
            } catch (Exception e) {
                return null;
            }
        }
        return null;
    }
    /**
     * @description getJsonArrayFronJsonByKey
     * @param json
     * @param key
     * @return
     */
    public static JsonArray getJsonArray(JsonObject json, String key) {
        if (json == null) {
            return null;
        }
        if (json.has(key)) {
            try {
                return json.get(key).getAsJsonArray();
            } catch (Exception e) {
                return null;
            }
        }
        return null;
    }
    /**
     * @description 对象转换成json字符串
     * @param obj
     * @return
     */
    public static String toJson(Object obj){
        if (obj == null) {
            return null;
        }
        String result = null;
        try {
            result = GSON.toJson(obj);
        } catch (Exception e) {
            return null;
        }
        return result;
    }
    /**
     * @description 对象转换成json字符串
     * @param obj
     * @return
     */
    public static String toJson(Object obj, String dateFormat){
        Gson gson = new GsonBuilder().disableHtmlEscaping().setDateFormat(dateFormat).create();
        if (obj == null) {
            return null;
        }
        String result = null;
        try {
            result = gson.toJson(obj);
        } catch (Exception e) {
            return null;
        }
        return result;
    }
    /**
     * @description 对象转换成json字符串
     * @param json
     * @return
     */
    public static JsonArray toJsonArray(String json) throws Exception {
        JsonArray ja = null;
        try {
            ja = JSON_PARSER.parse(json).getAsJsonArray();
        } catch (Exception e) {
            throw new Exception("json格式不对");
        }
        return ja;
    }
    /**
     * 对象转json对象
     * @param obj
     * @return
     */
    public static JsonObject objToJson(Object obj) {
        if (obj == null) {
            return null;
        }
        try {
            String json = toJson(obj);
            return JSON_PARSER.parse(json).getAsJsonObject();
        } catch (Exception e) {
            return null;
        }
    }
    /**
     * 对象转JsonArray对象
     * @param obj
     * @return
     */
    public static JsonArray objToJsonArray(Object obj) {
        if (obj == null) {
            return null;
        }
        try {
            String json = toJson(obj);
            return strToJsonArray(json);
        } catch (Exception e) {
            return null;
        }
    }
    public static void main(String[] args) {
        List<Object> list = new ArrayList<>();
        System.out.println(toJson(list));
        Map<String, Object> map = new HashMap<>();
        map.put("fff", "fff");
        System.out.println(JsonUtil.objToJson(map).toString());
        JsonObject json = JsonUtil.objToJson(map);
        json.addProperty("fff1", "1234");
        json.addProperty("fff2", "1234");
        json.remove("fff");
        System.out.println(json.toString());
    }
}

pom.xml

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
       xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
       xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
   <modelVersion>4.0.0</modelVersion>
   <groupId>org.example</groupId>
   <artifactId>flinkDemoTest</artifactId>
   <version>1.0-SNAPSHOT</version>
   <properties>
      <java.version>1.8</java.version>
      <groupId>org.apache.flink</groupId>
      <artifactId>flink-examples</artifactId>
      <version>1.12-SNAPSHOT</version>
      <relativePath>..</relativePath>
   </properties>
   <dependencies>
      <!-- Table ecosystem -->
      <dependency>
         <groupId>org.apache.flink</groupId>
         <artifactId>flink-table-api-java-bridge_2.11</artifactId>
         <version>1.11.2</version>
      </dependency>
      <!-- Table ecosystem -->
      <dependency>
         <groupId>org.apache.flink</groupId>
         <artifactId>flink-table-api-java</artifactId>
         <version>1.11.2</version>
      </dependency>
      <!-- https://mvnrepository.com/artifact/org.apache.flink/flink-connector-kafka -->
      <dependency>
         <groupId>org.apache.flink</groupId>
         <artifactId>flink-connector-kafka_2.11</artifactId>
         <version>1.11.2</version>
      </dependency>
      <!-- https://mvnrepository.com/artifact/org.apache.flink/flink-json -->
      <dependency>
         <groupId>org.apache.flink</groupId>
         <artifactId>flink-json</artifactId>
         <version>1.11.2</version>
      </dependency>
      <!-- https://mvnrepository.com/artifact/org.apache.flink/flink-csv -->
      <dependency>
         <groupId>org.apache.flink</groupId>
         <artifactId>flink-csv</artifactId>
         <version>1.11.2</version>
      </dependency>
      <dependency>
         <groupId>org.apache.flink</groupId>
         <artifactId>flink-table-api-scala-bridge_2.11</artifactId>
         <version>1.11.2</version>
      </dependency>
      <!-- https://mvnrepository.com/artifact/org.apache.flink/flink-jdbc -->
      <dependency>
         <groupId>org.apache.flink</groupId>
         <artifactId>flink-jdbc_2.11</artifactId>
         <version>1.10.2</version>
      </dependency>
      <dependency>
         <groupId>org.apache.flink</groupId>
         <artifactId>flink-table-planner_2.11</artifactId>
         <version>1.11.2</version>
      </dependency>
      <dependency>
         <groupId>org.apache.flink</groupId>
         <artifactId>flink-table-planner-blink_2.11</artifactId>
         <version>1.11.2</version>
      </dependency>
      <!-- Flink core -->
      <dependency>
         <groupId>org.apache.flink</groupId>
         <artifactId>flink-streaming-scala_2.11</artifactId>
         <version>1.11.2</version>
      </dependency>
      <dependency>
         <groupId>org.apache.flink</groupId>
         <artifactId>flink-clients_2.11</artifactId>
         <version>1.11.2</version>
      </dependency>
      <!-- Test dependencies -->
      <dependency>
         <groupId>org.apache.flink</groupId>
         <artifactId>flink-test-utils_2.11</artifactId>
         <version>1.11.2</version>
      </dependency>
      <dependency>
         <groupId>org.apache.flink</groupId>
         <artifactId>flink-core</artifactId>
         <version>1.11.2</version>
      </dependency>
      <dependency>
         <groupId>org.apache.flink</groupId>
         <artifactId>flink-java</artifactId>
         <version>1.11.2</version>
      </dependency>
      <dependency>
         <groupId>org.apache.flink</groupId>
         <artifactId>flink-scala_2.11</artifactId>
         <version>1.11.2</version>
      </dependency>
      <!-- https://mvnrepository.com/artifact/org.apache.flink/flink-table -->
      <!--  <dependency>
           <groupId>org.apache.flink</groupId>
           <artifactId>flink-table</artifactId>
           <version>${project.version}</version>
        </dependency>-->
      <!--<dependency>
         <groupId>org.apache.logging.log4j</groupId>
         <artifactId>log4j-core</artifactId>
         <version>2.8.2</version>
      </dependency>
      <dependency>
         <groupId>log4j</groupId>
         <artifactId>log4j</artifactId>
         <version>1.2.17</version>
      </dependency>-->
      <dependency>
         <groupId>com.corundumstudio.socketio</groupId>
         <artifactId>netty-socketio</artifactId>
         <version>1.7.7</version>
      </dependency>
      <dependency>
         <groupId>joda-time</groupId>
         <artifactId>joda-time</artifactId>
         <version>2.9.9</version>
      </dependency>
      <dependency>
         <groupId>mysql</groupId>
         <artifactId>mysql-connector-java</artifactId>
         <version>5.1.45</version>
      </dependency>
      <dependency>
         <groupId>org.codehaus.jettison</groupId>
         <artifactId>jettison</artifactId>
         <version>1.3.7</version>
      </dependency>
      <!--<dependency>
         <groupId>log4j</groupId>
         <artifactId>log4j</artifactId>
         <version>1.2.11</version>
      </dependency>
-->
      <!-- Flink Dependency -->
      <dependency>
         <groupId>org.apache.flink</groupId>
         <artifactId>flink-connector-hive_2.11</artifactId>
         <version>1.11.2</version>
      </dependency>
      <!-- Hive Dependency -->
      <dependency>
         <groupId>org.apache.hive</groupId>
         <artifactId>hive-exec</artifactId>
         <version>3.1.2</version>
      </dependency>
      <dependency>
         <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-common</artifactId>
         <version>3.1.2</version>
         <!--            <scope>provided</scope>-->
      </dependency>
      <dependency>
         <groupId>org.apache.flink</groupId>
         <artifactId>flink-table-common</artifactId>
         <version>1.11.1</version>
      </dependency>
      <!-- https://mvnrepository.com/artifact/org.apache.flink/flink-connector-jdbc -->
      <dependency>
         <groupId>org.apache.flink</groupId>
         <artifactId>flink-connector-jdbc_2.11</artifactId>
         <version>1.11.2</version>
      </dependency>
      <dependency>
         <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-hdfs</artifactId>
         <version>3.1.2</version>
         <!--<scope>provided</scope>-->
      </dependency>
      <dependency>
         <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-yarn-client</artifactId>
         <version>3.1.2</version>
      </dependency>
      <dependency>
         <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-mapreduce-client-core</artifactId>
         <version>3.1.2</version>
      </dependency>
      <dependency>
         <groupId>org.apache.hive</groupId>
         <artifactId>hive-jdbc</artifactId>
         <version>2.1.0</version>
      </dependency>
   </dependencies>
   <!--<build>
      <plugins>
         <plugin>
            <groupId>org.apache.maven.plugins</groupId>
            <artifactId>maven-shade-plugin</artifactId>
            <version>3.1.0</version>
            <configuration>
               <createDependencyReducedPom>false</createDependencyReducedPom>
            </configuration>
            <executions>
               <execution>
                  <phase>package</phase>
                  <goals>
                     <goal>shade</goal>
                  </goals>
                  <configuration>
                     <transformers>
                        <transformer
                           implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
                           <!–如果要打包的话,这里要换成对应的 main class–>
                           <mainClass>com.flink.BachTste</mainClass>
                        </transformer>
                     </transformers>
                     <filters>
                        <filter>
                           <artifact>*:*:*:*</artifact>
                           <excludes>
                              <exclude>META-INF/*.SF</exclude>
                              <exclude>META-INF/*.DSA</exclude>
                              <exclude>META-INF/*.RSA</exclude>
                           </excludes>
                        </filter>
                     </filters>
                  </configuration>
               </execution>
            </executions>
         </plugin>
         <plugin>
            <groupId>org.apache.maven.plugins</groupId>
            <artifactId>maven-compiler-plugin</artifactId>
            <configuration>
               <source>8</source>
               <target>8</target>
               <encoding>utf8</encoding>
            </configuration>
         </plugin>
      </plugins>
   </build>-->
   <build>
      <plugins>
         <plugin>
            <groupId>org.apache.maven.plugins</groupId>
            <artifactId>maven-shade-plugin</artifactId>
            <version>3.1.0</version>
         </plugin>
         <plugin>
            <groupId>org.apache.maven.plugins</groupId>
            <artifactId>maven-jar-plugin</artifactId>
            <version>2.6</version>
            <configuration>
               <archive>
                  <manifest>
                     <addClasspath>true</addClasspath>
                     <classpathPrefix>lib/</classpathPrefix>
                     <mainClass>com.flink.TestMainMysql</mainClass>
                  </manifest>
               </archive>
            </configuration>
         </plugin>
         <plugin>
            <groupId>org.apache.maven.plugins</groupId>
            <artifactId>maven-dependency-plugin</artifactId>
            <version>2.10</version>
            <executions>
               <execution>
                  <id>copy-dependencies</id>
                  <phase>package</phase>
                  <goals>
                     <goal>copy-dependencies</goal>
                  </goals>
                  <configuration>
                     <outputDirectory>${project.build.directory}/lib</outputDirectory>
                  </configuration>
               </execution>
            </executions>
         </plugin>
      </plugins>
   </build>
</project>