本篇介绍下 大批量数据的导出思路和方法

导出数据慢的原因:

一次性查询太多数据 sql会很慢 太多数据导出处理很慢

这里我讲解下分批查询和分批导出

目录

分批查询方法

一: 根据数据量 分割每一部分数据

二: 根据查询时间间隔 分割为每一天一个时间段

分批查询+分批导出实例


分批查询方法

一: 根据数据量 分割每一部分数据

//查询总条数
int count = testService.selectSize(test);
//数据量分割值10万
int num = 100000;
//循环次数/分割次数
int cycles = count / num;
//余数
int remainder = count % num;
String sql = "";
List<List<Test>> getDownloadList = new ArrayList<>();
//分批查询次数
for (int i = 0; i < cycles; i++) {
//sql=select .... from test where .... order by create_time " + " limit " + (i * num) + "," + num;
//limit前参数
test.setFront(i * num);
//limit后参数
test.setAfter(num);
List<Test>testList = testService.selectAll(test);
getDownloadList.addAll(testList);
}
if (remainder > 0) {
test.setFront(num * cycles);
test.setAfter((num * cycles) + remainder);
//sql=select .... from test where .... order by create_time " + " limit " + (num * cycles) + "," + ((num * cycles) + remainder);
List<Test>testList = testService.selectAll(test);
getDownloadList.addAll(testList);
}
//导出操作
.....

二: 根据查询时间间隔 分割为每一天一个时间段

Long beginTime = ...;(毫米级)
Long endTime = ...;
List<List<Test>> getDownloadList = new ArrayList<>();
//对查询时间进行分割 每天查一次 map中时间顺序 从小到大
Map<String, String> timeMap = new LinkedHashMap<>();
if (endTime - beginTime > 86400) {
    //要查询分割的次数
    int l = (int) ((endTime - beginTime) / 86400);
    for (int i = 1; i <= l; i++) {
        timeMap.put(DateUtils.longToString(beginTime + (i - 1) * 86400), DateUtils.longToString(beginTime + (i * 86400)));
    }
    //查询间隔不是整天数时 则补余下时间
    if (beginTime + l * 86400 != endTime) {
        timeMap.put(DateUtils.longToString(beginTime + (l * 86400)), DateUtils.longToString(endTime));
    }
} else {
    //小于一天时处理
    timeMap.put(DateUtils.longToString(beginTime), DateUtils.longToString(endTime));
}
//如果数据展示要倒序 需要将map中时间翻过来
List<String> times = new ArrayList<>(timeMap.keySet());
Collections.reverse(times);
for (String beginTimeStr : times) {
String endTimeStr = timeMap.get(beginTimeStr);
sql:.....
List<Test>testList=......;
getDownloadList.addAll(testList);
}
//导出操作
.....


/**
* long类型时间戳(秒级)转String 1654012800 --> 2022-06-01 00:00:00
*
* @param value
* @return
*/
public static String longToString(long value) {
if (String.valueOf(value).length() == 10) {
    value = value * 1000;
}
    Date time = new Date(value);
    return formatYMDHMS.format(time);
}

分批查询+分批导出实例

controller层

if ((endTime - beginTime) / (3600 * 24) > 31) {
                throw new CustomException("一次最多请求31天数据,请分批导出");
            }
            String sql = "select count(1) as count" + " from test where create_time between " + beginTime + " and " + endTime";
            List<Map<String, Object>> mapList = ClickHouseUtils.execSQL(sql);
            //根据数据量分割上传 也可根据时间
            int count = Integer.parseInt(mapList.get(0).get("count").toString());
            //数据量分割值20万
            int num = 200000;
            //循环次数/分割次数
            int cycles = count / num;
            //余数
            int remainder = count % num;
            String path = getDefaultBaseDir();
            String csvFile = "临时历史数据.csv";
            String absolutePath = path + "/" + csvFile;
            //获取表头
            String[] exportDataTitle = dataService.exportDataTitle(fields);
            File file = new File(path);
            //检查是否存在此文件夹 如没有则创建
            if (!file.exists()) {
                if (file.mkdirs()) {
                    logger.info("历史查询目录创建成功");
                } else {
                    logger.error("历史查询目录创建失败");
                }
            }
            for (int i = 0; i < cycles; i++) {
                sql = "select .. from test where create_time between " + beginTime + " and " + endTime + " order by create_time limit " + (i * num) + "," + num;
                FileVO fileVO = dataService.fileVO(sql, ids, fields);
                PoiUtils.exportCSVFile(exportDataTitle, fileVO.getDownloadList(), i, absolutePath);
            }
            if (remainder > 0) {
                sql = "select .. from test where create_time between " + beginTime + " and " + endTime + " order by create_time limit " + (num * cycles) + "," + ((num * cycles) + remainder);
                FileVO fileVO = dataService.fileVO(sql, ids, fields);
                PoiUtils.exportCSVFile(exportDataTitle, fileVO.getDownloadList(), cycles, absolutePath);
            }
            //输出csv流文件,提供给浏览器下载
            PoiUtils.outCsvStreamCSV(response, absolutePath);
            logger.info("历史查询下载目录: " + absolutePath);
            //删除临时文件
            PoiUtils.deleteFile(new File(absolutePath));
            logger.info("历史查询删除目录: " + absolutePath);


PoiUtils层


/**
     * 上传csv文件到服务器
     *
     * @param title
     * @param downloadList
     * @param i
     * @param absolutePath
     * @throws IOException
     */
    public static void exportCSVFile(String[] title, List<List<String>> downloadList, int i, String absolutePath) throws IOException {
        BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter(absolutePath, true));
        logger.info("创建文件地址: " + absolutePath);
        //如果是第一次循环 添加表头
        if (i == 0) {
            PoiUtils.writeHead(title, bufferedWriter);
            //另起一行
            bufferedWriter.newLine();
        }
        //循环list中数据 逐个添加
        for (List<String> list : downloadList) {
            CSVFileUtil.writeRow(list, bufferedWriter);
            bufferedWriter.newLine();
        }
        bufferedWriter.close();
    }


    /**
     * csv文件表头写入
     *
     * @param title
     * @param bufferedWriter
     * @throws IOException
     */
    public static void writeHead(String[] title, BufferedWriter bufferedWriter) throws IOException {
        // 写表头
        int i = 0;
        for (String data : title) {
            bufferedWriter.write(data);
            if (i != title.length - 1) {
                bufferedWriter.write(",");
            }
            i++;
        }
    }

/**
     * 分割csv文件传浏览器
     *
     * @param response
     * @param absolutePath
     * @throws IOException
     */
    public static void outCsvStreamCSV(HttpServletResponse response, String absolutePath) throws IOException {
        java.io.OutputStream out = response.getOutputStream();
        byte[] b = new byte[10240];
        java.io.File fileLoad = new java.io.File(absolutePath);
        response.reset();
        response.setContentType("application/csv");
        response.setHeader("content-disposition", "attachment; filename=" + URLEncoder.encode("export.csv", "UTF-8"));
        java.io.FileInputStream in = new java.io.FileInputStream(fileLoad);
        int n;
        //为了保证excel打开csv不出现中文乱码
        out.write(new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF});
        while ((n = in.read(b)) != -1) {
            //每次写入out1024字节
            out.write(b, 0, n);
        }
        in.close();
        out.close();
    }

CSVFileUtils

/**
 * csv导出工具类
 */
@Slf4j
public class CSVFileUtil {
    /**
     * 读取
     *
     * @param file      csv文件(路径+文件)
     * @param delimiter 分割符
     * @return
     */
    public static List<String[]> importCsv(File file, String delimiter, String charsetName) {
        List<String[]> dataList = new ArrayList<>();
        BufferedReader br = null;
        try {
            InputStreamReader isr = new InputStreamReader(new FileInputStream(file), charsetName);
            br = new BufferedReader(isr);
            String line = "";
            while ((line = br.readLine()) != null) {
                dataList.add(line.split(delimiter));
            }
        } catch (Exception e) {
        } finally {
            if (br != null) {
                try {
                    br.close();
                    br = null;
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }
        return dataList;
    }

    /**
     * 写入
     * csv文件(路径+文件名),csv文件不存在会自动创建
     *
     * @param exportData 数据
     * @return
     */
    public static File exportCsv(List<List<String>> exportData, String outPutPath, String fileName) {
        File csvFile = null;
        BufferedWriter csvFileOutputStream = null;
        try {
            File file = new File(outPutPath);
            if (!file.exists()) {
                if (file.mkdirs()) {
                    log.info("创建成功");
                } else {
                    log.error("创建失败");
                }
            }
            //定义文件名格式并创建
            csvFile = File.createTempFile(fileName, ".csv", new File(outPutPath));
            csvFileOutputStream = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(csvFile, true), StandardCharsets.UTF_8), 1024);
            for (List<String> exportDatum : exportData) {
                writeRow(exportDatum, csvFileOutputStream);
                csvFileOutputStream.newLine();
            }
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            try {
                if (csvFileOutputStream != null) {
                    csvFileOutputStream.close();
                }
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        return csvFile;
    }

    /**
     * 写一行数据
     *
     * @param row       数据列表
     * @param csvWriter
     * @throws IOException
     */
    public static void writeRow(List<String> row, BufferedWriter csvWriter) throws IOException {
        int i = 0;
        for (String data : row) {
            csvWriter.write(data);
            if (i != row.size() - 1) {
                csvWriter.write(",");
            }
            i++;
        }
    }

    /**
     * 剔除特殊字符
     *
     * @param str 数据
     */
    public static String DelQuota(String str) {
        String result = str;
        String[] strQuota = {"~", "!", "@", "#", "$", "%", "^", "&", "*", "(", ")", "`", ";", "'", ",", ".", "/", ":", "/,", "<", ">", "?"};
        for (String s : strQuota) {
            if (result.contains(s)) {
                result = result.replace(s, "");
            }
        }
        return result;
    }

    /**
     * 测试
     *
     * @param args
     */
    public static void main(String[] args) {
        exportCsv();
        //importCsv();
    }

    /**
     * CSV读取测试
     *
     * @throws Exception
     */
    public static void importCsv() {
        List<String[]> dataList = CSVFileUtil.importCsv(new File("F:/test_two.csv"), ",", "GB2312");
        if (!dataList.isEmpty()) {
            for (String[] cells : dataList) {
                if (cells != null && cells.length > 0) {
                    for (String cell : cells) {
                        System.out.print(cell + "  ");
                    }
                    System.out.println();
                }
            }
        }
    }

    /**
     * CSV写入测试
     *
     * @throws Exception
     */
    public static void exportCsv() {
        List<List<String>> listList = new ArrayList<>();

        List<String> list1 = new ArrayList<>();
        List<String> list2 = new ArrayList<>();
        List<String> list3 = new ArrayList<>();
        list1.add("编号");
        list1.add("姓名");
        list1.add("身高");
        list1.add("电话");

        list2.add("1");
        list2.add("小明");
        list2.add("180cm");
        list2.add("1111111");

        list3.add("2");
        list3.add("小红");
        list3.add("176cm");
        list3.add("1111111");

        listList.add(list1);
        listList.add(list2);
        listList.add(list3);

        CSVFileUtil.exportCsv(listList, "D://", "testFile");
    }
}

到这里分批查询+分批导出已经介绍完了

大家根据需求调整代码 根据源码多测试