Background

业务需求:提供数据导出功能。数据为传感器监测数据,存储在MySQL。需要用户传入传感器的IDs,起始时间和结束时间,由于数据量较大,所以这里通过定时任务实现,然后把每个传感器数据写一个csv文件,最终把所有的文件压缩成一个zip,再把压缩包的存储路径写入数据库,用户请求下载数据时从数据库查询获取压缩包的存储路径,最终把压缩包返回给用户。下面是主要代码(springboot+swagger2)

Ctroller

package com.cloudansys.monitor.solution.export.controller;

import com.cloudansys.monitor.base.BaseController;
import com.cloudansys.monitor.common.CSVUtils;
import com.cloudansys.monitor.solution.export.entity.TaskExportDTO;
import com.cloudansys.monitor.solution.export.entity.TaskInitDTO;
import com.cloudansys.monitor.solution.export.service.ExportService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.compress.utils.IOUtils;
import org.apache.commons.io.FileUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;

import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.OutputStream;
import java.util.List;

@Slf4j
@Api(tags = "数据导出")
@RestController
@RequestMapping("/export")
public class ExportController extends BaseController {

    @Autowired
    private ExportService service;

    @ApiOperation("提交数据导出任务")
    @PostMapping("/submitExportJob")
    public Integer submitExportJob(@RequestBody TaskExportDTO exportDTO) {
        log.info("exportDTO: {}", exportDTO);
        return this.service.submitJob(exportDTO);
    }

    @ApiOperation("根据任务初始化ID进行下载")
    @ApiImplicitParam(name = "init_id", value = "任务初始化ID")
    @GetMapping("/downloadFile/{init_id}")
    public void downloadFile(@PathVariable Integer init_id, HttpServletResponse response) {
        String filePath = this.service.getPathByInitID(init_id);
        log.info("filePath: {}", filePath);
        CSVUtils.downloadZipFile(filePath, response);
    }

    @ApiOperation("获取用户数据导出任务")
    @PostMapping("/getExportJob/{user_id}")
    public List<TaskInitDTO> getExportJob(@PathVariable Integer user_id) {
        log.info("user_id: {}", user_id);
        return this.service.getExportJob(user_id);
    }

}

定时任务

package com.cloudansys.monitor.solution.export.schedule;

import com.cloudansys.monitor.common.CSVUtils;
import com.cloudansys.monitor.common.CacheHandler;
import com.cloudansys.monitor.common.ZipUtils;
import com.cloudansys.monitor.entity.FileBean;
import com.cloudansys.monitor.solution.data.entity.TargetPrimaryData;
import com.cloudansys.monitor.solution.data.service.TargetPrimaryDataService;
import com.cloudansys.monitor.solution.export.entity.TaskExportDTO;
import com.cloudansys.monitor.solution.export.entity.TaskInitDTO;
import com.cloudansys.monitor.solution.export.service.ExportService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;

import java.util.ArrayList;
import java.util.Date;
import java.util.List;

@Slf4j
@Component
@EnableScheduling
public class ScheduleExportJob implements ScheduleJob, CommandLineRunner {

    @Autowired
    private JdbcTemplate jdbcTemplate;

    @Autowired
    private ExportService exportService;

    @Autowired
    private TargetPrimaryDataService targetService;

    @Autowired
    private CacheHandler cacheHandler;

    @Override
    public void run(String... args) {
//        log.info("定时任务开始 . . .");
        exec();
    }

    // {秒} {分} {时} {日} {月} {星期} {年份(可为空)}
    @Scheduled(cron = "* */10 * * * ?")
    private void exec() {

        // 压缩目录和生成的压缩文件
        String srcFile = "data/data-targets";
        String zipFile = "data/data-targets.zip";

        String sql_1 = "SELECT id FROM t_task_init WHERE status = 0;";
        String sql_2 = "SELECT target_ids, start_time, end_time FROM t_task_export WHERE init_id = ?;";
//        log.debug("sql :{} ", sql_1);
//        log.debug("sql_2 :{} ", sql_2);

        // 获取需要执行的任务的 init_ids
        List<Integer> init_ids = this.jdbcTemplate.queryForList(sql_1, Integer.class);

        // 首先判断有没有需要执行的数据导出任务
        if (null == init_ids || init_ids.size() == 0) {
            return;
        }

        // 遍历 init_ids 一个一个执行任务
        init_ids.forEach(init_id -> {
            List<TaskExportDTO> exportDTOs = this.jdbcTemplate.query(sql_2, new Object[]{init_id},
                    (resultSet, i) -> {
                        TaskExportDTO exportDTO = new TaskExportDTO();
                        exportDTO.setInit_id(init_id);
                        exportDTO.setTarget_ids(resultSet.getString(++i));
                        exportDTO.setSTime(resultSet.getDate(++i));
                        exportDTO.setETime(resultSet.getDate(++i));
                        return exportDTO;
                    });

            // sql_2 查询出来的总是只有一个 TaskExportDTO
            TaskExportDTO exportDTO = exportDTOs.get(0);

            // 开始执行 init_id 的任务,并更新任务状态
            TaskInitDTO initDTO = new TaskInitDTO();
            initDTO.setId(init_id);
            initDTO.setStatus(1);
            this.exportService.updateTaskInit(initDTO);

            // 遍历测点ID,查询数据,每个测点的数据生成一个以测点编号命名的 csv 文件,都放在 data/data-targets 目录
            String[] target_ids = exportDTO.getTarget_ids().split(",");
            Date sTime = exportDTO.getSTime();
            Date eTime = exportDTO.getETime();
            for (String target_id : target_ids) {
                Integer targetId = Integer.valueOf(target_id);
                List<TargetPrimaryData> targetPrimaryData = this.targetService.getByTargetId(targetId, sTime, eTime);

                // 构建该测点数据生成 csv 文件的文件头 [参数1,参数2,数据时间,. . .]
                String[] param = targetPrimaryData.get(0).getParam();
                List<Object> head = new ArrayList<>();
                for (String p : param) {
                    head.add(p);
                }
                head.add("数据时间");
                List<List<Object>> dataList = new ArrayList<>();
                for (TargetPrimaryData target : targetPrimaryData) {
                    Double[] data = target.getData();
                    List<Object> list = new ArrayList<>();
                    for (Double d : data) {
                        list.add(d);
                    }
                    list.add(target.getTime());
                    dataList.add(list);
                }

                FileBean fileBean = new FileBean();
                fileBean.setFileID(targetId);

                // 从缓存中获取该 targetId 对应的 targetCode
                String targetCode = cacheHandler.getTargetCode(targetId);
                fileBean.setFileName(targetCode);
                fileBean.setFilePath("data/data-targets");

                // 将该 targetId 测点数据写入 csv 文件,文件名为 targetCode
                CSVUtils.createCSVFile(head, dataList, fileBean);
//                log.info("================csv 文件创建结束!");
            }

            // 一个数据导出任务所需的数据处理结束后,生成的 csv 数据文件都在 data/data-targets 目录下
            // 然后 把 data-targets 目录压缩成 zip
            ZipUtils.doCompress(srcFile, zipFile);
            log.info("================csv 文件压缩结束!");

            // 最终把压缩包的路径存储到数据库中,并更新任务状态
            initDTO.setStatus(2);
            initDTO.setETime(new Date());
            initDTO.setPath(zipFile);
            this.exportService.updateTaskInit(initDTO);
            log.info("================csv 文件路径存储结束!");
        });
    }

}

CSV工具类

经测试三个字段,1千万条数据写成csv文件仅3秒

package com.cloudansys.monitor.common;

import com.cloudansys.monitor.entity.FileBean;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.compress.utils.IOUtils;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVPrinter;
import org.apache.commons.io.FileUtils;

import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStream;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.List;

@SuppressWarnings("ResultOfMethodCallIgnored")
@Slf4j
public class CSVUtils {

    /**
     * @param head     csv 文件头
     * @param dataList 要写入 csv 文件中的数据
     * @param fileBean 文件实体类
     * @return 返回生成的 csv 文件的路径
     */
    public static String createCSVFile(List<Object> head, List<List<Object>> dataList, FileBean fileBean) {

        Instant start_time = Instant.now();

        File csvFile = null;
        try {
            csvFile = new File(fileBean.getFilePath() + File.separator + fileBean.getFileName() + ".csv");
            File parent = csvFile.getParentFile();
            if (parent != null && !parent.exists()) {
                parent.mkdirs();
            }
            csvFile.createNewFile();
            FileWriter writer = new FileWriter(csvFile);
            CSVPrinter printer = CSVFormat.EXCEL.print(writer);

            // 写入文件头部
            printer.printRecord(head);

            // 写入文件内容
            for (List<Object> row : dataList) {
                printer.printRecord(row);
            }
            printer.flush();
            printer.close();
        } catch (IOException e) {
            e.printStackTrace();
        }

        Instant end_time = Instant.now();
        long seconds = ChronoUnit.SECONDS.between(start_time, end_time);
//        log.info("耗时:{}", seconds);
//        System.out.println("写入成功!");

        return csvFile.getPath();
    }

    /**
     * 压缩 csv 文件成 zip,并提供下载
     *
     * @param filePath 要进行压缩的文件目录
     * @param response      下载请求响应对象
     */
    public static void downloadZipFile(String filePath, HttpServletResponse response) {
        File file = new File(filePath);
        if (!file.exists()) {
            return;
        }
        OutputStream os = null;
        try {
            os = response.getOutputStream();
            response.reset();
            response.setHeader("Content-Disposition", "attachment;filename=" + file.getName());
            response.setContentType("application/octet-stream");
            os.write(FileUtils.readFileToByteArray(file));
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            IOUtils.closeQuietly(os);
        }
    }

}

ZIP压缩工具

package com.cloudansys.monitor.common;

import lombok.extern.slf4j.Slf4j;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;

@Slf4j
public class ZipUtils {

    private ZipUtils() {
    }

    public static void doCompress(String srcFile, String zipFile) {
        try {
            doCompress(new File(srcFile), new File(zipFile));
        } catch (IOException e) {
            log.info("doCompress——文件压缩出错了!");
            e.printStackTrace();
        }
    }

    /**
     * 文件压缩
     *
     * @param srcFile 目录或者单个文件
     * @param zipFile 压缩后的ZIP文件
     */
    private static void doCompress(File srcFile, File zipFile) throws IOException {
        ZipOutputStream out = null;
        try {
            out = new ZipOutputStream(new FileOutputStream(zipFile));
            doCompress(srcFile, out);
        } catch (Exception e) {
            throw e;
        } finally {
            out.close();//记得关闭资源
        }
    }

    static void doCompress(String fileName, ZipOutputStream out) throws IOException {
        doCompress(new File(fileName), out);
    }

    private static void doCompress(File file, ZipOutputStream out) throws IOException {
        doCompress(file, out, "");
    }

    private static void doCompress(File inFile, ZipOutputStream out, String dir) throws IOException {
        if (inFile.isDirectory()) {
            File[] files = inFile.listFiles();
            if (files != null && files.length > 0) {
                for (File file : files) {
                    String name = inFile.getName();
                    if (!"".equals(dir)) {
                        name = dir + "/" + name;
                    }
                    ZipUtils.doCompress(file, out, name);
                }
            }
        } else {
            ZipUtils.doZip(inFile, out, dir);
        }
    }

    private static void doZip(File inFile, ZipOutputStream out, String dir) throws IOException {
        String entryName;
        if (!"".equals(dir)) {
            entryName = dir + "/" + inFile.getName();
        } else {
            entryName = inFile.getName();
        }
        ZipEntry entry = new ZipEntry(entryName);
        out.putNextEntry(entry);
        int len;
        byte[] buffer = new byte[1024];
        FileInputStream fis = new FileInputStream(inFile);
        while ((len = fis.read(buffer)) > 0) {
            out.write(buffer, 0, len);
            out.flush();
        }
        out.closeEntry();
        fis.close();
    }

}