package cn.edu.tju.demo3;

import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.*;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.descriptors.*;
import org.apache.flink.table.functions.AggregateFunction;
import org.apache.flink.table.functions.ScalarFunction;
import org.apache.flink.table.functions.TableFunction;
import org.apache.flink.types.Row;

public class Test50 {
    private static String HOST_NAME = "xx.xx.xx.xx";
    private static int PORT = 9999;
    private static String DELIMITER ="\n";


    public static void main(String[] args) throws Exception {

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);
        StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);



        DataStream<String> socketDataInfo =  env.socketTextStream(HOST_NAME, PORT, DELIMITER);
        SingleOutputStreamOperator<DataInfo> dataInfoStream = socketDataInfo.map(new MapFunction<String, DataInfo>() {
            @Override
            public DataInfo map(String value) throws Exception {

                String[] stringList = value.split(",");
                DataInfo dataInfo = new DataInfo(Long.parseLong(
                        stringList[0]), stringList[1], Double.parseDouble(stringList[2]));
                return dataInfo;
            }
        });

        Table dataTable = tableEnv.fromDataStream(dataInfoStream,"ts,info,val");
        tableEnv.registerFunction("myAggregateFunction", new MyAggregateFunction());
        Table resultTable = dataTable.select("ts,info,val")
                        .groupBy("info")
                                .aggregate("myAggregateFunction(val) as avgVal" )
                                        .select("info, avgVal");

        tableEnv.createTemporaryView("dataInfo", dataTable);

        Table resultTableSql = tableEnv.sqlQuery(
                "select info,myAggregateFunction(val) from dataInfo group by info"

        );


        tableEnv.toRetractStream(resultTable, Row.class).print();
        tableEnv.toRetractStream(resultTableSql, Row.class).print("sql");

        env.execute("my job");

    }

    public static class DataInfo{
        private long ts;
        private String info;
        private double val;

        public long getTs() {
            return ts;
        }

        public void setTs(long ts) {
            this.ts = ts;
        }

        public String getInfo() {
            return info;
        }

        public void setInfo(String info) {
            this.info = info;
        }

        public double getVal() {
            return val;
        }

        public void setVal(double val) {
            this.val = val;
        }

        @Override
        public String toString() {
            return "DataInfo{" +
                    "ts=" + ts +
                    ", info='" + info + '\'' +
                    ", val='" + val + '\'' +
                    '}';
        }

        public DataInfo(long ts, String info, double val) {
            this.ts = ts;
            this.info = info;
            this.val = val;
        }

        public DataInfo() {

        }
    }

    //自定义聚合函数,实现getResult和方法
    public static class MyAggregateFunction extends AggregateFunction<Double, Tuple2<Double, Integer>> {

        @Override
        public Double getValue(Tuple2<Double, Integer> accumulator) {
            return accumulator.f0/accumulator.f1;
        }

        @Override
        public Tuple2<Double, Integer> createAccumulator() {
            return new Tuple2(0.0, 0);
        }

        public void accumulate(Tuple2<Double, Integer> accumulator, double d){
            accumulator.f1 += 1;
            accumulator.f0 += d;
        }


    }
}

nc -lk 9999
输入:

1689999831,ffff,34.2
1689999832,ffff,35.3

结果

flink: table api之自定义聚合函数_python