背景

之前使用nginx都是代理http协议的接口,最近有个交易类项目上线,此项目和银行使用socket通信,所以整理下如何使用nginx进行socket端口的代理。

配置

nginx配置ngx_stream_core_module模块

主要是--with-stream,因为之前有其他模块所以重新编译,使用命令如下:

./configure --prefix=/usr/local/nginx --with-stream --with-http_stub_status_module --with-http_ssl_module --add-module=/usr/local/fastdfs-nginx-module/src

make && make install

配置conf/nginx.conf文件

添加和events语句块平级的stream语句块。

stream {
    upstream socket_server {
        server 127.0.0.1:8001;
        server 127.0.0.1:8002;
    }
    # 监听socket端口
    server {
        listen 8888;
        proxy_pass socket_server;
    }
}

测试

  1. 云上部署两个netty服务端程序,端口分别是8001和8002,并启动。此时服务器端口如下:
  2. 启动nginx,端口如下:
  3. 在本地启动两个命令行,使用telnet IP 8888测试,可以看见云服务器上8001和8002两个进程交替响应。

 附:代码

服务端

NettyServer.java

package com.ll.netty;

import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.ChannelFuture;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioServerSocketChannel;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class NettyServer {
	private static final Logger log = LoggerFactory.getLogger(NettyServer.class);
	private final String ip = "127.0.0.1";
	private final String port = "8001";
	
	public void start() throws InterruptedException{
    	EventLoopGroup bossGroup = new NioEventLoopGroup(1);
    	EventLoopGroup workerGroup = new NioEventLoopGroup();
    	
    	try {
    		ServerBootstrap sb = new ServerBootstrap();
    		sb.group(bossGroup, workerGroup);
    		sb.channel(NioServerSocketChannel.class);
    		sb.childHandler(new ServerInitializer());
	     
    		ChannelFuture f = sb.bind(ip, Integer.parseInt(port)).sync();
    		log.info("Netty Server 启动成功! Ip: " + f.channel().localAddress().toString() + " ! ");
    		
    		f.channel().closeFuture().sync();
    	}catch (Exception e) {
    		e.printStackTrace();
		} finally {
    		bossGroup.shutdownGracefully();
    		workerGroup.shutdownGracefully();
    	}
    }
    
    public void destory(){
    	
    }
}

ServerInitializer.java

package com.ll.netty;

import java.nio.charset.Charset;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelPipeline;
import io.netty.channel.socket.SocketChannel;
import io.netty.handler.codec.DelimiterBasedFrameDecoder;
import io.netty.handler.codec.string.StringDecoder;
import io.netty.handler.codec.string.StringEncoder;
import io.netty.handler.logging.LogLevel;
import io.netty.handler.logging.LoggingHandler;
import io.netty.util.concurrent.DefaultEventExecutorGroup;
import io.netty.util.concurrent.EventExecutorGroup;

public class ServerInitializer extends ChannelInitializer<SocketChannel> {

	private static final Logger log = LoggerFactory.getLogger(ServerInitializer.class);
	
	static final EventExecutorGroup bussGroup = new DefaultEventExecutorGroup(16);
	
	@Override
	protected void initChannel(SocketChannel ch) {
		ChannelPipeline pipeline = ch.pipeline();
		pipeline.addLast(new LoggingHandler(LogLevel.INFO));
		pipeline.addLast(new DelimiterBasedFrameDecoder(Integer.MAX_VALUE, 
				Unpooled.wrappedBuffer(new byte[] { '~' })));
		
		pipeline.addLast(new StringDecoder(Charset.forName("utf-8")));
		pipeline.addLast(new StringEncoder(Charset.forName("utf-8")));
		
		pipeline.addLast(bussGroup, "handler", new ServerHandler());
	}
	
	
}

 ServerHandler.java

package com.ll.netty;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;


import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;

public class ServerHandler extends ChannelInboundHandlerAdapter {

	private static final Logger log = LoggerFactory.getLogger(ServerHandler.class);
	
    ………………………………

	@Override
	public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
		ctx.write("hi, " + msg.toString());
	}

	@Override
	public void channelReadComplete(ChannelHandlerContext ctx) throws Exception {
		ctx.flush();
	}

	………………………………

}

客户端

 CLient.java

import io.netty.bootstrap.Bootstrap;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioSocketChannel;
import io.netty.handler.codec.DelimiterBasedFrameDecoder;
import io.netty.handler.codec.string.StringDecoder;
import io.netty.handler.codec.string.StringEncoder;
import io.netty.handler.logging.LogLevel;
import io.netty.handler.logging.LoggingHandler;

public class CLient {

	public static void main(String[] args) throws InterruptedException {
		Bootstrap client = new Bootstrap();
		
		EventLoopGroup group = new NioEventLoopGroup();
        client.group(group);
        
        client.channel(NioSocketChannel.class);
        
        client.handler(new ChannelInitializer<NioSocketChannel>() {
            @Override
            protected void initChannel(NioSocketChannel ch) throws Exception {
            	ch.pipeline().addLast(new LoggingHandler(LogLevel.INFO));
                ch.pipeline().addLast(new StringEncoder());
                ch.pipeline().addLast(new DelimiterBasedFrameDecoder(   
                        Integer.MAX_VALUE, Unpooled.wrappedBuffer(new byte[] { '~' }))); 
                ch.pipeline().addLast(new StringDecoder());
            }
        });

        
        ChannelFuture future = client.connect("47.1.5.8", 8888).sync();
        
        for(int i=0;i<100;i++){
            String msg = "l"+i+"~";
            future.channel().writeAndFlush(msg);
        }
        
        future.channel().closeFuture().sync();
	}

}