illustrate
The socket-based implementation of the previous code is very simple, but for actual production, netty is generally used.
As for the advantages of netty, you can refer to:
Why choose netty?
http://houbb.github.io/2019/05/10/netty-definitive-gudie-04-why-netty
Code
Introduced by maven
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-all</artifactId>
<version>${netty.version}</version>
</dependency>
Introduce the maven package corresponding to netty, here is 4.1.17.Final.
Server code implementation
Netty's server startup code is relatively fixed.
package com.github.houbb.rpc.server.core;
import com.github.houbb.log.integration.core.Log;
import com.github.houbb.log.integration.core.LogFactory;
import com.github.houbb.rpc.server.constant.RpcServerConst;
import com.github.houbb.rpc.server.handler.RpcServerHandler;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.*;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioServerSocketChannel;
/**
* rpc 服务端
* @author binbin.hou
* @since 0.0.1
*/
public class RpcServer extends Thread {
private static final Log log = LogFactory.getLog(RpcServer.class);
/**
* 端口号
*/
private final int port;
public RpcServer() {
this.port = RpcServerConst.DEFAULT_PORT;
}
public RpcServer(int port) {
this.port = port;
}
@Override
public void run() {
// 启动服务端
log.info("RPC 服务开始启动服务端");
EventLoopGroup bossGroup = new NioEventLoopGroup();
EventLoopGroup workerGroup = new NioEventLoopGroup();
try {
ServerBootstrap serverBootstrap = new ServerBootstrap();
serverBootstrap.group(workerGroup, bossGroup)
.channel(NioServerSocketChannel.class)
.childHandler(new ChannelInitializer<Channel>() {
@Override
protected void initChannel(Channel ch) throws Exception {
ch.pipeline().addLast(new RpcServerHandler());
}
})
// 这个参数影响的是还没有被accept 取出的连接
.option(ChannelOption.SO_BACKLOG, 128)
// 这个参数只是过一段时间内客户端没有响应,服务端会发送一个 ack 包,以判断客户端是否还活着。
.childOption(ChannelOption.SO_KEEPALIVE, true);
// 绑定端口,开始接收进来的链接
ChannelFuture channelFuture = serverBootstrap.bind(port).syncUninterruptibly();
log.info("RPC 服务端启动完成,监听【" + port + "】端口");
channelFuture.channel().closeFuture().syncUninterruptibly();
log.info("RPC 服务端关闭完成");
} catch (Exception e) {
log.error("RPC 服务异常", e);
} finally {
workerGroup.shutdownGracefully();
bossGroup.shutdownGracefully();
}
}
}
For simplicity, the server start port number is fixed, and the contents of the RpcServerConst constant class are as follows:
public final class RpcServerConst {
private RpcServerConst(){}
/**
* 默认端口
* @since 0.0.1
*/
public static final int DEFAULT_PORT = 9627;
}
RpcServerHandler
Of course, there is a more core class is RpcServerHandler
public class RpcServerHandler extends SimpleChannelInboundHandler {
@Override
protected void channelRead0(ChannelHandlerContext ctx, Object msg) throws Exception {
// do nothing now
}
}
The current implementation is empty, and the corresponding log output and logic processing can be added later.
test
The code to start the test is very simple:
/**
* 服务启动代码测试
* @param args 参数
*/
public static void main(String[] args) {
new RpcServer().start();
}
illustrate
We have implemented the server-side implementation above. In this section, let's take a look at the client-side code implementation.
Code
RpcClient
/*
* Copyright (c) 2019. houbinbin Inc.
* rpc All rights reserved.
*/
package com.github.houbb.rpc.client.core;
import com.github.houbb.log.integration.core.Log;
import com.github.houbb.log.integration.core.LogFactory;
import com.github.houbb.rpc.client.handler.RpcClientHandler;
import io.netty.bootstrap.Bootstrap;
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelOption;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioSocketChannel;
import io.netty.handler.logging.LogLevel;
import io.netty.handler.logging.LoggingHandler;
/**
* <p> rpc 客户端 </p>
*
* <pre> Created: 2019/10/16 11:21 下午 </pre>
* <pre> Project: rpc </pre>
*
* @author houbinbin
* @since 0.0.2
*/
public class RpcClient extends Thread {
private static final Log log = LogFactory.getLog(RpcClient.class);
/**
* 监听端口号
*/
private final int port;
public RpcClient(int port) {
this.port = port;
}
public RpcClient() {
this(9527);
}
@Override
public void run() {
// 启动服务端
log.info("RPC 服务开始启动客户端");
EventLoopGroup workerGroup = new NioEventLoopGroup();
try {
Bootstrap bootstrap = new Bootstrap();
ChannelFuture channelFuture = bootstrap.group(workerGroup)
.channel(NioSocketChannel.class)
.option(ChannelOption.SO_KEEPALIVE, true)
.handler(new ChannelInitializer<Channel>(){
@Override
protected void initChannel(Channel ch) throws Exception {
ch.pipeline()
.addLast(new LoggingHandler(LogLevel.INFO))
.addLast(new RpcClientHandler());
}
})
.connect("localhost", port)
.syncUninterruptibly();
log.info("RPC 服务启动客户端完成,监听端口:" + port);
channelFuture.channel().closeFuture().syncUninterruptibly();
log.info("RPC 服务开始客户端已关闭");
} catch (Exception e) {
log.error("RPC 客户端遇到异常", e);
} finally {
workerGroup.shutdownGracefully();
}
}
}
.connect("localhost", port)
declares the server that the client needs to connect to, and the port here is the same as that of the server.
RpcClientHandler
The client-side processing class is also relatively simple, leave it blank for now.
/*
* Copyright (c) 2019. houbinbin Inc.
* rpc All rights reserved.
*/
package com.github.houbb.rpc.client.handler;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
/**
* <p> 客户端处理类 </p>
*
* <pre> Created: 2019/10/16 11:30 下午 </pre>
* <pre> Project: rpc </pre>
*
* @author houbinbin
* @since 0.0.2
*/
public class RpcClientHandler extends SimpleChannelInboundHandler {
@Override
protected void channelRead0(ChannelHandlerContext ctx, Object msg) throws Exception {
// do nothing.
}
}
Start the test
Server
Start the server first.
Client
Then start the client to connect to the server, the implementation is as follows:
/**
* 服务启动代码测试
* @param args 参数
*/
public static void main(String[] args) {
new RpcClient().start();
}
summary
In order to facilitate everyone to learn, the above source code has been open source:
https://github.com/houbb/rpc
I am an old horse, and I look forward to seeing you again next time.
**粗体** _斜体_ [链接](http://example.com) `代码` - 列表 > 引用
。你还可以使用@
来通知其他用户。