问题描述
为什么EventLoopGroup无法shutdown? 使用Netty创建的Server和Client通信之后,关闭Channel和调用shutdown 关闭EventLoopGroup,但是主线程仍然一直被阻塞。我试了netty官方的telnet example,客户端逻辑结束之后,主线程仍然无法退出,官方文档没有对这个问题的描述,这是为什么?
复现代码
我用一个简单的逻辑复现这个问题,代码如下:
server
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.*;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import io.netty.handler.codec.DelimiterBasedFrameDecoder;
import io.netty.handler.codec.Delimiters;
import io.netty.handler.codec.string.StringDecoder;
import io.netty.handler.codec.string.StringEncoder;
import io.netty.util.ReferenceCountUtil;
public class TestServer {
public static final int PORT = 12345;
public static void main(String[] args) throws InterruptedException {
ServerBootstrap bootstrap = new ServerBootstrap();
NioEventLoopGroup bossGroup = new NioEventLoopGroup(1);
NioEventLoopGroup workerGroup = new NioEventLoopGroup(1);
bootstrap.group(bossGroup, workerGroup)
.channel(NioServerSocketChannel.class)
.childHandler(new Initializer());
ChannelFuture future = bootstrap.bind(PORT).sync();
future.channel().closeFuture().sync();
bossGroup.shutdownGracefully();
workerGroup.shutdownGracefully();
}
static class Initializer extends ChannelInitializer<SocketChannel> {
private static final StringDecoder DECODER = new StringDecoder();
private static final StringEncoder ENCODER = new StringEncoder();
@Override
protected void initChannel(SocketChannel channel) throws Exception {
ChannelPipeline pipeline = channel.pipeline();
pipeline.addLast(new DelimiterBasedFrameDecoder(8192, Delimiters.lineDelimiter()));
pipeline.addLast(DECODER);
pipeline.addLast(ENCODER);
pipeline.addLast(new Handler());
}
}
static class Handler extends SimpleChannelInboundHandler<String> {
@Override
protected void channelRead0(ChannelHandlerContext ctx, String msg) throws Exception {
System.out.println("server:" + msg);
ctx.writeAndFlush(msg + "\r\n");
ReferenceCountUtil.release(msg);
ctx.close();
}
}
}
client
import io.netty.bootstrap.Bootstrap;
import io.netty.channel.*;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioSocketChannel;
import io.netty.handler.codec.DelimiterBasedFrameDecoder;
import io.netty.handler.codec.Delimiters;
import io.netty.handler.codec.string.StringDecoder;
import io.netty.handler.codec.string.StringEncoder;
import io.netty.util.ReferenceCountUtil;
public class TestClient {
public static void main(String[] args) throws InterruptedException {
Bootstrap bootstrap = new Bootstrap();
EventLoopGroup group = new NioEventLoopGroup(1);
ChannelFuture future = bootstrap.group(group)
.handler(new Initializer())
.option(ChannelOption.SO_KEEPALIVE, true)
.channel(NioSocketChannel.class)
.connect("127.0.0.1", TestServer.PORT);
future.sync().channel().writeAndFlush("abcdef\r\n");
future.channel().closeFuture().sync();
group.shutdownGracefully();
System.out.println("closed: " + !future.channel().isOpen() + "/shutdown:" + group.isShutdown());
}
static class Initializer extends ChannelInitializer<SocketChannel> {
private static final StringDecoder DECODER = new StringDecoder();
private static final StringEncoder ENCODER = new StringEncoder();
@Override
protected void initChannel(SocketChannel channel) throws Exception {
ChannelPipeline pipeline = channel.pipeline();
pipeline.addLast(new DelimiterBasedFrameDecoder(8192, Delimiters.lineDelimiter()));
pipeline.addLast(DECODER);
pipeline.addLast(ENCODER);
pipeline.addLast(new Handler());
}
}
static class Handler extends SimpleChannelInboundHandler<String> {
@Override
protected void channelRead0(ChannelHandlerContext ctx, String msg) throws Exception {
System.out.println("client:" + msg);
ctx.writeAndFlush(msg);
ReferenceCountUtil.release(msg);
ctx.close();
}
}
}
线程栈
2018-02-06 11:17:09
Full thread dump Java HotSpot(TM) 64-Bit Server VM (25.121-b13 mixed mode):
"Attach Listener" #15 daemon prio=9 os_prio=31 tid=0x00007fd4cb8e1000 nid=0x1307 waiting on condition [0x0000000000000000]
java.lang.Thread.State: RUNNABLE
Locked ownable synchronizers:
- None
"DestroyJavaVM" #13 prio=5 os_prio=31 tid=0x00007fd4ca801000 nid=0xe03 waiting on condition [0x0000000000000000]
java.lang.Thread.State: RUNNABLE
Locked ownable synchronizers:
- None
"ObjectCleanerThread" #11 prio=1 os_prio=31 tid=0x00007fd4cb13d000 nid=0x4d03 in Object.wait() [0x000070000a133000]
java.lang.Thread.State: TIMED_WAITING (on object monitor)
at java.lang.Object.wait(Native Method)
- waiting on <0x00000007963ba400> (a java.lang.ref.ReferenceQueue$Lock)
at java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:143)
- locked <0x00000007963ba400> (a java.lang.ref.ReferenceQueue$Lock)
at io.netty.util.internal.ObjectCleaner$1.run(ObjectCleaner.java:52)
at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
at java.lang.Thread.run(Thread.java:745)
Locked ownable synchronizers:
- None
"Monitor Ctrl-Break" #9 daemon prio=5 os_prio=31 tid=0x00007fd4ca82d000 nid=0x4b03 runnable [0x000070000a030000]
java.lang.Thread.State: RUNNABLE
at java.net.PlainSocketImpl.socketAccept(Native Method)
at java.net.AbstractPlainSocketImpl.accept(AbstractPlainSocketImpl.java:409)
at java.net.ServerSocket.implAccept(ServerSocket.java:545)
at java.net.ServerSocket.accept(ServerSocket.java:513)
at com.intellij.rt.execution.application.AppMain$1.run(AppMain.java:79)
at java.lang.Thread.run(Thread.java:745)
Locked ownable synchronizers:
- None
"Service Thread" #8 daemon prio=9 os_prio=31 tid=0x00007fd4cb00e800 nid=0x4703 runnable [0x0000000000000000]
java.lang.Thread.State: RUNNABLE
Locked ownable synchronizers:
- None
"C1 CompilerThread2" #7 daemon prio=9 os_prio=31 tid=0x00007fd4cb00c000 nid=0x4503 waiting on condition [0x0000000000000000]
java.lang.Thread.State: RUNNABLE
Locked ownable synchronizers:
- None
"C2 CompilerThread1" #6 daemon prio=9 os_prio=31 tid=0x00007fd4cb003000 nid=0x4303 waiting on condition [0x0000000000000000]
java.lang.Thread.State: RUNNABLE
Locked ownable synchronizers:
- None
"C2 CompilerThread0" #5 daemon prio=9 os_prio=31 tid=0x00007fd4ca843800 nid=0x4103 waiting on condition [0x0000000000000000]
java.lang.Thread.State: RUNNABLE
Locked ownable synchronizers:
- None
"Signal Dispatcher" #4 daemon prio=9 os_prio=31 tid=0x00007fd4ca822800 nid=0x3f07 runnable [0x0000000000000000]
java.lang.Thread.State: RUNNABLE
Locked ownable synchronizers:
- None
"Finalizer" #3 daemon prio=8 os_prio=31 tid=0x00007fd4cb04d800 nid=0x3203 in Object.wait() [0x000070000991b000]
java.lang.Thread.State: WAITING (on object monitor)
at java.lang.Object.wait(Native Method)
- waiting on <0x0000000795588ec8> (a java.lang.ref.ReferenceQueue$Lock)
at java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:143)
- locked <0x0000000795588ec8> (a java.lang.ref.ReferenceQueue$Lock)
at java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:164)
at java.lang.ref.Finalizer$FinalizerThread.run(Finalizer.java:209)
Locked ownable synchronizers:
- None
"Reference Handler" #2 daemon prio=10 os_prio=31 tid=0x00007fd4ca017000 nid=0x3003 in Object.wait() [0x0000700009818000]
java.lang.Thread.State: WAITING (on object monitor)
at java.lang.Object.wait(Native Method)
- waiting on <0x0000000795586b68> (a java.lang.ref.Reference$Lock)
at java.lang.Object.wait(Object.java:502)
at java.lang.ref.Reference.tryHandlePending(Reference.java:191)
- locked <0x0000000795586b68> (a java.lang.ref.Reference$Lock)
at java.lang.ref.Reference$ReferenceHandler.run(Reference.java:153)
Locked ownable synchronizers:
- None
"VM Thread" os_prio=31 tid=0x00007fd4ca80f800 nid=0x2e03 runnable
"GC task thread#0 (ParallelGC)" os_prio=31 tid=0x00007fd4ca812800 nid=0x2607 runnable
"GC task thread#1 (ParallelGC)" os_prio=31 tid=0x00007fd4ca809800 nid=0x2803 runnable
"GC task thread#2 (ParallelGC)" os_prio=31 tid=0x00007fd4ca80a800 nid=0x2a03 runnable
"GC task thread#3 (ParallelGC)" os_prio=31 tid=0x00007fd4ca80b000 nid=0x2c03 runnable
"VM Periodic Task Thread" os_prio=31 tid=0x00007fd4ca82a000 nid=0x4903 waiting on condition
JNI global references: 349
Issue
这是旧版本的bug,ObjectClear的线程不是守护线程,导致主线程一直被阻塞,升级到 4.1.21 版本即可