麻烦各位大神帮我指出问题或可优化的地方
处理的流程:
- 调用播放方法
- 后台回调播放通知,打开播放界面,并在Surface准备好后直接去渲染
- 在停止播放前,后台会一直回调解码后的帧数据
- 前端就需要处理帧数据进行渲染播放
我当前的处理方式:
Call.java
与c层通讯的类,调用接口和回调数据
public class Call {
/**
* 根据资源id存放,jni回调的解码数据
*/
public static HashMap<Integer, LinkedBlockingQueue<MediaBean>> decodeMap = new HashMap<>();
/**
* 对象池最大容量
*/
public static final int CAPACITY = 50;
/**
* 解码数据的对象池
*/
public static Pools.SynchronizedPool<MediaBean> synchronizedPool;
/**
* 存放后台回调解码后的数据,固定大小,超过的不处理
*/
private ByteBuffer m_dbuf = ByteBuffer.allocateDirect(1024 * 500);
private ByteBuffer m_dexbuf = ByteBuffer.allocateDirect(600);
/**
* 使用哪种方式处理解码数据
*/
private static boolean useByteBuffer = true;
static {
System.loadLibrary("avcodec-57");
System.loadLibrary("avdevice-57");
System.loadLibrary("avfilter-6");
System.loadLibrary("avformat-57");
System.loadLibrary("avutil-55");
System.loadLibrary("postproc-54");
System.loadLibrary("swresample-2");
System.loadLibrary("swscale-4");
System.loadLibrary("SDL2");
System.loadLibrary("main");
System.loadLibrary("NetClient");
System.loadLibrary("Codec");
System.loadLibrary("ExecProc");
System.loadLibrary("Device-OpenSles");
System.loadLibrary("meetcoreAnd");
System.loadLibrary("PBmeetcoreAnd");
System.loadLibrary("meetAnd");
System.loadLibrary("native-lib");
System.loadLibrary("z");
}
private Call() {
//创建好12个通道的解码数据存放队列
decodeMap.put(0, new LinkedBlockingQueue<>(CAPACITY));
decodeMap.put(1, new LinkedBlockingQueue<>(CAPACITY));
decodeMap.put(2, new LinkedBlockingQueue<>(CAPACITY));
decodeMap.put(3, new LinkedBlockingQueue<>(CAPACITY));
decodeMap.put(4, new LinkedBlockingQueue<>(CAPACITY));
decodeMap.put(5, new LinkedBlockingQueue<>(CAPACITY));
decodeMap.put(6, new LinkedBlockingQueue<>(CAPACITY));
decodeMap.put(7, new LinkedBlockingQueue<>(CAPACITY));
decodeMap.put(8, new LinkedBlockingQueue<>(CAPACITY));
decodeMap.put(9, new LinkedBlockingQueue<>(CAPACITY));
decodeMap.put(10, new LinkedBlockingQueue<>(CAPACITY));
decodeMap.put(11, new LinkedBlockingQueue<>(CAPACITY));
}
private static final class MagHolder {
static final Call mag = new Call();
}
public static Call getInstance() {
return MagHolder.mag;
}
/**
* 初始化接口
*
* @param data 参数
* @return 成功返回0 失败返回-1
*/
public native int Init_walletSys(byte[] data);
public void setDirectBuf() {
if (useByteBuffer) {
SetDirectBuf(m_dbuf, m_dexbuf);
}
}
/**
* 初始化后立马调用;传递给c层,帧数据写入到里面,避免多次创建byte数组
*
* @param dbuf 存放帧数据
* @param dexbuf 存放配置数据(csd-0 csd-1)
*/
public native void SetDirectBuf(ByteBuffer dbuf, ByteBuffer dexbuf);
/**
* 功能api
*
* @param type 类型
* @param method 方法
* @param data 数据
* @return 查询类方法才有数据返回,失败返回null数组
*/
public native byte[] call_method(int type, int method, byte[] data);
/**
* 初始化桌面、摄像头采集
*
* @param type 传0
* @param channelindex 流通道索引值 =2 屏幕,=3 摄像头
* @return 成功返回0 失败返回-1
*/
public native int InitAndCapture(int type, int channelindex);
/**
* 发送帧数据
*
* @param type 流类型 =2 屏幕,=3 摄像头
* @param iskeyframe =1关键帧,=0其它帧
* @param pts {@link MediaCodec.BufferInfo#presentationTimeUs}
* @param data 帧数据
* @return 成功返回0 失败返回-1
*/
public native int call(int type, int iskeyframe, long pts, byte[] data);
public native byte[] NV21ToI420(byte[] data, int w, int h);
public native byte[] NV21ToNV12(byte[] data, int w, int h);
public native byte[] YV12ToNV12(byte[] data, int w, int h);
/**
* 回调解码后的yuv格式视频数据
*
* @param res 播放资源id
*/
public int callback_yuvdisplay(int res, int w, int h, byte[] y, byte[] u, byte[] v) {
LogUtils.v("videodata", "callback_yuvdisplay --> 后台YUV数据 res=" + res + ",y:" + y.length + ",u:" + u.length + ",v:" + v.length);
EventBus.getDefault().post(new EventMessage.Builder().type(EventType.BUS_YUV_DISPLAY).objects(res, w, h, y, u, v).build());
return 0;
}
/**
* 初始化成功后如果不调用 {@link #SetDirectBuf(ByteBuffer dbuf, ByteBuffer dexbuf)},则解码数据通过这个方法回调
*
* @param res 播放资源ID
* @param codecid 解码器类型 (h264=27,h265=173,mpeg4=12,vp8=139,vp9=167)
* @param w 视频的宽
* @param h 视频的高
* @param packet 视频帧
* @param pts {@link MediaCodec.BufferInfo#presentationTimeUs}
* @param codecdata 配置数据
*/
public int callback_videodecode(int isKeyframe, int res, int codecid, int w, int h, byte[] packet, long pts, byte[] codecdata) {
if (packet != null) {
if (packet.length > 1024 * 500) {
LogUtils.i("收到一个大数据 datalen:" + packet.length);
return 0;
}
if (codecdata.length > 600) {
LogUtils.i("收到一个大数据 codecdatalen:" + codecdata.length);
return 0;
}
LinkedBlockingQueue<MediaBean> decodeQueue = decodeMap.get(res);
if (synchronizedPool == null) {
synchronizedPool = new Pools.SynchronizedPool<>(CAPACITY);
}
MediaBean mediaBean = synchronizedPool.acquire();
if (mediaBean == null) {
LogUtils.i("--新建对象--" + packet.length);
mediaBean = new MediaBean();
}
mediaBean.setIsKeyFrame(isKeyframe);
mediaBean.setRes(res);
mediaBean.setCodecid(codecid);
mediaBean.setW(w);
mediaBean.setH(h);
mediaBean.setBytes(packet);
mediaBean.setPts(pts);
mediaBean.setCodecdata(codecdata);
if (decodeQueue != null) {
//超过了阈值,添加会失败
if (!decodeQueue.offer(mediaBean)) {
//添加失败就把最旧的数据删除后再添加
if (decodeQueue.poll() != null) {
boolean offer = decodeQueue.offer(mediaBean);
LogUtils.i("添加失败就把最旧的数据删除后再添加,offer=" + offer);
}
}
}
}
return 0;
}
/**
* 初始化成功后如果调用了 {@link #SetDirectBuf(ByteBuffer dbuf, ByteBuffer dexbuf)},则解码数据通过这个方法回调;优化之前在c层一直会创建数组的问题
*
* @param isKeyframe =1关键帧
* @param res 播放资源ID
* @param codecid 解码器类型 (h264=27,h265=173,mpeg4=12,vp8=139,vp9=167)
* @param w 视频的宽
* @param h 视频的高
* @param datalen 帧数据长度
* @param pts {@link MediaCodec.BufferInfo#presentationTimeUs}
* @param codecdatalen 配置数据长度
*/
public int callback_directvideodecode(int isKeyframe, int res, int codecid, int w, int h, int datalen, long pts, int codecdatalen) {
m_dbuf.position(0);
m_dbuf.limit(datalen);
m_dexbuf.position(0);
m_dexbuf.limit(codecdatalen);
if (datalen > 1024 * 500) {
LogUtils.i("收到一个大数据 datalen:" + datalen);
return 0;
}
if (codecdatalen > 600) {
LogUtils.i("收到一个大数据 codecdatalen:" + codecdatalen);
return 0;
}
LinkedBlockingQueue<MediaBean> decodeQueue = decodeMap.get(res);
if (synchronizedPool == null) {
synchronizedPool = new Pools.SynchronizedPool<>(CAPACITY);
}
MediaBean mediaBean = synchronizedPool.acquire();
if (mediaBean == null) {
LogUtils.i("create meidabean datalen=" + datalen + ",decodeQueue size:" + decodeQueue.size());
mediaBean = new MediaBean();
}
mediaBean.setIsKeyFrame(isKeyframe);
mediaBean.setRes(res);
mediaBean.setCodecid(codecid);
mediaBean.setW(w);
mediaBean.setH(h);
mediaBean.setPts(pts);
mediaBean.setPacketBuffer(m_dbuf);
mediaBean.setCodecbf(m_dexbuf);
if (decodeQueue != null) {
//超过了阈值,添加会失败
if (!decodeQueue.offer(mediaBean)) {
//添加失败就把最旧的数据删除后再添加
if (decodeQueue.poll() != null) {
boolean offer = decodeQueue.offer(mediaBean);
LogUtils.i("添加失败就把最旧的数据删除后再添加,offer=" + offer);
}
}
}
return 0;
}
/**
* 功能接口回调接口
*
* @param type 功能类型
* @param method 功能类型的方法
* @param data 方法需要的参数
* @param datalen data有数据时 datalen就有长度
* @return 返回0即可
*/
public int callback_method(int type, int method, byte[] data, int datalen) {
EventBus.getDefault().post(new EventMessage.Builder().type(type).method(method).objects(data, datalen).build());
return 0;
}
public static class MediaBean {
@Deprecated
int iskeyframe;
int res;
int codecid;
int w;
int h;
long pts;
byte[] bytes;
int packetSize;
byte[] codecdata;
int codecdataSize;
public MediaBean() {
if (useByteBuffer) {
bytes = new byte[1024 * 500];
codecdata = new byte[600];
}
}
public boolean isKeyFrame() {
return (iskeyframe & 4) == 4;
}
public int getIsKeyFrame() {
return iskeyframe;
}
public void setIsKeyFrame(int isKeyFrame) {
this.iskeyframe = isKeyFrame;
}
public int getRes() {
return res;
}
public void setRes(int res) {
this.res = res;
}
public int getCodecid() {
return codecid;
}
public void setCodecid(int codecid) {
this.codecid = codecid;
}
public int getW() {
return w;
}
public void setW(int w) {
this.w = w;
}
public int getH() {
return h;
}
public void setH(int h) {
this.h = h;
}
public long getPts() {
return pts;
}
public void setPts(long pts) {
this.pts = pts;
}
public byte[] getBytes() {
return bytes;
}
public void setBytes(byte[] bytes) {
if (!useByteBuffer) {
this.bytes = bytes;
} else {
System.arraycopy(bytes, 0, this.bytes, 0, bytes.length);
}
this.packetSize = bytes.length;
}
public int getPacketSize() {
return packetSize;
}
public void setPacketSize(int packetSize) {
this.packetSize = packetSize;
}
public byte[] getCodecdata() {
return codecdata;
}
public void setCodecdata(byte[] codecdata) {
if (!useByteBuffer) {
this.codecdata = codecdata;
} else {
System.arraycopy(codecdata, 0, this.codecdata, 0, codecdata.length);
}
this.codecdataSize = codecdata.length;
}
public void setPacketBuffer(ByteBuffer packetBuffer) {
this.packetSize = packetBuffer.remaining();
packetBuffer.get(bytes, 0, this.packetSize);
}
public void setCodecbf(ByteBuffer codecbf) {
this.codecdataSize = codecbf.remaining();
codecbf.get(codecdata, 0, this.codecdataSize);
}
}
}
自定义播放view
抽取出简易版本,GLRenderer使用的是:https://github.com/wanliyang1990/wlplayer/blob/master/wlplayer/libwlplayer/src/main/java/com/ywl5320/opengles/WlGlRender.java
public class PlayerView extends GLSurfaceView {
private int resourceId;
private PlayerView.DecodeThread decodeThread;
private Surface mSurface;
private GLRenderer glRender;
private final AtomicBoolean mIsRunning = new AtomicBoolean(true);
private PlayerView.DecodeStatusListener mCallback;
public PlayerView(Context context) {
this(context, null);
}
public PlayerView(Context context, AttributeSet attrs) {
super(context, attrs);
setEGLContextClientVersion(2);
initRender();
}
private void initRender() {
LogUtils.i("---initRender---" + this);
glRender = new GLRenderer(getContext());
setRenderer(glRender);
/**
* 设置渲染模式:
* 1.手动刷新模式 RENDERMODE_WHEN_DIRTY:需要手动调用 glSurfaceView.requestRender() 才会进行更新
* 2.自动刷新模式 RENDERMODE_CONTINUOUSLY:自动不停的渲染
*/
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
glRender.setWlOnRenderRefreshListener(new WlOnRenderRefreshListener() {
@Override
public void onRefresh() {
requestRender();
if (mSurface == null) {
LogUtils.i("mSurface is null");
}
}
});
}
public void setCodecType(@RendererType int codecType) {
if (glRender == null) {
initRender();
}
glRender.setRenderType(codecType);
requestRender();
}
public void setFrameData(int w, int h, byte[] y, byte[] u, byte[] v) {
setCodecType(GLRenderer.RENDER_TYPE_YUV);
glRender.setFrameData(w, h, y, u, v);
requestRender();
}
public void setOnGlSurfaceViewOnCreateListener(WlOnGlSurfaceViewOnCreateListener onGlSurfaceViewOnCreateListener) {
if (glRender != null) {
glRender.setWlOnGlSurfaceViewOnCreateListener(onGlSurfaceViewOnCreateListener);
}
}
public void setResId(int resourceId) {
this.resourceId = resourceId;
}
public int getResId() {
return resourceId;
}
public void setSurface(Surface surface) {
this.mSurface = surface;
LogUtils.i("set Surface " + mSurface);
}
public Surface getSurface() {
return mSurface;
}
public void cutVideoImg() {
if (glRender != null) {
glRender.cutVideoImg();
requestRender();
}
}
public void destroy() {
LogUtils.i("destroy glRender=" + (glRender != null));
stopDecode();
if (glRender != null) {
setOnGlSurfaceViewOnCreateListener(null);
glRender.setWlOnRenderRefreshListener(null);
destroyDrawingCache();
requestRender();
glRender.destroy();
glRender = null;
}
}
public void startDecode() {
if (decodeThread == null) {
mIsRunning.set(true);
setCodecType(GLRenderer.RENDER_TYPE_DECODE);
decodeThread = new PlayerView.DecodeThread("pure-" + resourceId);
LogUtils.i("---startDecode---" + decodeThread);
decodeThread.start();
}
}
public void stopDecode() {
if (decodeThread != null) {
LogUtils.i("---stopDecode---" + decodeThread);
mIsRunning.set(false);
decodeThread.interrupt();
decodeThread = null;
} else {
mCallback.onDestroy();
}
}
private MediaCodec mediaCodec;
private final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
private final LinkedBlockingQueue<MediaBean> cacheQueue = new LinkedBlockingQueue<>(10);
private String saveMimeType = "";
private int initW;
private int initH;
/**
* <ol>
* <li>当通过 MediaCodec.createByCodecName(...) or MediaCodec.createDecoderByType(...) or MediaCodec.createEncoderByType(...)三种方法中的任一种创建一个MediaCodec对象实例后,Codec将会处于 Uninitialized 状态;</li>
* <li>当你调用 MediaCodec.configure(...)方法对Codec进行配置后,Codec将进入 Configured 状态;</li>
* <li>之后可以调用 MediaCodec.start() 方法启动Codec,Codec会转入 Executing 状态,start后Codec立即进入 Flushed 子状态,此时的Codec拥有所有的input and output buffers,Client无法操作这些buffers;</li>
* <li>一旦第一个input buffer 出队列,也即Client通过调用 MediaCodec.dequeueInputBuffer(...)请求得到了一个有效的input buffer index, Codec立即进入到了 Running 子状态,在这个状态下Codec会进行实际的数据处理(解码、编码)工作,度过它生命周期的主要阶段;</li>
* <li>当输入端入队列一个带有 end-of-stream 标记的input buffer时(queueInputBuffer(EOS)),Codec将转入 End of Stream 子状态。在此状态下,Codec不再接受新的input buffer数据,但仍会处理之前入队列而未处理完的input buffer并产生output buffer,直到end-of-stream 标记到达输出端,数据处理的过程也随即终止;</li>
* <li>在 Executing状态下可以调用 MediaCodec.flush()方法使Codec进入 Flushed 子状态;</li>
* <li>在 Executing状态下可以调用 MediaCodec.stop()方法使Codec进入 Uninitialized 子状态,可以对Codec进行重新配置;</li>
* <li>极少数情况下Codec会遇到错误进入 Error 状态,可以调用 MediaCodec.reset() 方法使其再次可用;</li>
* <li>当MediaCodec数据处理任务完成时或不再需要MediaCodec时,可使用 MediaCodec.release()方法释放其资源。</li>
* </ol>
*/
class DecodeThread extends Thread {
public DecodeThread(@NonNull String name) {
super(name);
}
@Override
public void run() {
try {
LinkedBlockingQueue<MediaBean> decodeQueue = decodeMap.get(resourceId);
byte[] packet;
int packetSize;
int width, height, codecid;
long pts;
String mimeType;
MediaBean bean;
int cacheQueueSize, decodeQueueSize;
while (mIsRunning.get()) {
cacheQueueSize = cacheQueue.size();
decodeQueueSize = decodeQueue.size();
if (mSurface == null) {
if (mCallback != null) {
updateStatus(0);
}
if (decodeQueueSize > 0) {
//回收对象
synchronizedPool.release(decodeQueue.poll());
}
//跳过循环
continue;
} else {
updateStatus(1);
}
if (decodeQueueSize > 0 || cacheQueueSize > 0) {
//先取缓存的数据
bean = cacheQueueSize > 0 ? cacheQueue.peek() : decodeQueue.poll();
packet = bean.getBytes();
packetSize = bean.getPacketSize();
width = bean.getW();
height = bean.getH();
pts = bean.getPts();
codecid = bean.getCodecid();
mimeType = Constant.getMimeType(codecid);
//已经初始化过了,但是参数有变化的时候需要重新初始化
if (!saveMimeType.equals(mimeType) || initW != width || initH != height || mediaCodec == null) {
//进入初始化MediaCodec
if (mediaCodec != null) {
//调用stop方法使其进入 uninitialzed 状态,这样才可以重新配置MediaCodec
mediaCodec.stop();
}
saveMimeType = mimeType;
//初始化MediaCodec
byte[] codecdata1 = new byte[bean.getCodecdataSize()];
System.arraycopy(codecdata1, 0, bean.getCodecdata(), 0, codecdata1.length);
initCodec(width, height, codecdata1);
}
if (mediaCodec == null) {//初始化失败
if (cacheQueueSize > 0) {//使用的是缓存中的数据
//把它删掉
MediaBean poll = cacheQueue.poll();
//回收对象,已经满了再调用offer添加数据会返回false
synchronizedPool.release(poll);
} else {//使用的是新数据
//放入缓存队列
if (!cacheQueue.offer(bean)) {
//已经满了,把旧的删除掉
MediaBean poll = cacheQueue.poll();
//回收对象,已经满了再调用offer添加数据会返回false
synchronizedPool.release(poll);
//然后将这个新的放入
cacheQueue.offer(bean);
}
}
continue;
}
if (mediaCodecDecode(packet, pts, packetSize)) {
if (cacheQueueSize > 0) {
//使用缓存数据,编码成功就丢掉
cacheQueue.poll();
}
//回收对象,已经满了再调用offer添加数据会返回false
synchronizedPool.release(bean);
} else {//编码失败
if (cacheQueueSize == 0) {//使用的是新数据
LogUtils.i("after encoding fails, put it into the cache queue.");
//放入缓存队列
if (!cacheQueue.offer(bean)) {
//已经满了,把旧的删除掉
MediaBean poll = cacheQueue.poll();
//回收对象,已经满了再调用offer添加数据会返回false
synchronizedPool.release(poll);
//然后将这个新的放入
cacheQueue.offer(bean);
}
}
}
} else {
if (mediaCodec != null) {
//外部没有数据了,还需要渲染已经放入解码器中的数据
mediaCodecDecode(null, 0, 0);
}
}
}
cacheQueue.clear();//清空缓存队列
releaseMediaCodec();
synchronizedPool = null;
LogUtils.i("thread id=" + Thread.currentThread().getId() + ",end decoding decodeQueue=" + decodeQueue.size());
decodeQueue.clear();//清空接收队列,避免下次存放时留下脏数据
mCallback.onDestroy();
} catch (Exception e) {
LogUtils.e(e);
}
}
/**
* 初始化解码器
*
* @param w 宽
* @param h 高
* @param codecdata pps/sps 编码配置数据
*/
private void initCodec(int w, int h, byte[] codecdata) {
try {
updateStatus(2);
//创建了一个解码器,此时编解码器处于未初始化状态(Uninitialized)
mediaCodec = MediaCodec.createDecoderByType(saveMimeType);
LogUtils.i("initCodec : ---start--- mediaCodec=" + mediaCodec + ",mSurface=" + mSurface);
//宽高要判断是否是解码器所支持的范围
MediaCodecInfo.CodecCapabilities capabilitiesForType = mediaCodec.getCodecInfo().getCapabilitiesForType(saveMimeType);
MediaCodecInfo.VideoCapabilities videoCapabilities = capabilitiesForType.getVideoCapabilities();
Range<Integer> supportedWidths = videoCapabilities.getSupportedWidths();
Range<Integer> supportedHeights = videoCapabilities.getSupportedHeights();
initW = w;
initH = h;
if (w < h) {
LogUtils.i("宽度比高度小:w=" + w + ",h=" + h);
int temp = w;
w = h;
h = temp;
}
w = supportedWidths.clamp(w);
h = supportedHeights.clamp(h);
LogUtils.d("initCodec 可支持的宽高:" + supportedWidths + "、" + supportedHeights + ",initW=" + initW + ",initH=" + initH);
MediaFormat mediaFormat = MediaFormat.createVideoFormat(saveMimeType, w, h);
mediaFormat.setInteger(MediaFormat.KEY_WIDTH, w);
mediaFormat.setInteger(MediaFormat.KEY_HEIGHT, h);
if (codecdata != null && codecdata.length > 0) {
LogUtils.i("initCodec 设置 csd= " + Arrays.toString(codecdata));
mediaFormat.setByteBuffer("csd-0", ByteBuffer.wrap(codecdata));
mediaFormat.setByteBuffer("csd-1", ByteBuffer.wrap(codecdata));
}
boolean formatSupported = capabilitiesForType.isFormatSupported(mediaFormat);
LogUtils.i("initCodec : 是否支持 --> " + formatSupported + ",mediaFormat=" + mediaFormat);
//2.对编解码器进行配置,这将使编解码器转为配置状态(Configured)
mediaCodec.configure(mediaFormat, mSurface, null, 0);
LogUtils.i("initCodec : ---configure--- ");
//3.调用start()方法使其转入执行状态(Executing)
mediaCodec.start();
//在 Executing状态下可以调用 MediaCodec.flush()方法使Codec进入 Flushed 子状态
//在 Executing状态下可以调用 MediaCodec.stop()方法使Codec进入 Uninitialized 子状态,可以对Codec进行重新配置;
LogUtils.i("initCodec : ---Executing Flushed --- ");
//要保持屏幕纵横比:此方法必须在configure和start之后执行才有效
//VIDEO_SCALING_MODE_SCALE_TO_FIT //默认铺满
//VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING //保持比例,多余会被裁剪
// mediaCodec.setVideoScalingMode(MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT);
// LogUtils.i("initCodec : ---setVideoScalingMode--- ");
updateStatus(3);
} catch (Exception e) {
LogUtils.e(e);
}
}
/**
* 解码数据
*
* @param packet 帧数据包
* @param pts 此缓冲区的呈现时间戳(以微秒为单位)。这通常是应该呈现(渲染)此缓冲区的媒体时间。使用输出表面时,这将作为帧的时间戳传播(转换为纳秒后)
* @param packetSize 帧数据大小
* @return 成功将数据放入解码器则返回true
*/
private boolean mediaCodecDecode(byte[] packet, long pts, int packetSize) {
int inputBufferIndex = -1;
//判断解码器是否初始化完成
if (mediaCodec == null) {
LogUtils.e("mediaCodec not ready");
return false;
}
try {
if (packet != null) {
//队列中有视频帧,检查解码队列中是否有空闲可用的buffer,有则取视频帧送进去解码
inputBufferIndex = mediaCodec.dequeueInputBuffer(0);
if (inputBufferIndex >= 0) {
//Running子状态
//有空闲可用的解码buffer
ByteBuffer byteBuffer = mediaCodec.getInputBuffer(inputBufferIndex);
byteBuffer.clear();
byteBuffer.limit(packetSize);
byteBuffer.position(0);
//将视频队列中的头取出送到解码队列中
byteBuffer.put(packet, 0, packetSize);
mediaCodec.queueInputBuffer(inputBufferIndex, 0, packetSize, pts, 0);
updateStatus(4);
}
}
int outputBufferIndex = mediaCodec.dequeueOutputBuffer(info, 0);
if (outputBufferIndex >= 0) {
//返回输出缓冲区,如果索引不是dequeued输出缓冲区,或者编解码器配置了输出面,则返回null
ByteBuffer outputBuffer = mediaCodec.getOutputBuffer(outputBufferIndex);
if (outputBuffer != null) {
outputBuffer.position(info.offset);
outputBuffer.limit(info.offset + info.size);
}
//如果配置编码器时指定了有效的surface,传true将此输出缓冲区显示在surface
mediaCodec.releaseOutputBuffer(outputBufferIndex, true);
updateStatus(5);
}
} catch (IllegalStateException e) {
LogUtils.e(e);
mediaCodec = null;
}
return inputBufferIndex >= 0;
}
/**
* 释放资源
*/
private void releaseMediaCodec() {
if (mediaCodec != null) {
try {
//调用stop()方法使编解码器返回到未初始化状态(Uninitialized),此时这个编解码器可以再次重新配置
mediaCodec.stop();
//使用完编解码器后,你必须调用release()方法释放其资源
mediaCodec.release();
} catch (Exception e) {
LogUtils.e(e);
}
mediaCodec = null;
}
}
}
public void setDecodeStatusListener(PlayerView.DecodeStatusListener listener) {
mCallback = listener;
}
private int saveCode;
private void updateStatus(int code) {
if (mCallback != null) {
if (saveCode < code) {
saveCode = code;
mCallback.updateStatus(code);
}
}
}
public interface DecodeStatusListener {
void updateStatus(int code);
void onMediaCodecStatus(String status);
void onError(Exception e);
void onDestroy();
}
}
渲染类GLRenderer
public class GLRenderer implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
private final String TAG = "GLRenderer-->";
private Context context;
private FloatBuffer vertexBuffer;
private final float[] vertexData = {
1f, 1f, 0f,
-1f, 1f, 0f,
1f, -1f, 0f,
-1f, -1f, 0f
};
private FloatBuffer textureBuffer;
private final float[] textureVertexData = {
1f, 0f,
0f, 0f,
1f, 1f,
0f, 1f
};
/**
* mediacodec
*/
private int programId_mediacodec;
private int aPositionHandle_mediacodec;
private int textureid_mediacodec;
private int uTextureSamplerHandle_mediacodec;
private int aTextureCoordHandle_mediacodec;
private SurfaceTexture surfaceTexture;
private Surface surface;
/**
* yuv
*/
private int programId_yuv;
private int aPositionHandle_yuv;
private int aTextureCoordHandle_yuv;
private int sampler_y;
private int sampler_u;
private int sampler_v;
private int[] textureid_yuv;
int w;
int h;
Buffer y;
Buffer u;
Buffer v;
/**
* release
*/
private int programId_stop;
private int aPositionHandle_stop;
private int aTextureCoordHandle_stop;
public static final int RENDER_TYPE_STOP = 0;
public static final int RENDER_TYPE_DECODE = 1;
public static final int RENDER_TYPE_YUV = 2;
int renderType = -1;
private boolean cutimg = false;
private int sWidth = 0;
private int sHeight = 0;
private WlOnGlSurfaceViewOnCreateListener wlOnGlSurfaceViewOncreateListener;
private WlOnRenderRefreshListener wlOnRenderRefreshListener;
public GLRenderer(Context context) {
WeakReference<Context> mContext = new WeakReference<>(context);
this.context = mContext.get();
// this.context = context;
vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4)//分配新的直接字节缓冲区
.order(ByteOrder.nativeOrder())//修改此缓冲区的字节顺序
.asFloatBuffer()//创建此字节缓冲区的视图,作为 float 缓冲区
.put(vertexData);//将4个包含给定float值的字节按照当前的字节顺序写入到此缓冲区的当前位置,然后然后将该位置增加 4。
/*
* 设置此缓冲区的位置。如果标记已定义且大于新的位置,则丢弃该标记
* 新位置值;必须为 非负 且 不大于当前限制
*/
vertexBuffer.position(0);
textureBuffer = ByteBuffer.allocateDirect(textureVertexData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(textureVertexData);
textureBuffer.position(0);
}
public void setFrameData(int w, int h, byte[] by, byte[] bu, byte[] bv) {
this.w = w;
this.h = h;
this.y = ByteBuffer.wrap(by);
this.u = ByteBuffer.wrap(bu);
this.v = ByteBuffer.wrap(bv);
}
public void setRenderType(@RendererType int renderType) {
this.renderType = renderType;
if (renderType == RENDER_TYPE_STOP) {
LogUtils.i(TAG, "---清空屏幕---");
glClear();
}
}
public int getRenderType() {
return renderType;
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
LogUtils.i(TAG, "onSurfaceCreated : --> ");
initShader();
}
private void initShader() {
initMediacodecShader();
initYuvShader();
initStop();
}
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
LogUtils.i(TAG, "onSurfaceChanged : --> " + "onSurfaceChanged, width:" + width + ",height :" + height);
sWidth = width;
sHeight = height;
//设置展示窗口
GLES20.glViewport(0, 0, width, height);
}
@Override
public void onDrawFrame(GL10 gl) {
if (renderType == RENDER_TYPE_DECODE) {
renderMediacodec();
} else if (renderType == RENDER_TYPE_YUV) {
renderYuv();
} else {
renderStop();
}
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
if (cutimg) {
cutimg = false;
Bitmap bitmap = cutBitmap(sWidth, sHeight);
if (wlOnGlSurfaceViewOncreateListener != null) {
wlOnGlSurfaceViewOncreateListener.onCutVideoImg(bitmap);
}
}
}
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
if (wlOnRenderRefreshListener != null) {
wlOnRenderRefreshListener.onRefresh();
}
}
public void setWlOnGlSurfaceViewOnCreateListener(WlOnGlSurfaceViewOnCreateListener wlOnGlSurfaceViewOnCreateListener) {
this.wlOnGlSurfaceViewOncreateListener = wlOnGlSurfaceViewOnCreateListener;
}
public void setWlOnRenderRefreshListener(WlOnRenderRefreshListener wlOnRenderRefreshListener) {
this.wlOnRenderRefreshListener = wlOnRenderRefreshListener;
}
/**
* 初始化硬件解码shader
*/
private void initMediacodecShader() {
//将glsl文件中的内容装换成字符串源码
String vertexShader = WlShaderUtils.readRawTextFile(context, R.raw.vertex_base);
String fragmentShader = WlShaderUtils.readRawTextFile(context, R.raw.fragment_mediacodec);
//创建空的OpenGL ES程序,并把着色器添加进去
programId_mediacodec = WlShaderUtils.createProgram(vertexShader, fragmentShader);
aPositionHandle_mediacodec = GLES20.glGetAttribLocation(programId_mediacodec, "av_Position");
aTextureCoordHandle_mediacodec = GLES20.glGetAttribLocation(programId_mediacodec, "af_Position");
uTextureSamplerHandle_mediacodec = GLES20.glGetUniformLocation(programId_mediacodec, "sTexture");
int[] textures = new int[1];
/**
* @n 给新创建的Texture返回n个唯一ID标识,这里只需要填1就行了
* @textures OpenGL会将Texture的ID存放到这个数组里,当然,textures.length >= n + offset
* @offset 参数textures的offset,textures.length >= n + offset
*/
GLES20.glGenTextures(1, textures, 0);
//获取唯一标识
textureid_mediacodec = textures[0];
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureid_mediacodec);
WlShaderUtils.checkGlError("glBindTexture mTextureID");
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);
surfaceTexture = new SurfaceTexture(textureid_mediacodec);
surfaceTexture.setOnFrameAvailableListener(this);
surface = new Surface(surfaceTexture);
if (wlOnGlSurfaceViewOncreateListener != null) {
wlOnGlSurfaceViewOncreateListener.onGlSurfaceViewOnCreated(surface);
}
}
/**
* 使用硬件解码shader
*/
private void renderMediacodec() {
try {
// LogUtils.v(TAG, "onDrawFrame renderMediacodec -->");
GLES20.glUseProgram(programId_mediacodec);
surfaceTexture.updateTexImage();
vertexBuffer.position(0);
GLES20.glEnableVertexAttribArray(aPositionHandle_mediacodec);
GLES20.glVertexAttribPointer(aPositionHandle_mediacodec, 3, GLES20.GL_FLOAT, false,
12, vertexBuffer);
textureBuffer.position(0);
GLES20.glEnableVertexAttribArray(aTextureCoordHandle_mediacodec);
GLES20.glVertexAttribPointer(aTextureCoordHandle_mediacodec, 2, GLES20.GL_FLOAT, false, 8, textureBuffer);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureid_mediacodec);
GLES20.glUniform1i(uTextureSamplerHandle_mediacodec, 0);
} catch (Exception e) {
LogUtils.e(e);
}
}
private void initYuvShader() {
String vertexShader = WlShaderUtils.readRawTextFile(context, R.raw.vertex_base);
String fragmentShader = WlShaderUtils.readRawTextFile(context, R.raw.fragment_yuv);
programId_yuv = WlShaderUtils.createProgram(vertexShader, fragmentShader);
aPositionHandle_yuv = GLES20.glGetAttribLocation(programId_yuv, "av_Position");
aTextureCoordHandle_yuv = GLES20.glGetAttribLocation(programId_yuv, "af_Position");
sampler_y = GLES20.glGetUniformLocation(programId_yuv, "sampler_y");
sampler_u = GLES20.glGetUniformLocation(programId_yuv, "sampler_u");
sampler_v = GLES20.glGetUniformLocation(programId_yuv, "sampler_v");
textureid_yuv = new int[3];
GLES20.glGenTextures(3, textureid_yuv, 0);
for (int i = 0; i < 3; i++) {
// 绑定纹理空间
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureid_yuv[i]);
//设置属性 当显示的纹理比加载的纹理大时 使用纹理坐标中最接近的若干个颜色 通过加权算法获得绘制颜色
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
// 比加载的小
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
// 如果纹理坐标超出范围 0,0-1,1 坐标会被截断在范围内
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
}
}
private void renderYuv() {
if (w > 0 && h > 0 && y != null && u != null && v != null) {
// LogUtils.v(TAG, "onDrawFrame renderYuv -->");
GLES20.glUseProgram(programId_yuv);
GLES20.glEnableVertexAttribArray(aPositionHandle_yuv);
GLES20.glVertexAttribPointer(aPositionHandle_yuv, 3, GLES20.GL_FLOAT, false,
12, vertexBuffer);
textureBuffer.position(0);
GLES20.glEnableVertexAttribArray(aTextureCoordHandle_yuv);
GLES20.glVertexAttribPointer(aTextureCoordHandle_yuv, 2, GLES20.GL_FLOAT, false, 8, textureBuffer);
//使 GL_TEXTURE0 单元 活跃 opengl最多支持16个纹理
//纹理单元是显卡中所有的可用于在shader中进行纹理采样的显存 数量与显卡类型相关,至少16个
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
//绑定纹理空间 下面的操作就会作用在这个空间中
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureid_yuv[0]);
//创建一个2d纹理 使用亮度颜色模型并且纹理数据也是亮度颜色模型
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, w, h, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, y);
//绑定采样器与纹理单元
GLES20.glUniform1i(sampler_y, 0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureid_yuv[1]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, w / 2, h / 2, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE,
u);
GLES20.glUniform1i(sampler_u, 1);
GLES20.glActiveTexture(GLES20.GL_TEXTURE2);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureid_yuv[2]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, w / 2, h / 2, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE,
v);
GLES20.glUniform1i(sampler_v, 2);
y.clear();
u.clear();
v.clear();
y = null;
u = null;
v = null;
}
}
private void initStop() {
String vertexShader = WlShaderUtils.readRawTextFile(context, R.raw.vertex_base);
String fragmentShader = WlShaderUtils.readRawTextFile(context, R.raw.fragment_no);
programId_stop = WlShaderUtils.createProgram(vertexShader, fragmentShader);
aPositionHandle_stop = GLES20.glGetAttribLocation(programId_stop, "av_Position");
aTextureCoordHandle_stop = GLES20.glGetAttribLocation(programId_stop, "af_Position");
}
private void renderStop() {
GLES20.glUseProgram(programId_stop);
vertexBuffer.position(0);
GLES20.glEnableVertexAttribArray(aPositionHandle_stop);
GLES20.glVertexAttribPointer(aPositionHandle_stop, 3, GLES20.GL_FLOAT, false,
12, vertexBuffer);
textureBuffer.position(0);
GLES20.glEnableVertexAttribArray(aTextureCoordHandle_stop);
GLES20.glVertexAttribPointer(aTextureCoordHandle_stop, 2, GLES20.GL_FLOAT, false, 8, textureBuffer);
}
private Bitmap cutBitmap(int w, int h) {
int bitmapBuffer[] = new int[w * h];
int bitmapSource[] = new int[w * h];
IntBuffer intBuffer = IntBuffer.wrap(bitmapBuffer);
intBuffer.position(0);
try {
GLES20.glReadPixels(0, 0, w, h, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE,
intBuffer);
int offset1, offset2;
for (int i = 0; i < h; i++) {
offset1 = i * w;
offset2 = (h - i - 1) * w;
for (int j = 0; j < w; j++) {
int texturePixel = bitmapBuffer[offset1 + j];
int blue = (texturePixel >> 16) & 0xff;
int red = (texturePixel << 16) & 0x00ff0000;
int pixel = (texturePixel & 0xff00ff00) | red | blue;
bitmapSource[offset2 + j] = pixel;
}
}
} catch (GLException e) {
return null;
}
Bitmap bitmap = Bitmap.createBitmap(bitmapSource, w, h, Bitmap.Config.ARGB_8888);
intBuffer.clear();
return bitmap;
}
public void cutVideoImg() {
cutimg = true;
}
public void destroy() {
LogUtils.i("destroy 清除缓冲区");
setRenderType(RENDER_TYPE_STOP);
if (vertexBuffer != null) {
LogUtils.i("vertexBuffer clear");
vertexBuffer.clear();
}
if (textureBuffer != null) {
LogUtils.i("textureBuffer clear");
textureBuffer.clear();
}
if (surfaceTexture != null) {
LogUtils.i("surfaceTexture release");
surfaceTexture.setOnFrameAvailableListener(null);
surfaceTexture.release();
}
wlOnGlSurfaceViewOncreateListener = null;
wlOnRenderRefreshListener = null;
}
private void glClear() {
LogUtils.i("---glclear---");
GLES20.glClearDepthf(1);
GLES20.glClearStencil(1);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_STENCIL_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
GLES20.glClearColor(1f, 0f, 0f, 1f);
}
}