android 集成ffmpeg 编码摄像头数据 mpegts格式 mpegvideo编码 出错
第一步 初始化
Java_com_deerlive_jni_ffmpeg_FFmpegHandle_initVideo(JNIEnv *env, jobject instance,
jstring url_) {
const char *out_path = env->GetStringUTFChars(url_, 0);
logd(out_path);
//计算yuv数据的长度
yuv_width = width;
yuv_height = height;
y_length = width * height;
uv_length = width * height / 4;
av_register_all();
//推流就需要初始化网络协议
avformat_network_init();
//初始化AVFormatContext
avformat_alloc_output_context2(&ofmt_ctx, NULL, "mpegts", out_path);
if(!ofmt_ctx) {
loge("Could not create output context\n");
return -1;
}
//寻找编码器,这里用的就是x264的那个编码器了
pCodec = avcodec_find_encoder(AV_CODEC_ID_MPEG1VIDEO);
if(!pCodec) {
loge("Can not find encoder!\n");
return -1;
}
//初始化编码器的context
pCodecCtx = avcodec_alloc_context3(pCodec);
pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P; //指定编码格式
pCodecCtx->width = width;
pCodecCtx->height = height;
pCodecCtx->time_base.num = 1;
pCodecCtx->time_base.den = fps;
pCodecCtx->bit_rate = 800000;
pCodecCtx->gop_size = 300;
if(ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER) {
pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
}
pCodecCtx->qmin = 10;
pCodecCtx->qmax = 51;
pCodecCtx->max_b_frames = 0;
AVDictionary *dicParams = NULL;
av_dict_set(&dicParams, "preset", "ultrafast", 0);
av_dict_set(&dicParams, "tune", "zerolatency", 0);
//打开编码器
if(avcodec_open2(pCodecCtx, pCodec, &dicParams) < 0) {
loge("Failed to open encoder!\n");
return -1;
}
//新建输出流
video_st = avformat_new_stream(ofmt_ctx, pCodec);
if(!video_st) {
loge("Failed allocation output stream\n");
return -1;
}
video_st->time_base.num = 1;
video_st->time_base.den = fps;
//复制一份编码器的配置给输出流
avcodec_parameters_from_context(video_st->codecpar, pCodecCtx);
//打开输出流
int ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_WRITE);
if(ret < 0) {
loge("Could not open output URL %s");
return -1;
}
// ret = avformat_write_header(ofmt_ctx, NULL);
// if(ret < 0) {
// loge("Error occurred when open output URL\n");
// return -1;
// }
pFrameYUV = av_frame_alloc();
uint8_t *out_buffer = (uint8_t *) av_malloc(av_image_get_buffer_size(pCodecCtx->pix_fmt, width, height, 1));
av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, out_buffer, pCodecCtx->pix_fmt, width, height, 1);
return 0;
}
第二步 获取摄像头原始数据 开始编码 (回调形式)
extern "C"
JNIEXPORT jint JNICALL
Java_com_deerlive_jni_ffmpeg_FFmpegHandle_onFrameCallback(JNIEnv *env, jobject instance,
jbyteArray buffer_) {
startTime = av_gettime();
jbyte *in = env->GetByteArrayElements(buffer_, NULL);
int ret = 0;
//初始化一个帧的数据结构,用于编码用
//指定AV_PIX_FMT_YUV420P这种格式的
//安卓摄像头数据为NV21格式,此处将其转换为YUV420P格式
////N21 0~width * height是Y分量, width*height~ width*height*3/2是VU交替存储
//复制Y分量的数据
memcpy(pFrameYUV->data[0], in, y_length); //Y
for (int i = 0; i < uv_length; i++) {
//将v数据存到第三个平面
*(pFrameYUV->data[2] + i) = *(in + y_length + i * 2);
//将U数据存到第二个平面
*(pFrameYUV->data[1] + i) = *(in + y_length + i * 2 + 1);
}
pFrameYUV->format = pCodecCtx->pix_fmt;
pFrameYUV->width = yuv_width;
pFrameYUV->height = yuv_height;
pFrameYUV->pts = count;
//pFrameYUV->pts = (1.0 / 30) * 90 * count; ////////////////////////////
//例如对于H.264来说。1个AVPacket的data通常对应一个NAL
//初始化AVPacket
enc_pkt.data = NULL;
enc_pkt.size = 0;
av_init_packet(&enc_pkt);
// __android_log_print(ANDROID_LOG_WARN, "eric", "编码前时间:%lld",
// (long long) ((av_gettime() - startTime) / 1000));
//开始编码YUV数据
/* send the frame to the encoder */
ret = avcodec_send_frame(pCodecCtx, pFrameYUV);
if (ret < 0) {
logi("Error sending a frame for encoding\n");
}
ret = avcodec_receive_packet(pCodecCtx, &enc_pkt);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
logi("Error during encodg\n");
else if (ret < 0) {
logi("Error during encoding\n");
}
//av_frame_free(&pFrameYUV);
if (ret != 0 || enc_pkt.size <= 0) {
loge("avcodec_receive_packet error");
return -2;
}
enc_pkt.stream_index = video_st->index;
AVRational time_base = ofmt_ctx->streams[0]->time_base;
AVRational r_frame_rate1 = pCodecCtx->framerate;
AVRational time_base_q = {1, AV_TIME_BASE};
int64_t calc_duration = (double)(AV_TIME_BASE) * (1 / av_q2d(r_frame_rate1));
//enc_pkt.pts = count * (video_st->time_base.den) / ((video_st->time_base.num) * fps);
enc_pkt.pts = av_rescale_q(count * calc_duration, time_base_q, time_base);
enc_pkt.dts = enc_pkt.pts;
//enc_pkt.duration = (video_st->time_base.den) / ((video_st->time_base.num) * fps);
enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base);
enc_pkt.pos = -1;
ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
if (ret != 0) {
loge("av_interleaved_write_frame failed");
}
count++;
env->ReleaseByteArrayElements(buffer_, in, 0);
return 0;
}
报错信息 :
avcodec_send_frame send error
avcodec_receive_packet error
我想问一下,学会ffmpeg 需要先学会什么技术