在线观看不卡亚洲电影_亚洲妓女99综合网_91青青青亚洲娱乐在线观看_日韩无码高清综合久久

鍍金池/ 問答/Android  網(wǎng)絡(luò)安全  HTML/ android ffmpeg 編碼攝像頭數(shù)據(jù) mpegts格式 mpegv

android ffmpeg 編碼攝像頭數(shù)據(jù) mpegts格式 mpegvideo編碼 出錯(cuò)

android 集成ffmpeg 編碼攝像頭數(shù)據(jù) mpegts格式 mpegvideo編碼 出錯(cuò)
第一步 初始化

Java_com_deerlive_jni_ffmpeg_FFmpegHandle_initVideo(JNIEnv *env, jobject instance,
                                                          jstring url_) {
    const char *out_path = env->GetStringUTFChars(url_, 0);
    logd(out_path);

    //計(jì)算yuv數(shù)據(jù)的長度
    yuv_width = width;
    yuv_height = height;
    y_length = width * height;
    uv_length = width * height / 4;

    av_register_all();

    //推流就需要初始化網(wǎng)絡(luò)協(xié)議
    avformat_network_init();

    //初始化AVFormatContext
    avformat_alloc_output_context2(&ofmt_ctx, NULL, "mpegts", out_path);
    if(!ofmt_ctx) {
        loge("Could not create output context\n");
        return -1;
    }

    //尋找編碼器,這里用的就是x264的那個(gè)編碼器了
    pCodec = avcodec_find_encoder(AV_CODEC_ID_MPEG1VIDEO);
    if(!pCodec) {
        loge("Can not find encoder!\n");
        return -1;
    }

    //初始化編碼器的context
    pCodecCtx = avcodec_alloc_context3(pCodec);
    pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;  //指定編碼格式
    pCodecCtx->width = width;
    pCodecCtx->height = height;
    pCodecCtx->time_base.num = 1;
    pCodecCtx->time_base.den = fps;
    pCodecCtx->bit_rate = 800000;
    pCodecCtx->gop_size = 300;

    if(ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER) {
        pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
    }

    pCodecCtx->qmin = 10;
    pCodecCtx->qmax = 51;

    pCodecCtx->max_b_frames = 0;

    AVDictionary *dicParams = NULL;
    av_dict_set(&dicParams, "preset", "ultrafast", 0);
    av_dict_set(&dicParams, "tune", "zerolatency", 0);

    //打開編碼器
    if(avcodec_open2(pCodecCtx, pCodec, &dicParams) < 0) {
        loge("Failed to open encoder!\n");
        return -1;
    }

    //新建輸出流
    video_st = avformat_new_stream(ofmt_ctx, pCodec);
    if(!video_st) {
        loge("Failed allocation output stream\n");
        return -1;
    }
    video_st->time_base.num = 1;
    video_st->time_base.den = fps;
    //復(fù)制一份編碼器的配置給輸出流
    avcodec_parameters_from_context(video_st->codecpar, pCodecCtx);

    //打開輸出流
    int ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_WRITE);
    if(ret < 0) {
        loge("Could not open output URL %s");
        return -1;
    }

   // ret = avformat_write_header(ofmt_ctx, NULL);
  // if(ret < 0) {
  //     loge("Error occurred when open output URL\n");
  //     return -1;
  // }

    pFrameYUV = av_frame_alloc();
    uint8_t *out_buffer = (uint8_t *) av_malloc(av_image_get_buffer_size(pCodecCtx->pix_fmt, width, height, 1));
    av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, out_buffer, pCodecCtx->pix_fmt, width, height, 1);

    return 0;

}

第二步 獲取攝像頭原始數(shù)據(jù) 開始編碼 (回調(diào)形式)

extern "C"
JNIEXPORT jint JNICALL
Java_com_deerlive_jni_ffmpeg_FFmpegHandle_onFrameCallback(JNIEnv *env, jobject instance,
                                                                jbyteArray buffer_) {

    startTime = av_gettime();
    jbyte *in = env->GetByteArrayElements(buffer_, NULL);

    int ret = 0;
    //初始化一個(gè)幀的數(shù)據(jù)結(jié)構(gòu),用于編碼用
    //指定AV_PIX_FMT_YUV420P這種格式的


    //安卓攝像頭數(shù)據(jù)為NV21格式,此處將其轉(zhuǎn)換為YUV420P格式
    ////N21   0~width * height是Y分量,  width*height~ width*height*3/2是VU交替存儲(chǔ)
    //復(fù)制Y分量的數(shù)據(jù)
    memcpy(pFrameYUV->data[0], in, y_length); //Y

    for (int i = 0; i < uv_length; i++) {
        //將v數(shù)據(jù)存到第三個(gè)平面
        *(pFrameYUV->data[2] + i) = *(in + y_length + i * 2);
        //將U數(shù)據(jù)存到第二個(gè)平面
        *(pFrameYUV->data[1] + i) = *(in + y_length + i * 2 + 1);
    }

    pFrameYUV->format = pCodecCtx->pix_fmt;
    pFrameYUV->width = yuv_width;
    pFrameYUV->height = yuv_height;
    pFrameYUV->pts = count;
    //pFrameYUV->pts = (1.0 / 30) * 90 * count;                     ////////////////////////////
    //例如對(duì)于H.264來說。1個(gè)AVPacket的data通常對(duì)應(yīng)一個(gè)NAL
    //初始化AVPacket
    enc_pkt.data = NULL;
    enc_pkt.size = 0;
    av_init_packet(&enc_pkt);
//    __android_log_print(ANDROID_LOG_WARN, "eric", "編碼前時(shí)間:%lld",
//                        (long long) ((av_gettime() - startTime) / 1000));
    //開始編碼YUV數(shù)據(jù)

    /* send the frame to the encoder */

    ret = avcodec_send_frame(pCodecCtx, pFrameYUV);
    if (ret < 0) {
        logi("Error sending a frame for encoding\n");
    }

    ret = avcodec_receive_packet(pCodecCtx, &enc_pkt);
    if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
        logi("Error during encodg\n");
    else if (ret < 0) {
        logi("Error during encoding\n");
    }


    //av_frame_free(&pFrameYUV);

    if (ret != 0 || enc_pkt.size <= 0) {
        loge("avcodec_receive_packet error");
        return -2;
    }
    enc_pkt.stream_index = video_st->index;

    AVRational time_base = ofmt_ctx->streams[0]->time_base;
    AVRational r_frame_rate1 = pCodecCtx->framerate;
    AVRational time_base_q = {1, AV_TIME_BASE};
    int64_t calc_duration = (double)(AV_TIME_BASE) * (1 / av_q2d(r_frame_rate1));
    //enc_pkt.pts = count * (video_st->time_base.den) / ((video_st->time_base.num) * fps);
    enc_pkt.pts = av_rescale_q(count * calc_duration, time_base_q, time_base);
    enc_pkt.dts = enc_pkt.pts;
    //enc_pkt.duration = (video_st->time_base.den) / ((video_st->time_base.num) * fps);
    enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base);

    enc_pkt.pos = -1;

    ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
    if (ret != 0) {
        loge("av_interleaved_write_frame failed");
    }
    count++;
    env->ReleaseByteArrayElements(buffer_, in, 0);
    return 0;

}

報(bào)錯(cuò)信息 :
avcodec_send_frame send error

avcodec_receive_packet error

回答
編輯回答
櫻花霓

我想問一下,學(xué)會(huì)ffmpeg 需要先學(xué)會(huì)什么技術(shù)

2018年8月24日 04:22
編輯回答
我甘愿

首先你的編碼器錯(cuò)了,x264對(duì)應(yīng)AV_CODEC_ID_H264。

第二,ffmpeg本身有AV_PIX_FMT_NV21。

第三,第一幀應(yīng)該設(shè)成 keyframe,往后每隔GOP這么多幀再給一個(gè)keyframe。

最后,你不妨從最簡單的JPEG (AV_CODEC_ID_MJPEG)編碼開始,學(xué)習(xí)一下ffmpeg的基礎(chǔ),然后再做視頻,視頻其實(shí)就是處理很多的圖片連在一起。

2017年7月22日 09:16