2017-09-27 144 views
1

FFmpeg 및 C++로 mp4 비디오 파일을 만들려고하는데 결과적으로 깨진 파일 (Windows Player에서 "Can not play ... 0xc00d36c4")을 수신합니다. .h264 파일을 만들면 'ffplay'로 재생하고 CL을 통해 mp4로 변환 할 수 있습니다.C++ FFmpeg mp4 파일 만들기

내 코드 : 프로그램의

int main() { 
    char *filename = "tmp.mp4"; 
    AVOutputFormat *fmt; 
    AVFormatContext *fctx; 
    AVCodecContext *cctx; 
    AVStream *st; 

    av_register_all(); 
    avcodec_register_all(); 

    //auto detect the output format from the name 
    fmt = av_guess_format(NULL, filename, NULL); 
    if (!fmt) { 
     cout << "Error av_guess_format()" << endl; system("pause"); exit(1); 
    } 

    if (avformat_alloc_output_context2(&fctx, fmt, NULL, filename) < 0) { 
     cout << "Error avformat_alloc_output_context2()" << endl; system("pause"); exit(1); 
    } 


    //stream creation + parameters 
    st = avformat_new_stream(fctx, 0); 
    if (!st) { 
     cout << "Error avformat_new_stream()" << endl; system("pause"); exit(1); 
    } 

    st->codecpar->codec_id = fmt->video_codec; 
    st->codecpar->codec_type = AVMEDIA_TYPE_VIDEO; 
    st->codecpar->width = 352; 
    st->codecpar->height = 288; 
    st->time_base.num = 1; 
    st->time_base.den = 25; 

    AVCodec *pCodec = avcodec_find_encoder(st->codecpar->codec_id); 
    if (!pCodec) { 
     cout << "Error avcodec_find_encoder()" << endl; system("pause"); exit(1); 
    } 

    cctx = avcodec_alloc_context3(pCodec); 
    if (!cctx) { 
     cout << "Error avcodec_alloc_context3()" << endl; system("pause"); exit(1); 
    } 

    avcodec_parameters_to_context(cctx, st->codecpar); 
    cctx->bit_rate = 400000; 
    cctx->width = 352; 
    cctx->height = 288; 
    cctx->time_base.num = 1; 
    cctx->time_base.den = 25; 
    cctx->gop_size = 12; 
    cctx->pix_fmt = AV_PIX_FMT_YUV420P; 
    if (st->codecpar->codec_id == AV_CODEC_ID_H264) { 
     av_opt_set(cctx->priv_data, "preset", "ultrafast", 0); 
    } 
    if (fctx->oformat->flags & AVFMT_GLOBALHEADER) { 
     cctx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; 
    } 
    avcodec_parameters_from_context(st->codecpar, cctx); 

    av_dump_format(fctx, 0, filename, 1); 

    //OPEN FILE + WRITE HEADER 
    if (avcodec_open2(cctx, pCodec, NULL) < 0) { 
     cout << "Error avcodec_open2()" << endl; system("pause"); exit(1); 
    } 
    if (!(fmt->flags & AVFMT_NOFILE)) { 
     if (avio_open(&fctx->pb, filename, AVIO_FLAG_WRITE) < 0) { 
      cout << "Error avio_open()" << endl; system("pause"); exit(1); 
     } 
    } 
    if (avformat_write_header(fctx, NULL) < 0) { 
     cout << "Error avformat_write_header()" << endl; system("pause"); exit(1); 
    } 


    //CREATE DUMMY VIDEO 
    AVFrame *frame = av_frame_alloc(); 
    frame->format = cctx->pix_fmt; 
    frame->width = cctx->width; 
    frame->height = cctx->height; 
    av_image_alloc(frame->data, frame->linesize, cctx->width, cctx->height, cctx->pix_fmt, 32); 

    AVPacket pkt; 
    double video_pts = 0; 
    for (int i = 0; i < 50; i++) { 
     video_pts = (double)cctx->time_base.num/cctx->time_base.den * 90 * i; 

     for (int y = 0; y < cctx->height; y++) { 
      for (int x = 0; x < cctx->width; x++) { 
       frame->data[0][y * frame->linesize[0] + x] = x + y + i * 3; 
       if (y < cctx->height/2 && x < cctx->width/2) { 
        /* Cb and Cr */ 
        frame->data[1][y * frame->linesize[1] + x] = 128 + y + i * 2; 
        frame->data[2][y * frame->linesize[2] + x] = 64 + x + i * 5; 
       } 
      } 
     } 

     av_init_packet(&pkt); 
     pkt.flags |= AV_PKT_FLAG_KEY; 
     pkt.pts = frame->pts = video_pts; 
     pkt.data = NULL; 
     pkt.size = 0; 
     pkt.stream_index = st->index; 

     if (avcodec_send_frame(cctx, frame) < 0) { 
      cout << "Error avcodec_send_frame()" << endl; system("pause"); exit(1); 
     } 
     if (avcodec_receive_packet(cctx, &pkt) == 0) { 
      //cout << "Write frame " << to_string((int) pkt.pts) << endl; 
      av_interleaved_write_frame(fctx, &pkt); 
      av_packet_unref(&pkt); 
     } 
    } 

    //DELAYED FRAMES 
    for (;;) { 
     avcodec_send_frame(cctx, NULL); 
     if (avcodec_receive_packet(cctx, &pkt) == 0) { 
      //cout << "-Write frame " << to_string((int)pkt.pts) << endl; 
      av_interleaved_write_frame(fctx, &pkt); 
      av_packet_unref(&pkt); 
     } 
     else { 
      break; 
     } 
    } 

    //FINISH 
    av_write_trailer(fctx); 
    if (!(fmt->flags & AVFMT_NOFILE)) { 
     if (avio_close(fctx->pb) < 0) { 
      cout << "Error avio_close()" << endl; system("pause"); exit(1); 
     } 
    } 
    av_frame_free(&frame); 
    avcodec_free_context(&cctx); 
    avformat_free_context(fctx); 

    system("pause"); 
    return 0; 
} 

출력 : 나는 'ffplay'로 MP4 파일을 재생하려고 경우

Output #0, mp4, to 'tmp.mp4': 
    Stream #0:0: Video: h264, yuv420p, 352x288, q=2-31, 400 kb/s, 25 tbn 
[libx264 @ 0000021c4a995ba0] using cpu capabilities: MMX2 SSE2Fast SSSE3 SSE4.2 AVX FMA3 BMI2 AVX2 
[libx264 @ 0000021c4a995ba0] profile Constrained Baseline, level 2.0 
[libx264 @ 0000021c4a995ba0] 264 - core 152 r2851 ba24899 - H.264/MPEG-4 AVC codec - Copyleft 2003-2017 - http://www.videolan.org/x264.html - options: cabac=0 ref=1 deblock=0:0:0 analyse=0:0 me=dia subme=0 psy=1 psy_rd=1.00:0.00 mixed_ref=0 me_range=16 chroma_me=1 trellis=0 8x8dct=0 cqm=0 deadzone=21,11 fast_pskip=1 chroma_qp_offset=0 threads=6 lookahead_threads=1 sliced_threads=0 nr=0 decimate=1 interlaced=0 bluray_compat=0 constrained_intra=0 bframes=0 weightp=0 keyint=12 keyint_min=1 scenecut=0 intra_refresh=0 rc=abr mbtree=0 bitrate=400 ratetol=1.0 qcomp=0.60 qpmin=0 qpmax=69 qpstep=4 ip_ratio=1.40 aq=0 
[libx264 @ 0000021c4a995ba0] frame I:5  Avg QP: 7.03 size: 9318 
[libx264 @ 0000021c4a995ba0] frame P:45 Avg QP: 4.53 size: 4258 
[libx264 @ 0000021c4a995ba0] mb I I16..4: 100.0% 0.0% 0.0% 
[libx264 @ 0000021c4a995ba0] mb P I16..4: 0.0% 0.0% 0.0% P16..4: 100.0% 0.0% 0.0% 0.0% 0.0% skip: 0.0% 
[libx264 @ 0000021c4a995ba0] final ratefactor: 9.11 
[libx264 @ 0000021c4a995ba0] coded y,uvDC,uvAC intra: 18.9% 21.8% 14.5% inter: 7.8% 100.0% 15.5% 
[libx264 @ 0000021c4a995ba0] i16 v,h,dc,p: 4% 5% 5% 86% 
[libx264 @ 0000021c4a995ba0] i8c dc,h,v,p: 2% 9% 6% 82% 
[libx264 @ 0000021c4a995ba0] kb/s:264.68 

그것은 인쇄 : 나는했습니다

[mov,mp4,m4a,3gp,3g2,mj2 @ 00000000026bf900] Could not find codec parameters for stream 0 (Video: h264 (avc1/0x31637661), none, 352x288, 138953 kb/s): unspecified pixel format 
[h264 @ 00000000006c6ae0] non-existing PPS 0 referenced 
[h264 @ 00000000006c6ae0] decode_slice_header error 
[h264 @ 00000000006c6ae0] no frame! 

문제를 발견하지 못한 채 많은 시간을 보냈는데, 그 이유는 무엇일까요?

감사합니다.

+0

어떻게 마지막으로 코드를 수정 했습니까? – ar2015

+0

@ ar2015 답변을 살펴보면 내 솔루션을 게시했습니다. MP4 컨테이너에 직접 쓸 수없는 것 같아서 먼저 h264를 만든 다음 mp4로 다시 변환했습니다. –

답변

2

근무 솔루션 :

#include "VideoCapture.h" 

#define VIDEO_TMP_FILE "tmp.h264" 
#define FINAL_FILE_NAME "record.mp4" 


using namespace std; 

void VideoCapture::Init(int width, int height, int fpsrate, int bitrate) { 

    fps = fpsrate; 

    int err; 

    if (!(oformat = av_guess_format(NULL, VIDEO_TMP_FILE, NULL))) { 
     Debug("Failed to define output format", 0); 
     return; 
    } 

    if ((err = avformat_alloc_output_context2(&ofctx, oformat, NULL, VIDEO_TMP_FILE) < 0)) { 
     Debug("Failed to allocate output context", err); 
     Free(); 
     return; 
    } 

    if (!(codec = avcodec_find_encoder(oformat->video_codec))) { 
     Debug("Failed to find encoder", 0); 
     Free(); 
     return; 
    } 

    if (!(videoStream = avformat_new_stream(ofctx, codec))) { 
     Debug("Failed to create new stream", 0); 
     Free(); 
     return; 
    } 

    if (!(cctx = avcodec_alloc_context3(codec))) { 
     Debug("Failed to allocate codec context", 0); 
     Free(); 
     return; 
    } 

    videoStream->codecpar->codec_id = oformat->video_codec; 
    videoStream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO; 
    videoStream->codecpar->width = width; 
    videoStream->codecpar->height = height; 
    videoStream->codecpar->format = AV_PIX_FMT_YUV420P; 
    videoStream->codecpar->bit_rate = bitrate * 1000; 
    videoStream->time_base = { 1, fps }; 

    avcodec_parameters_to_context(cctx, videoStream->codecpar); 
    cctx->time_base = { 1, fps }; 
    cctx->max_b_frames = 2; 
    cctx->gop_size = 12; 
    if (videoStream->codecpar->codec_id == AV_CODEC_ID_H264) { 
     av_opt_set(cctx, "preset", "ultrafast", 0); 
    } 
    if (ofctx->oformat->flags & AVFMT_GLOBALHEADER) { 
     cctx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; 
    } 
    avcodec_parameters_from_context(videoStream->codecpar, cctx); 

    if ((err = avcodec_open2(cctx, codec, NULL)) < 0) { 
     Debug("Failed to open codec", err); 
     Free(); 
     return; 
    } 

    if (!(oformat->flags & AVFMT_NOFILE)) { 
     if ((err = avio_open(&ofctx->pb, VIDEO_TMP_FILE, AVIO_FLAG_WRITE)) < 0) { 
      Debug("Failed to open file", err); 
      Free(); 
      return; 
     } 
    } 

    if ((err = avformat_write_header(ofctx, NULL)) < 0) { 
     Debug("Failed to write header", err); 
     Free(); 
     return; 
    } 

    av_dump_format(ofctx, 0, VIDEO_TMP_FILE, 1); 
} 

void VideoCapture::AddFrame(uint8_t *data) { 
    int err; 
    if (!videoFrame) { 

     videoFrame = av_frame_alloc(); 
     videoFrame->format = AV_PIX_FMT_YUV420P; 
     videoFrame->width = cctx->width; 
     videoFrame->height = cctx->height; 

     if ((err = av_frame_get_buffer(videoFrame, 32)) < 0) { 
      Debug("Failed to allocate picture", err); 
      return; 
     } 
    } 

    if (!swsCtx) { 
     swsCtx = sws_getContext(cctx->width, cctx->height, AV_PIX_FMT_RGB24, cctx->width, cctx->height, AV_PIX_FMT_YUV420P, SWS_BICUBIC, 0, 0, 0); 
    } 

    int inLinesize[1] = { 3 * cctx->width }; 

    // From RGB to YUV 
    sws_scale(swsCtx, (const uint8_t * const *)&data, inLinesize, 0, cctx->height, videoFrame->data, videoFrame->linesize); 

    videoFrame->pts = frameCounter++; 

    if ((err = avcodec_send_frame(cctx, videoFrame)) < 0) { 
     Debug("Failed to send frame", err); 
     return; 
    } 

    AVPacket pkt; 
    av_init_packet(&pkt); 
    pkt.data = NULL; 
    pkt.size = 0; 

    if (avcodec_receive_packet(cctx, &pkt) == 0) { 
     pkt.flags |= AV_PKT_FLAG_KEY; 
     av_interleaved_write_frame(ofctx, &pkt); 
     av_packet_unref(&pkt); 
    } 
} 

void VideoCapture::Finish() { 
    //DELAYED FRAMES 
    AVPacket pkt; 
    av_init_packet(&pkt); 
    pkt.data = NULL; 
    pkt.size = 0; 

    for (;;) { 
     avcodec_send_frame(cctx, NULL); 
     if (avcodec_receive_packet(cctx, &pkt) == 0) { 
      av_interleaved_write_frame(ofctx, &pkt); 
      av_packet_unref(&pkt); 
     } 
     else { 
      break; 
     } 
    } 

    av_write_trailer(ofctx); 
    if (!(oformat->flags & AVFMT_NOFILE)) { 
     int err = avio_close(ofctx->pb); 
     if (err < 0) { 
      Debug("Failed to close file", err); 
     } 
    } 

    Free(); 

    Remux(); 
} 

void VideoCapture::Free() { 
    if (videoFrame) { 
     av_frame_free(&videoFrame); 
    } 
    if (cctx) { 
     avcodec_free_context(&cctx); 
    } 
    if (ofctx) { 
     avformat_free_context(ofctx); 
    } 
    if (swsCtx) { 
     sws_freeContext(swsCtx); 
    } 
} 

void VideoCapture::Remux() { 
    AVFormatContext *ifmt_ctx = NULL, *ofmt_ctx = NULL; 
    int err; 

    if ((err = avformat_open_input(&ifmt_ctx, VIDEO_TMP_FILE, 0, 0)) < 0) { 
     Debug("Failed to open input file for remuxing", err); 
     goto end; 
    } 
    if ((err = avformat_find_stream_info(ifmt_ctx, 0)) < 0) { 
     Debug("Failed to retrieve input stream information", err); 
     goto end; 
    } 
    if ((err = avformat_alloc_output_context2(&ofmt_ctx, NULL, NULL, FINAL_FILE_NAME))) { 
     Debug("Failed to allocate output context", err); 
     goto end; 
    } 

    AVStream *inVideoStream = ifmt_ctx->streams[0]; 
    AVStream *outVideoStream = avformat_new_stream(ofmt_ctx, NULL); 
    if (!outVideoStream) { 
     Debug("Failed to allocate output video stream", 0); 
     goto end; 
    } 
    outVideoStream->time_base = { 1, fps }; 
    avcodec_parameters_copy(outVideoStream->codecpar, inVideoStream->codecpar); 
    outVideoStream->codecpar->codec_tag = 0; 

    if (!(ofmt_ctx->oformat->flags & AVFMT_NOFILE)) { 
     if ((err = avio_open(&ofmt_ctx->pb, FINAL_FILE_NAME, AVIO_FLAG_WRITE)) < 0) { 
      Debug("Failed to open output file", err); 
      goto end; 
     } 
    } 

    if ((err = avformat_write_header(ofmt_ctx, 0)) < 0) { 
     Debug("Failed to write header to output file", err); 
     goto end; 
    } 

    AVPacket videoPkt; 
    int ts = 0; 
    while (true) { 
     if ((err = av_read_frame(ifmt_ctx, &videoPkt)) < 0) { 
      break; 
     } 
     videoPkt.stream_index = outVideoStream->index; 
     videoPkt.pts = ts; 
     videoPkt.dts = ts; 
     videoPkt.duration = av_rescale_q(videoPkt.duration, inVideoStream->time_base, outVideoStream->time_base); 
     ts += videoPkt.duration; 
     videoPkt.pos = -1; 

     if ((err = av_interleaved_write_frame(ofmt_ctx, &videoPkt)) < 0) { 
      Debug("Failed to mux packet", err); 
      av_packet_unref(&videoPkt); 
      break; 
     } 
     av_packet_unref(&videoPkt); 
    } 

    av_write_trailer(ofmt_ctx); 

end: 
    if (ifmt_ctx) { 
     avformat_close_input(&ifmt_ctx); 
    } 
    if (ofmt_ctx && !(ofmt_ctx->oformat->flags & AVFMT_NOFILE)) { 
     avio_closep(&ofmt_ctx->pb); 
    } 
    if (ofmt_ctx) { 
     avformat_free_context(ofmt_ctx); 
    } 
} 

헤더 파일 :

#define VIDEOCAPTURE_API __declspec(dllexport) 

#include <iostream> 
#include <cstdio> 
#include <cstdlib> 
#include <fstream> 
#include <cstring> 
#include <math.h> 
#include <string.h> 
#include <algorithm> 
#include <string> 

extern "C" 
{ 
#include <libavcodec/avcodec.h> 
#include <libavcodec/avfft.h> 

#include <libavdevice/avdevice.h> 

#include <libavfilter/avfilter.h> 
#include <libavfilter/avfiltergraph.h> 
#include <libavfilter/buffersink.h> 
#include <libavfilter/buffersrc.h> 

#include <libavformat/avformat.h> 
#include <libavformat/avio.h> 

    // libav resample 

#include <libavutil/opt.h> 
#include <libavutil/common.h> 
#include <libavutil/channel_layout.h> 
#include <libavutil/imgutils.h> 
#include <libavutil/mathematics.h> 
#include <libavutil/samplefmt.h> 
#include <libavutil/time.h> 
#include <libavutil/opt.h> 
#include <libavutil/pixdesc.h> 
#include <libavutil/file.h> 


    // hwaccel 
#include "libavcodec/vdpau.h" 
#include "libavutil/hwcontext.h" 
#include "libavutil/hwcontext_vdpau.h" 

    // lib swresample 

#include <libswscale/swscale.h> 

    std::ofstream logFile; 

    void Log(std::string str) { 
     logFile.open("Logs.txt", std::ofstream::app); 
     logFile.write(str.c_str(), str.size()); 
     logFile.close(); 
    } 

    typedef void(*FuncPtr)(const char *); 
    FuncPtr ExtDebug; 
    char errbuf[32]; 

    void Debug(std::string str, int err) { 
     Log(str + " " + std::to_string(err)); 
     if (err < 0) { 
      av_strerror(err, errbuf, sizeof(errbuf)); 
      str += errbuf; 
     } 
     Log(str); 
     ExtDebug(str.c_str()); 
    } 

    void avlog_cb(void *, int level, const char * fmt, va_list vargs) { 
     static char message[8192]; 
     vsnprintf_s(message, sizeof(message), fmt, vargs); 
     Log(message); 
    } 

    class VideoCapture { 
    public: 

     VideoCapture() { 
      oformat = NULL; 
      ofctx = NULL; 
      videoStream = NULL; 
      videoFrame = NULL; 
      swsCtx = NULL; 
      frameCounter = 0; 

      // Initialize libavcodec 
      av_register_all(); 
      av_log_set_callback(avlog_cb); 
     } 

     ~VideoCapture() { 
      Free(); 
     } 

     void Init(int width, int height, int fpsrate, int bitrate); 

     void AddFrame(uint8_t *data); 

     void Finish(); 

    private: 

     AVOutputFormat *oformat; 
     AVFormatContext *ofctx; 

     AVStream *videoStream; 
     AVFrame *videoFrame; 

     AVCodec *codec; 
     AVCodecContext *cctx; 

     SwsContext *swsCtx; 

     int frameCounter; 

     int fps; 

     void Free(); 

     void Remux(); 
    }; 

    VIDEOCAPTURE_API VideoCapture* Init(int width, int height, int fps, int bitrate) { 
     VideoCapture *vc = new VideoCapture(); 
     vc->Init(width, height, fps, bitrate); 
     return vc; 
    }; 

    VIDEOCAPTURE_API void AddFrame(uint8_t *data, VideoCapture *vc) { 
     vc->AddFrame(data); 
    } 

    VIDEOCAPTURE_API void Finish(VideoCapture *vc) { 
     vc->Finish(); 
    } 

    VIDEOCAPTURE_API void SetDebug(FuncPtr fp) { 
     ExtDebug = fp; 
    }; 
} 
+0

헤더 파일을 사용할 수있게 만드시겠습니까? – ar2015

+1

@ ar2015 헤더 파일을 추가했습니다 –

+0

많은 감사 .... – ar2015

0

코덱 컨텍스트 인 extradata 및 extradata_size 필드를 채워야합니다.

+0

설명서에 "인코딩 : 설정/할당/해제 됨 libavcodec"이 나와 있습니다. https://ffmpeg.org/doxygen/3.1/structAVCodecContext.html#abe964316aaaa61967b012efdcced79c4 무엇을 넣어야합니까? –

+0

생성 된 .h264에서 extradata를 추출하고 mp4 컨테이너로 설정하려고했습니다. 이제 비디오를 재생할 수 있지만 50kbps 대신 7kfps, 400kb/s 대신 60mb/s 비트율이 있습니다. 결과적으로 비디오 길이는 3ms (원래 4 초)입니다. –