Newest 'ffmpeg' Questions - Stack Overflow

http://stackoverflow.com/questions/tagged/ffmpeg

Les articles publiés sur le site

  • Decoding an mp3 file using FFmpeg but sound is glitchy

    28 avril 2017, par satyres

    After successfuly compiling the latest version of FFmpeg library and generated .a library in Ubuntu I've been struggling now for more than a week to play a simple mp3 file in Android without a success ! The sound on my S4 working but it's glitchy and stuttering I've followed this tutorial given by FFmpeg team in Github i've tried to use it in Android but no luck ! here is the Native code.

     void Java_com_example_home_hellondk_MainActivity_audio_1decode_1example(JNIEnv * env, jobject obj, jstring file, jbyteArray array) {
            jboolean isfilenameCopy;
             const char * filename = (*env)->GetStringUTFChars(env, file,
                     &isfilenameCopy);
    jclass cls = (*env)->GetObjectClass(env, obj);
             jmethodID play = (*env)->GetMethodID(env, cls, "playSound", "([BI)V");
        AVCodec *codec;
        AVCodecContext *c= NULL;
        int len;
        FILE *f, *outfile;
        uint8_t inbuf[AUDIO_INBUF_SIZE + AV_INPUT_BUFFER_PADDING_SIZE];
        AVPacket avpkt;
        AVFrame *decoded_frame = NULL;
    AVFormatContext* container=NULL;
        av_init_packet(&avpkt);
    
        printf("Decode audio file %s \n", filename);
    LOGE("Decode audio file %s\n", filename);
        /* find the MPEG audio decoder */
      /*  codec = avcodec_find_decoder(AV_CODEC_ID_MP3);
        if (!codec) {
            fprintf(stderr, "Codec not found\n");
            LOGE("Codec not found\n");
            exit(1);
        }*/
    int lError;
             if ((lError = avformat_open_input(&container, filename, NULL, NULL))
                     != 0) {
                 LOGE("Error open source file: %d", lError);
                 exit(1);
             }
             if ((lError =  avformat_find_stream_info(container,NULL)) < 0) {
                 LOGE("Error find stream information: %d", lError);
                 exit(1);
             }
             LOGE("Stage 1.5");
            LOGE("audio format: %s", container->iformat->name);
           LOGE("audio bitrate: %llu", container->bit_rate);
    
        int stream_id = -1;
            // To find the first audio stream. This process may not be necessary
            // if you can gurarantee that the container contains only the desired
            // audio stream
               LOGE("nb_streams: %d", container->nb_streams);
            int i;
            for (i = 0; i < container->nb_streams; i++) {
                if (container->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO) {
                    stream_id = i;
                     LOGE("stream_id: %d", stream_id);
                    break;
                }
            }
    
            AVCodecContext* codec_context = container->streams[stream_id]->codec;
            codec = avcodec_find_decoder(codec_context->codec_id);
             LOGE("stream_id: %d", stream_id);
            LOGE("codec %s", codec->name);
            if (!codec) {
                fprintf(stderr, "codec not found\n");
                exit(1);
            }
    
        c = avcodec_alloc_context3(codec);
        if (!c) {
            fprintf(stderr, "Could not allocate audio codec context\n");
              LOGE("Could not allocate audio codec context\n");
            exit(1);
        }
    
        /* open it */
        if (avcodec_open2(c, codec, NULL) < 0) {
            fprintf(stderr, "Could not open codec\n");
             LOGE("Could not open codec\n");
            exit(1);
        }
    
        f = fopen(filename, "rb");
        if (!f) {
            fprintf(stderr, "Could not open %s\n", filename);
            LOGE("Could not open %s\n",filename);
            exit(1);
        }
    
    
    
            avpkt.data = inbuf;
            avpkt.size = fread(inbuf, 1, AUDIO_INBUF_SIZE, f);
            LOGE("Stage 5");
        /* decode until eof */
    
        while (1) {
    
        if ((len = av_read_frame(container, &avpkt)) < 0)
                           break;
    
            if (avpkt.stream_index == stream_id)
                         {
          if (!decoded_frame) {
                                                 if (!(decoded_frame = av_frame_alloc())) {
                                                     fprintf(stderr, "Could not allocate audio frame\n");
                                                      LOGE("Could not allocate audio frame\n");
                                                     exit(1);
                                                 }
                                             }
                                int got_frame = 0;
                                  len = avcodec_decode_audio4(c, decoded_frame, &got_frame, &avpkt);
                                 LOGE("len=%d",len);
                                 if (len < 0)
                                 {
                                     LOGE("Error decoding audio\n");
                                     continue;
                                 }
    
                                 if (got_frame)
                                 {
                                      LOGE("begin frame decode\n");
                                      int data_size = av_samples_get_buffer_size(NULL, c->channels,decoded_frame->nb_samples,c->sample_fmt, 1);
                                    if (data_size>0)
                                    {
                                      LOGE("after frame decode %d\n",data_size);
    
                                     jbyte *bytes = (*env)->GetByteArrayElements(env, array, NULL);
                                      memcpy(bytes, decoded_frame->data[0], data_size);
                                      (*env)->ReleaseByteArrayElements(env, array, bytes, 0);
                                      (*env)->CallVoidMethod(env, obj, play, array, data_size);
    
                                     }
                                     else
                                     {
                                     fprintf(stderr, "Failed to calculate data size\n");
                                      exit(1);
    
                                     }
    
                                 }
    
                                 avpkt.size -= len;
                                 avpkt.data += len;
                                 avpkt.pts = AV_NOPTS_VALUE;
                                if (avpkt.size < AUDIO_REFILL_THRESH)
                                {
                                    memmove(inbuf, avpkt.data, avpkt.size);
                                    avpkt.data = inbuf;
                                    len = fread(avpkt.data + avpkt.size, 1, AUDIO_INBUF_SIZE - avpkt.size, f);
                                    if (len > 0)
                                        avpkt.size += len;
                                }
    
                           }
    
    
    
    
        }
    
    
        fclose(f);
    
        avcodec_free_context(&c);
    av_frame_free(&decoded_frame);
        }
    

    The Java code :

    package com.example.home.hellondk;
    
    import android.media.AudioFormat;
    import android.media.AudioManager;
    import android.media.AudioTrack;
    import android.media.MediaPlayer;
    import android.support.v7.app.AppCompatActivity;
    import android.os.Bundle;
    import android.util.Log;
    
    import java.io.File;
    import java.io.FileNotFoundException;
    import java.io.FileOutputStream;
    import java.io.IOException;
    
    public class MainActivity extends AppCompatActivity {
        static {
            System.loadLibrary("MyLibraryPlayer");
        }
        public native void createEngine();
    
        public native void audio_decode_example(String outfilename, byte[] array);
    
    
    
        private AudioTrack track;
        private FileOutputStream os;
    
        @Override
        protected void onCreate(Bundle savedInstanceState) {
            super.onCreate(savedInstanceState);
            setContentView(R.layout.activity_main);
            createEngine();
    
     /*        MediaPlayer mp = new MediaPlayer();
            mp.start();*/
    
            int bufSize = AudioTrack.getMinBufferSize(32000,
                    AudioFormat.CHANNEL_CONFIGURATION_STEREO,
                    AudioFormat.ENCODING_PCM_16BIT);
    
            track = new AudioTrack(AudioManager.STREAM_MUSIC,
                    32000,
                    AudioFormat.CHANNEL_CONFIGURATION_STEREO,
                    AudioFormat.ENCODING_PCM_16BIT,
                    bufSize,
                    AudioTrack.MODE_STREAM);
    
            byte[] bytes = new byte[bufSize];
    
    
       audio_decode_example("/storage/emulated/0/test.mp3", bytes);
    
        }
    
        void playSound(byte[] buf, int size) {
            //android.util.Log.v("ROHAUPT", "RAH Playing");
            if (track.getPlayState() != AudioTrack.PLAYSTATE_PLAYING)
                track.play();
            track.write(buf, 0, size);
    
    
        }
    }
    

    Thank you so much for your help. Kind regards

  • Create H.264 stream from single PNG

    28 avril 2017, par Jim Rhodes

    I have an application that uses RTSP to get a video stream from an IP camera. The application extracts the H.264 data from the RTP packets and sends it to a remote process that decodes the stream (using ffmpeg) and displays it. If the video stream is interrupted for any reason, I would like to insert a static image into the stream being sent to the remote process.

    I have a PNG that is the same resolution as the camera. I thought I could use ffmpeg to encode the PNG to H.264 to get an IFrame that I could insert into the stream but I have not had any success. Using ffmpeg I created an MP4 with H.264 encoding from the PNG image and then extracted what I believed was the H.264 data from the MP4 but when I try to decode it ffmpeg returns an error.

    Anyone have any ideas on how I could accomplish this?

  • ffmpeg compose multiple files [on hold]

    28 avril 2017, par Seba

    We are looking for an ffmpeg development to compose multiple matroska files to a single mpeg4 file. Bellow are the requirements definition for this composer:

    • Technology: FFmpeg Fast Video Processing. AWS to iOS and Android Streaming.

    • Input: Webm Video and Audio Individual files.

    • Output: MPEG4 Video and Audio Single file.

    • Resizable: Mobile Phone Display Size.

    • Background Vertical Video. 0 (Transparent) to 1 Video.

    • Front: Circular Videos. 0 to 9 Videos.

    • Layout: image

    Any help would be appreciated

  • Encode videos to send with WhatsApp

    28 avril 2017, par piegames

    I have a series of PNG images of a video I rendered and now I want so send it via WhatsApp. I used to encode my videos with the command from this answer, but apparently the videos can only be played from WhatsApp web; on the phone I still get an error message. How do I convert them so that they can also be played on phone?

    Command used:

    %PATH_TO_FFMPEG%\ffmpeg.exe -i paint\out%04d.png -c:v libx264 -c:a aac ./videos/out7.mp4
    

    Log:

    ffmpeg version 3.2.4 Copyright (c) 2000-2017 the FFmpeg developers
      built with gcc 6.3.0 (GCC)
      configuration: --enable-gpl --enable-version3 --enable-d3d11va --enable-dxva2 --enable-libmfx --enable-nvenc --enable-avisynth --enable-bzlib --enable-fontconfig --enable-frei0r --enable-gnutls --enable-iconv --enable-libass --enable-libbluray --enable-libbs2b --enable-libcaca --enable-libfreetype --enable-libgme --enable-libgsm --enable-libilbc --enable-libmodplug --enable-libmp3lame --enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopenh264 --enable-libopenjpeg --enable-libopus --enable-librtmp --enable-libsnappy --enable-libsoxr --enable-libspeex --enable-libtheora --enable-libtwolame --enable-libvidstab --enable-libvo-amrwbenc --enable-libvorbis --enable-libvpx --enable-libwavpack --enable-libwebp --enable-libx264 --enable-libx265 --enable-libxavs --enable-libxvid --enable-libzimg --enable-lzma --enable-zlib
      libavutil      55. 34.101 / 55. 34.101
      libavcodec     57. 64.101 / 57. 64.101
      libavformat    57. 56.101 / 57. 56.101
      libavdevice    57.  1.100 / 57.  1.100
      libavfilter     6. 65.100 /  6. 65.100
      libswscale      4.  2.100 /  4.  2.100
      libswresample   2.  3.100 /  2.  3.100
      libpostproc    54.  1.100 / 54.  1.100
    Input #0, image2, from 'paint2\out%04d.png':
      Duration: 00:00:45.00, start: 0.000000, bitrate: N/A
        Stream #0:0: Video: png, rgba(pc), 384x240 [SAR 2835:2835 DAR 8:5], 25 fps, 25 tbr, 25 tbn, 25 tbc
    No pixel format specified, yuv444p for H.264 encoding chosen.
    Use -pix_fmt yuv420p for compatibility with outdated media players.
    [libx264 @ 000000000061a660] using SAR=1/1
    [libx264 @ 000000000061a660] using cpu capabilities: MMX2 SSE2Fast LZCNT
    [libx264 @ 000000000061a660] profile High 4:4:4 Predictive, level 1.3, 4:4:4 8-bit
    [libx264 @ 000000000061a660] 264 - core 148 r2762 90a61ec - H.264/MPEG-4 AVC codec - Copyleft 2003-2017 - http://www.videolan.org/x264.html - options: cabac=1 ref=3 deblock=1:0:0 analyse=0x3:0x113 me=hex subme=7 psy=1 psy_rd=1.00:0.00 mixed_ref=1 me_range=16 chroma_me=1 trellis=1 8x8dct=1 cqm=0 deadzone=21,11 fast_pskip=1 chroma_qp_offset=4 threads=6 lookahead_threads=1 sliced_threads=0 nr=0 decimate=1 interlaced=0 bluray_compat=0 constrained_intra=0 bframes=3 b_pyramid=2 b_adapt=1 b_bias=0 direct=1 weightb=1 open_gop=0 weightp=2 keyint=250 keyint_min=25 scenecut=40 intra_refresh=0 rc_lookahead=40 rc=crf mbtree=1 crf=23.0 qcomp=0.60 qpmin=0 qpmax=69 qpstep=4 ip_ratio=1.40 aq=1:1.00
    Output #0, mp4, to './videos/out6.mp4':
      Metadata:
        encoder         : Lavf57.56.101
        Stream #0:0: Video: h264 (libx264) ([33][0][0][0] / 0x0021), yuv444p, 384x240 [SAR 1:1 DAR 8:5], q=-1--1, 25 fps, 12800 tbn, 25 tbc
        Metadata:
          encoder         : Lavc57.64.101 libx264
        Side data:
          cpb: bitrate max/min/avg: 0/0/0 buffer size: 0 vbv_delay: -1
    Stream mapping:
      Stream #0:0 -> #0:0 (png (native) -> h264 (libx264))
    Press [q] to stop, [?] for help
    frame=   79 fps=0.0 q=28.0 size=      19kB time=00:00:01.04 bitrate= 152.5kbits/frame=  171 fps=168 q=28.0 size=      21kB time=00:00:04.72 bitrate=  36.4kbits/frame=  255 fps=168 q=28.0 size=      24kB time=00:00:08.08 bitrate=  24.1kbits/frame=  309 fps=153 q=28.0 size=      48kB time=00:00:10.24 bitrate=  38.6kbits/frame=  383 fps=151 q=28.0 size=      51kB time=00:00:13.20 bitrate=  31.7kbits/frame=  452 fps=148 q=28.0 size=      54kB time=00:00:15.96 bitrate=  27.7kbits/frame=  534 fps=151 q=28.0 size=      59kB time=00:00:19.24 bitrate=  25.1kbits/frame=  622 fps=154 q=28.0 size=      85kB time=00:00:22.76 bitrate=  30.7kbits/frame=  706 fps=155 q=28.0 size=      90kB time=00:00:26.12 bitrate=  28.1kbits/frame=  795 fps=157 q=28.0 size=      94kB time=00:00:29.68 bitrate=  25.9kbits/frame=  893 fps=161 q=28.0 size=     121kB time=00:00:33.60 bitrate=  29.5kbits/frame=  985 fps=162 q=28.0 size=     126kB time=00:00:37.28 bitrate=  27.6kbits/frame= 1084 fps=165 q=28.0 size=     153kB time=00:00:41.24 bitrate=  30.3kbits/frame= 1125 fps=163 q=-1.0 Lsize=     168kB time=00:00:44.88 bitrate=  30.6kbits/s speed=6.52x
    video:154kB audio:0kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: 8.859178%
    [libx264 @ 000000000061a660] frame I:5     Avg QP:14.73  size: 22873
    [libx264 @ 000000000061a660] frame P:384   Avg QP:14.24  size:    65
    [libx264 @ 000000000061a660] frame B:736   Avg QP:16.11  size:    24
    [libx264 @ 000000000061a660] consecutive B-frames:  7.3% 13.7%  8.3% 70.8%
    [libx264 @ 000000000061a660] mb I  I16..4: 35.6% 47.1% 17.3%
    [libx264 @ 000000000061a660] mb P  I16..4:  0.0%  0.0%  0.0%  P16..4:  1.2%  0.1%  0.0%  0.0%  0.0%    skip:98.6%
    [libx264 @ 000000000061a660] mb B  I16..4:  0.0%  0.0%  0.0%  B16..8:  0.4%  0.0%  0.0%  direct: 0.0%  skip:99.6%  L0:53.1% L1:46.0% BI: 0.9%
    [libx264 @ 000000000061a660] 8x8 transform intra:47.2% inter:15.9%
    [libx264 @ 000000000061a660] coded y,u,v intra: 80.7% 29.5% 0.9% inter: 0.1% 0.0% 0.0%
    [libx264 @ 000000000061a660] i16 v,h,dc,p: 24% 15% 55%  6%
    [libx264 @ 000000000061a660] i8 v,h,dc,ddl,ddr,vr,hd,vl,hu:  5%  7% 62% 14%  6%  1%  2%  1%  2%
    [libx264 @ 000000000061a660] i4 v,h,dc,ddl,ddr,vr,hd,vl,hu: 10%  9% 29% 11%  8%  5% 14%  5%  9%
    [libx264 @ 000000000061a660] Weighted P-Frames: Y:0.0% UV:0.0%
    [libx264 @ 000000000061a660] ref P L0: 27.3%  0.7% 36.5% 35.6%
    [libx264 @ 000000000061a660] ref B L0: 59.9% 29.7% 10.4%
    [libx264 @ 000000000061a660] ref B L1: 96.7%  3.3%
    [libx264 @ 000000000061a660] kb/s:27.91
    

    Error on my Phone: "Dieses Video kann leider nicht wiedergegeben werden"

  • Send frame ffmpeg c++ jsmpeg

    28 avril 2017, par user345974

    I'm using this lib to live streaming https://github.com/phoboslab/jsmpeg.

    I read video file from uri http://server/file.mp4, I have node.js websoket server and I have a website.

    I read mp4 with ffmpeg send video to server and read it with jsmpeg library. I use this command line :

    ffmpeg -i "http://server/file.mp4 -f mpeg1Video -b:a 800k -r 30 http://server:8082/pw/858/480

    I would like to replace it with ffmpeg c++ lib 3.2.4. I'm using remuxing.c example

    RecordFromhttp() {

    AVOutputFormat *ofmt = NULL;
    AVFormatContext *ifmt_ctx = NULL, *ofmt_ctx = NULL;
    AVPacket pkt;
    const char *in_filename, *out_filename;
    int ret, i;
    int stream_index = 0;
    int stream_mapping_size = 0;
    int size;
    std::vector stream_mapping;
    
    in_filename = "http://server/file.mp4";
    out_filename = "http://server:8082/pw/858/480/";
    
    av_register_all();
    
    if ((ret = avformat_open_input(&ifmt_ctx, in_filename, NULL, NULL)) < 0) {
        return 2;
    }
    
    if ((ret = avformat_find_stream_info(ifmt_ctx, 0)) < 0) {
        return 3;
    }
    
    if (ifmt_ctx->streams[0]->codecpar->codec_id == AV_CODEC_ID_H264) {
        return -19;
    }
    
    
    AVOutputFormat *mpegts_format = av_guess_format("mpegts", NULL, NULL);
    avformat_alloc_output_context2(&ofmt_ctx, mpegts_format, "mpeg1video", out_filename);
    
    if (!ofmt_ctx) {
        return 4;
    }
    
    stream_mapping_size = ifmt_ctx->nb_streams;
    
    ofmt = ofmt_ctx->oformat;
    
    for (i = 0; i < ifmt_ctx->nb_streams; i++) {
        AVStream *out_stream;
        AVStream *in_stream = ifmt_ctx->streams[i];
        AVCodecParameters *in_codecpar = in_stream->codecpar;
    
        if (in_codecpar->codec_type != AVMEDIA_TYPE_AUDIO &&
            in_codecpar->codec_type != AVMEDIA_TYPE_VIDEO &&
            in_codecpar->codec_type != AVMEDIA_TYPE_SUBTITLE) {
            stream_mapping.push_back(-1);
            continue;
        }
        stream_mapping.push_back(stream_index++);
    
        AVCodec c1;
    
        out_stream = avformat_new_stream(ofmt_ctx, NULL);
    
        if (!out_stream) {
            return 6;
        }
        ret = avcodec_parameters_copy(out_stream->codecpar, in_codecpar);
        if (ret < 0) {
            return 7;
        }
        out_stream->codecpar->codec_tag = 0;
    }
    
    av_dump_format(ofmt_ctx, 0, out_filename, 1);
    
    if (!(ofmt->flags & AVFMT_NOFILE)) {
        ret = avio_open(&ofmt_ctx->pb, out_filename, AVIO_FLAG_WRITE);
        if (ret < 0) {
            return 8;
        }
    }
    
    ret = avformat_write_header(ofmt_ctx, NULL);
    
    if (ret < 0) {
        return 9;
    }
    
    while (1) {
    
        AVStream *in_stream, *out_stream;
        ret = av_read_frame(ifmt_ctx, &pkt);
    
        if (ret < 0)
            return ret;
    
        in_stream = ifmt_ctx->streams[pkt.stream_index];
        if (pkt.stream_index >= stream_mapping_size ||
            stream_mapping[pkt.stream_index] < 0) {
            av_packet_unref(&pkt);
            return -16;
        }
    
        pkt.stream_index = stream_mapping[pkt.stream_index];
        out_stream = ofmt_ctx->streams[pkt.stream_index];
        pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base, AVRounding(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
        pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base, AVRounding(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
        pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);
        pkt.pos = -1;
    
        ret = av_interleaved_write_frame(ofmt_ctx, &pkt);
    
        if (ret < 0) {
            return -45;
        }
    
        av_packet_unref(&pkt);
    }
    
    av_write_trailer(ofmt_ctx);
    
    
    avformat_close_input(&ifmt_ctx);
    if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE))
        avio_closep(&ofmt_ctx->pb);
    avformat_free_context(ofmt_ctx);
    av_freep(&stream_mapping);
    if (ret < 0 && ret != AVERROR_EOF) {
        return 11;
    }
    
    return 0;
    

    }

    the cmd work fine c++ not work. The c++ program code get frame by frame send it to soket jsmpeg read it fine but the video is not shown.

    What's the problem ?