Recherche avancée

Médias (91)

Autres articles (107)

  • HTML5 audio and video support

    13 avril 2011, par

    MediaSPIP uses HTML5 video and audio tags to play multimedia files, taking advantage of the latest W3C innovations supported by modern browsers.
    The MediaSPIP player used has been created specifically for MediaSPIP and can be easily adapted to fit in with a specific theme.
    For older browsers the Flowplayer flash fallback is used.
    MediaSPIP allows for media playback on major mobile platforms with the above (...)

  • Support audio et vidéo HTML5

    10 avril 2011

    MediaSPIP utilise les balises HTML5 video et audio pour la lecture de documents multimedia en profitant des dernières innovations du W3C supportées par les navigateurs modernes.
    Pour les navigateurs plus anciens, le lecteur flash Flowplayer est utilisé.
    Le lecteur HTML5 utilisé a été spécifiquement créé pour MediaSPIP : il est complètement modifiable graphiquement pour correspondre à un thème choisi.
    Ces technologies permettent de distribuer vidéo et son à la fois sur des ordinateurs conventionnels (...)

  • De l’upload à la vidéo finale [version standalone]

    31 janvier 2010, par

    Le chemin d’un document audio ou vidéo dans SPIPMotion est divisé en trois étapes distinctes.
    Upload et récupération d’informations de la vidéo source
    Dans un premier temps, il est nécessaire de créer un article SPIP et de lui joindre le document vidéo "source".
    Au moment où ce document est joint à l’article, deux actions supplémentaires au comportement normal sont exécutées : La récupération des informations techniques des flux audio et video du fichier ; La génération d’une vignette : extraction d’une (...)

Sur d’autres sites (9249)

  • Revision 31020 : max = 255, ça bloquait la création de la table sur certaine base

    20 août 2009, par vincent@… — Log

    max = 255, ça bloquait la création de la table sur certaine base

  • Need help about noise in my video audio after decoding with ffmpeg in c++ (Tutorial from http://dranger.com/ffmpeg/)

    13 février 2019, par Abe Jahwin

    I just read and implemented a tutorial about ffmpeg on (http://dranger.com/ffmpeg).

    Then after implementing the tutorial 03 and 04 about playing sound, the program was running nice but the output of sound is bad (there is too much noise with the real audio). I do not know where the problem is.

    But I tried to change the audio format but still the same.

      #include <iostream>
      #include <vector>


    // FFmpeg
    extern "C" {
    #include
    #include
    #include
    #undef main
    #include
    #include <libavformat></libavformat>avformat.h>
    #include <libavcodec></libavcodec>avcodec.h>
    #include <libavutil></libavutil>avutil.h>
    #include <libavutil></libavutil>pixdesc.h>
    #include <libswscale></libswscale>swscale.h>
    }


    // compatibility with newer API
    #if LIBAVCODEC_VERSION_INT &lt; AV_VERSION_INT(55,28,1)
    #define av_frame_alloc avcodec_alloc_frame
    #define av_frame_free avcodec_free_frame
    #endif

    #define SDL_AUDIO_BUFFER_SIZE 1024
    #define MAX_AUDIO_FRAME_SIZE 192000

    #define MAX_AUDIOQ_SIZE (5 * 16 * 1024)
    #define MAX_VIDEOQ_SIZE (5 * 256 * 1024)

    #define FF_REFRESH_EVENT (SDL_USEREVENT)
    #define FF_QUIT_EVENT (SDL_USEREVENT + 1)

    #define VIDEO_PICTURE_QUEUE_SIZE 1



    typedef struct PacketQueue {
       AVPacketList *first_pkt, *last_pkt;
       int nb_packets;
       int size;
       SDL_mutex *mutex;
       SDL_cond *cond;
    } PacketQueue;

    typedef struct VideoPicture {
       SDL_Overlay *bmp;
       int width, height; /* source height &amp; width */
       int allocated;
    } VideoPicture;

    typedef struct VideoState {

       AVFormatContext *pFormatCtx = nullptr;
       int             videoStream, audioStream;
       AVStream        *audio_st;
       AVCodecContext  *audio_ctx;
       PacketQueue     audioq;
       uint8_t         audio_buf[(MAX_AUDIO_FRAME_SIZE * 3) / 2];
       unsigned int    audio_buf_size;
       unsigned int    audio_buf_index;
       AVFrame         audio_frame;
       AVPacket        audio_pkt;
       uint8_t         *audio_pkt_data;
       int             audio_pkt_size;
       AVStream        *video_st;
       AVCodecContext  *video_ctx = nullptr;
       PacketQueue     videoq;
       struct SwsContext *sws_ctx;

       VideoPicture    pictq[VIDEO_PICTURE_QUEUE_SIZE];
       int             pictq_size, pictq_rindex, pictq_windex;
       SDL_mutex       *pictq_mutex;
       SDL_cond        *pictq_cond;

       SDL_Thread      *parse_tid;
       SDL_Thread      *video_tid;

       char            filename[1024];
       int             quit;
    } VideoState;

    SDL_Surface     *screen;
    SDL_mutex       *screen_mutex;

    VideoState *global_video_state;


    void packet_queue_init(PacketQueue *q) {
       memset(q, 0, sizeof(PacketQueue));
       q->mutex = SDL_CreateMutex();
       q->cond = SDL_CreateCond();
    }


    int packet_queue_put(PacketQueue *q, AVPacket *pkt) {

       AVPacketList *pkt1;
       if (av_dup_packet(pkt) &lt; 0) {
           return -1;
       }
       pkt1 = (AVPacketList *)av_malloc(sizeof(AVPacketList));
       if (!pkt1)
           return -1;
       pkt1->pkt = *pkt;
       pkt1->next = NULL;

       SDL_LockMutex(q->mutex);

       if (!q->last_pkt)
           q->first_pkt = pkt1;
       else
           q->last_pkt->next = pkt1;
       q->last_pkt = pkt1;
       q->nb_packets++;
       q->size += pkt1->pkt.size;
       SDL_CondSignal(q->cond);

       SDL_UnlockMutex(q->mutex);
       return 0;
    }


    static int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block)
    {
       AVPacketList *pkt1;
       int ret;

       SDL_LockMutex(q->mutex);

       for (;;) {

           if (global_video_state->quit) {
               ret = -1;
               break;
           }

           pkt1 = q->first_pkt;
           if (pkt1) {
               q->first_pkt = pkt1->next;
               if (!q->first_pkt)
                   q->last_pkt = NULL;
               q->nb_packets--;
               q->size -= pkt1->pkt.size;
               *pkt = pkt1->pkt;
               av_free(pkt1);
               ret = 1;
               break;
           }
           else if (!block) {
               ret = 0;
               break;
           }
           else {
               SDL_CondWait(q->cond, q->mutex);
           }
       }
       SDL_UnlockMutex(q->mutex);
       return ret;
    }
    int audio_decode_frame(VideoState *is, uint8_t *audio_buf, int buf_size) {

       int len1, data_size = 0;
       AVPacket *pkt = &amp;is->audio_pkt;

       for (;;) {
           while (is->audio_pkt_size > 0) {
               int got_frame = 0;
               len1 = avcodec_decode_audio4(is->audio_ctx, &amp;is->audio_frame, &amp;got_frame, pkt);
               if (len1 &lt; 0) {
                   /* if error, skip frame */
                   is->audio_pkt_size = 0;
                   break;
               }
               data_size = 0;
               if (got_frame) {
                   data_size = av_samples_get_buffer_size(NULL,
                       is->audio_ctx->channels,
                       is->audio_frame.nb_samples,
                       is->audio_ctx->sample_fmt,
                       1);
                   assert(data_size &lt;= buf_size);
                   memcpy(audio_buf, is->audio_frame.data[0], data_size);
               }
               is->audio_pkt_data += len1;
               is->audio_pkt_size -= len1;
               if (data_size &lt;= 0) {
                   /* No data yet, get more frames */
                   continue;
               }
               /* We have data, return it and come back for more later */
               return data_size;
           }
           if (pkt->data)
               av_free_packet(pkt);

           if (is->quit) {
               return -1;
           }
           /* next packet */
           if (packet_queue_get(&amp;is->audioq, pkt, 1) &lt; 0) {
               return -1;
           }
           is->audio_pkt_data = pkt->data;
           is->audio_pkt_size = pkt->size;
       }
    }



    void audio_callback(void *userdata, Uint8 *stream, int len) {

       VideoState *is = (VideoState *)userdata;
       int len1, audio_size;

       while (len > 0) {
           if (is->audio_buf_index >= is->audio_buf_size) {
               /* We have already sent all our data; get more */
               audio_size = audio_decode_frame(is, is->audio_buf, sizeof(is->audio_buf));
               if (audio_size &lt; 0) {
                   /* If error, output silence */
                   is->audio_buf_size = 1024;
                   memset(is->audio_buf, 0, is->audio_buf_size);
               }
               else {
                   is->audio_buf_size = audio_size;
               }
               is->audio_buf_index = 0;
           }
           len1 = is->audio_buf_size - is->audio_buf_index;
           if (len1 > len)
               len1 = len;
           memcpy(stream, (uint8_t *)is->audio_buf + is->audio_buf_index, len1);
           len -= len1;
           stream += len1;
           is->audio_buf_index += len1;
       }
    }


    int decode_interrupt_cb(void) {
       return (global_video_state &amp;&amp; global_video_state->quit);
    }

    void alloc_picture(void *userdata) {

       VideoState *is = (VideoState *)userdata;
       VideoPicture *vp;

       vp = &amp;is->pictq[is->pictq_windex];
       if (vp->bmp) {
           // we already have one make another, bigger/smaller
           SDL_FreeYUVOverlay(vp->bmp);
       }
       // Allocate a place to put our YUV image on that screen
       SDL_LockMutex(screen_mutex);
       vp->bmp = SDL_CreateYUVOverlay(is->video_st->codec->width,
           is->video_st->codec->height,
           SDL_YV12_OVERLAY,
           screen);
       SDL_UnlockMutex(screen_mutex);
       vp->width = is->video_st->codec->width;
       vp->height = is->video_st->codec->height;
       vp->allocated = 1;
    }

    int queue_picture(VideoState *is, AVFrame *pFrame) {

       VideoPicture *vp;
       int dst_pix_fmt;
       AVPicture pict;

       /* wait until we have space for a new pic */
       SDL_LockMutex(is->pictq_mutex);
       while (is->pictq_size >= VIDEO_PICTURE_QUEUE_SIZE &amp;&amp;
           !is->quit) {
           SDL_CondWait(is->pictq_cond, is->pictq_mutex);
       }
       SDL_UnlockMutex(is->pictq_mutex);

       if (is->quit)
           return -1;

       // windex is set to 0 initially
       vp = &amp;is->pictq[is->pictq_windex];

       /* allocate or resize the buffer! */
       if (!vp->bmp ||
           vp->width != is->video_st->codec->width ||
           vp->height != is->video_st->codec->height) {
           SDL_Event event;

           vp->allocated = 0;
           alloc_picture(is);
           if (is->quit) {
               return -1;
           }
       }

       if (vp->bmp) {

           SDL_LockYUVOverlay(vp->bmp);

           dst_pix_fmt = AV_PIX_FMT_YUV420P;
           /* point pict at the queue */

           pict.data[0] = vp->bmp->pixels[0];
           pict.data[1] = vp->bmp->pixels[2];
           pict.data[2] = vp->bmp->pixels[1];

           pict.linesize[0] = vp->bmp->pitches[0];
           pict.linesize[1] = vp->bmp->pitches[2];
           pict.linesize[2] = vp->bmp->pitches[1];

           // Convert the image into YUV format that SDL uses
           sws_scale(is->sws_ctx, (uint8_t const * const *)pFrame->data,
               pFrame->linesize, 0, is->video_st->codec->height,
               pict.data, pict.linesize);

           SDL_UnlockYUVOverlay(vp->bmp);
           /* now we inform our display thread that we have a pic ready */
           if (+Š>pictq_windex == VIDEO_PICTURE_QUEUE_SIZE) {
               is->pictq_windex = 0;
           }
           SDL_LockMutex(is->pictq_mutex);
           is->pictq_size++;
           SDL_UnlockMutex(is->pictq_mutex);
       }
       return 0;

    }

    int video_thread(void *arg) {
       VideoState *is = (VideoState *)arg;
       AVPacket pkt1, packet;
       int frameFinished;
       AVFrame *pFrame = av_frame_alloc();

       for (;;) {
           if (packet_queue_get(&amp;is->videoq, &amp;packet, 1) &lt; 0) {
               // means we quit getting packets
               break;
           }

           // Decode video frame
           avcodec_decode_video2(is->video_ctx, pFrame, &amp;frameFinished, &amp;packet);

           // Did we get a video frame?
           if (frameFinished) {
               if (queue_picture(is, pFrame) &lt; 0) {
                   break;
               }
           }
           av_free_packet(&amp;packet);
       }
       av_free(pFrame);
       return 0;
    }

    int stream_component_open(VideoState *is, int stream_index) {

       AVFormatContext *pFormatCtx = is->pFormatCtx;
       AVCodecContext *codecCtx = nullptr;
       AVCodec *codec = nullptr;
       SDL_AudioSpec wanted_spec, spec;
       AVCodecParameters *CodecParameters = NULL;

       if (stream_index &lt; 0 || stream_index >= pFormatCtx->nb_streams) {
           return -1;
       }

       CodecParameters = pFormatCtx->streams[stream_index]->codecpar;
       codecCtx = pFormatCtx->streams[stream_index]->codec;
       codec = avcodec_find_decoder(codecCtx->codec_id);
       if (!codec) {
           fprintf(stderr, "Unsupported codec!\n");
           return -1;
       }

       codecCtx = avcodec_alloc_context3(codec);

       if (avcodec_parameters_to_context(codecCtx, CodecParameters) &lt; 0)
       {
           fprintf(stderr, "Couldn't copy codec context");
           return -1;
       }


       if (codecCtx->codec_type == AVMEDIA_TYPE_AUDIO) {
           // Set audio settings from codec info
           wanted_spec.freq = codecCtx->sample_rate;
           wanted_spec.format = AUDIO_S16SYS;
           wanted_spec.channels = codecCtx->channels;
           wanted_spec.silence = 0;
           wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE;
           wanted_spec.callback = audio_callback;
           wanted_spec.userdata = is;

           if (SDL_OpenAudio(&amp;wanted_spec, &amp;spec) &lt; 0) {
               fprintf(stderr, "SDL_OpenAudio: %s\n", SDL_GetError());
               return -1;
           }
       }


       if (avcodec_open2(codecCtx, codec, NULL) &lt; 0) {
           fprintf(stderr, "Unsupported codec!\n");
           return -1;
       }

       switch (codecCtx->codec_type) {
       case AVMEDIA_TYPE_AUDIO:
           is->audioStream = stream_index;
           is->audio_st = pFormatCtx->streams[stream_index];
           is->audio_ctx = codecCtx;
           is->audio_buf_size = 0;
           is->audio_buf_index = 0;
           memset(&amp;is->audio_pkt, 0, sizeof(is->audio_pkt));
           packet_queue_init(&amp;is->audioq);
           SDL_PauseAudio(0);
           break;
       case AVMEDIA_TYPE_VIDEO:
           is->videoStream = stream_index;
           is->video_st = pFormatCtx->streams[stream_index];
           is->video_ctx = codecCtx;

           packet_queue_init(&amp;is->videoq);

           is->video_tid = SDL_CreateThread(video_thread, is);
           is->sws_ctx = sws_getContext(is->video_st->codec->width, is->video_st->codec->height,
               is->video_st->codec->pix_fmt, is->video_st->codec->width,
               is->video_st->codec->height, AV_PIX_FMT_YUV420P,
               SWS_BILINEAR, NULL, NULL, NULL
           );
           break;
       default:
           break;
       }
    }



    static Uint32 sdl_refresh_timer_cb(Uint32 interval, void *opaque) {
       SDL_Event event;
       event.type = FF_REFRESH_EVENT;
       event.user.data1 = opaque;
       SDL_PushEvent(&amp;event);
       return 0; /* 0 means stop timer */
    }

    /* schedule a video refresh in 'delay' ms */
    static void schedule_refresh(VideoState *is, int delay) {
       SDL_AddTimer(delay, sdl_refresh_timer_cb, is);
    }


    void video_display(VideoState *is) {

       SDL_Rect rect;
       VideoPicture *vp;
       float aspect_ratio;
       int w, h, x, y;
       int i;

       vp = &amp;is->pictq[is->pictq_rindex];
       if (vp->bmp) {
           if (is->video_st->codec->sample_aspect_ratio.num == 0) {
               aspect_ratio = 0;
           }
           else {
               aspect_ratio = av_q2d(is->video_st->codec->sample_aspect_ratio) *
                   is->video_st->codec->width / is->video_st->codec->height;
           }
           if (aspect_ratio &lt;= 0.0) {
               aspect_ratio = (float)is->video_st->codec->width /
                   (float)is->video_st->codec->height;
           }
           h = screen->h;
           w = ((int)rint(h * aspect_ratio)) &amp; -3;
           if (w > screen->w) {
               w = screen->w;
               h = ((int)rint(w / aspect_ratio)) &amp; -3;
           }
           x = (screen->w - w) / 2;
           y = (screen->h - h) / 2;

           rect.x = x;
           rect.y = y;
           rect.w = w;
           rect.h = h;
           SDL_LockMutex(screen_mutex);
           SDL_DisplayYUVOverlay(vp->bmp, &amp;rect);
           SDL_UnlockMutex(screen_mutex);
       }
    }

    void video_refresh_timer(void *userdata) {

       VideoState *is = (VideoState *)userdata;
       VideoPicture *vp;

       if (is->video_st) {
           if (is->pictq_size == 0) {
               schedule_refresh(is, 1);
           }
           else {
               vp = &amp;is->pictq[is->pictq_rindex];
               /* Timing code goes here */

               schedule_refresh(is, 80);

               /* show the picture! */
               video_display(is);

               /* update queue for next picture! */
               if (+Š>pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE) {
                   is->pictq_rindex = 0;
               }
               SDL_LockMutex(is->pictq_mutex);
               is->pictq_size--;
               SDL_CondSignal(is->pictq_cond);
               SDL_UnlockMutex(is->pictq_mutex);
           }
       }
       else {
           schedule_refresh(is, 100);
       }
    }


    int decode_thread(void *arg) {

       VideoState *is = (VideoState *)arg;
       AVFormatContext* pFormatCtx = avformat_alloc_context();
       AVPacket pkt1, *packet = &amp;pkt1;

       int video_index = -1;
       int audio_index = -1;
       int i;
       int ret;

       is->videoStream = -1;
       is->audioStream = -1;

       global_video_state = is;

       ret = avformat_open_input(&amp;pFormatCtx, is->filename, nullptr, nullptr);

       // Open video file
       if (ret != 0)
           printf("Failed to open the file");
           //return -1; // Couldn't open file

       is->pFormatCtx = pFormatCtx;

       // Retrieve stream information
       if (avformat_find_stream_info(pFormatCtx, NULL)&lt;0)
           return -1; // Couldn't find stream information

                      // Dump information about file onto standard error
       av_dump_format(pFormatCtx, 0, is->filename, 0);

       // Find the first video stream

       for (i = 0; inb_streams; i++) {
           if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO &amp;&amp;
               video_index &lt; 0) {
               video_index = i;
           }
           if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO &amp;&amp;
               audio_index &lt; 0) {
               audio_index = i;
           }
       }

       if (video_index >= 0) {
           stream_component_open(is, video_index);
       }

       if (audio_index >= 0) {
           stream_component_open(is, audio_index);
       }



       if (is->videoStream &lt; 0) {
           fprintf(stderr, "%s: could not open Video codecs\n", is->filename);
           system("pause");
           goto fail;
       }

       if (is->audioStream &lt; 0) {
           fprintf(stderr, "%s: could not open Audio codecs\n", is->filename);
           system("pause");
           goto fail;
       }

       // main decode loop

       for (;;) {
           if (is->quit) {
               break;
           }
           // seek stuff goes here
           if (is->audioq.size > MAX_AUDIOQ_SIZE ||
               is->videoq.size > MAX_VIDEOQ_SIZE) {
               SDL_Delay(10);
               continue;
           }
           if (av_read_frame(is->pFormatCtx, packet) &lt; 0) {
               if (is->pFormatCtx->pb->error == 0) {
                   SDL_Delay(100); /* no error; wait for user input */
                   continue;
               }
               else {
                   break;
               }
           }
           // Is this a packet from the video stream?
           if (packet->stream_index == is->videoStream) {
               packet_queue_put(&amp;is->videoq, packet);
           }
           else if (packet->stream_index == is->audioStream) {
               packet_queue_put(&amp;is->audioq, packet);
           }
           else {
               av_free_packet(packet);
           }
       }
       /* all done - wait for it */
       while (!is->quit) {
           SDL_Delay(100);
       }

    fail:
       if (1) {
           SDL_Event event;
           event.type = FF_QUIT_EVENT;
           event.user.data1 = is;
           SDL_PushEvent(&amp;event);
       }
       return 0;
    }


    int main() {

       SDL_Event       event;

       VideoState      *is;

       is = (VideoState *)av_mallocz(sizeof(VideoState));

       // Register all formats and codecs
       av_register_all();

       if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
           fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
           exit(1);
       }

       // Make a screen to put our video
    #ifndef __DARWIN__
       screen = SDL_SetVideoMode(640, 480, 0, 0);
    #else
       screen = SDL_SetVideoMode(640, 480, 24, 0);
    #endif
       if (!screen) {
           fprintf(stderr, "SDL: could not set video mode - exiting\n");
           exit(1);
       }

       screen_mutex = SDL_CreateMutex();

       strncpy(is->filename, "C:/vidoefile.mp4", sizeof(is->filename));

       is->pictq_mutex = SDL_CreateMutex();
       is->pictq_cond = SDL_CreateCond();

       schedule_refresh(is, 40);

       is->parse_tid = SDL_CreateThread(decode_thread, is);
       if (!is->parse_tid) {
           av_free(is);
           return -1;
       }

       for (;;) {

           SDL_WaitEvent(&amp;event);
           switch (event.type) {
           case FF_QUIT_EVENT:
           case SDL_QUIT:
               is->quit = 1;
               SDL_Quit();
               return 0;
               break;
           case FF_REFRESH_EVENT:
               video_refresh_timer(event.user.data1);
               break;
           default:
               break;
           }
       }

       printf("Done playing video\n");
       system("pause");
       return 0;

    }
    </vector></iostream>

    I just need the sound to be played in normal way without noise.

  • FFmpeg stdout emits more frames than it should

    29 décembre 2022, par distante

    I am running this command to get two images per second from an rtsp stream :

    &#xA;

    const ffmpeg = spawn(this.ffmpeg, [&#xA;      // Set the frame rate of the input video to 2 frames per second&#xA;      &#x27;-r&#x27;,&#xA;      &#x27;2&#x27;,&#xA;      // Specify that the RTSP stream should use TCP as the transport protocol&#xA;      &#x27;-rtsp_transport&#x27;,&#xA;      &#x27;tcp&#x27;,&#xA;      // Set the input source to the RTSP stream specified by the `rtspUrl` variable&#xA;      &#x27;-i&#x27;,&#xA;      rtspUrl,&#xA;      // Set the video filter to only output two frames per second (final video)&#xA;      &#x27;-vf&#x27;,&#xA;      &#x27;fps=2&#x27;,&#xA;      // Set the output format to "image2pipe," which specifies that the output should be written to stdout as a series of images in a pipe&#xA;      &#x27;-f&#x27;,&#xA;      &#x27;image2pipe&#x27;,&#xA;      // Overwrite any existing output file without prompting for confirmation&#xA;      &#x27;-y&#x27;,&#xA;      // Set the output destination to stdout&#xA;      &#x27;-&#x27;,&#xA;    ]);&#xA;&#xA;

    &#xA;

    I am subscribing to the stdout and saving each emit into a file (just for testing, I need to process the data later)

    &#xA;

        let i = 0;&#xA;    from(ffmpeg.stdout)&#xA;      .pipe(&#xA;        concatMap((data) => {&#xA;          i&#x2B;&#x2B;&#xA;          return writeFile(`file_${i}`, data).then(() => i);&#xA;        }),&#xA;      )&#xA;      .subscribe((i) => {&#xA;        this.imageLogger.log(`file_${i} saved`);&#xA;      });&#xA;

    &#xA;

    The thing is, I am getting a lot more than 2 images per second :

    &#xA;

    i       | [Nest] 431  - 12/28/2022, 1:16:09 PM     LOG [Image Logger] stderr: ffmpeg version 5.0.1-static https://johnvansickle.com/ffmpeg/  Copyright (c) 2000-2022 the FFmpeg developers&#xA;api       |   built with gcc 8 (Debian 8.3.0-6)&#xA;api       |   configuration: --enable-gpl --enable-version3 --enable-static --disable-debug --disable-ffplay --disable-indev=sndio --disable-outdev=sndio --cc=gcc --enable-fontconfig --enable-frei0r --enable-gnutls --enable-gmp --enable-libgme --enable-gray --enable-libaom --enable-libfribidi --enable-libass --enable-libvmaf --enable-libfreetype --enable-libmp3lame --enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopenjpeg --enable-librubberband --enable-libsoxr --enable-libspeex --enable-libsrt --enable-libvorbis --enable-libopus --enable-libtheora --enable-libvidstab --enable-libvo-amrwbenc --enable-libvpx --enable-libwebp --enable-libx264 --enable-libx265 --enable-libxml2 --enable-libdav1d --enable-libxvid --enable-libzvbi --enable-libzimg&#xA;api       |   libavutil      57. 17.100 / 57. 17.100&#xA;api       |   libavcodec     59. 18.100 / 59. 18.100&#xA;api       |   libavformat    59. 16.100 / 59. 16.100&#xA;api       |   libavdevice    59.  4.100 / 59.  4.100&#xA;api       |   libavfilter     8. 24.100 /  8. 24.100&#xA;api       |   libswscale      6.  4.100 /  6.  4.100&#xA;api       |   libswresample   4.  3.100 /  4.  3.100&#xA;api       |   libpostproc    56.  3.100 / 56.  3.100&#xA;api       |   configuration: --enable-gpl --enable-version3 --enable-static --disable-debug --disable-ffplay --disable-indev=sndio --disable-outdev=sndio --cc=gcc --enable-fontconfig --enable-frei0r --enable-gnutls --enable-gmp --enable-libgme --enable-gray --enable-libaom --enable-libfribidi --enable-libass --enable-libvmaf --enable-libfreetype --enable-libmp3lame --enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopenjpeg --enable-librubberband --enable-libsoxr --enable-libspeex --enable-libsrt --enable-libvorbis --enable-libopus --enable-libtheora --enable-libvidstab --enable-libvo-amrwbenc --enable-libvpx --enable-libwebp --enable-libx264 --enable-libx265 --enable-libxml2 --enable-libdav1d --enable-libxvid --enable-libzvbi --enable-libzimg&#xA;api       |   libavutil      57. 17.100 / 57. 17.100&#xA;api       |   libavcodec     59. 18.100 / 59. 18.100&#xA;api       |   libavformat    59. 16.100 / 59. 16.100&#xA;api       |   libavdevice    59.  4.100 / 59.  4.100&#xA;api       |   libavfilter     8. 24.100 /  8. 24.100&#xA;api       |   libswscale      6.  4.100 /  6.  4.100&#xA;api       |   libswresample   4.  3.100 /  4.  3.100&#xA;api       |   libpostproc    56.  3.100 / 56.  3.100&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] stderr: Guessed Channel Layout for Input Stream #0.1 : mono&#xA;api       | Input #0, rtsp, from &#x27;rtsp://user:password@192.68.45.54:554/stream2&#x27;:&#xA;api       |   Metadata:&#xA;api       |     title           : Session streamed by "TP-LINK RTSP Server"&#xA;api       |     comment         : stream2&#xA;api       |   Duration: N/A, start: 0.000000, bitrate: N/A&#xA;api       |   Stream #0:0: Video: h264 (High), yuvj420p(pc, bt709, progressive), 640x360, 15 fps, 28.58 tbr, 90k tbn&#xA;api       |   Stream #0:1: Audio: pcm_alaw, 8000 Hz, mono, s16, 64 kb/s&#xA;api       |&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] stderr: Stream mapping:&#xA;api       |   Stream #0:0 -> #0:0 (h264 (native) -> mjpeg (native))&#xA;api       | Press [q] to stop, [?] for help&#xA;api       |&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] stderr: Output #0, image2pipe, to &#x27;pipe:&#x27;:&#xA;api       |   Metadata:&#xA;api       |     title           : Session streamed by "TP-LINK RTSP Server"&#xA;api       |     comment         : stream2&#xA;api       |     encoder         : Lavf59.16.100&#xA;api       |   Stream #0:0: Video: mjpeg, yuvj420p(pc, bt709, progressive), 640x360, q=2-31, 200 kb/s, 2 fps, 2 tbn&#xA;api       |&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] stderr:     Metadata:&#xA;api       |       encoder         : Lavc59.18.100 mjpeg&#xA;api       |     Side data:&#xA;api       |       cpb: bitrate max/min/avg: 0/0/200000 buffer size: 0 vbv_delay: N/A&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] stderr: frame=    1 fps=0.0 q=5.0 size=      23kB time=00:00:00.50 bitrate= 383.9kbits/s speed=59.4x&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_1.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_2.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_3.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_4.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_5.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_6.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_7.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_8.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_9.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_10.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_11.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_12.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_13.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_14.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_15.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_16.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_17.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_18.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_19.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_20.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_21.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_22.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_23.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_24.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_25.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_26.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] stderr: frame=   27 fps=0.0 q=16.0 size=     559kB time=00:00:13.50 bitrate= 339.3kbits/s speed=25.3x&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_27.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_28.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_29.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_30.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_31.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:12 PM     LOG [Image Logger] file_32.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:13 PM     LOG [Image Logger] file_33.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:13 PM     LOG [Image Logger] stderr: frame=   34 fps= 33 q=16.6 size=     632kB time=00:00:17.00 bitrate= 304.6kbits/s speed=16.3x&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:13 PM     LOG [Image Logger] file_34.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:13 PM     LOG [Image Logger] file_35.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:13 PM     LOG [Image Logger] file_36.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:13 PM     LOG [Image Logger] file_37.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:13 PM     LOG [Image Logger] file_38.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:13 PM     LOG [Image Logger] file_39.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:13 PM     LOG [Image Logger] file_40.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:13 PM     LOG [Image Logger] file_41.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:13 PM     LOG [Image Logger] stderr: frame=   41 fps= 26 q=16.5 size=     705kB time=00:00:20.50 bitrate= 281.7kbits/s speed=13.2x&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:13 PM     LOG [Image Logger] file_42.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:13 PM     LOG [Image Logger] file_43.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:13 PM     LOG [Image Logger] file_44.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:13 PM     LOG [Image Logger] file_45.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:13 PM     LOG [Image Logger] file_46.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:14 PM     LOG [Image Logger] file_47.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:14 PM     LOG [Image Logger] file_48.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:14 PM     LOG [Image Logger] stderr: frame=   48 fps= 23 q=16.4 size=     779kB time=00:00:24.00 bitrate= 265.9kbits/s speed=11.6x&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:14 PM     LOG [Image Logger] file_49.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:14 PM     LOG [Image Logger] file_50.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:14 PM     LOG [Image Logger] file_51.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:14 PM     LOG [Image Logger] file_52.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:14 PM     LOG [Image Logger] file_53.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:14 PM     LOG [Image Logger] file_54.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:14 PM     LOG [Image Logger] file_55.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:14 PM     LOG [Image Logger] stderr: frame=   56 fps= 22 q=16.1 size=     865kB time=00:00:28.00 bitrate= 253.0kbits/s speed=10.9x&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:14 PM     LOG [Image Logger] file_56.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:14 PM     LOG [Image Logger] file_57.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:14 PM     LOG [Image Logger] file_58.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:14 PM     LOG [Image Logger] file_59.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:14 PM     LOG [Image Logger] file_60.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:15 PM     LOG [Image Logger] file_61.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:15 PM     LOG [Image Logger] file_62.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:15 PM     LOG [Image Logger] file_63.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:15 PM     LOG [Image Logger] stderr: frame=   63 fps= 20 q=15.5 size=     939kB time=00:00:31.50 bitrate= 244.3kbits/s speed=10.2x&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:15 PM     LOG [Image Logger] file_64.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:15 PM     LOG [Image Logger] file_65.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:15 PM     LOG [Image Logger] file_66.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:15 PM     LOG [Image Logger] file_67.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:15 PM     LOG [Image Logger] file_68.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:15 PM     LOG [Image Logger] file_69.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:15 PM     LOG [Image Logger] file_70.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:15 PM     LOG [Image Logger] stderr: frame=   70 fps= 19 q=15.4 size=    1017kB time=00:00:35.00 bitrate= 238.0kbits/s speed= 9.7x&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:15 PM     LOG [Image Logger] file_71.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:15 PM     LOG [Image Logger] file_72.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:15 PM     LOG [Image Logger] file_73.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:15 PM     LOG [Image Logger] file_74.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:15 PM     LOG [Image Logger] file_75.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:16 PM     LOG [Image Logger] file_76.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:16 PM     LOG [Image Logger] file_77.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:16 PM     LOG [Image Logger] stderr: frame=   78 fps= 19 q=15.1 size=    1106kB time=00:00:39.00 bitrate= 232.4kbits/s speed=9.48x&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:16 PM     LOG [Image Logger] file_78.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:16 PM     LOG [Image Logger] file_79.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:16 PM     LOG [Image Logger] file_80.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:16 PM     LOG [Image Logger] file_81.jpg saved&#xA;api       | [Nest] 431  - 12/28/2022, 1:16:16 PM     LOG [Image Logger] file_82.jpg saved&#xA;

    &#xA;

    Am I missing some part ? why I do not only get 2 stdout events pro second ?

    &#xA;

    Edit :

    &#xA;

    ffmpeg version 5.0.1-static&#xA;node : 18.12.1

    &#xA;

    Edit 2 :

    &#xA;

    When I run this command on the terminal :

    &#xA;

    ffmpeg -r 2 -rtsp_transport tcp -i "rtsp://user:password@192.68.45.54:554/stream2" -vf fps=2 -timecode 00:00:00:00 test.mp4&#xA;

    &#xA;

    The generated video has a frame rate of 2 but it looks like it is in slow motion. Since the source video has also a timer, I see how it takes something like 12 "frames" to go from one second to another.

    &#xA;

    This is the output :

    &#xA;

    fmpeg version 5.1.2 Copyright (c) 2000-2022 the FFmpeg developers&#xA;  built with Apple clang version 14.0.0 (clang-1400.0.29.202)&#xA;  configuration: --prefix=/usr/local/Cellar/ffmpeg/5.1.2_1 --enable-shared --enable-pthreads --enable-version3 --cc=clang --host-cflags= --host-ldflags= --enable-ffplay --enable-gnutls --enable-gpl --enable-libaom --enable-libbluray --enable-libdav1d --enable-libmp3lame --enable-libopus --enable-librav1e --enable-librist --enable-librubberband --enable-libsnappy --enable-libsrt --enable-libtesseract --enable-libtheora --enable-libvidstab --enable-libvmaf --enable-libvorbis --enable-libvpx --enable-libwebp --enable-libx264 --enable-libx265 --enable-libxml2 --enable-libxvid --enable-lzma --enable-libfontconfig --enable-libfreetype --enable-frei0r --enable-libass --enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopenjpeg --enable-libspeex --enable-libsoxr --enable-libzmq --enable-libzimg --disable-libjack --disable-indev=jack --enable-videotoolbox&#xA;  libavutil      57. 28.100 / 57. 28.100&#xA;  libavcodec     59. 37.100 / 59. 37.100&#xA;  libavformat    59. 27.100 / 59. 27.100&#xA;  libavdevice    59.  7.100 / 59.  7.100&#xA;  libavfilter     8. 44.100 /  8. 44.100&#xA;  libswscale      6.  7.100 /  6.  7.100&#xA;  libswresample   4.  7.100 /  4.  7.100&#xA;  libpostproc    56.  6.100 / 56.  6.100&#xA;Input #0, rtsp, from &#x27;rtsp://user:password@192.68.45.54:554/stream2&#x27;:&#xA;  Metadata:&#xA;    title           : Session streamed by "TP-LINK RTSP Server"&#xA;    comment         : stream2&#xA;  Duration: N/A, start: 0.000000, bitrate: N/A&#xA;  Stream #0:0: Video: h264 (High), yuvj420p(pc, bt709, progressive), 640x360, 15 fps, 28.58 tbr, 90k tbn&#xA;  Stream #0:1: Audio: pcm_alaw, 8000 Hz, mono, s16, 64 kb/s&#xA;File &#x27;test.mp4&#x27; already exists. Overwrite? [y/N] y&#xA;Stream mapping:&#xA;  Stream #0:0 -> #0:0 (h264 (native) -> h264 (libx264))&#xA;  Stream #0:1 -> #0:1 (pcm_alaw (native) -> aac (native))&#xA;Press [q] to stop, [?] for help&#xA;[aac @ 0x7fa79fb061c0] Too many bits 8832.000000 > 6144 per frame requested, clamping to max&#xA;[libx264 @ 0x7fa79fb04e00] using cpu capabilities: MMX2 SSE2Fast SSSE3 SSE4.2 AVX FMA3 BMI2 AVX2&#xA;[libx264 @ 0x7fa79fb04e00] profile High, level 2.2, 4:2:0, 8-bit&#xA;[libx264 @ 0x7fa79fb04e00] 264 - core 164 r3095 baee400 - H.264/MPEG-4 AVC codec - Copyleft 2003-2022 - http://www.videolan.org/x264.html - options: cabac=1 ref=3 deblock=1:0:0 analyse=0x3:0x113 me=hex subme=7 psy=1 psy_rd=1.00:0.00 mixed_ref=1 me_range=16 chroma_me=1 trellis=1 8x8dct=1 cqm=0 deadzone=21,11 fast_pskip=1 chroma_qp_offset=-2 threads=11 lookahead_threads=1 sliced_threads=0 nr=0 decimate=1 interlaced=0 bluray_compat=0 constrained_intra=0 bframes=3 b_pyramid=2 b_adapt=1 b_bias=0 direct=1 weightb=1 open_gop=0 weightp=2 keyint=250 keyint_min=2 scenecut=40 intra_refresh=0 rc_lookahead=40 rc=crf mbtree=1 crf=23.0 qcomp=0.60 qpmin=0 qpmax=69 qpstep=4 ip_ratio=1.40 aq=1:1.00&#xA;[mp4 @ 0x7fa78f104780] Using non-standard frame rate 2/1&#xA;    Last message repeated 1 times&#xA;Output #0, mp4, to &#x27;test.mp4&#x27;:&#xA;  Metadata:&#xA;    title           : Session streamed by "TP-LINK RTSP Server"&#xA;    comment         : stream2&#xA;    timecode        : 00:00:00:00&#xA;    encoder         : Lavf59.27.100&#xA;  Stream #0:0: Video: h264 (avc1 / 0x31637661), yuvj420p(pc, bt709, progressive), 640x360, q=2-31, 2 fps, 16384 tbn&#xA;    Metadata:&#xA;      encoder         : Lavc59.37.100 libx264&#xA;    Side data:&#xA;      cpb: bitrate max/min/avg: 0/0/0 buffer size: 0 vbv_delay: N/A&#xA;  Stream #0:1: Audio: aac (LC) (mp4a / 0x6134706D), 8000 Hz, mono, fltp, 48 kb/s&#xA;    Metadata:&#xA;      encoder         : Lavc59.37.100 aac&#xA;frame=  186 fps= 28 q=20.0 size=     256kB time=00:01:04.00 bitrate=  32.8kbits/s speed=9.58x&#xA;&#xA;[q] command received. Exiting.&#xA;&#xA;frame=  190 fps= 27 q=-1.0 Lsize=     749kB time=00:01:33.50 bitrate=  65.6kbits/s speed=13.2x&#xA;video:708kB audio:35kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: 0.836704%&#xA;[libx264 @ 0x7fa79fb04e00] frame I:1     Avg QP:10.13  size: 45352&#xA;[libx264 @ 0x7fa79fb04e00] frame P:74    Avg QP:12.27  size:  7161&#xA;[libx264 @ 0x7fa79fb04e00] frame B:115   Avg QP:22.28  size:  1294&#xA;[libx264 @ 0x7fa79fb04e00] consecutive B-frames: 14.7%  9.5% 12.6% 63.2%&#xA;[libx264 @ 0x7fa79fb04e00] mb I  I16..4: 15.0% 14.0% 71.0%&#xA;[libx264 @ 0x7fa79fb04e00] mb P  I16..4:  0.6%  1.5%  2.4%  P16..4: 20.4%  4.9%  5.9%  0.0%  0.0%    skip:64.3%&#xA;[libx264 @ 0x7fa79fb04e00] mb B  I16..4:  0.0%  0.1%  0.3%  B16..8: 15.9%  2.4%  1.5%  direct: 0.9%  skip:78.9%  L0:45.5% L1:48.6% BI: 5.9%&#xA;[libx264 @ 0x7fa79fb04e00] 8x8 transform intra:27.4% inter:64.7%&#xA;[libx264 @ 0x7fa79fb04e00] coded y,uvDC,uvAC intra: 81.7% 60.2% 46.5% inter: 9.4% 4.2% 1.5%&#xA;[libx264 @ 0x7fa79fb04e00] i16 v,h,dc,p: 44% 10% 19% 27%&#xA;[libx264 @ 0x7fa79fb04e00] i8 v,h,dc,ddl,ddr,vr,hd,vl,hu: 52% 14% 13%  3%  2%  3%  3%  5%  4%&#xA;[libx264 @ 0x7fa79fb04e00] i4 v,h,dc,ddl,ddr,vr,hd,vl,hu: 35% 18% 13%  6%  5%  6%  5%  6%  7%&#xA;[libx264 @ 0x7fa79fb04e00] i8c dc,h,v,p: 45% 18% 27%  9%&#xA;[libx264 @ 0x7fa79fb04e00] Weighted P-Frames: Y:0.0% UV:0.0%&#xA;[libx264 @ 0x7fa79fb04e00] ref P L0: 78.9%  7.6% 11.4%  2.1%&#xA;[libx264 @ 0x7fa79fb04e00] ref B L0: 90.8%  8.0%  1.2%&#xA;[libx264 @ 0x7fa79fb04e00] ref B L1: 97.0%  3.0%&#xA;[libx264 @ 0x7fa79fb04e00] kb/s:60.98&#xA;[aac @ 0x7fa79fb061c0] Qavg: 64944.246&#xA;

    &#xA;