Recherche avancée

Médias (1)

Mot : - Tags -/Rennes

Autres articles (56)

  • Les autorisations surchargées par les plugins

    27 avril 2010, par

    Mediaspip core
    autoriser_auteur_modifier() afin que les visiteurs soient capables de modifier leurs informations sur la page d’auteurs

  • HTML5 audio and video support

    13 avril 2011, par

    MediaSPIP uses HTML5 video and audio tags to play multimedia files, taking advantage of the latest W3C innovations supported by modern browsers.
    The MediaSPIP player used has been created specifically for MediaSPIP and can be easily adapted to fit in with a specific theme.
    For older browsers the Flowplayer flash fallback is used.
    MediaSPIP allows for media playback on major mobile platforms with the above (...)

  • Support audio et vidéo HTML5

    10 avril 2011

    MediaSPIP utilise les balises HTML5 video et audio pour la lecture de documents multimedia en profitant des dernières innovations du W3C supportées par les navigateurs modernes.
    Pour les navigateurs plus anciens, le lecteur flash Flowplayer est utilisé.
    Le lecteur HTML5 utilisé a été spécifiquement créé pour MediaSPIP : il est complètement modifiable graphiquement pour correspondre à un thème choisi.
    Ces technologies permettent de distribuer vidéo et son à la fois sur des ordinateurs conventionnels (...)

Sur d’autres sites (10040)

  • FFmpeg.Autogen Wrapper : Issue with Zero-Sized Atom Boxes in MP4 Output

    11 juin 2024, par Alexander Jansson

    I just started learning ffmpeg using ffmpeg.autogen wrapper version 5.1 in c#, and ffmpeg shared libs version 5.1. Im trying to facilitate a class which screenrecords using gdigrab and produces streamable mp4 to a/an buffer/event. Everything seems to work as suposed to with no error except that the outputstream produces atom boxes with 0 in size thus small file size aswell, no data seems to be produced in the boxes, the "debug test mp4 file" is analyzed with MP4Box and the box info is provided in the thread.

    


    To be more specific why does this code produce empty atomboxes, is someone able to make the data produced actually contain any frame data from the gdigrab editing my code ?

    


    `code :

    


     public unsafe class ScreenStreamer : IDisposable
 {
     private readonly AVCodec* productionCodec;
     private readonly AVCodec* screenCaptureAVCodec;
     private readonly AVCodecContext* productionAVCodecContext;
     private readonly AVFormatContext* productionFormatContext;
     private readonly AVCodecContext* screenCaptureAVCodecContext;
     private readonly AVDictionary* productionAVCodecOptions;
     private readonly AVInputFormat* screenCaptureInputFormat;
     private readonly AVFormatContext* screenCaptureInputFormatContext;
     private readonly int gDIGrabVideoStreamIndex;
     private readonly System.Drawing.Size screenBounds;
     private readonly int _produceAtleastAmount;
     public EventHandler OnNewVideoDataProduced;
     private MemoryStream unsafeToManagedBridgeBuffer;
     private CancellationTokenSource cancellationTokenSource;
     private Task recorderTask;

     public ScreenStreamer(int fps, int bitrate, int screenIndex, int produceAtleastAmount = 1000)
     {
         ffmpeg.avdevice_register_all();
         ffmpeg.avformat_network_init();
         recorderTask = Task.CompletedTask;
         cancellationTokenSource = new CancellationTokenSource();
         unsafeToManagedBridgeBuffer = new MemoryStream();
         _produceAtleastAmount = produceAtleastAmount;

         // Allocate and initialize production codec and context
         productionCodec = ffmpeg.avcodec_find_encoder(AVCodecID.AV_CODEC_ID_H264);
         if (productionCodec == null) throw new ApplicationException("Could not find encoder for codec ID H264.");

         productionAVCodecContext = ffmpeg.avcodec_alloc_context3(productionCodec);
         if (productionAVCodecContext == null) throw new ApplicationException("Could not allocate video codec context.");

         // Set codec parameters
         screenBounds = RetrieveScreenBounds(screenIndex);
         productionAVCodecContext->width = screenBounds.Width;
         productionAVCodecContext->height = screenBounds.Height;
         productionAVCodecContext->time_base = new AVRational() { den = fps, num = 1 };
         productionAVCodecContext->pix_fmt = AVPixelFormat.AV_PIX_FMT_YUV420P;
         productionAVCodecContext->bit_rate = bitrate;

         int result = ffmpeg.av_opt_set(productionAVCodecContext->priv_data, "preset", "veryfast", 0);
         if (result != 0)
         {
             throw new ApplicationException($"Failed to set options with error code {result}.");
         }

         // Open codec
         fixed (AVDictionary** pm = &productionAVCodecOptions)
         {
             result = ffmpeg.av_dict_set(pm, "movflags", "frag_keyframe+empty_moov+default_base_moof", 0);
             if (result < 0)
             {
                 throw new ApplicationException($"Failed to set dictionary with error code {result}.");
             }

             result = ffmpeg.avcodec_open2(productionAVCodecContext, productionCodec, pm);
             if (result < 0)
             {
                 throw new ApplicationException($"Failed to open codec with error code {result}.");
             }
         }

         // Allocate and initialize screen capture codec and context
         screenCaptureInputFormat = ffmpeg.av_find_input_format("gdigrab");
         if (screenCaptureInputFormat == null) throw new ApplicationException("Could not find input format gdigrab.");

         fixed (AVFormatContext** ps = &screenCaptureInputFormatContext)
         {
             result = ffmpeg.avformat_open_input(ps, "desktop", screenCaptureInputFormat, null);
             if (result < 0)
             {
                 throw new ApplicationException($"Failed to open input with error code {result}.");
             }

             result = ffmpeg.avformat_find_stream_info(screenCaptureInputFormatContext, null);
             if (result < 0)
             {
                 throw new ApplicationException($"Failed to find stream info with error code {result}.");
             }
         }

         gDIGrabVideoStreamIndex = -1;
         for (int i = 0; i < screenCaptureInputFormatContext->nb_streams; i++)
         {
             if (screenCaptureInputFormatContext->streams[i]->codecpar->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
             {
                 gDIGrabVideoStreamIndex = i;
                 break;
             }
         }

         if (gDIGrabVideoStreamIndex < 0)
         {
             throw new ApplicationException("Failed to find video stream in input.");
         }

         AVCodecParameters* codecParameters = screenCaptureInputFormatContext->streams[gDIGrabVideoStreamIndex]->codecpar;
         screenCaptureAVCodec = ffmpeg.avcodec_find_decoder(codecParameters->codec_id);
         if (screenCaptureAVCodec == null)
         {
             throw new ApplicationException("Could not find decoder for screen capture.");
         }

         screenCaptureAVCodecContext = ffmpeg.avcodec_alloc_context3(screenCaptureAVCodec);
         if (screenCaptureAVCodecContext == null)
         {
             throw new ApplicationException("Could not allocate screen capture codec context.");
         }

         result = ffmpeg.avcodec_parameters_to_context(screenCaptureAVCodecContext, codecParameters);
         if (result < 0)
         {
             throw new ApplicationException($"Failed to copy codec parameters to context with error code {result}.");
         }

         result = ffmpeg.avcodec_open2(screenCaptureAVCodecContext, screenCaptureAVCodec, null);
         if (result < 0)
         {
             throw new ApplicationException($"Failed to open screen capture codec with error code {result}.");
         }
     }

     public void Start()
     {
         recorderTask = Task.Run(() =>
         {
             AVPacket* packet = ffmpeg.av_packet_alloc();
             AVFrame* rawFrame = ffmpeg.av_frame_alloc();
             AVFrame* compatibleFrame = null;
             byte* dstBuffer = null;

             try
             {
                 while (!cancellationTokenSource.Token.IsCancellationRequested)
                 {
                     if (ffmpeg.av_read_frame(screenCaptureInputFormatContext, packet) >= 0)
                     {
                         if (packet->stream_index == gDIGrabVideoStreamIndex)
                         {
                             int response = ffmpeg.avcodec_send_packet(screenCaptureAVCodecContext, packet);
                             if (response < 0)
                             {
                                 throw new ApplicationException($"Error while sending a packet to the decoder: {response}");
                             }

                             response = ffmpeg.avcodec_receive_frame(screenCaptureAVCodecContext, rawFrame);
                             if (response == ffmpeg.AVERROR(ffmpeg.EAGAIN) || response == ffmpeg.AVERROR_EOF)
                             {
                                 continue;
                             }
                             else if (response < 0)
                             {
                                 throw new ApplicationException($"Error while receiving a frame from the decoder: {response}");
                             }

                             compatibleFrame = ConvertToCompatiblePixelFormat(rawFrame, out dstBuffer);

                             response = ffmpeg.avcodec_send_frame(productionAVCodecContext, compatibleFrame);
                             if (response < 0)
                             {
                                 throw new ApplicationException($"Error while sending a frame to the encoder: {response}");
                             }

                             while (response >= 0)
                             {
                                 response = ffmpeg.avcodec_receive_packet(productionAVCodecContext, packet);
                                 if (response == ffmpeg.AVERROR(ffmpeg.EAGAIN) || response == ffmpeg.AVERROR_EOF)
                                 {
                                     break;
                                 }
                                 else if (response < 0)
                                 {
                                     throw new ApplicationException($"Error while receiving a packet from the encoder: {response}");
                                 }

                                 using var packetStream = new UnmanagedMemoryStream(packet->data, packet->size);
                                 packetStream.CopyTo(unsafeToManagedBridgeBuffer);
                                 byte[] managedBytes = unsafeToManagedBridgeBuffer.ToArray();
                                 OnNewVideoDataProduced?.Invoke(this, managedBytes);
                                 unsafeToManagedBridgeBuffer.SetLength(0);
                             }
                         }
                     }
                     ffmpeg.av_packet_unref(packet);
                     ffmpeg.av_frame_unref(rawFrame);
                     if (compatibleFrame != null)
                     {
                         ffmpeg.av_frame_unref(compatibleFrame);
                         ffmpeg.av_free(dstBuffer);
                     }
                 }
             }
             finally
             {
                 ffmpeg.av_packet_free(&packet);
                 ffmpeg.av_frame_free(&rawFrame);
                 if (compatibleFrame != null)
                 {
                     ffmpeg.av_frame_free(&compatibleFrame);
                 }
             }
         });
     }

     public AVFrame* ConvertToCompatiblePixelFormat(AVFrame* srcFrame, out byte* dstBuffer)
     {
         AVFrame* dstFrame = ffmpeg.av_frame_alloc();
         int buffer_size = ffmpeg.av_image_get_buffer_size(productionAVCodecContext->pix_fmt, productionAVCodecContext->width, productionAVCodecContext->height, 1);
         byte_ptrArray4 dstData = new byte_ptrArray4();
         int_array4 dstLinesize = new int_array4();
         dstBuffer = (byte*)ffmpeg.av_malloc((ulong)buffer_size);
         ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, dstBuffer, productionAVCodecContext->pix_fmt, productionAVCodecContext->width, productionAVCodecContext->height, 1);

         dstFrame->format = (int)productionAVCodecContext->pix_fmt;
         dstFrame->width = productionAVCodecContext->width;
         dstFrame->height = productionAVCodecContext->height;
         dstFrame->data.UpdateFrom(dstData);
         dstFrame->linesize.UpdateFrom(dstLinesize);

         SwsContext* swsCtx = ffmpeg.sws_getContext(
             srcFrame->width, srcFrame->height, (AVPixelFormat)srcFrame->format,
             productionAVCodecContext->width, productionAVCodecContext->height, productionAVCodecContext->pix_fmt,
             ffmpeg.SWS_BILINEAR, null, null, null);

         if (swsCtx == null)
         {
             throw new ApplicationException("Could not initialize the conversion context.");
         }

         ffmpeg.sws_scale(swsCtx, srcFrame->data, srcFrame->linesize, 0, srcFrame->height, dstFrame->data, dstFrame->linesize);
         ffmpeg.sws_freeContext(swsCtx);
         return dstFrame;
     }

     private System.Drawing.Size RetrieveScreenBounds(int screenIndex)
     {
         return new System.Drawing.Size(1920, 1080);
     }

     public void Dispose()
     {
         cancellationTokenSource?.Cancel();
         recorderTask?.Wait();
         cancellationTokenSource?.Dispose();
         recorderTask?.Dispose();
         unsafeToManagedBridgeBuffer?.Dispose();

         fixed (AVCodecContext** p = &productionAVCodecContext)
         {
             if (*p != null)
             {
                 ffmpeg.avcodec_free_context(p);
             }
         }
         fixed (AVCodecContext** p = &screenCaptureAVCodecContext)
         {
             if (*p != null)
             {
                 ffmpeg.avcodec_free_context(p);
             }
         }

         if (productionFormatContext != null)
         {
             ffmpeg.avformat_free_context(productionFormatContext);
         }

         if (screenCaptureInputFormatContext != null)
         {
             ffmpeg.avformat_free_context(screenCaptureInputFormatContext);
         }

         if (productionAVCodecOptions != null)
         {
             fixed (AVDictionary** p = &productionAVCodecOptions)
             {
                 ffmpeg.av_dict_free(p);
             }
         }
     }
 }


    


    I call Start method and wait 8 econds, out of scope I write the bytes to an mp4 file without using the write trailer just to debug the atomboxes. and the mp4 debugging box output I got :

    


    (Full OUTPUT)
https://pastebin.com/xkM4MfG7

    



    


    (Not full)

    


    &#xA;&#xA;"&#xA;<boxes>&#xA;<uuidbox size="0" type="uuid" uuid="{00000000-00000000-00000000-00000000}" specification="unknown" container="unknown">&#xA;</uuidbox>&#xA;<trackreferencetypebox size="0" type="cdsc" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="hint" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="font" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="hind" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="vdep" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="vplx" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="subt" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="thmb" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="mpod" specification="p14" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="dpnd" specification="p14" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="sync" specification="p14" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="ipir" specification="p14" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="sbas" specification="p15" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="scal" specification="p15" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="tbas" specification="p15" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="sabt" specification="p15" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="oref" specification="p15" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="adda" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="adrc" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="iloc" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="avcp" specification="p15" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="swto" specification="p15" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="swfr" specification="p15" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="chap" specification="apple" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="tmcd" specification="apple" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="cdep" specification="apple" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="scpt" specification="apple" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="ssrc" specification="apple" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="lyra" specification="apple" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<itemreferencebox size="0" type="tbas" specification="p12" container="iref">&#xA;<itemreferenceboxentry itemid=""></itemreferenceboxentry>&#xA;</itemreferencebox>&#xA;<itemreferencebox size="0" type="iloc" specification="p12" container="iref">&#xA;<itemreferenceboxentry itemid=""></itemreferenceboxentry>&#xA;</itemreferencebox>&#xA;<itemreferencebox size="0" type="fdel" specification="p12" container="iref">&#xA;<itemreferenceboxentry itemid=""></itemreferenceboxentry>&#xA;</itemreferencebox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p12" container="stbl traf">&#xA;<rollrecoveryentry></rollrecoveryentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p12" container="stbl traf">&#xA;<audioprerollentry></audioprerollentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p12" container="stbl traf">&#xA;<visualrandomaccessentry></visualrandomaccessentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<cencsampleencryptiongroupentry isencrypted="" kid=""></cencsampleencryptiongroupentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<operatingpointsinformation>&#xA; <profiletierlevel></profiletierlevel>&#xA;<operatingpoint minpicwidth="" minpicheight="" maxpicwidth="" maxpicheight="" maxchromaformat="" maxbitdepth="" avgframerate="" constantframerate="" maxbitrate="" avgbitrate=""></operatingpoint>&#xA;&#xA;</operatingpointsinformation>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<layerinformation>&#xA;<layerinfoitem></layerinfoitem>&#xA;</layerinformation>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<tileregiongroupentry tilegroup="" independent="" x="" y="" w="" h="">&#xA;<tileregiondependency tileid=""></tileregiondependency>&#xA;</tileregiongroupentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<nalumap rle="">&#xA;<nalumapentry groupid=""></nalumapentry>&#xA;</nalumap>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p12" container="stbl traf">&#xA;<temporallevelentry></temporallevelentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p12" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p12" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p12" container="stbl traf">&#xA;<sapentry></sapentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<syncsamplegroupentry></syncsamplegroupentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<subpictureorderentry refs=""></subpictureorderentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="3gpp" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="3gpp" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<sampledescriptionentrybox size="0" type="GNRM" specification="unknown" container="stsd" extensiondatasize="0">&#xA;</sampledescriptionentrybox>&#xA;<visualsampledescriptionbox size="0" type="GNRV" specification="unknown" container="stsd" version="0" revision="0" vendor="0" temporalquality="0" spacialquality="0" width="0" height="0" horizontalresolution="4718592" verticalresolution="4718592" compressorname="" bitdepth="24">&#xA;</visualsampledescriptionbox>&#xA;<audiosampledescriptionbox size="0" type="GNRA" specification="unknown" container="stsd" version="0" revision="0" vendor="0" channelcount="2" bitspersample="16" samplerate="0">&#xA;</audiosampledescriptionbox>&#xA;<trackgrouptypebox size="0" type="msrc" version="0" flags="0" specification="p12" container="trgr">&#xA;</trackgrouptypebox>&#xA;<trackgrouptypebox size="0" type="ster" version="0" flags="0" specification="p12" container="trgr">&#xA;</trackgrouptypebox>&#xA;<trackgrouptypebox size="0" type="cstg" version="0" flags="0" specification="p15" container="trgr">&#xA;</trackgrouptypebox>&#xA;<freespacebox size="0" type="free" specification="p12" container="*">&#xA;</freespacebox>&#xA;<freespacebox size="0" type="free" specification="p12" container="*">&#xA;</freespacebox>&#xA;<mediadatabox size="0" type="mdat" specification="p12" container="file">&#xA;</mediadatabox>&#xA;<mediadatabox size="0" type="mdat" specification="p12" container="meta">&#xA;"&#xA;</mediadatabox></boxes>

    &#xA;

  • FFMPEG lossless simple & efficient format

    30 octobre 2016, par Adminy

    I took a sample video and ran this code for each image format like this :

    ffmpeg -i test.mkv png/%d.png
    ffmpeg -i test.mkv bmp/%d.bmp

    What I noted is that BMP at 1080P has a fixed size of 5.97 MB (6,266,934 bytes)

                                                              !6,220,800

    Then I did some maths :

    1 byte = 2^8 == 256 possible combinations
    r + g + b form a color (0-255,0-255,0-255)
    bmp = 3 * 1920 * 1080 = 6,220,800 bytes

    So extra is just the file headers ?! (46134 bytes for just headers ?)

    Next thing I noted that PNG size is dynamic and when the image has a lot of repetition is somewhere 2.5mb. Usually its between 10mb and 12mb.

    Also decoding BMP seemed a lot quicker compared to PNG. So there is no point in even looking at PNG. I would like to know why is it so huge over BMP if it has (zlib) compression factor.

    What is the best format to export to in terms of speed and size, preferred if its as simple as reading the file of color bytes ?

  • How to extract frames at 30 fps using FFMPEG APIs ?

    7 septembre 2016, par Amber Beriwal

    We are working on a project that consumes FFMPEG library for video frame extraction.

    We have seen that using command line, ffmpeg is capable of extracting frames at 30 fps using below command. Even using Xuggler, we are able to extract frames at 30 fps.

    ffmpeg -i input.flv -vf fps=1 out%d.png

    But when we use FFMPEG APIs directly in our code, we are getting following results :

    • 720p video (1280 x 720) - 16 fps (approx. 60 ms/frame)
    • 1080p video (1920 x 1080) - 7 fps (approx. 140 ms/frame)

    Ideally, we should be able to get the data in constant time (approx. 30 ms/frame).

    How can we get 30 fps ?

    Code :

    if (avformat_open_input(&amp;pFormatCtx, pcVideoFile, NULL, NULL)) {
       iError = -1;  //Couldn't open file
    }

    if (!iError) {
       //Retrieve stream information
       if (avformat_find_stream_info(pFormatCtx, NULL) &lt; 0)
           iError = -2; //Couldn't find stream information
    }

    //Find the first video stream
    if (!iError) {

       for (i = 0; i &lt; pFormatCtx->nb_streams; i++) {
           if (AVMEDIA_TYPE_VIDEO
                   == pFormatCtx->streams[i]->codec->codec_type) {
               iFramesInVideo = pFormatCtx->streams[i]->nb_index_entries;
               duration = pFormatCtx->streams[i]->duration;
               begin = pFormatCtx->streams[i]->start_time;
               time_base = (pFormatCtx->streams[i]->time_base.num * 1.0f)
                       / pFormatCtx->streams[i]->time_base.den;

               pCodecCtx = avcodec_alloc_context3(NULL);
               if (!pCodecCtx) {
                   iError = -6;
                   break;
               }

               AVCodecParameters params = { 0 };
               iReturn = avcodec_parameters_from_context(&amp;params,
                       pFormatCtx->streams[i]->codec);
               if (iReturn &lt; 0) {
                   iError = -7;
                   break;
               }

               iReturn = avcodec_parameters_to_context(pCodecCtx, &amp;params);
               if (iReturn &lt; 0) {
                   iError = -7;
                   break;
               }

               //pCodecCtx = pFormatCtx->streams[i]->codec;

               iVideoStreamIndex = i;
               break;
           }
       }
    }

    if (!iError) {
       if (iVideoStreamIndex == -1) {
           iError = -3; // Didn't find a video stream
       }
    }

    if (!iError) {
       // Find the decoder for the video stream
       pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
       if (pCodec == NULL) {
           iError = -4;
       }
    }

    if (!iError) {
       // Open codec
       if (avcodec_open2(pCodecCtx, pCodec, NULL) &lt; 0)
           iError = -5;
    }

    if (!iError) {
       iNumBytes = av_image_get_buffer_size(AV_PIX_FMT_RGB24, pCodecCtx->width,
               pCodecCtx->height, 1);

       // initialize SWS context for software scaling
       sws_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
               pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height,
               AV_PIX_FMT_RGB24,
               SWS_BILINEAR,
               NULL,
               NULL,
               NULL);
       if (!sws_ctx) {
           iError = -7;
       }
    }
    clock_gettime(CLOCK_MONOTONIC_RAW, &amp;end);
    delta_us = (end.tv_sec - start.tv_sec) * 1000000
           + (end.tv_nsec - start.tv_nsec) / 1000;
    start = end;
    //LOGI("Starting_Frame_Extraction: %lld", delta_us);
    if (!iError) {
       while (av_read_frame(pFormatCtx, &amp;packet) == 0) {
           // Is this a packet from the video stream?
           if (packet.stream_index == iVideoStreamIndex) {
               pFrame = av_frame_alloc();
               if (NULL == pFrame) {
                   iError = -8;
                   break;
               }

               // Decode video frame
               avcodec_decode_video2(pCodecCtx, pFrame, &amp;iFrameFinished,
                       &amp;packet);
               if (iFrameFinished) {
                   //OUR CODE
               }
               av_frame_free(&amp;pFrame);
               pFrame = NULL;
           }
           av_packet_unref(&amp;packet);
       }
    }