Recherche avancée

Médias (0)

Mot : - Tags -/publication

Aucun média correspondant à vos critères n’est disponible sur le site.

Autres articles (103)

  • Creating farms of unique websites

    13 avril 2011, par

    MediaSPIP platforms can be installed as a farm, with a single "core" hosted on a dedicated server and used by multiple websites.
    This allows (among other things) : implementation costs to be shared between several different projects / individuals rapid deployment of multiple unique sites creation of groups of like-minded sites, making it possible to browse media in a more controlled and selective environment than the major "open" (...)

  • Publier sur MédiaSpip

    13 juin 2013

    Puis-je poster des contenus à partir d’une tablette Ipad ?
    Oui, si votre Médiaspip installé est à la version 0.2 ou supérieure. Contacter au besoin l’administrateur de votre MédiaSpip pour le savoir

  • Les autorisations surchargées par les plugins

    27 avril 2010, par

    Mediaspip core
    autoriser_auteur_modifier() afin que les visiteurs soient capables de modifier leurs informations sur la page d’auteurs

Sur d’autres sites (13420)

  • FFmpeg.Autogen : Issue with Zero-Sized Atom Boxes in MP4 Output

    16 juin 2024, par Alexander Jansson

    I just started learning ffmpeg using ffmpeg.autogen wrapper version 5.1 in c#, and ffmpeg shared libs version 5.1. Im trying to facilitate a class which screenrecords using gdigrab and produces streamable mp4 to a/an buffer/event. Everything seems to work as suposed to with no error except that the outputstream produces atom boxes with 0 in size thus small file size aswell, no data seems to be produced in the boxes, the "debug test mp4 file" is analyzed with MP4Box and the box info is provided in the thread.

    


    To be more specific why does this code produce empty atomboxes, is someone able to make the data produced actually contain any frame data from the gdigrab editing my code ?

    


    `code :

    


     public unsafe class ScreenStreamer : IDisposable
 {
     private readonly AVCodec* productionCodec;
     private readonly AVCodec* screenCaptureAVCodec;
     private readonly AVCodecContext* productionAVCodecContext;
     private readonly AVFormatContext* productionFormatContext;
     private readonly AVCodecContext* screenCaptureAVCodecContext;
     private readonly AVDictionary* productionAVCodecOptions;
     private readonly AVInputFormat* screenCaptureInputFormat;
     private readonly AVFormatContext* screenCaptureInputFormatContext;
     private readonly int gDIGrabVideoStreamIndex;
     private readonly System.Drawing.Size screenBounds;
     private readonly int _produceAtleastAmount;
     public EventHandler OnNewVideoDataProduced;
     private MemoryStream unsafeToManagedBridgeBuffer;
     private CancellationTokenSource cancellationTokenSource;
     private Task recorderTask;

     public ScreenStreamer(int fps, int bitrate, int screenIndex, int produceAtleastAmount = 1000)
     {
         ffmpeg.avdevice_register_all();
         ffmpeg.avformat_network_init();
         recorderTask = Task.CompletedTask;
         cancellationTokenSource = new CancellationTokenSource();
         unsafeToManagedBridgeBuffer = new MemoryStream();
         _produceAtleastAmount = produceAtleastAmount;

         // Allocate and initialize production codec and context
         productionCodec = ffmpeg.avcodec_find_encoder(AVCodecID.AV_CODEC_ID_H264);
         if (productionCodec == null) throw new ApplicationException("Could not find encoder for codec ID H264.");

         productionAVCodecContext = ffmpeg.avcodec_alloc_context3(productionCodec);
         if (productionAVCodecContext == null) throw new ApplicationException("Could not allocate video codec context.");

         // Set codec parameters
         screenBounds = RetrieveScreenBounds(screenIndex);
         productionAVCodecContext->width = screenBounds.Width;
         productionAVCodecContext->height = screenBounds.Height;
         productionAVCodecContext->time_base = new AVRational() { den = fps, num = 1 };
         productionAVCodecContext->pix_fmt = AVPixelFormat.AV_PIX_FMT_YUV420P;
         productionAVCodecContext->bit_rate = bitrate;

         int result = ffmpeg.av_opt_set(productionAVCodecContext->priv_data, "preset", "veryfast", 0);
         if (result != 0)
         {
             throw new ApplicationException($"Failed to set options with error code {result}.");
         }

         // Open codec
         fixed (AVDictionary** pm = &productionAVCodecOptions)
         {
             result = ffmpeg.av_dict_set(pm, "movflags", "frag_keyframe+empty_moov+default_base_moof", 0);
             if (result < 0)
             {
                 throw new ApplicationException($"Failed to set dictionary with error code {result}.");
             }

             result = ffmpeg.avcodec_open2(productionAVCodecContext, productionCodec, pm);
             if (result < 0)
             {
                 throw new ApplicationException($"Failed to open codec with error code {result}.");
             }
         }

         // Allocate and initialize screen capture codec and context
         screenCaptureInputFormat = ffmpeg.av_find_input_format("gdigrab");
         if (screenCaptureInputFormat == null) throw new ApplicationException("Could not find input format gdigrab.");

         fixed (AVFormatContext** ps = &screenCaptureInputFormatContext)
         {
             result = ffmpeg.avformat_open_input(ps, "desktop", screenCaptureInputFormat, null);
             if (result < 0)
             {
                 throw new ApplicationException($"Failed to open input with error code {result}.");
             }

             result = ffmpeg.avformat_find_stream_info(screenCaptureInputFormatContext, null);
             if (result < 0)
             {
                 throw new ApplicationException($"Failed to find stream info with error code {result}.");
             }
         }

         gDIGrabVideoStreamIndex = -1;
         for (int i = 0; i < screenCaptureInputFormatContext->nb_streams; i++)
         {
             if (screenCaptureInputFormatContext->streams[i]->codecpar->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
             {
                 gDIGrabVideoStreamIndex = i;
                 break;
             }
         }

         if (gDIGrabVideoStreamIndex < 0)
         {
             throw new ApplicationException("Failed to find video stream in input.");
         }

         AVCodecParameters* codecParameters = screenCaptureInputFormatContext->streams[gDIGrabVideoStreamIndex]->codecpar;
         screenCaptureAVCodec = ffmpeg.avcodec_find_decoder(codecParameters->codec_id);
         if (screenCaptureAVCodec == null)
         {
             throw new ApplicationException("Could not find decoder for screen capture.");
         }

         screenCaptureAVCodecContext = ffmpeg.avcodec_alloc_context3(screenCaptureAVCodec);
         if (screenCaptureAVCodecContext == null)
         {
             throw new ApplicationException("Could not allocate screen capture codec context.");
         }

         result = ffmpeg.avcodec_parameters_to_context(screenCaptureAVCodecContext, codecParameters);
         if (result < 0)
         {
             throw new ApplicationException($"Failed to copy codec parameters to context with error code {result}.");
         }

         result = ffmpeg.avcodec_open2(screenCaptureAVCodecContext, screenCaptureAVCodec, null);
         if (result < 0)
         {
             throw new ApplicationException($"Failed to open screen capture codec with error code {result}.");
         }
     }

     public void Start()
     {
         recorderTask = Task.Run(() =>
         {
             AVPacket* packet = ffmpeg.av_packet_alloc();
             AVFrame* rawFrame = ffmpeg.av_frame_alloc();
             AVFrame* compatibleFrame = null;
             byte* dstBuffer = null;

             try
             {
                 while (!cancellationTokenSource.Token.IsCancellationRequested)
                 {
                     if (ffmpeg.av_read_frame(screenCaptureInputFormatContext, packet) >= 0)
                     {
                         if (packet->stream_index == gDIGrabVideoStreamIndex)
                         {
                             int response = ffmpeg.avcodec_send_packet(screenCaptureAVCodecContext, packet);
                             if (response < 0)
                             {
                                 throw new ApplicationException($"Error while sending a packet to the decoder: {response}");
                             }

                             response = ffmpeg.avcodec_receive_frame(screenCaptureAVCodecContext, rawFrame);
                             if (response == ffmpeg.AVERROR(ffmpeg.EAGAIN) || response == ffmpeg.AVERROR_EOF)
                             {
                                 continue;
                             }
                             else if (response < 0)
                             {
                                 throw new ApplicationException($"Error while receiving a frame from the decoder: {response}");
                             }

                             compatibleFrame = ConvertToCompatiblePixelFormat(rawFrame, out dstBuffer);

                             response = ffmpeg.avcodec_send_frame(productionAVCodecContext, compatibleFrame);
                             if (response < 0)
                             {
                                 throw new ApplicationException($"Error while sending a frame to the encoder: {response}");
                             }

                             while (response >= 0)
                             {
                                 response = ffmpeg.avcodec_receive_packet(productionAVCodecContext, packet);
                                 if (response == ffmpeg.AVERROR(ffmpeg.EAGAIN) || response == ffmpeg.AVERROR_EOF)
                                 {
                                     break;
                                 }
                                 else if (response < 0)
                                 {
                                     throw new ApplicationException($"Error while receiving a packet from the encoder: {response}");
                                 }

                                 using var packetStream = new UnmanagedMemoryStream(packet->data, packet->size);
                                 packetStream.CopyTo(unsafeToManagedBridgeBuffer);
                                 byte[] managedBytes = unsafeToManagedBridgeBuffer.ToArray();
                                 OnNewVideoDataProduced?.Invoke(this, managedBytes);
                                 unsafeToManagedBridgeBuffer.SetLength(0);
                             }
                         }
                     }
                     ffmpeg.av_packet_unref(packet);
                     ffmpeg.av_frame_unref(rawFrame);
                     if (compatibleFrame != null)
                     {
                         ffmpeg.av_frame_unref(compatibleFrame);
                         ffmpeg.av_free(dstBuffer);
                     }
                 }
             }
             finally
             {
                 ffmpeg.av_packet_free(&packet);
                 ffmpeg.av_frame_free(&rawFrame);
                 if (compatibleFrame != null)
                 {
                     ffmpeg.av_frame_free(&compatibleFrame);
                 }
             }
         });
     }

     public AVFrame* ConvertToCompatiblePixelFormat(AVFrame* srcFrame, out byte* dstBuffer)
     {
         AVFrame* dstFrame = ffmpeg.av_frame_alloc();
         int buffer_size = ffmpeg.av_image_get_buffer_size(productionAVCodecContext->pix_fmt, productionAVCodecContext->width, productionAVCodecContext->height, 1);
         byte_ptrArray4 dstData = new byte_ptrArray4();
         int_array4 dstLinesize = new int_array4();
         dstBuffer = (byte*)ffmpeg.av_malloc((ulong)buffer_size);
         ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, dstBuffer, productionAVCodecContext->pix_fmt, productionAVCodecContext->width, productionAVCodecContext->height, 1);

         dstFrame->format = (int)productionAVCodecContext->pix_fmt;
         dstFrame->width = productionAVCodecContext->width;
         dstFrame->height = productionAVCodecContext->height;
         dstFrame->data.UpdateFrom(dstData);
         dstFrame->linesize.UpdateFrom(dstLinesize);

         SwsContext* swsCtx = ffmpeg.sws_getContext(
             srcFrame->width, srcFrame->height, (AVPixelFormat)srcFrame->format,
             productionAVCodecContext->width, productionAVCodecContext->height, productionAVCodecContext->pix_fmt,
             ffmpeg.SWS_BILINEAR, null, null, null);

         if (swsCtx == null)
         {
             throw new ApplicationException("Could not initialize the conversion context.");
         }

         ffmpeg.sws_scale(swsCtx, srcFrame->data, srcFrame->linesize, 0, srcFrame->height, dstFrame->data, dstFrame->linesize);
         ffmpeg.sws_freeContext(swsCtx);
         return dstFrame;
     }

     private System.Drawing.Size RetrieveScreenBounds(int screenIndex)
     {
         return new System.Drawing.Size(1920, 1080);
     }

     public void Dispose()
     {
         cancellationTokenSource?.Cancel();
         recorderTask?.Wait();
         cancellationTokenSource?.Dispose();
         recorderTask?.Dispose();
         unsafeToManagedBridgeBuffer?.Dispose();

         fixed (AVCodecContext** p = &productionAVCodecContext)
         {
             if (*p != null)
             {
                 ffmpeg.avcodec_free_context(p);
             }
         }
         fixed (AVCodecContext** p = &screenCaptureAVCodecContext)
         {
             if (*p != null)
             {
                 ffmpeg.avcodec_free_context(p);
             }
         }

         if (productionFormatContext != null)
         {
             ffmpeg.avformat_free_context(productionFormatContext);
         }

         if (screenCaptureInputFormatContext != null)
         {
             ffmpeg.avformat_free_context(screenCaptureInputFormatContext);
         }

         if (productionAVCodecOptions != null)
         {
             fixed (AVDictionary** p = &productionAVCodecOptions)
             {
                 ffmpeg.av_dict_free(p);
             }
         }
     }
 }


    


    I call Start method and wait 8 econds, out of scope I write the bytes to an mp4 file without using the write trailer just to debug the atomboxes. and the mp4 debugging box output I got :

    


    (Full OUTPUT)
https://pastebin.com/xkM4MfG7

    



    


    (Not full)

    


    &#xA;&#xA;"&#xA;<boxes>&#xA;<uuidbox size="0" type="uuid" uuid="{00000000-00000000-00000000-00000000}" specification="unknown" container="unknown">&#xA;</uuidbox>&#xA;<trackreferencetypebox size="0" type="cdsc" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="hint" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="font" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="hind" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="vdep" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="vplx" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="subt" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="thmb" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="mpod" specification="p14" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="dpnd" specification="p14" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="sync" specification="p14" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="ipir" specification="p14" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="sbas" specification="p15" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="scal" specification="p15" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="tbas" specification="p15" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="sabt" specification="p15" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="oref" specification="p15" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="adda" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="adrc" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="iloc" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="avcp" specification="p15" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="swto" specification="p15" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="swfr" specification="p15" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="chap" specification="apple" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="tmcd" specification="apple" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="cdep" specification="apple" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="scpt" specification="apple" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="ssrc" specification="apple" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="lyra" specification="apple" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<itemreferencebox size="0" type="tbas" specification="p12" container="iref">&#xA;<itemreferenceboxentry itemid=""></itemreferenceboxentry>&#xA;</itemreferencebox>&#xA;<itemreferencebox size="0" type="iloc" specification="p12" container="iref">&#xA;<itemreferenceboxentry itemid=""></itemreferenceboxentry>&#xA;</itemreferencebox>&#xA;<itemreferencebox size="0" type="fdel" specification="p12" container="iref">&#xA;<itemreferenceboxentry itemid=""></itemreferenceboxentry>&#xA;</itemreferencebox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p12" container="stbl traf">&#xA;<rollrecoveryentry></rollrecoveryentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p12" container="stbl traf">&#xA;<audioprerollentry></audioprerollentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p12" container="stbl traf">&#xA;<visualrandomaccessentry></visualrandomaccessentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<cencsampleencryptiongroupentry isencrypted="" kid=""></cencsampleencryptiongroupentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<operatingpointsinformation>&#xA; <profiletierlevel></profiletierlevel>&#xA;<operatingpoint minpicwidth="" minpicheight="" maxpicwidth="" maxpicheight="" maxchromaformat="" maxbitdepth="" avgframerate="" constantframerate="" maxbitrate="" avgbitrate=""></operatingpoint>&#xA;&#xA;</operatingpointsinformation>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<layerinformation>&#xA;<layerinfoitem></layerinfoitem>&#xA;</layerinformation>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<tileregiongroupentry tilegroup="" independent="" x="" y="" w="" h="">&#xA;<tileregiondependency tileid=""></tileregiondependency>&#xA;</tileregiongroupentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<nalumap rle="">&#xA;<nalumapentry groupid=""></nalumapentry>&#xA;</nalumap>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p12" container="stbl traf">&#xA;<temporallevelentry></temporallevelentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p12" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p12" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p12" container="stbl traf">&#xA;<sapentry></sapentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<syncsamplegroupentry></syncsamplegroupentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<subpictureorderentry refs=""></subpictureorderentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="3gpp" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="3gpp" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<sampledescriptionentrybox size="0" type="GNRM" specification="unknown" container="stsd" extensiondatasize="0">&#xA;</sampledescriptionentrybox>&#xA;<visualsampledescriptionbox size="0" type="GNRV" specification="unknown" container="stsd" version="0" revision="0" vendor="0" temporalquality="0" spacialquality="0" width="0" height="0" horizontalresolution="4718592" verticalresolution="4718592" compressorname="" bitdepth="24">&#xA;</visualsampledescriptionbox>&#xA;<audiosampledescriptionbox size="0" type="GNRA" specification="unknown" container="stsd" version="0" revision="0" vendor="0" channelcount="2" bitspersample="16" samplerate="0">&#xA;</audiosampledescriptionbox>&#xA;<trackgrouptypebox size="0" type="msrc" version="0" flags="0" specification="p12" container="trgr">&#xA;</trackgrouptypebox>&#xA;<trackgrouptypebox size="0" type="ster" version="0" flags="0" specification="p12" container="trgr">&#xA;</trackgrouptypebox>&#xA;<trackgrouptypebox size="0" type="cstg" version="0" flags="0" specification="p15" container="trgr">&#xA;</trackgrouptypebox>&#xA;<freespacebox size="0" type="free" specification="p12" container="*">&#xA;</freespacebox>&#xA;<freespacebox size="0" type="free" specification="p12" container="*">&#xA;</freespacebox>&#xA;<mediadatabox size="0" type="mdat" specification="p12" container="file">&#xA;</mediadatabox>&#xA;<mediadatabox size="0" type="mdat" specification="p12" container="meta">&#xA;"&#xA;</mediadatabox></boxes>

    &#xA;

  • Decode multiple video on GPU (FFmpeg.autogen + C#)

    5 décembre 2024, par Sang Nguyen

    I'm trying to use the basic example about decoding video (FFmpeg.AutoGen.Example) in the FFmpeg.autogen 4.3.0.3 library https://github.com/Ruslan-B/FFmpeg.AutoGen to decode multiple videos on a GPU (AMD radeon R7 430). My main function is as follows :

    &#xA;

     private static void Main(string[] args)&#xA;        {&#xA;            var url = @".\abc.mp4";&#xA;            for (int i = 0; i &lt; 11; i&#x2B;&#x2B;)&#xA;            {&#xA;                url = @"D:\video\abc" &#x2B; i &#x2B; ".mp4";&#xA;                new Thread(() =>&#xA;                {&#xA;                    DecodeAllFramesToImages(AVHWDeviceType.AV_HWDEVICE_TYPE_D3D11VA, url);&#xA;                }).Start();&#xA;            }&#xA;        }&#xA;

    &#xA;

    I try to decode video abc.mp4 with GPU Hardware Acceleration. However, an error occurs when i run thread count greater than 10. The error images is as follows :

    &#xA;

      &#xA;
    • "System.AccessViolationException : Attempted to read or write protected memory. This is often an indication that other memory." enter image description here
    • &#xA;

    • And in the console screen there is an error message "Failed to create D3D11VA video decoder" and "Failed setup for format d3d11 : hwaccel innitialisation returned error"&#xA;enter image description here.
    • &#xA;

    &#xA;

    I'm new to the ffmpeg library recently, so I don't know the problem very well, I would love to have your help with this error !!

    &#xA;

    private static unsafe void DecodeAllFramesToImages(AVHWDeviceType HWDevice, string url)&#xA;        {             &#xA;            using (var vsd = new VideoStreamDecoder(url,HWDevice))&#xA;            {&#xA;                Console.WriteLine($"codec name: {vsd.CodecName}");&#xA;&#xA;                var info = vsd.GetContextInfo();&#xA;                info.ToList().ForEach(x => Console.WriteLine($"{x.Key} = {x.Value}"));&#xA;&#xA;                var sourceSize = vsd.FrameSize;&#xA;                var sourcePixelFormat = HWDevice == AVHWDeviceType.AV_HWDEVICE_TYPE_NONE ? vsd.PixelFormat : GetHWPixelFormat(HWDevice);&#xA;                var destinationSize = sourceSize;&#xA;                var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_YUV420P;&#xA;                using (var vfc = new VideoFrameConverter(sourceSize, sourcePixelFormat, destinationSize, destinationPixelFormat))&#xA;                {&#xA;                    var frameNumber = 0;&#xA;                    while (vsd.TryDecodeNextFrame(out var frame))&#xA;                    {&#xA;                       //var convertedFrame = vfc.Convert(frame);                        &#xA;                       // using (var bitmap = new Bitmap(convertedFrame.width, convertedFrame.height, convertedFrame.linesize[0], PixelFormat.Format24bppRgb, (IntPtr) convertedFrame.data[0]))&#xA;                       // bitmap.Save($"frame.{frameNumber:D8}.jpg", ImageFormat.Jpeg);&#xA;                        &#xA;                        Console.WriteLine($"frame: {frameNumber}");&#xA;                        frameNumber&#x2B;&#x2B;;&#xA;                    }&#xA;                }&#xA;            }&#xA;        }&#xA;

    &#xA;

    using System;&#xA;using System.Collections.Generic;&#xA;using System.Drawing;&#xA;using System.IO;&#xA;using System.Runtime.InteropServices;&#xA;&#xA;namespace FFmpeg.AutoGen.Example&#xA;{&#xA;    public sealed unsafe class VideoStreamDecoder : IDisposable&#xA;    {&#xA;        private readonly AVCodecContext* _pCodecContext;&#xA;        private readonly AVFormatContext* _pFormatContext;&#xA;        private readonly int _streamIndex;&#xA;        private readonly AVFrame* _pFrame;&#xA;        private readonly AVFrame* _receivedFrame;&#xA;        private readonly AVPacket* _pPacket;&#xA;&#xA;        public VideoStreamDecoder(string url, AVHWDeviceType HWDeviceType = AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)&#xA;        {&#xA;            _pFormatContext = ffmpeg.avformat_alloc_context();&#xA;            _receivedFrame = ffmpeg.av_frame_alloc();&#xA;            var pFormatContext = _pFormatContext;&#xA;            ffmpeg.avformat_open_input(&amp;pFormatContext, url, null, null).ThrowExceptionIfError();&#xA;            ffmpeg.avformat_find_stream_info(_pFormatContext, null).ThrowExceptionIfError();&#xA;            AVCodec* codec = null;&#xA;            _streamIndex = ffmpeg.av_find_best_stream(_pFormatContext, AVMediaType.AVMEDIA_TYPE_VIDEO, -1, -1, &amp;codec, 0).ThrowExceptionIfError();&#xA;            _pCodecContext = ffmpeg.avcodec_alloc_context3(codec);&#xA;            if (HWDeviceType != AVHWDeviceType.AV_HWDEVICE_TYPE_NONE)&#xA;            {&#xA;                ffmpeg.av_hwdevice_ctx_create(&amp;_pCodecContext->hw_device_ctx, HWDeviceType, null, null, 0).ThrowExceptionIfError();&#xA;            }&#xA;            ffmpeg.avcodec_parameters_to_context(_pCodecContext, _pFormatContext->streams[_streamIndex]->codecpar).ThrowExceptionIfError();&#xA;            ffmpeg.avcodec_open2(_pCodecContext, codec, null).ThrowExceptionIfError();            &#xA;            CodecName = ffmpeg.avcodec_get_name(codec->id);&#xA;            FrameSize = new Size(_pCodecContext->width, _pCodecContext->height);&#xA;            PixelFormat = _pCodecContext->pix_fmt;&#xA;            _pPacket = ffmpeg.av_packet_alloc();&#xA;            _pFrame = ffmpeg.av_frame_alloc();&#xA;        }&#xA;&#xA;        public string CodecName { get; }&#xA;        public Size FrameSize { get; }&#xA;        public AVPixelFormat PixelFormat { get; }&#xA;&#xA;        public void Dispose()&#xA;        {&#xA;            ffmpeg.av_frame_unref(_pFrame);&#xA;            ffmpeg.av_free(_pFrame);&#xA;&#xA;            ffmpeg.av_packet_unref(_pPacket);&#xA;            ffmpeg.av_free(_pPacket);&#xA;&#xA;            ffmpeg.avcodec_close(_pCodecContext);&#xA;            var pFormatContext = _pFormatContext;&#xA;            ffmpeg.avformat_close_input(&amp;pFormatContext);&#xA;        }&#xA;&#xA;        public bool TryDecodeNextFrame(out AVFrame frame)&#xA;        {&#xA;            ffmpeg.av_frame_unref(_pFrame);&#xA;            ffmpeg.av_frame_unref(_receivedFrame);&#xA;            int error;&#xA;            do&#xA;            {&#xA;                try&#xA;                {&#xA;                    do&#xA;                    {&#xA;                        error = ffmpeg.av_read_frame(_pFormatContext, _pPacket);&#xA;                        if (error == ffmpeg.AVERROR_EOF)&#xA;                        {&#xA;                            frame = *_pFrame;&#xA;                            return false;&#xA;                        }&#xA;&#xA;                        error.ThrowExceptionIfError();&#xA;                    } while (_pPacket->stream_index != _streamIndex);&#xA;&#xA;                    ffmpeg.avcodec_send_packet(_pCodecContext, _pPacket).ThrowExceptionIfError();&#xA;                }&#xA;                finally&#xA;                {&#xA;                    ffmpeg.av_packet_unref(_pPacket);&#xA;                }&#xA;&#xA;                error = ffmpeg.avcodec_receive_frame(_pCodecContext, _pFrame);&#xA;            } while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN));&#xA;            error.ThrowExceptionIfError();&#xA;            if (_pCodecContext->hw_device_ctx != null)&#xA;            {&#xA;                ffmpeg.av_hwframe_transfer_data(_receivedFrame, _pFrame, 0).ThrowExceptionIfError();&#xA;                frame = *_receivedFrame;&#xA;            }&#xA;            else&#xA;            {&#xA;                frame = *_pFrame;&#xA;            }&#xA;            return true;&#xA;        }&#xA;        public IReadOnlyDictionary GetContextInfo()&#xA;        {&#xA;            AVDictionaryEntry* tag = null;&#xA;            var result = new Dictionary();&#xA;            while ((tag = ffmpeg.av_dict_get(_pFormatContext->metadata, "", tag, ffmpeg.AV_DICT_IGNORE_SUFFIX)) != null)&#xA;            {&#xA;                var key = Marshal.PtrToStringAnsi((IntPtr) tag->key);&#xA;                var value = Marshal.PtrToStringAnsi((IntPtr) tag->value);&#xA;                result.Add(key, value);&#xA;            }&#xA;            return result;&#xA;        }&#xA;    }&#xA;}&#xA;

    &#xA;

  • Read multiple frames in demuxer

    22 décembre 2024, par Aycon

    I use FFmpeg.AutoGen. This is C# wrapper of ffmpeg (7.0) C++ library for reading mediafiles and generate stream of frames for other application.&#xA;I want get n frames and hold pointers in memory indefinitely.&#xA;However, I am completely confused trying to figure out which API is deprecated and how I can tell ffmpeg to hold the pointers in memory until I tell it to.
    &#xA;I don't want to copy the frame after receiving it if I can avoid it.

    &#xA;

    I tried many ways.
    &#xA;My last attempt was to receive the frames using ffmpeg.avcodec_send_packet(), ffmpeg.av_read_frame() and ffmpeg.avcodec_receive_frame() functions as it is specified in the current manual.
    &#xA;My code fragment for reading frames :

    &#xA;

    using Core.Backends.FFmpeg.Helpers;&#xA;using Core.Backends.FFmpeg.UnsafeWrappers;&#xA;using Core.Enums;&#xA;using Core.Interfaces;&#xA;using FFmpeg.AutoGen.Abstractions;&#xA;using System.Diagnostics;&#xA;using System.Drawing;&#xA;&#xA;namespace Core.Backends.FFmpeg.Internal;&#xA;&#xA;internal class Video : IVideo&#xA;{&#xA;  private readonly AVFormatHandler p_format;&#xA;  private readonly AVCodecHandler p_codec;&#xA;  private readonly AVPacketWrapper p_packet;&#xA;  private readonly FramesPool p_framesPool;&#xA;  private readonly FramesPool p_framesBufferPool;&#xA;  private bool p_disposedValue;&#xA;&#xA;  public Video(VideoMetadata _videoMetadata, AVFormatHandler _format, AVCodecHandler _codec, int _bufferizedFramesCount = 1)&#xA;  {&#xA;    Duration = _videoMetadata.Duration;&#xA;    FrameRate = _videoMetadata.FrameRate;&#xA;    FrameSize = _videoMetadata.FrameSize;&#xA;    PixelFormat = _videoMetadata.PixelFormat;&#xA;    SelectedStreamID = _videoMetadata.SelectedStreamID;&#xA;    p_format = _format;&#xA;    p_codec = _codec;&#xA;    p_packet = new AVPacketWrapper();&#xA;    var frame = new AVFrameWrapper(p_format, p_packet);&#xA;    p_framesPool = new(frame, _bufferizedFramesCount);&#xA;    p_framesBufferPool = new(frame, _bufferizedFramesCount);&#xA;  }&#xA;&#xA;  /// <inheritdoc></inheritdoc>&#xA;  public long Duration { get; init; }&#xA;&#xA;  /// <inheritdoc></inheritdoc>&#xA;  public (int num, int den) FrameRate { get; init; }&#xA;&#xA;  /// <inheritdoc></inheritdoc>&#xA;  public Size FrameSize { get; init; }&#xA;&#xA;  /// <inheritdoc></inheritdoc>&#xA;  public PixelFormat PixelFormat { get; init; }&#xA;&#xA;  private int SelectedStreamID { get; init; }&#xA;&#xA;  private unsafe int SendPacket(AVPacketWrapper? _packet)&#xA;  {&#xA;    if (_packet == null)&#xA;      return ffmpeg.avcodec_send_packet(p_codec.AVCodecContextPointer, null);&#xA;&#xA;    return ffmpeg.avcodec_send_packet(p_codec.AVCodecContextPointer, _packet.AVPacketPointer);&#xA;  }&#xA;&#xA;  private unsafe bool IsSelectedStream(AVPacketWrapper _packet)&#xA;  {&#xA;    return _packet.AVPacketPointer->stream_index == SelectedStreamID;&#xA;  }&#xA;&#xA;  private unsafe int ReadFrame(AVPacketWrapper _packet)&#xA;  {&#xA;    return ffmpeg.av_read_frame(p_format.AVFormatPointer, _packet.AVPacketPointer);&#xA;  }&#xA;&#xA;  private static unsafe void UnrefPacket(AVPacketWrapper _packet) => ffmpeg.av_packet_unref(_packet.AVPacketPointer);&#xA;&#xA;  private IEnumerable<int> ReadToSelectedStream(AVPacketWrapper _packet)&#xA;  {&#xA;    do&#xA;    {&#xA;      UnrefPacket(p_packet);&#xA;      yield return ReadFrame(_packet);&#xA;    } while (!IsSelectedStream(_packet));&#xA;  }&#xA;&#xA;  private unsafe void FlushBuffers() => ffmpeg.avcodec_flush_buffers(p_codec.AVCodecContextPointer);&#xA;&#xA;  private IEnumerable<avpacketwrapper> GetNextPacketPrivate()&#xA;  {&#xA;    try&#xA;    {&#xA;      while (true)&#xA;      {&#xA;        foreach (int errorCodeRead in ReadToSelectedStream(p_packet))&#xA;        {&#xA;          if (errorCodeRead == ffmpeg.AVERROR_EOF)&#xA;            break;&#xA;&#xA;          errorCodeRead.ThrowInvalidOperationExceptionIfError();&#xA;        }&#xA;&#xA;        int errorCodeSend = SendPacket(p_packet);&#xA;&#xA;        if (errorCodeSend == ffmpeg.AVERROR(ffmpeg.EAGAIN))&#xA;        {&#xA;          yield return p_packet;&#xA;          continue;&#xA;        }&#xA;&#xA;        if (errorCodeSend == ffmpeg.AVERROR_EOF)&#xA;        {&#xA;          yield return p_packet;&#xA;          break;&#xA;        }&#xA;&#xA;        errorCodeSend.ThrowInvalidOperationExceptionIfError();&#xA;&#xA;        yield return p_packet;&#xA;      }&#xA;&#xA;      // Last iteration special case handling&#xA;      int errorCodeSendLast = SendPacket(null);&#xA;&#xA;      if (errorCodeSendLast != ffmpeg.AVERROR_EOF)&#xA;        errorCodeSendLast.ThrowInvalidOperationExceptionIfError();&#xA;&#xA;      yield return p_packet;&#xA;    }&#xA;    finally&#xA;    {&#xA;      UnrefPacket(p_packet);&#xA;      FlushBuffers();&#xA;    }&#xA;  }&#xA;&#xA;  private unsafe int ReceiveFrame(AVFrameWrapper _frame)&#xA;  {&#xA;    return ffmpeg.avcodec_receive_frame(p_codec.AVCodecContextPointer, _frame.AVFramePointer);&#xA;  }&#xA;&#xA;  private unsafe AVFrameWrapper HWFrameCopyIfRequired(AVCodecHandler _codec, AVFrameWrapper _frame, AVFrameWrapper _buffer)&#xA;  {&#xA;    if (_codec.AVCodecContextPointer->hw_device_ctx != null)&#xA;    {&#xA;      int errorCode = ffmpeg.av_hwframe_transfer_data(_buffer.AVFramePointer, _frame.AVFramePointer, flags: 0);&#xA;      errorCode.ThrowInvalidOperationExceptionIfError();&#xA;      return _buffer;&#xA;    }&#xA;&#xA;    return _frame;&#xA;  }&#xA;&#xA;  private IEnumerable GetNextFramePrivate(AVFrameWrapper _fresh_frame, AVFrameWrapper _fresh_frameBuffer)&#xA;  {&#xA;    int readCode;&#xA;&#xA;    while (true)&#xA;    {&#xA;      readCode = ReceiveFrame(_fresh_frame);&#xA;&#xA;      if (readCode == ffmpeg.AVERROR(ffmpeg.EAGAIN) || readCode == ffmpeg.AVERROR_EOF)&#xA;        yield break;&#xA;&#xA;      readCode.ThrowInvalidOperationExceptionIfError();&#xA;      &#xA;      yield return HWFrameCopyIfRequired(p_codec, _fresh_frame, _fresh_frameBuffer);&#xA;    }&#xA;  }&#xA;&#xA;  private static void RefreshFrames&#xA;  (&#xA;    IEnumerator<avframewrapper> _framesEnumerator,&#xA;    IEnumerator<avframewrapper> _framesBufferEnumerator,&#xA;    out AVFrameWrapper _frame,&#xA;    out AVFrameWrapper _frameBuffer&#xA;  )&#xA;  {&#xA;    // Catch fresh frame from pool&#xA;    Debug.Assert(_framesEnumerator.MoveNext(), "Пул фреймов никогда не должен завершать предоставление фреймов.");&#xA;    _frame = _framesEnumerator.Current;&#xA;&#xA;    // Catch fresh frame buffer from pool&#xA;    Debug.Assert(_framesBufferEnumerator.MoveNext(), "Пул фреймов никогда не должен завершать предоставление фреймов.");&#xA;    _frameBuffer = _framesBufferEnumerator.Current;&#xA;  }&#xA;&#xA;  /// <inheritdoc></inheritdoc>&#xA;  public IEnumerable GetNextFrame()&#xA;  {&#xA;    IEnumerator<avframewrapper> framesEnumerator = p_framesPool.GetNextFrame().GetEnumerator();&#xA;    IEnumerator<avframewrapper> framesBufferEnumerator = p_framesBufferPool.GetNextFrame().GetEnumerator();&#xA;    RefreshFrames(framesEnumerator, framesBufferEnumerator, out AVFrameWrapper fresh_frame, out AVFrameWrapper fresh_frameBuffer);&#xA;    foreach (var packet in GetNextPacketPrivate())&#xA;      foreach (var frame in GetNextFramePrivate(fresh_frame, fresh_frameBuffer))&#xA;      {&#xA;        yield return frame;&#xA;        RefreshFrames(framesEnumerator, framesBufferEnumerator, out fresh_frame, out fresh_frameBuffer);&#xA;      }&#xA;  }&#xA;&#xA;  protected virtual void Dispose(bool disposing)&#xA;  {&#xA;    if (!p_disposedValue)&#xA;    {&#xA;      if (disposing)&#xA;      {&#xA;      }&#xA;&#xA;      p_packet.Dispose();&#xA;      p_framesPool.Flush();&#xA;      p_framesBufferPool.Flush();&#xA;&#xA;      p_disposedValue = true;&#xA;    }&#xA;  }&#xA;&#xA;  ~Video()&#xA;  {&#xA;    Dispose(disposing: false);&#xA;  }&#xA;&#xA;  public void Dispose()&#xA;  {&#xA;    Dispose(disposing: true);&#xA;    GC.SuppressFinalize(this);&#xA;  }&#xA;}&#xA;</avframewrapper></avframewrapper></avframewrapper></avframewrapper></avpacketwrapper></int>

    &#xA;

    My FramesPool class :

    &#xA;

    using Core.Backends.FFmpeg.UnsafeWrappers;&#xA;using FFmpeg.AutoGen.Abstractions;&#xA;&#xA;namespace Core.Backends.FFmpeg.Internal;&#xA;&#xA;internal class FramesPool&#xA;{&#xA;  private readonly AVFrameWrapper p_frameWrapper;&#xA;  private readonly Queue<avframewrapper> p_frames;&#xA;  private readonly int p_count;&#xA;&#xA;  public FramesPool(AVFrameWrapper _initframeWrapper, int _count = 1)&#xA;  {&#xA;    p_frameWrapper = _initframeWrapper;&#xA;    p_frames = new(_count);&#xA;    p_frames.Enqueue(p_frameWrapper);&#xA;    p_count = _count;&#xA;  }&#xA;&#xA;  private static unsafe void UnrefFrame(AVFrameWrapper _frame) => ffmpeg.av_frame_unref(_frame.AVFramePointer);&#xA;&#xA;  public IEnumerable<avframewrapper> GetNextFrame()&#xA;  {&#xA;    // First frame case&#xA;    UnrefFrame(p_frameWrapper);&#xA;    yield return p_frameWrapper;&#xA;&#xA;    while (true)&#xA;    {&#xA;      if (p_frames.Count &lt; p_count)&#xA;      {&#xA;        var new_frame = p_frameWrapper.Clone();&#xA;        p_frames.Enqueue(new_frame);&#xA;        yield return new_frame;&#xA;      }&#xA;      else&#xA;      {&#xA;        var frame = p_frames.Dequeue();&#xA;        UnrefFrame(frame);&#xA;        yield return frame;&#xA;        p_frames.Enqueue(frame);&#xA;      }&#xA;    }&#xA;  }&#xA;&#xA;  public void Flush()&#xA;  {&#xA;    foreach(var frame in p_frames)&#xA;    {&#xA;      UnrefFrame(frame);&#xA;      frame.Dispose();&#xA;    }&#xA;&#xA;    p_frames.Clear();&#xA;  }&#xA;}&#xA;</avframewrapper></avframewrapper>

    &#xA;

    Additional calls, among others :

    &#xA;

    ffmpeg.avformat_alloc_context();&#xA;ffmpeg.avformat_open_input(pptr, p_filePath, null, null);&#xA;ffmpeg.av_hwdevice_ctx_create(&amp;p_avCodecHandler!.AVCodecContextPointer->hw_device_ctx, strongDevice, null, null, 0);&#xA;ffmpeg.av_find_best_stream(/*args*/);&#xA;ffmpeg.avcodec_alloc_context3(p_avCodec);&#xA;ffmpeg.avcodec_parameters_to_context(/*args*/);&#xA;ffmpeg.avcodec_open2(/*args*/);&#xA;

    &#xA;

    Function ffmpeg.av_hwframe_transfer_data(_buffer.AVFramePointer, _frame.AVFramePointer, flags: 0); returns "-22" (message : "Invalid argument")

    &#xA;

    Please, help me)

    &#xA;