Recherche avancée

Médias (1)

Mot : - Tags -/lev manovitch

Autres articles (68)

  • MediaSPIP 0.1 Beta version

    25 avril 2011, par

    MediaSPIP 0.1 beta is the first version of MediaSPIP proclaimed as "usable".
    The zip file provided here only contains the sources of MediaSPIP in its standalone version.
    To get a working installation, you must manually install all-software dependencies on the server.
    If you want to use this archive for an installation in "farm mode", you will also need to proceed to other manual (...)

  • MediaSPIP version 0.1 Beta

    16 avril 2011, par

    MediaSPIP 0.1 beta est la première version de MediaSPIP décrétée comme "utilisable".
    Le fichier zip ici présent contient uniquement les sources de MediaSPIP en version standalone.
    Pour avoir une installation fonctionnelle, il est nécessaire d’installer manuellement l’ensemble des dépendances logicielles sur le serveur.
    Si vous souhaitez utiliser cette archive pour une installation en mode ferme, il vous faudra également procéder à d’autres modifications (...)

  • Amélioration de la version de base

    13 septembre 2013

    Jolie sélection multiple
    Le plugin Chosen permet d’améliorer l’ergonomie des champs de sélection multiple. Voir les deux images suivantes pour comparer.
    Il suffit pour cela d’activer le plugin Chosen (Configuration générale du site > Gestion des plugins), puis de configurer le plugin (Les squelettes > Chosen) en activant l’utilisation de Chosen dans le site public et en spécifiant les éléments de formulaires à améliorer, par exemple select[multiple] pour les listes à sélection multiple (...)

Sur d’autres sites (11190)

  • FFmpeg.Autogen : Issue with Zero-Sized Atom Boxes in MP4 Output

    16 juin 2024, par Alexander Jansson

    I just started learning ffmpeg using ffmpeg.autogen wrapper version 5.1 in c#, and ffmpeg shared libs version 5.1. Im trying to facilitate a class which screenrecords using gdigrab and produces streamable mp4 to a/an buffer/event. Everything seems to work as suposed to with no error except that the outputstream produces atom boxes with 0 in size thus small file size aswell, no data seems to be produced in the boxes, the "debug test mp4 file" is analyzed with MP4Box and the box info is provided in the thread.

    


    To be more specific why does this code produce empty atomboxes, is someone able to make the data produced actually contain any frame data from the gdigrab editing my code ?

    


    `code :

    


     public unsafe class ScreenStreamer : IDisposable
 {
     private readonly AVCodec* productionCodec;
     private readonly AVCodec* screenCaptureAVCodec;
     private readonly AVCodecContext* productionAVCodecContext;
     private readonly AVFormatContext* productionFormatContext;
     private readonly AVCodecContext* screenCaptureAVCodecContext;
     private readonly AVDictionary* productionAVCodecOptions;
     private readonly AVInputFormat* screenCaptureInputFormat;
     private readonly AVFormatContext* screenCaptureInputFormatContext;
     private readonly int gDIGrabVideoStreamIndex;
     private readonly System.Drawing.Size screenBounds;
     private readonly int _produceAtleastAmount;
     public EventHandler OnNewVideoDataProduced;
     private MemoryStream unsafeToManagedBridgeBuffer;
     private CancellationTokenSource cancellationTokenSource;
     private Task recorderTask;

     public ScreenStreamer(int fps, int bitrate, int screenIndex, int produceAtleastAmount = 1000)
     {
         ffmpeg.avdevice_register_all();
         ffmpeg.avformat_network_init();
         recorderTask = Task.CompletedTask;
         cancellationTokenSource = new CancellationTokenSource();
         unsafeToManagedBridgeBuffer = new MemoryStream();
         _produceAtleastAmount = produceAtleastAmount;

         // Allocate and initialize production codec and context
         productionCodec = ffmpeg.avcodec_find_encoder(AVCodecID.AV_CODEC_ID_H264);
         if (productionCodec == null) throw new ApplicationException("Could not find encoder for codec ID H264.");

         productionAVCodecContext = ffmpeg.avcodec_alloc_context3(productionCodec);
         if (productionAVCodecContext == null) throw new ApplicationException("Could not allocate video codec context.");

         // Set codec parameters
         screenBounds = RetrieveScreenBounds(screenIndex);
         productionAVCodecContext->width = screenBounds.Width;
         productionAVCodecContext->height = screenBounds.Height;
         productionAVCodecContext->time_base = new AVRational() { den = fps, num = 1 };
         productionAVCodecContext->pix_fmt = AVPixelFormat.AV_PIX_FMT_YUV420P;
         productionAVCodecContext->bit_rate = bitrate;

         int result = ffmpeg.av_opt_set(productionAVCodecContext->priv_data, "preset", "veryfast", 0);
         if (result != 0)
         {
             throw new ApplicationException($"Failed to set options with error code {result}.");
         }

         // Open codec
         fixed (AVDictionary** pm = &productionAVCodecOptions)
         {
             result = ffmpeg.av_dict_set(pm, "movflags", "frag_keyframe+empty_moov+default_base_moof", 0);
             if (result < 0)
             {
                 throw new ApplicationException($"Failed to set dictionary with error code {result}.");
             }

             result = ffmpeg.avcodec_open2(productionAVCodecContext, productionCodec, pm);
             if (result < 0)
             {
                 throw new ApplicationException($"Failed to open codec with error code {result}.");
             }
         }

         // Allocate and initialize screen capture codec and context
         screenCaptureInputFormat = ffmpeg.av_find_input_format("gdigrab");
         if (screenCaptureInputFormat == null) throw new ApplicationException("Could not find input format gdigrab.");

         fixed (AVFormatContext** ps = &screenCaptureInputFormatContext)
         {
             result = ffmpeg.avformat_open_input(ps, "desktop", screenCaptureInputFormat, null);
             if (result < 0)
             {
                 throw new ApplicationException($"Failed to open input with error code {result}.");
             }

             result = ffmpeg.avformat_find_stream_info(screenCaptureInputFormatContext, null);
             if (result < 0)
             {
                 throw new ApplicationException($"Failed to find stream info with error code {result}.");
             }
         }

         gDIGrabVideoStreamIndex = -1;
         for (int i = 0; i < screenCaptureInputFormatContext->nb_streams; i++)
         {
             if (screenCaptureInputFormatContext->streams[i]->codecpar->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
             {
                 gDIGrabVideoStreamIndex = i;
                 break;
             }
         }

         if (gDIGrabVideoStreamIndex < 0)
         {
             throw new ApplicationException("Failed to find video stream in input.");
         }

         AVCodecParameters* codecParameters = screenCaptureInputFormatContext->streams[gDIGrabVideoStreamIndex]->codecpar;
         screenCaptureAVCodec = ffmpeg.avcodec_find_decoder(codecParameters->codec_id);
         if (screenCaptureAVCodec == null)
         {
             throw new ApplicationException("Could not find decoder for screen capture.");
         }

         screenCaptureAVCodecContext = ffmpeg.avcodec_alloc_context3(screenCaptureAVCodec);
         if (screenCaptureAVCodecContext == null)
         {
             throw new ApplicationException("Could not allocate screen capture codec context.");
         }

         result = ffmpeg.avcodec_parameters_to_context(screenCaptureAVCodecContext, codecParameters);
         if (result < 0)
         {
             throw new ApplicationException($"Failed to copy codec parameters to context with error code {result}.");
         }

         result = ffmpeg.avcodec_open2(screenCaptureAVCodecContext, screenCaptureAVCodec, null);
         if (result < 0)
         {
             throw new ApplicationException($"Failed to open screen capture codec with error code {result}.");
         }
     }

     public void Start()
     {
         recorderTask = Task.Run(() =>
         {
             AVPacket* packet = ffmpeg.av_packet_alloc();
             AVFrame* rawFrame = ffmpeg.av_frame_alloc();
             AVFrame* compatibleFrame = null;
             byte* dstBuffer = null;

             try
             {
                 while (!cancellationTokenSource.Token.IsCancellationRequested)
                 {
                     if (ffmpeg.av_read_frame(screenCaptureInputFormatContext, packet) >= 0)
                     {
                         if (packet->stream_index == gDIGrabVideoStreamIndex)
                         {
                             int response = ffmpeg.avcodec_send_packet(screenCaptureAVCodecContext, packet);
                             if (response < 0)
                             {
                                 throw new ApplicationException($"Error while sending a packet to the decoder: {response}");
                             }

                             response = ffmpeg.avcodec_receive_frame(screenCaptureAVCodecContext, rawFrame);
                             if (response == ffmpeg.AVERROR(ffmpeg.EAGAIN) || response == ffmpeg.AVERROR_EOF)
                             {
                                 continue;
                             }
                             else if (response < 0)
                             {
                                 throw new ApplicationException($"Error while receiving a frame from the decoder: {response}");
                             }

                             compatibleFrame = ConvertToCompatiblePixelFormat(rawFrame, out dstBuffer);

                             response = ffmpeg.avcodec_send_frame(productionAVCodecContext, compatibleFrame);
                             if (response < 0)
                             {
                                 throw new ApplicationException($"Error while sending a frame to the encoder: {response}");
                             }

                             while (response >= 0)
                             {
                                 response = ffmpeg.avcodec_receive_packet(productionAVCodecContext, packet);
                                 if (response == ffmpeg.AVERROR(ffmpeg.EAGAIN) || response == ffmpeg.AVERROR_EOF)
                                 {
                                     break;
                                 }
                                 else if (response < 0)
                                 {
                                     throw new ApplicationException($"Error while receiving a packet from the encoder: {response}");
                                 }

                                 using var packetStream = new UnmanagedMemoryStream(packet->data, packet->size);
                                 packetStream.CopyTo(unsafeToManagedBridgeBuffer);
                                 byte[] managedBytes = unsafeToManagedBridgeBuffer.ToArray();
                                 OnNewVideoDataProduced?.Invoke(this, managedBytes);
                                 unsafeToManagedBridgeBuffer.SetLength(0);
                             }
                         }
                     }
                     ffmpeg.av_packet_unref(packet);
                     ffmpeg.av_frame_unref(rawFrame);
                     if (compatibleFrame != null)
                     {
                         ffmpeg.av_frame_unref(compatibleFrame);
                         ffmpeg.av_free(dstBuffer);
                     }
                 }
             }
             finally
             {
                 ffmpeg.av_packet_free(&packet);
                 ffmpeg.av_frame_free(&rawFrame);
                 if (compatibleFrame != null)
                 {
                     ffmpeg.av_frame_free(&compatibleFrame);
                 }
             }
         });
     }

     public AVFrame* ConvertToCompatiblePixelFormat(AVFrame* srcFrame, out byte* dstBuffer)
     {
         AVFrame* dstFrame = ffmpeg.av_frame_alloc();
         int buffer_size = ffmpeg.av_image_get_buffer_size(productionAVCodecContext->pix_fmt, productionAVCodecContext->width, productionAVCodecContext->height, 1);
         byte_ptrArray4 dstData = new byte_ptrArray4();
         int_array4 dstLinesize = new int_array4();
         dstBuffer = (byte*)ffmpeg.av_malloc((ulong)buffer_size);
         ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, dstBuffer, productionAVCodecContext->pix_fmt, productionAVCodecContext->width, productionAVCodecContext->height, 1);

         dstFrame->format = (int)productionAVCodecContext->pix_fmt;
         dstFrame->width = productionAVCodecContext->width;
         dstFrame->height = productionAVCodecContext->height;
         dstFrame->data.UpdateFrom(dstData);
         dstFrame->linesize.UpdateFrom(dstLinesize);

         SwsContext* swsCtx = ffmpeg.sws_getContext(
             srcFrame->width, srcFrame->height, (AVPixelFormat)srcFrame->format,
             productionAVCodecContext->width, productionAVCodecContext->height, productionAVCodecContext->pix_fmt,
             ffmpeg.SWS_BILINEAR, null, null, null);

         if (swsCtx == null)
         {
             throw new ApplicationException("Could not initialize the conversion context.");
         }

         ffmpeg.sws_scale(swsCtx, srcFrame->data, srcFrame->linesize, 0, srcFrame->height, dstFrame->data, dstFrame->linesize);
         ffmpeg.sws_freeContext(swsCtx);
         return dstFrame;
     }

     private System.Drawing.Size RetrieveScreenBounds(int screenIndex)
     {
         return new System.Drawing.Size(1920, 1080);
     }

     public void Dispose()
     {
         cancellationTokenSource?.Cancel();
         recorderTask?.Wait();
         cancellationTokenSource?.Dispose();
         recorderTask?.Dispose();
         unsafeToManagedBridgeBuffer?.Dispose();

         fixed (AVCodecContext** p = &productionAVCodecContext)
         {
             if (*p != null)
             {
                 ffmpeg.avcodec_free_context(p);
             }
         }
         fixed (AVCodecContext** p = &screenCaptureAVCodecContext)
         {
             if (*p != null)
             {
                 ffmpeg.avcodec_free_context(p);
             }
         }

         if (productionFormatContext != null)
         {
             ffmpeg.avformat_free_context(productionFormatContext);
         }

         if (screenCaptureInputFormatContext != null)
         {
             ffmpeg.avformat_free_context(screenCaptureInputFormatContext);
         }

         if (productionAVCodecOptions != null)
         {
             fixed (AVDictionary** p = &productionAVCodecOptions)
             {
                 ffmpeg.av_dict_free(p);
             }
         }
     }
 }


    


    I call Start method and wait 8 econds, out of scope I write the bytes to an mp4 file without using the write trailer just to debug the atomboxes. and the mp4 debugging box output I got :

    


    (Full OUTPUT)
https://pastebin.com/xkM4MfG7

    



    


    (Not full)

    


    &#xA;&#xA;"&#xA;<boxes>&#xA;<uuidbox size="0" type="uuid" uuid="{00000000-00000000-00000000-00000000}" specification="unknown" container="unknown">&#xA;</uuidbox>&#xA;<trackreferencetypebox size="0" type="cdsc" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="hint" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="font" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="hind" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="vdep" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="vplx" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="subt" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="thmb" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="mpod" specification="p14" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="dpnd" specification="p14" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="sync" specification="p14" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="ipir" specification="p14" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="sbas" specification="p15" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="scal" specification="p15" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="tbas" specification="p15" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="sabt" specification="p15" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="oref" specification="p15" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="adda" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="adrc" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="iloc" specification="p12" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="avcp" specification="p15" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="swto" specification="p15" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="swfr" specification="p15" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="chap" specification="apple" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="tmcd" specification="apple" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="cdep" specification="apple" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="scpt" specification="apple" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="ssrc" specification="apple" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<trackreferencetypebox size="0" type="lyra" specification="apple" container="tref">&#xA;<trackreferenceentry trackid=""></trackreferenceentry>&#xA;</trackreferencetypebox>&#xA;<itemreferencebox size="0" type="tbas" specification="p12" container="iref">&#xA;<itemreferenceboxentry itemid=""></itemreferenceboxentry>&#xA;</itemreferencebox>&#xA;<itemreferencebox size="0" type="iloc" specification="p12" container="iref">&#xA;<itemreferenceboxentry itemid=""></itemreferenceboxentry>&#xA;</itemreferencebox>&#xA;<itemreferencebox size="0" type="fdel" specification="p12" container="iref">&#xA;<itemreferenceboxentry itemid=""></itemreferenceboxentry>&#xA;</itemreferencebox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p12" container="stbl traf">&#xA;<rollrecoveryentry></rollrecoveryentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p12" container="stbl traf">&#xA;<audioprerollentry></audioprerollentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p12" container="stbl traf">&#xA;<visualrandomaccessentry></visualrandomaccessentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<cencsampleencryptiongroupentry isencrypted="" kid=""></cencsampleencryptiongroupentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<operatingpointsinformation>&#xA; <profiletierlevel></profiletierlevel>&#xA;<operatingpoint minpicwidth="" minpicheight="" maxpicwidth="" maxpicheight="" maxchromaformat="" maxbitdepth="" avgframerate="" constantframerate="" maxbitrate="" avgbitrate=""></operatingpoint>&#xA;&#xA;</operatingpointsinformation>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<layerinformation>&#xA;<layerinfoitem></layerinfoitem>&#xA;</layerinformation>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<tileregiongroupentry tilegroup="" independent="" x="" y="" w="" h="">&#xA;<tileregiondependency tileid=""></tileregiondependency>&#xA;</tileregiongroupentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<nalumap rle="">&#xA;<nalumapentry groupid=""></nalumapentry>&#xA;</nalumap>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p12" container="stbl traf">&#xA;<temporallevelentry></temporallevelentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p12" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p12" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p12" container="stbl traf">&#xA;<sapentry></sapentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<syncsamplegroupentry></syncsamplegroupentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="p15" container="stbl traf">&#xA;<subpictureorderentry refs=""></subpictureorderentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="3gpp" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<samplegroupdescriptionbox size="0" type="sgpd" version="0" flags="0" specification="3gpp" container="stbl traf">&#xA;<defaultsamplegroupdescriptionentry size=""></defaultsamplegroupdescriptionentry>&#xA;</samplegroupdescriptionbox>&#xA;<sampledescriptionentrybox size="0" type="GNRM" specification="unknown" container="stsd" extensiondatasize="0">&#xA;</sampledescriptionentrybox>&#xA;<visualsampledescriptionbox size="0" type="GNRV" specification="unknown" container="stsd" version="0" revision="0" vendor="0" temporalquality="0" spacialquality="0" width="0" height="0" horizontalresolution="4718592" verticalresolution="4718592" compressorname="" bitdepth="24">&#xA;</visualsampledescriptionbox>&#xA;<audiosampledescriptionbox size="0" type="GNRA" specification="unknown" container="stsd" version="0" revision="0" vendor="0" channelcount="2" bitspersample="16" samplerate="0">&#xA;</audiosampledescriptionbox>&#xA;<trackgrouptypebox size="0" type="msrc" version="0" flags="0" specification="p12" container="trgr">&#xA;</trackgrouptypebox>&#xA;<trackgrouptypebox size="0" type="ster" version="0" flags="0" specification="p12" container="trgr">&#xA;</trackgrouptypebox>&#xA;<trackgrouptypebox size="0" type="cstg" version="0" flags="0" specification="p15" container="trgr">&#xA;</trackgrouptypebox>&#xA;<freespacebox size="0" type="free" specification="p12" container="*">&#xA;</freespacebox>&#xA;<freespacebox size="0" type="free" specification="p12" container="*">&#xA;</freespacebox>&#xA;<mediadatabox size="0" type="mdat" specification="p12" container="file">&#xA;</mediadatabox>&#xA;<mediadatabox size="0" type="mdat" specification="p12" container="meta">&#xA;"&#xA;</mediadatabox></boxes>

    &#xA;

  • Need help using libavfilter for adding overlay to frames [closed]

    30 juillet 2024, par Michael Werner

    Hello gentlemen and ladies,

    &#xA;

    I am working with libavfilter and I am getting crazy.

    &#xA;

    On Windows 11 OS with latest libav (full build) a C/C++ app reads YUV420P frames from a frame grabber card.

    &#xA;

    I want to draw a bitmap (BGR24) overlay image from file on every frame via libavfilter. First I convert the BGR24 overlay image via format filter to YUV420P. Then I feed the YUV420P frame from frame grabber and the YUV420P overlay into the overlay filter.

    &#xA;

    Everything seems to be fine but when I try to get the frame out of the filter graph I always get an "Resource is temporary not available" (EAGAIN) return code, independent on how many frames I put into the graph.

    &#xA;

    The frames from the frame grabber card are fine, I could encode them or write them to a .yuv file. The overlay frame looks fine too.

    &#xA;

    My current initialization code looks like below. It does not report any errors or warnings but when I try to get the filtered frame out of the graph via av_buffersink_get_frame I always get an EAGAIN return code.

    &#xA;

    Here is my current initialization code :

    &#xA;

    int init_overlay_filter(AVFilterGraph** graph, AVFilterContext** src_ctx, AVFilterContext** overlay_src_ctx,&#xA;                        AVFilterContext** sink_ctx)&#xA;{&#xA;    AVFilterGraph* filter_graph;&#xA;    AVFilterContext* buffersrc_ctx;&#xA;    AVFilterContext* overlay_buffersrc_ctx;&#xA;    AVFilterContext* buffersink_ctx;&#xA;    AVFilterContext* overlay_ctx;&#xA;    AVFilterContext* format_ctx;&#xA;    const AVFilter *buffersrc, *buffersink, *overlay_buffersrc, *overlay_filter, *format_filter;&#xA;    int ret;&#xA;&#xA;    // Create the filter graph&#xA;    filter_graph = avfilter_graph_alloc();&#xA;    if (!filter_graph)&#xA;    {&#xA;        fprintf(stderr, "Unable to create filter graph.\n");&#xA;        return AVERROR(ENOMEM);&#xA;    }&#xA;&#xA;    // Create buffer source filter for main video&#xA;    buffersrc = avfilter_get_by_name("buffer");&#xA;    if (!buffersrc)&#xA;    {&#xA;        fprintf(stderr, "Unable to find buffer filter.\n");&#xA;        return AVERROR_FILTER_NOT_FOUND;&#xA;    }&#xA;&#xA;    // Create buffer source filter for overlay image&#xA;    overlay_buffersrc = avfilter_get_by_name("buffer");&#xA;    if (!overlay_buffersrc)&#xA;    {&#xA;        fprintf(stderr, "Unable to find buffer filter.\n");&#xA;        return AVERROR_FILTER_NOT_FOUND;&#xA;    }&#xA;&#xA;    // Create buffer sink filter&#xA;    buffersink = avfilter_get_by_name("buffersink");&#xA;    if (!buffersink)&#xA;    {&#xA;        fprintf(stderr, "Unable to find buffersink filter.\n");&#xA;        return AVERROR_FILTER_NOT_FOUND;&#xA;    }&#xA;&#xA;    // Create overlay filter&#xA;    overlay_filter = avfilter_get_by_name("overlay");&#xA;    if (!overlay_filter)&#xA;    {&#xA;        fprintf(stderr, "Unable to find overlay filter.\n");&#xA;        return AVERROR_FILTER_NOT_FOUND;&#xA;    }&#xA;&#xA;    // Create format filter&#xA;    format_filter = avfilter_get_by_name("format");&#xA;    if (!format_filter) &#xA;    {&#xA;        fprintf(stderr, "Unable to find format filter.\n");&#xA;        return AVERROR_FILTER_NOT_FOUND;&#xA;    }&#xA;&#xA;    // Initialize the main video buffer source&#xA;    char args[512];&#xA;    snprintf(args, sizeof(args),&#xA;             "video_size=1920x1080:pix_fmt=yuv420p:time_base=1/25:pixel_aspect=1/1");&#xA;    ret = avfilter_graph_create_filter(&amp;buffersrc_ctx, buffersrc, "in", args, NULL, filter_graph);&#xA;    if (ret &lt; 0)&#xA;    {&#xA;        fprintf(stderr, "Unable to create buffer source filter for main video.\n");&#xA;        return ret;&#xA;    }&#xA;&#xA;    // Initialize the overlay buffer source&#xA;    snprintf(args, sizeof(args),&#xA;             "video_size=165x165:pix_fmt=bgr24:time_base=1/25:pixel_aspect=1/1");&#xA;    ret = avfilter_graph_create_filter(&amp;overlay_buffersrc_ctx, overlay_buffersrc, "overlay_in", args, NULL,&#xA;                                       filter_graph);&#xA;    if (ret &lt; 0)&#xA;    {&#xA;        fprintf(stderr, "Unable to create buffer source filter for overlay.\n");&#xA;        return ret;&#xA;    }&#xA;&#xA;    // Initialize the format filter to convert overlay image to yuv420p&#xA;    snprintf(args, sizeof(args), "pix_fmts=yuv420p");&#xA;    ret = avfilter_graph_create_filter(&amp;format_ctx, format_filter, "format", args, NULL, filter_graph);&#xA;&#xA;    if (ret &lt; 0) &#xA;    {&#xA;        fprintf(stderr, "Unable to create format filter.\n");&#xA;        return ret;&#xA;    }&#xA;&#xA;    // Initialize the buffer sink&#xA;    ret = avfilter_graph_create_filter(&amp;buffersink_ctx, buffersink, "out", NULL, NULL, filter_graph);&#xA;    if (ret &lt; 0)&#xA;    {&#xA;        fprintf(stderr, "Unable to create buffer sink filter.\n");&#xA;        return ret;&#xA;    }&#xA;&#xA;    // Initialize the overlay filter&#xA;    ret = avfilter_graph_create_filter(&amp;overlay_ctx, overlay_filter, "overlay", "W-w:H-h:enable=&#x27;between(t,0,20)&#x27;:format=yuv420", NULL, filter_graph);&#xA;    if (ret &lt; 0)&#xA;    {&#xA;        fprintf(stderr, "Unable to create overlay filter.\n");&#xA;        return ret;&#xA;    }&#xA;&#xA;    // Connect the filters&#xA;    ret = avfilter_link(overlay_buffersrc_ctx, 0, format_ctx, 0);&#xA;&#xA;    if (ret >= 0)&#xA;    {&#xA;        ret = avfilter_link(buffersrc_ctx, 0, overlay_ctx, 0);&#xA;    }&#xA;    else&#xA;    {&#xA;        fprintf(stderr, "Unable to configure filter graph.\n");&#xA;        return ret;&#xA;    }&#xA;&#xA;&#xA;    if (ret >= 0) &#xA;    {&#xA;        ret = avfilter_link(format_ctx, 0, overlay_ctx, 1);&#xA;    }&#xA;    else&#xA;    {&#xA;        fprintf(stderr, "Unable to configure filter graph.\n");&#xA;        return ret;&#xA;    }&#xA;&#xA;    if (ret >= 0) &#xA;    {&#xA;        if ((ret = avfilter_link(overlay_ctx, 0, buffersink_ctx, 0)) &lt; 0)&#xA;        {&#xA;            fprintf(stderr, "Unable to link filter graph.\n");&#xA;            return ret;&#xA;        }&#xA;    }&#xA;    else&#xA;    {&#xA;        fprintf(stderr, "Unable to configure filter graph.\n");&#xA;        return ret;&#xA;    }&#xA;&#xA;    // Configure the filter graph&#xA;    if ((ret = avfilter_graph_config(filter_graph, NULL)) &lt; 0)&#xA;    {&#xA;        fprintf(stderr, "Unable to configure filter graph.\n");&#xA;        return ret;&#xA;    }&#xA;&#xA;    *graph = filter_graph;&#xA;    *src_ctx = buffersrc_ctx;&#xA;    *overlay_src_ctx = overlay_buffersrc_ctx;&#xA;    *sink_ctx = buffersink_ctx;&#xA;&#xA;    return 0;&#xA;}&#xA;

    &#xA;

    Feeding the filter graph is done this way :

    &#xA;

    av_buffersrc_add_frame_flags(buffersrc_ctx, pFrameGrabberFrame, AV_BUFFERSRC_FLAG_KEEP_REF)&#xA;av_buffersink_get_frame(buffersink_ctx, filtered_frame)&#xA;

    &#xA;

    av_buffersink_get_frame returns always EAGAIN, no matter how many frames I feed into the graph. The frames (from framegrabber and the overlay frame) itself are looking fine.

    &#xA;

    I did set libav logging level to maximum but I do not see any warnings or errors or helpful, related information in the log.

    &#xA;

    Here the log output related to the filter configuration :

    &#xA;

    [in @ 00000288ee494f40] Setting &#x27;video_size&#x27; to value &#x27;1920x1080&#x27;&#xA;[in @ 00000288ee494f40] Setting &#x27;pix_fmt&#x27; to value &#x27;yuv420p&#x27;&#xA;[in @ 00000288ee494f40] Setting &#x27;time_base&#x27; to value &#x27;1/25&#x27;&#xA;[in @ 00000288ee494f40] Setting &#x27;pixel_aspect&#x27; to value &#x27;1/1&#x27;&#xA;[in @ 00000288ee494f40] w:1920 h:1080 pixfmt:yuv420p tb:1/25 fr:0/1 sar:1/1 csp:unknown range:unknown&#xA;[overlay_in @ 00000288ff1013c0] Setting &#x27;video_size&#x27; to value &#x27;165x165&#x27;&#xA;[overlay_in @ 00000288ff1013c0] Setting &#x27;pix_fmt&#x27; to value &#x27;bgr24&#x27;&#xA;[overlay_in @ 00000288ff1013c0] Setting &#x27;time_base&#x27; to value &#x27;1/25&#x27;&#xA;[overlay_in @ 00000288ff1013c0] Setting &#x27;pixel_aspect&#x27; to value &#x27;1/1&#x27;&#xA;[overlay_in @ 00000288ff1013c0] w:165 h:165 pixfmt:bgr24 tb:1/25 fr:0/1 sar:1/1 csp:unknown range:unknown&#xA;[format @ 00000288ff1015c0] Setting &#x27;pix_fmts&#x27; to value &#x27;yuv420p&#x27;&#xA;[overlay @ 00000288ff101880] Setting &#x27;x&#x27; to value &#x27;W-w&#x27;&#xA;[overlay @ 00000288ff101880] Setting &#x27;y&#x27; to value &#x27;H-h&#x27;&#xA;[overlay @ 00000288ff101880] Setting &#x27;enable&#x27; to value &#x27;between(t,0,20)&#x27;&#xA;[overlay @ 00000288ff101880] Setting &#x27;format&#x27; to value &#x27;yuv420&#x27;&#xA;[auto_scale_0 @ 00000288ff101ec0] w:iw h:ih flags:&#x27;&#x27; interl:0&#xA;[format @ 00000288ff1015c0] auto-inserting filter &#x27;auto_scale_0&#x27; between the filter &#x27;overlay_in&#x27; and the filter &#x27;format&#x27;&#xA;[auto_scale_1 @ 00000288ee4a4cc0] w:iw h:ih flags:&#x27;&#x27; interl:0&#xA;[overlay @ 00000288ff101880] auto-inserting filter &#x27;auto_scale_1&#x27; between the filter &#x27;format&#x27; and the filter &#x27;overlay&#x27;&#xA;[AVFilterGraph @ 00000288ee495c80] query_formats: 5 queried, 6 merged, 6 already done, 0 delayed&#xA;[auto_scale_0 @ 00000288ff101ec0] w:165 h:165 fmt:bgr24 csp:gbr range:pc sar:1/1 -> w:165 h:165 fmt:yuv420p csp:unknown range:unknown sar:1/1 flags:0x00000004&#xA;[auto_scale_1 @ 00000288ee4a4cc0] w:165 h:165 fmt:yuv420p csp:unknown range:unknown sar:1/1 -> w:165 h:165 fmt:yuva420p csp:unknown range:unknown sar:1/1 flags:0x00000004&#xA;[overlay @ 00000288ff101880] main w:1920 h:1080 fmt:yuv420p overlay w:165 h:165 fmt:yuva420p&#xA;[overlay @ 00000288ff101880] [framesync @ 00000288ff1019a8] Selected 1/25 time base&#xA;[overlay @ 00000288ff101880] [framesync @ 00000288ff1019a8] Sync level 2&#xA;

    &#xA;

  • Use ffmpeg multiple h264_nvenc instances will crash occurs during release

    13 août 2024, par yang zhao

    Use FFMpeg, When multiple threads use multiple h264_nvenc instances(one instance per thread), an exception crash occurs during release(avcodec_free_context), and the final exception occurs in libnvcuvid.so.&#xA;I don't know what the reason is ? Please help, thanks.&#xA;The same problem exists : ffmpeg v5.0.1 + cuda v11.6 and ffmpeg v7.0.1 + cuda v12.2&#xA;operating system:Ubuntu 22.04.4 LTS

    &#xA;

    The specific code is as follows :

    &#xA;

    class NvencEncoder {&#xA;public:&#xA;    NvencEncoder() {}&#xA;    ~NvencEncoder { Close(); }&#xA;    &#xA;    bool Open() {&#xA;        auto encoder = avcodec_find_encoder_by_name("h264_nvenc");&#xA;        pCodecCtx_ = avcodec_alloc_context3(encoder);&#xA;        if (!pCodecCtx_)&#xA;            return false;&#xA;&#xA;        int width = 1920;&#xA;        int height = 1080;&#xA;        int bitrate = 1000000;&#xA;        &#xA;        pCodecCtx_->codec_type = AVMEDIA_TYPE_VIDEO;&#xA;        pCodecCtx_->pix_fmt = AV_PIX_FMT_YUV420P;&#xA;        pCodecCtx_->width = width;&#xA;        pCodecCtx_->height = height;&#xA;        pCodecCtx_->bit_rate = bitrate;&#xA;        pCodecCtx_->rc_min_rate = bitrate;&#xA;        pCodecCtx_->rc_max_rate = bitrate;&#xA;        pCodecCtx_->bit_rate_tolerance = bitrate;&#xA;        pCodecCtx_->rc_buffer_size = bitrate / 2;&#xA;        pCodecCtx_->time_base = AVRational{ 1, 90000 };&#xA;        pCodecCtx_->framerate = AVRational{ 25, 1 };&#xA;        pCodecCtx_->gop_size = 50;&#xA;        pCodecCtx_->max_b_frames = 0;&#xA;        pCodecCtx_->delay = 0;&#xA;        pCodecCtx_->refs = 2;&#xA;        pCodecCtx_->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;&#xA;&#xA;        av_opt_set_int(pCodecCtx_->priv_data, "gpu", 0, 0);&#xA;        av_opt_set(pCodecCtx_->priv_data, "preset", "llhp", 0);&#xA;        av_opt_set(pCodecCtx_->priv_data, "rc", "cbr", 0);&#xA;        av_opt_set(pCodecCtx_->priv_data, "profile", "main", 0);&#xA;        av_opt_set(pCodecCtx_->priv_data, "zerolatency", "1", 0);&#xA;        av_opt_set(pCodecCtx_->priv_data, "delay", "0", 0);&#xA;        av_opt_set(pCodecCtx_->priv_data, "preset", "medium", 0);&#xA;&#xA;        int ret = avcodec_open2(pCodecCtx_, encoder, nullptr);&#xA;        if (ret &lt; 0)&#xA;            return false;&#xA;&#xA;        pkt_ = av_packet_alloc();&#xA;        if (!pkt_)&#xA;            return false;&#xA;&#xA;        char output_mp4[] = "output.mp4";&#xA;        ret = avformat_alloc_output_context2(&amp;avMp4Context_, NULL, "mp4", output_mp4);&#xA;        if (ret &lt; 0)&#xA;            return false;&#xA;            &#xA;        mp4_stream_ = avformat_new_stream(avMp4Context_, nullptr);&#xA;        if (!mp4_stream_)&#xA;            return false;&#xA;&#xA;        ret = avcodec_parameters_copy(mp4_stream_->codecpar, out_stream_->codecpar);&#xA;        if (ret &lt; 0)&#xA;            return false;&#xA;            &#xA;        mp4_stream_->codecpar->codec_tag = 0;&#xA;&#xA;        if (!(avMp4Context_->oformat->flags &amp; AVFMT_NOFILE)) {&#xA;            ret = avio_open(&amp;avMp4Context_->pb, output_mp4_.c_str(), AVIO_FLAG_WRITE);&#xA;            if (ret &lt; 0) {&#xA;                return false;&#xA;        }&#xA;        return true;&#xA;    }&#xA;&#xA;    void Close() {&#xA;        if (pCodecCtx_)&#xA;            avcodec_free_context(&amp;pCodecCtx_); // Crash will occur in libnvcuvid.so&#xA;&#xA;        if (avMp4Context_) {&#xA;            if (avMp4Context_->oformat &amp;&amp; !(avMp4Context_->oformat->flags &amp; AVFMT_NOFILE)) {&#xA;                avio_closep(&amp;avMp4Context_->pb);&#xA;            }&#xA;            avformat_free_context(avMp4Context_);&#xA;            avMp4Context_ = nullptr;&#xA;        }&#xA;        &#xA;        if (pkt_)&#xA;            av_packet_free(&amp;pkt_);&#xA;    }&#xA;&#xA;    bool InputFrame(AVFrame* frame) {&#xA;        int ret = avcodec_send_frame(pEncoderVideoCodecCtx_, frame);&#xA;        if (ret &lt; 0)&#xA;            return false;&#xA;            &#xA;        while (ret >= 0) {&#xA;            ret = avcodec_receive_packet(pEncoderVideoCodecCtx_, pkt_);&#xA;            if (ret &lt; 0)&#xA;                break;&#xA;&#xA;            if (avNotHeadWrited_) {&#xA;                ret = avformat_write_header(avMp4Context_, &amp;opts);&#xA;                if (ret &lt; 0) {&#xA;                    av_packet_unref(pkt_);&#xA;                    break;&#xA;                }&#xA;                avNotHeadWrited_ = false;&#xA;            }&#xA;&#xA;            av_packet_rescale_ts(pkt_, pCodecCtx_->time_base, mp4_stream_->time_base);&#xA;            ret = av_write_frame(avMp4Context_, pkt_);&#xA;            if (ret &lt; 0) {&#xA;                av_packet_unref(pkt_);&#xA;                break;&#xA;            }&#xA;&#xA;            av_packet_unref(pkt_);&#xA;        }&#xA;        &#xA;        return (ret >= 0);&#xA;    }&#xA;private:&#xA;    AVPacket* pkt_ = nullptr;&#xA;    AVCodecContext* pCodecCtx_ = nullptr;&#xA;    AVFormatContext* avMp4Context_ = nullptr;&#xA;    AVStream* mp4_stream_ = nullptr;&#xA;    avNotHeadWrited_ = true;&#xA;}&#xA;&#xA;uint8_t* data = nullptr; //a frame of yuv420 data&#xA;void Run(int idx);&#xA;&#xA;int main() {&#xA;    //Fill a frame of yuv420 data here&#xA;    ...&#xA;&#xA;    std::thread th[3];&#xA;    for (int i = 0; i &lt; 3; i&#x2B;&#x2B;) {&#xA;        th[i] = std::thread(Run, i);&#xA;        sleep(3);&#xA;    }&#xA;&#xA;    sleep(35);&#xA;&#xA;    for (int i = 0; i &lt; 3; i&#x2B;&#x2B;) {&#xA;        if (th[i].joinable()) {&#xA;            printf("thread %d join()\n", i);&#xA;            th[i].join();&#xA;        }&#xA;    }&#xA;&#xA;    free(data);&#xA;    printf("Exit\n");&#xA;}&#xA;&#xA;void Run(int idx) {&#xA;    printf("Run() thread(%d)\n", idx);&#xA;    //cudaSetDevice(0);&#xA;&#xA;    auto nvenc = new NvencEncoder(ffpar, FFOutputCB);&#xA;    if (!nvenc->Open()) {&#xA;        delete nvenc;&#xA;        return;&#xA;    }&#xA;&#xA;    auto avframe_ = av_frame_alloc();&#xA;    avframe_->width = 1920;&#xA;    avframe_->height = 1080;&#xA;    avframe_->format = AV_PIX_FMT_YUV420P;&#xA;&#xA;    int ret = av_frame_get_buffer(avframe_, 0);&#xA;    if (ret &lt; 0) {&#xA;        printf("av_frame_get_buffer() is error %d\n", ret);&#xA;        delete nvenc;&#xA;        av_frame_free(&amp;avframe_);&#xA;        return;&#xA;    }&#xA;&#xA;    int frame_size = 1920 * 1080;&#xA;    double one_frame_us = 1000000.0 / 25.0;&#xA;    unsigned long frame_count = 0;&#xA;    struct timeval t1, t2;&#xA;    double timeuse;&#xA;&#xA;    AVRational timebase = { ffpar.timebase_num, ffpar.timebase_den };&#xA;    std::int64_t llCalcDuration = (double)AV_TIME_BASE / 25.0;&#xA;    double in_stream_timebase = av_q2d(timebase);&#xA;    std::int64_t duration = (double)llCalcDuration / (double)(in_stream_timebase * AV_TIME_BASE);&#xA;    avframe_->time_base = timebase;&#xA;    gettimeofday(&amp;t1, NULL);&#xA;&#xA;    while (frame_count &lt; 25*30) { //30 seconds&#xA;&#xA;        avframe_->pts = (double)(frame_count * llCalcDuration) / (double(in_stream_timebase * AV_TIME_BASE));&#xA;        //avframe_->duration = duration;&#xA;        frame_count&#x2B;&#x2B;;&#xA;&#xA;        ret = av_frame_make_writable(avframe_);&#xA;        if (ret &lt; 0) {&#xA;            printf("av_frame_make_writable() is error %d\n", ret);&#xA;            break;&#xA;        }&#xA;&#xA;        // copy YUV420&#xA;        memcpy(avframe_->data[0], data, frame_size);&#xA;        memcpy(avframe_->data[1], data &#x2B; frame_size, frame_size / 4);&#xA;        memcpy(avframe_->data[2], data &#x2B; frame_size * 5 / 4, frame_size / 4);&#xA;&#xA;        ret = nvenc->InputFrame(avframe_);&#xA;        if (ret &lt; 0) {&#xA;            printf("InputFrame() is error: %d\n", ret);&#xA;            break;&#xA;        }&#xA;&#xA;        // frame rate&#xA;        gettimeofday(&amp;t2, NULL);&#xA;        timeuse = (t2.tv_sec - t1.tv_sec) * 1000000 &#x2B; (t2.tv_usec - t1.tv_usec); //us&#xA;        if (timeuse &lt; one_frame_us) {&#xA;            usleep(one_frame_us - timeuse);&#xA;        }&#xA;        gettimeofday(&amp;t1, NULL);&#xA;    }&#xA;&#xA;    if (frame_count > 0) {&#xA;        nvenc->WriteTrailer();&#xA;    }&#xA;&#xA;    printf("do Close() thread(%d)\n", idx);&#xA;    nvenc->Close();  // Crash will occur&#xA;    printf("Closed thread(%d)\n", idx);&#xA;    delete nvenc;&#xA;    av_frame_free(&amp;avframe_);&#xA;}&#xA;

    &#xA;