Recherche avancée

Médias (16)

Mot : - Tags -/mp3

Autres articles (45)

  • Ecrire une actualité

    21 juin 2013, par

    Présentez les changements dans votre MédiaSPIP ou les actualités de vos projets sur votre MédiaSPIP grâce à la rubrique actualités.
    Dans le thème par défaut spipeo de MédiaSPIP, les actualités sont affichées en bas de la page principale sous les éditoriaux.
    Vous pouvez personnaliser le formulaire de création d’une actualité.
    Formulaire de création d’une actualité Dans le cas d’un document de type actualité, les champs proposés par défaut sont : Date de publication ( personnaliser la date de publication ) (...)

  • D’autres logiciels intéressants

    12 avril 2011, par

    On ne revendique pas d’être les seuls à faire ce que l’on fait ... et on ne revendique surtout pas d’être les meilleurs non plus ... Ce que l’on fait, on essaie juste de le faire bien, et de mieux en mieux...
    La liste suivante correspond à des logiciels qui tendent peu ou prou à faire comme MediaSPIP ou que MediaSPIP tente peu ou prou à faire pareil, peu importe ...
    On ne les connais pas, on ne les a pas essayé, mais vous pouvez peut être y jeter un coup d’oeil.
    Videopress
    Site Internet : (...)

  • Support de tous types de médias

    10 avril 2011

    Contrairement à beaucoup de logiciels et autres plate-formes modernes de partage de documents, MediaSPIP a l’ambition de gérer un maximum de formats de documents différents qu’ils soient de type : images (png, gif, jpg, bmp et autres...) ; audio (MP3, Ogg, Wav et autres...) ; vidéo (Avi, MP4, Ogv, mpg, mov, wmv et autres...) ; contenu textuel, code ou autres (open office, microsoft office (tableur, présentation), web (html, css), LaTeX, Google Earth) (...)

Sur d’autres sites (5055)

  • FFMPEG not enough data (x y), trying to decode anyway

    7 juin 2016, par Forest J. Handford

    I’m trying to make videos of Direct3D games using a C# app. For non-Direct3D games I stream images from Graphics.CopyFromScreen which works. When I copy the screen from Direct3D and stream it to FFMPEG I get :

    [bmp @ 00000276b0b9c280] not enough data (5070 < 129654), trying to
    decode anyway

    An MP4 file is created, but it is always 0 bytes.

    To get screenshots from Direct3D, I am using Justin Stenning’s Direct3DHook. This produces images MUCH bigger than when I get images from Graphics.CopyFromScreen (8 MB vs 136 KB). I’ve tried increasing the buffer (-bufsize) but the number on the left of the error is not impacted.

    I’ve tried resizing the image to 1/6th the original. That reduces the number on the right, but does not eliminate it. Even when the number on the right is close to what I have for Graphics.CopyFromScreen I get an error. Here is a sample of the current code :

    using System;
    using System.Diagnostics;
    using System.Threading;
    using System.Drawing;
    using Capture.Hook;
    using Capture.Interface;
    using Capture;
    using System.IO;

    namespace GameRecord
    {
       public class Video
       {
           private const int VID_FRAME_FPS = 8;
           private const int SIZE_MODIFIER = 6;
           private const double FRAMES_PER_MS = VID_FRAME_FPS * 0.001;
           private const int SLEEP_INTERVAL = 2;
           private const int CONSTANT_RATE_FACTOR = 18; // Lower crf = Higher Quality https://trac.ffmpeg.org/wiki/Encode/H.264
           private Image image;
           private Capture captureScreen;
           private int processId = 0;
           private Process process;
           private CaptureProcess captureProcess;
           private Process launchingFFMPEG;
           private string arg;
           private int frame = 0;
           private Size? resize = null;


           /// <summary>
           /// Generates the Videos by gathering frames and processing via FFMPEG.
           /// </summary>
           public void RecordScreenTillGameEnd(string exe, OutputDirectory outputDirectory, CustomMessageBox alertBox, Thread workerThread)
           {
               AttachProcess(exe);
               RequestD3DScreenShot();
               while (image == null) ;
               Logger.log.Info("Launching FFMPEG ....");
               resize = new Size(image.Width / SIZE_MODIFIER, image.Height / SIZE_MODIFIER);
               // H.264 can let us do 8 FPS in high res . . . but must be licensed for commercial use.
               arg = "-f image2pipe -framerate " + VID_FRAME_FPS + " -i pipe:.bmp -pix_fmt yuv420p -crf " +
                   CONSTANT_RATE_FACTOR + " -preset ultrafast -s " + resize.Value.Width + "x" +
                   resize.Value.Height + " -vcodec libx264 -bufsize 30000k -y \"" +
                   outputDirectory.pathToVideo + "\"";

               launchingFFMPEG = new Process
               {
                   StartInfo = new ProcessStartInfo
                   {
                       FileName = "ffmpeg",
                       Arguments = arg,
                       UseShellExecute = false,
                       CreateNoWindow = true,
                       RedirectStandardInput = true,
                       RedirectStandardError = true
                   }
               };
               launchingFFMPEG.Start();

               Stopwatch stopWatch = Stopwatch.StartNew(); //creates and start the instance of Stopwatch

               do
               {
                   Thread.Sleep(SLEEP_INTERVAL);
               } while (workerThread.IsAlive);

               Logger.log.Info("Total frames: " + frame + " Expected frames: " + (ExpectedFrames(stopWatch.ElapsedMilliseconds) - 1));

               launchingFFMPEG.StandardInput.Close();

    #if DEBUG
               string line;
               while ((line = launchingFFMPEG.StandardError.ReadLine()) != null)
               {
                   Logger.log.Debug(line);
               }
    #endif
               launchingFFMPEG.Close();
               alertBox.Show();
           }

           void RequestD3DScreenShot()
           {
               captureProcess.CaptureInterface.BeginGetScreenshot(new Rectangle(0, 0, 0, 0), new TimeSpan(0, 0, 2), Callback, resize, (ImageFormat)Enum.Parse(typeof(ImageFormat), "Bitmap"));
           }

           private void AttachProcess(string exe)
           {
               Thread.Sleep(300);
               Process[] processes = Process.GetProcessesByName(Path.GetFileNameWithoutExtension(exe));
               foreach (Process currProcess in processes)
               {
                   // Simply attach to the first one found.

                   // If the process doesn't have a mainwindowhandle yet, skip it (we need to be able to get the hwnd to set foreground etc)
                   if (currProcess.MainWindowHandle == IntPtr.Zero)
                   {
                       continue;
                   }

                   // Skip if the process is already hooked (and we want to hook multiple applications)
                   if (HookManager.IsHooked(currProcess.Id))
                   {
                       continue;
                   }

                   Direct3DVersion direct3DVersion = Direct3DVersion.AutoDetect;

                   CaptureConfig cc = new CaptureConfig()
                   {
                       Direct3DVersion = direct3DVersion,
                       ShowOverlay = false
                   };

                   processId = currProcess.Id;
                   process = currProcess;

                   var captureInterface = new CaptureInterface();
                   captureInterface.RemoteMessage += new MessageReceivedEvent(CaptureInterface_RemoteMessage);
                   captureProcess = new CaptureProcess(process, cc, captureInterface);

                   break;
               }
               Thread.Sleep(10);

               if (captureProcess == null)
               {
                   ShowUser.Exception("No executable found matching: '" + exe + "'");
               }
           }

           /// <summary>
           /// The callback for when the screenshot has been taken
           /// </summary>
           ///
           ///
           ///
           void Callback(IAsyncResult result)
           {
               using (Screenshot screenshot = captureProcess.CaptureInterface.EndGetScreenshot(result))
               if (screenshot != null &amp;&amp; screenshot.Data != null &amp;&amp; arg != null)
               {
                   if (image != null)
                   {
                       image.Dispose();
                   }

                   image = screenshot.ToBitmap();
                   // image.Save("D3DImageTest.bmp");
                   image.Save(launchingFFMPEG.StandardInput.BaseStream, System.Drawing.Imaging.ImageFormat.Bmp);
                   launchingFFMPEG.StandardInput.Flush();
                   frame++;
               }

               if (frame &lt; 5)
               {
                   Thread t = new Thread(new ThreadStart(RequestD3DScreenShot));
                   t.Start();
               }
               else
               {
                   Logger.log.Info("Done getting shots from D3D.");
               }
           }

           /// <summary>
           /// Display messages from the target process
           /// </summary>
           ///
           private void CaptureInterface_RemoteMessage(MessageReceivedEventArgs message)
           {
               Logger.log.Info(message);
           }
       }
    }

    When I search the internet for the error all I get is the FFMPEG source code, which has not proven to be illuminating. I have been able to save the image directly to disk, which makes me feel like it is not an issue with disposing the data. I have also tried only grabbing one frame, but that produces the same error, which suggests to me it is not a threading issue.

    Here is the full sample of stderr :

    2016-06-02 18:29:38,046 === ffmpeg version N-79143-g8ff0f6a Copyright (c) 2000-2016 the FFmpeg developers

    2016-06-02 18:29:38,047 ===   built with gcc 5.3.0 (GCC)

    2016-06-02 18:29:38,048 ===   configuration: --enable-gpl
    --enable-version3 --disable-w32threads --enable-avisynth --enable-bzlib --enable-fontconfig --enable-frei0r --enable-gnutls --enable-iconv --enable-libass --enable-libbluray --enable-libbs2b --enable-libcaca --enable-libdcadec --enable-libfreetype --enable-libgme --enable-libgsm --enable-libilbc --enable-libmodplug --enable-libmfx --enable-libmp3lame --enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopenjpeg --enable-libopus --enable-librtmp --enable-libschroedinger --enable-libsnappy --enable-libsoxr --enable-libspeex --enable-libtheora --enable-libtwolame --enable-libvidstab --enable-libvo-amrwbenc --enable-libvorbis --enable-libvpx --enable-libwavpack --enable-libwebp --enable-libx264 --enable-libx265 --enable-libxavs --enable-libxvid --enable-libzimg --enable-lzma --enable-decklink --enable-zlib

    2016-06-02 18:29:38,062 ===   libavutil      55. 19.100 / 55. 19.100

    2016-06-02 18:29:38,063 ===   libavcodec     57. 30.100 / 57. 30.100

    2016-06-02 18:29:38,064 ===   libavformat    57. 29.101 / 57. 29.101

    2016-06-02 18:29:38,064 ===   libavdevice    57.  0.101 / 57.  0.101

    2016-06-02 18:29:38,065 ===   libavfilter     6. 40.102 /  6. 40.102

    2016-06-02 18:29:38,066 ===   libswscale      4.  0.100 /  4.  0.100

    2016-06-02 18:29:38,067 ===   libswresample   2.  0.101 /  2.  0.101

    2016-06-02 18:29:38,068 ===   libpostproc    54.  0.100 / 54.  0.100

    2016-06-02 18:29:38,068 === [bmp @ 000002cd7e5cc280] not enough data (13070 &lt; 8294454), trying to decode anyway

    2016-06-02 18:29:38,069 === [bmp @ 000002cd7e5cc280] not enough data (13016 &lt; 8294400)

    2016-06-02 18:29:38,069 === Input #0, image2pipe, from 'pipe:.bmp':

    2016-06-02 18:29:38,262 ===   Duration: N/A, bitrate: N/A

    2016-06-02 18:29:38,262 ===     Stream #0:0: Video: bmp, bgra, 1920x1080, 8 tbr, 8 tbn, 8 tbc

    2016-06-02 18:29:38,263 === [libx264 @ 000002cd7e5d59a0] VBV bufsize set but maxrate unspecified, ignored

    2016-06-02 18:29:38,264 === [libx264 @ 000002cd7e5d59a0] using cpu capabilities: MMX2 SSE2Fast SSSE3 SSE4.2 AVX FMA3 AVX2 LZCNT BMI2

    2016-06-02 18:29:38,265 === [libx264 @ 000002cd7e5d59a0] profile Constrained Baseline, level 1.1

    2016-06-02 18:29:38,266 === [libx264 @ 000002cd7e5d59a0] 264 - core 148 r2665 a01e339 - H.264/MPEG-4 AVC codec - Copyleft 2003-2016 - http://www.videolan.org/x264.html - options: cabac=0 ref=1 deblock=0:0:0 analyse=0:0 me=dia subme=0 psy=1 psy_rd=1.00:0.00 mixed_ref=0 me_range=16 chroma_me=1 trellis=0 8x8dct=0 cqm=0 deadzone=21,11 fast_pskip=1 chroma_qp_offset=0 threads=6 lookahead_threads=1 sliced_threads=0 nr=0 decimate=1 interlaced=0 bluray_compat=0 constrained_intra=0 bframes=0 weightp=0 keyint=250 keyint_min=8 scenecut=0 intra_refresh=0 rc=crf mbtree=0 crf=18.0 qcomp=0.60 qpmin=0 qpmax=69 qpstep=4 ip_ratio=1.40 aq=0

    2016-06-02 18:29:38,463 === Output #0, mp4, to 'C:\Users\fores\AppData\Roaming\Affectiva\n_Artifacts_20160602_182857\GameplayVidOut.mp4':

    2016-06-02 18:29:38,464 ===   Metadata:

    2016-06-02 18:29:38,465 ===     encoder         : Lavf57.29.101

    2016-06-02 18:29:38,469 ===     Stream #0:0: Video: h264 (libx264) ([33][0][0][0] / 0x0021), yuv420p, 320x180, q=-1--1, 8 fps, 16384 tbn, 8 tbc

    2016-06-02 18:29:38,470 ===     Metadata:

    2016-06-02 18:29:38,472 ===       encoder         : Lavc57.30.100 libx264

    2016-06-02 18:29:38,474 ===     Side data:

    2016-06-02 18:29:38,475 ===       cpb: bitrate max/min/avg: 0/0/0 buffer size: 30000000 vbv_delay: -1

    2016-06-02 18:29:38,476 === Stream mapping:

    2016-06-02 18:29:38,477 ===   Stream #0:0 -> #0:0 (bmp (native) -> h264 (libx264))

    2016-06-02 18:29:38,480 === [bmp @ 000002cd7e5cc9a0] not enough data (13070 &lt; 8294454), trying to decode anyway

    2016-06-02 18:29:38,662 === [bmp @ 000002cd7e5cc9a0] not enough data (13016 &lt; 8294400)

    2016-06-02 18:29:38,662 === Error while decoding stream #0:0: Invalid data found when processing input

    2016-06-02 18:29:38,663 === frame=    0 fps=0.0 q=0.0 Lsize=       0kB time=00:00:00.00 bitrate=N/A speed=   0x    

    2016-06-02 18:29:38,663 === video:0kB audio:0kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: unknown

    2016-06-02 18:29:38,664 === Conversion failed!

    In memory, the current image is 320 pixels wide and 180 pixels long. The pixel format is Format32bppRgb. The horizontal and vertical resolutions seem odd, they are both 96.01199. When filed to disk here is ffprobe output for the file :

    ffprobe version N-79143-g8ff0f6a Copyright (c) 2007-2016 the FFmpeg developers
     built with gcc 5.3.0 (GCC)
     configuration: --enable-gpl --enable-version3 --disable-w32threads --enable-avisynth --enable-bzlib --enable-fontconfig --enable-frei0r --enable-gnutls --enable-iconv --enable-libass --enable-libbluray --enable-libbs2b --enable-libcaca --enable-libdcadec --enable-libfreetype --enable-libgme --enable-libgsm --enable-libilbc --enable-libmodplug --enable-libmfx --enable-libmp3lame --enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopenjpeg --enable-libopus --enable-librtmp --enable-libschroedinger --enable-libsnappy --enable-libsoxr --enable-libspeex --enable-libtheora --enable-libtwolame --enable-libvidstab --enable-libvo-amrwbenc --enable-libvorbis --enable-libvpx --enable-libwavpack --enable-libwebp --enable-libx264 --enable-libx265 --enable-libxavs --enable-libxvid --enable-libzimg --enable-lzma --enable-decklink --enable-zlib
     libavutil      55. 19.100 / 55. 19.100
     libavcodec     57. 30.100 / 57. 30.100
     libavformat    57. 29.101 / 57. 29.101
     libavdevice    57.  0.101 / 57.  0.101
     libavfilter     6. 40.102 /  6. 40.102
     libswscale      4.  0.100 /  4.  0.100
     libswresample   2.  0.101 /  2.  0.101
     libpostproc    54.  0.100 / 54.  0.100
    Input #0, png_pipe, from 'C:\Users\fores\git\game-playtest-tool\GamePlayTest\bin\x64\Debug\D3DFromCapture.bmp':
     Duration: N/A, bitrate: N/A
       Stream #0:0: Video: png, rgba(pc), 1920x1080 [SAR 3779:3779 DAR 16:9], 25 tbr, 25 tbn, 25 tbc

    Here is a PNG version of an example screenshot from the current code (playing Portal 2) :
    Portal 2 Screenshot

    Any ideas would be greatly appreciated. My current workaround is to save the files to the HDD and compile the video after gameplay, but it’s a far less performant option. Thank you !

  • ffmpeg stops randomly with "No more output streams to write to, finishing" when reading rtsp stream [on hold]

    7 juin 2016, par ImagineDragon

    Basically I am running the following ffmpeg command to read an h264 rtsp stream using hls muxer, under Ubuntu 14.04 64bit with i7 CPU and 8GB memory :

    ffmpeg -v debug -i rtsp://172.16.11.247:554/live/ch00_0?token=hQttXvrjSPOcClQB -codec copy -flags -global_header -f hls -hls_time 5 -use_localtime 1 -hls_segment_filename '/home/eason/ffmpeg-test/%s.ts' '/home/eason/ffmpeg-test/live.m3u8'
    ffmpeg version N-79139-gde1a0d4 Copyright (c) 2000-2016 the FFmpeg developers
     built with gcc 4.8 (Ubuntu 4.8.4-2ubuntu1~14.04.1)
     configuration: --extra-libs=-ldl --prefix=/opt/ffmpeg --mandir=/usr/share/man --enable-avresample --disable-debug --enable-nonfree --enable-gpl --enable-version3 --enable-libopencore-amrnb --enable-libopencore-amrwb --disable-decoder=amrnb --disable-decoder=amrwb --enable-libpulse --enable-libfreetype --enable-gnutls --enable-libx264 --enable-libx265 --enable-libfdk-aac --enable-libvorbis --enable-libmp3lame --enable-libopus --enable-libvpx --enable-libspeex --enable-libass --enable-avisynth --enable-libsoxr --enable-libxvid --enable-libvidstab
     libavutil      55. 19.100 / 55. 19.100
     libavcodec     57. 30.100 / 57. 30.100
     libavformat    57. 29.101 / 57. 29.101
     libavdevice    57.  0.101 / 57.  0.101
     libavfilter     6. 40.102 /  6. 40.102
     libavresample   3.  0.  0 /  3.  0.  0
     libswscale      4.  0.100 /  4.  0.100
     libswresample   2.  0.101 /  2.  0.101
     libpostproc    54.  0.100 / 54.  0.100
    Splitting the commandline.
    Reading option '-v' ... matched as option 'v' (set logging level) with argument 'debug'.
    Reading option '-re' ... matched as option 're' (read input at native frame rate) with argument '1'.
    Reading option '-i' ... matched as input file with argument 'rtsp://172.16.11.247:554/live/ch00_0?token=hQttXvrjSPOcClQB'.
    Reading option '-codec' ... matched as option 'codec' (codec name) with argument 'copy'.
    Reading option '-flags' ... matched as AVOption 'flags' with argument '-global_header'.
    Reading option '-f' ... matched as option 'f' (force format) with argument 'hls'.
    Reading option '-hls_time' ... matched as AVOption 'hls_time' with argument '5'.
    Reading option '-use_localtime' ... matched as AVOption 'use_localtime' with argument '1'.
    Reading option '-hls_segment_filename' ... matched as AVOption 'hls_segment_filename' with argument '/home/eason/ffmpeg-test/%s.ts'.
    Reading option '/home/eason/ffmpeg-test/live.m3u8' ... matched as output file.
    Finished splitting the commandline.
    Parsing a group of options: global .
    Applying option v (set logging level) with argument debug.
    Successfully parsed a group of options.
    Parsing a group of options: input file rtsp://172.16.11.247:554/live/ch00_0?token=hQttXvrjSPOcClQB.
    Applying option re (read input at native frame rate) with argument 1.
    Successfully parsed a group of options.
    Opening an input file: rtsp://172.16.11.247:554/live/ch00_0?token=hQttXvrjSPOcClQB.
    [tcp @ 0x29c4d60] No default whitelist set
    [rtsp @ 0x29c2e20] SDP:
    m=video 0 RTP/AVP 96
    a=rtpmap:96 H264/90000
    a=fmtp:96 packetization-mode=1;profile-level-id=640028;sprop-parameter-sets=Z2QAKKzoBQBbkA==,aO48sA==
    a=control:trackID=0
    m=audio 0 RTP/AVP 98
    a=rtpmap:98 MPEG4-GENERIC/16000/1
    a=fmtp:98 profile-level-id=1;mode=AAC-lbr;sizelength=13;indexlength=3;indexdeltalength=3;config=1408;cpresent=0;
    a=control:trackID=1


    [rtsp @ 0x29c2e20] video codec set to: h264
    [rtsp @ 0x29c2e20] RTP Packetization Mode: 1
    [rtsp @ 0x29c2e20] RTP Profile IDC: 64 Profile IOP: 0 Level: 28
    [rtsp @ 0x29c2e20] Extradata set to 0x29c4ff0 (size: 22)
    [rtsp @ 0x29c2e20] audio codec set to: aac
    [rtsp @ 0x29c2e20] audio samplerate set to: 16000
    [rtsp @ 0x29c2e20] audio channels set to: 1
    [rtp @ 0x29c50a0] No default whitelist set
    [udp @ 0x29c5200] No default whitelist set
    [udp @ 0x29c5200] end receive buffer size reported is 131072
    [udp @ 0x29d9480] No default whitelist set
    [udp @ 0x29d9480] end receive buffer size reported is 131072
    [rtsp @ 0x29c2e20] method SETUP failed: 461 Unsupported transport
    [rtsp @ 0x29c2e20] CSeq: 3

    [rtsp @ 0x29c2e20] setting jitter buffer size to 0
       Last message repeated 1 times
    [rtsp @ 0x29c2e20] hello state=0
    [rtsp @ 0x29c2e20] All info found
    [rtsp @ 0x29c2e20] rfps: 24.250000 0.018062
    [rtsp @ 0x29c2e20] rfps: 24.333333 0.012911
    [rtsp @ 0x29c2e20] rfps: 24.416667 0.008627
    [rtsp @ 0x29c2e20] rfps: 24.500000 0.005207
       Last message repeated 1 times
    [rtsp @ 0x29c2e20] rfps: 24.583333 0.002653
       Last message repeated 1 times
    [rtsp @ 0x29c2e20] rfps: 24.666667 0.000965
       Last message repeated 1 times
    [rtsp @ 0x29c2e20] rfps: 24.750000 0.000142
       Last message repeated 1 times
    [rtsp @ 0x29c2e20] rfps: 24.833333 0.000185
       Last message repeated 1 times
    [rtsp @ 0x29c2e20] rfps: 24.916667 0.001093
       Last message repeated 1 times
    [rtsp @ 0x29c2e20] rfps: 25.000000 0.002867
       Last message repeated 1 times
    [rtsp @ 0x29c2e20] rfps: 25.083333 0.005507
       Last message repeated 1 times
    [rtsp @ 0x29c2e20] rfps: 25.166667 0.009011
    [rtsp @ 0x29c2e20] rfps: 25.250000 0.013382
    [rtsp @ 0x29c2e20] rfps: 25.333333 0.018618
    [rtsp @ 0x29c2e20] rfps: 50.000000 0.011469
    Input #0, rtsp, from 'rtsp://172.16.11.247:554/live/ch00_0?token=hQttXvrjSPOcClQB':
     Duration: N/A, start: 0.000000, bitrate: N/A
       Stream #0:0, 28, 1/90000: Video: h264 (High), 1 reference frame, yuv420p(left), 1280x720, 1/180000, 24.75 tbr, 90k tbn, 180k tbc
       Stream #0:1, 23, 1/16000: Audio: aac (LC), 16000 Hz, mono, fltp
    Successfully opened the file.
    Parsing a group of options: output file /home/eason/ffmpeg-test/live.m3u8.
    Applying option codec (codec name) with argument copy.
    Applying option f (force format) with argument hls.
    Successfully parsed a group of options.
    Opening an output file: /home/eason/ffmpeg-test/live.m3u8.
    Successfully opened the file.
    [file @ 0x2cae180] Setting default whitelist 'file,crypto'
    [mpegts @ 0x29f7ce0] muxrate VBR, pcr every 9000 pkts, sdt every 2147483647, pat/pmt every 2147483647 pkts
    Output #0, hls, to '/home/eason/ffmpeg-test/live.m3u8':
     Metadata:
       encoder         : Lavf57.29.101
       Stream #0:0, 0, 1/90000: Video: h264, 1 reference frame, yuv420p(left), 1280x720 (0x0), 1/90000, q=2-31, 24.75 tbr, 90k tbn, 90k tbc
       Stream #0:1, 0, 1/90000: Audio: aac (LC), 16000 Hz, mono
    Stream mapping:
     Stream #0:0 -> #0:0 (copy)
     Stream #0:1 -> #0:1 (copy)
    Press [q] to stop, [?] for help
    cur_dts is invalid (this is harmless if it occurs once at the start per stream)
       Last message repeated 103 times
    [hls @ 0x2a0fc00] Timestamps are unset in a packet for stream 0. This is deprecated and will stop working in the future. Fix your code to set the timestamps properly
    [AVIOContext @ 0x2c998c0] Statistics: 0 seeks, 155 writeouts=N/A speed=1.01x    
    [file @ 0x2ba60e0] Setting default whitelist 'file,crypto'
    [file @ 0x2ca9fc0] Setting default whitelist 'file,crypto'
    [hls @ 0x2a0fc00] EXT-X-MEDIA-SEQUENCE:0
    [AVIOContext @ 0x2caa080] Statistics: 0 seeks, 1 writeouts
    [AVIOContext @ 0x2c99480] Statistics: 0 seeks, 103 writeouts=N/A speed=   1x    
    [file @ 0x2ba60e0] Setting default whitelist 'file,crypto'
    [file @ 0x2c97c60] Setting default whitelist 'file,crypto'
    [hls @ 0x2a0fc00] EXT-X-MEDIA-SEQUENCE:0
    [AVIOContext @ 0x2c97d20] Statistics: 0 seeks, 1 writeouts
    [AVIOContext @ 0x2c99480] Statistics: 0 seeks, 104 writeouts=N/A speed=   1x    
    [file @ 0x2ba60e0] Setting default whitelist 'file,crypto'
    [file @ 0x2c99680] Setting default whitelist 'file,crypto'
    [hls @ 0x2a0fc00] EXT-X-MEDIA-SEQUENCE:0
    [AVIOContext @ 0x2c99820] Statistics: 0 seeks, 1 writeouts
    [AVIOContext @ 0x2c99480] Statistics: 0 seeks, 28 writeoutse=N/A speed=   1x    
    [file @ 0x29c50c0] Setting default whitelist 'file,crypto'
    [file @ 0x2ba60e0] Setting default whitelist 'file,crypto'
    [hls @ 0x2a0fc00] EXT-X-MEDIA-SEQUENCE:0
    [AVIOContext @ 0x29c2ce0] Statistics: 0 seeks, 1 writeouts
    [AVIOContext @ 0x2c99480] Statistics: 0 seeks, 111 writeouts=N/A speed=   1x    
    [file @ 0x2c97d40] Setting default whitelist 'file,crypto'
    [file @ 0x2a134e0] Setting default whitelist 'file,crypto'
    [hls @ 0x2a0fc00] EXT-X-MEDIA-SEQUENCE:0
    [AVIOContext @ 0x2a11000] Statistics: 0 seeks, 1 writeouts
    [AVIOContext @ 0x2c99480] Statistics: 0 seeks, 105 writeouts=N/A speed=   1x    
    [file @ 0x29c50c0] Setting default whitelist 'file,crypto'
    [file @ 0x2a10c00] Setting default whitelist 'file,crypto'
    [hls @ 0x2a0fc00] EXT-X-MEDIA-SEQUENCE:1
    [AVIOContext @ 0x2a10cc0] Statistics: 0 seeks, 1 writeouts
    [AVIOContext @ 0x2c99480] Statistics: 0 seeks, 103 writeouts=N/A speed=   1x    
    [file @ 0x29c50c0] Setting default whitelist 'file,crypto'
    [file @ 0x2c99480] Setting default whitelist 'file,crypto'

    .......repeat........

    [hls @ 0x2a0fc00] EXT-X-MEDIA-SEQUENCE:71
    [AVIOContext @ 0x2c97fa0] Statistics: 0 seeks, 1 writeouts
    [rtsp @ 0x29c2e20] Too short data for FU-A H264 RTP packette=N/A speed=   1x    
    [AVIOContext @ 0x2ba60e0] Statistics: 0 seeks, 103 writeouts=N/A speed=   1x    
    [file @ 0x2c97b20] Setting default whitelist 'file,crypto'
    [file @ 0x2c97c60] Setting default whitelist 'file,crypto'
    [hls @ 0x2a0fc00] EXT-X-MEDIA-SEQUENCE:72
    [AVIOContext @ 0x2c99480] Statistics: 0 seeks, 1 writeouts
    [AVIOContext @ 0x2ba60e0] Statistics: 0 seeks, 156 writeouts=N/A speed=   1x    
    [file @ 0x2c99480] Setting default whitelist 'file,crypto'
    [file @ 0x2cab600] Setting default whitelist 'file,crypto'
    [hls @ 0x2a0fc00] EXT-X-MEDIA-SEQUENCE:73
    [AVIOContext @ 0x2cab6c0] Statistics: 0 seeks, 1 writeouts
    [AVIOContext @ 0x2ba60e0] Statistics: 0 seeks, 104 writeouts=N/A speed=   1x    
    [file @ 0x2cadc00] Setting default whitelist 'file,crypto'
    [file @ 0x2c99480] Setting default whitelist 'file,crypto'
    [hls @ 0x2a0fc00] EXT-X-MEDIA-SEQUENCE:74
    [AVIOContext @ 0x29c2ce0] Statistics: 0 seeks, 1 writeouts
    [AVIOContext @ 0x2ba60e0] Statistics: 0 seeks, 103 writeouts=N/A speed=   1x    
    [file @ 0x2a13540] Setting default whitelist 'file,crypto'
    [file @ 0x2cab680] Setting default whitelist 'file,crypto'
    [hls @ 0x2a0fc00] EXT-X-MEDIA-SEQUENCE:75
    [AVIOContext @ 0x2c99480] Statistics: 0 seeks, 1 writeouts
    No more output streams to write to, finishing.:46.09 bitrate=N/A speed=0.993x    
    [AVIOContext @ 0x2ba60e0] Statistics: 0 seeks, 138 writeouts
    [file @ 0x29c50c0] Setting default whitelist 'file,crypto'
    [hls @ 0x2a0fc00] EXT-X-MEDIA-SEQUENCE:76
    [AVIOContext @ 0x2ba60e0] Statistics: 0 seeks, 1 writeouts
    frame= 8423 fps= 21 q=-1.0 Lsize=N/A time=00:06:46.09 bitrate=N/A speed=0.993x    
    video:37031kB audio:1586kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: unknown
    Input file #0 (rtsp://172.16.11.247:554/live/ch00_0?token=hQttXvrjSPOcClQB):
     Input stream #0:0 (video): 8423 packets read (37919815 bytes);
     Input stream #0:1 (audio): 6345 packets read (1624337 bytes);
     Total: 14768 packets (39544152 bytes) demuxed
    Output file #0 (/home/eason/ffmpeg-test/live.m3u8):
     Output stream #0:0 (video): 8423 packets muxed (37919815 bytes);
     Output stream #0:1 (audio): 6345 packets muxed (1624337 bytes);
     Total: 14768 packets (39544152 bytes) muxed
    0 frames successfully decoded, 0 decoding errors

    The problem is that ffmpeg exits randomly after 5 to 20 minutes. The RTSP is a live stream so stopping every 5 minute is really not acceptable. In the ffmpeg log I can’t see any noticeable error except "No more output streams to write to, finishing." near the end. Can someone shed some light upon this ? How can I make ffmpeg run without stopping ? Thanks

  • increasing memory occupancy while recording screen save to disk with ffmpeg [on hold]

    19 mai 2016, par vbtang

    wonder if there are any resoures i didn’t free ?or do i need to do something special so that i can free these momery ?

    ps. run the demo step by step, and found it has the same problem, until it quit the main function, it still have 40MB memory occupancy. And i found the memory increase obviously in the screen capture thread,but when it increased to about 150MB, it won’t increase, until i quit the program, it will have 40MB memory left. i feel confused.

    pss. i download ffmpeg dev and shared version form here https://ffmpeg.zeranoe.com/builds/,it seems a dll of debug version ? do i need a release version ?
    here is my demo code.

       #include "stdafx.h"

    #ifdef  __cplusplus
    extern "C"
    {
    #endif
    #include "libavcodec/avcodec.h"
    #include "libavformat/avformat.h"
    #include "libswscale/swscale.h"
    #include "libavdevice/avdevice.h"
    #include "libavutil/audio_fifo.h"

    #pragma comment(lib, "avcodec.lib")
    #pragma comment(lib, "avformat.lib")
    #pragma comment(lib, "avutil.lib")
    #pragma comment(lib, "avdevice.lib")
    #pragma comment(lib, "avfilter.lib")

    //#pragma comment(lib, "avfilter.lib")
    //#pragma comment(lib, "postproc.lib")
    //#pragma comment(lib, "swresample.lib")
    #pragma comment(lib, "swscale.lib")
    #ifdef __cplusplus
    };
    #endif

    AVFormatContext *pFormatCtx_Video = NULL, *pFormatCtx_Audio = NULL, *pFormatCtx_Out = NULL;
    AVCodecContext  *pCodecCtx_Video;
    AVCodec         *pCodec_Video;
    AVFifoBuffer    *fifo_video = NULL;
    AVAudioFifo     *fifo_audio = NULL;
    int VideoIndex, AudioIndex;

    CRITICAL_SECTION AudioSection, VideoSection;



    SwsContext *img_convert_ctx;
    int frame_size = 0;

    uint8_t *picture_buf = NULL, *frame_buf = NULL;

    bool bCap = true;

    DWORD WINAPI ScreenCapThreadProc( LPVOID lpParam );
    DWORD WINAPI AudioCapThreadProc( LPVOID lpParam );

    int OpenVideoCapture()
    {
       AVInputFormat *ifmt=av_find_input_format("gdigrab");
       //
       AVDictionary *options = NULL;
       av_dict_set(&amp;options, "framerate", "15", NULL);
       //av_dict_set(&amp;options,"offset_x","20",0);
       //The distance from the top edge of the screen or desktop
       //av_dict_set(&amp;options,"offset_y","40",0);
       //Video frame size. The default is to capture the full screen
       //av_dict_set(&amp;options,"video_size","320x240",0);
       if(avformat_open_input(&amp;pFormatCtx_Video, "desktop", ifmt, &amp;options)!=0)
       {
           printf("Couldn't open input stream.(无法打开视频输入流)\n");
           return -1;
       }
       if(avformat_find_stream_info(pFormatCtx_Video,NULL)&lt;0)
       {
           printf("Couldn't find stream information.(无法获取视频流信息)\n");
           return -1;
       }
       if (pFormatCtx_Video->streams[0]->codec->codec_type != AVMEDIA_TYPE_VIDEO)
       {
           printf("Couldn't find video stream information.(无法获取视频流信息)\n");
           return -1;
       }
       pCodecCtx_Video = pFormatCtx_Video->streams[0]->codec;
       pCodec_Video = avcodec_find_decoder(pCodecCtx_Video->codec_id);
       if(pCodec_Video == NULL)
       {
           printf("Codec not found.(没有找到解码器)\n");
           return -1;
       }
       if(avcodec_open2(pCodecCtx_Video, pCodec_Video, NULL) &lt; 0)
       {
           printf("Could not open codec.(无法打开解码器)\n");
           return -1;
       }



       img_convert_ctx = sws_getContext(pCodecCtx_Video->width, pCodecCtx_Video->height, pCodecCtx_Video->pix_fmt,
           pCodecCtx_Video->width, pCodecCtx_Video->height, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);

       frame_size = avpicture_get_size(pCodecCtx_Video->pix_fmt, pCodecCtx_Video->width, pCodecCtx_Video->height);
       //
       fifo_video = av_fifo_alloc(30 * avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx_Video->width, pCodecCtx_Video->height));

       return 0;
    }

    static char *dup_wchar_to_utf8(wchar_t *w)
    {
       char *s = NULL;
       int l = WideCharToMultiByte(CP_UTF8, 0, w, -1, 0, 0, 0, 0);
       s = (char *) av_malloc(l);
       if (s)
           WideCharToMultiByte(CP_UTF8, 0, w, -1, s, l, 0, 0);
       return s;
    }

    int OpenAudioCapture()
    {
       //
       AVInputFormat *pAudioInputFmt = av_find_input_format("dshow");

       //
       char * psDevName = dup_wchar_to_utf8(L"audio=virtual-audio-capturer");

       if (avformat_open_input(&amp;pFormatCtx_Audio, psDevName, pAudioInputFmt,NULL) &lt; 0)
       {
           printf("Couldn't open input stream.(无法打开音频输入流)\n");
           return -1;
       }

       if(avformat_find_stream_info(pFormatCtx_Audio,NULL)&lt;0)  
           return -1;

       if(pFormatCtx_Audio->streams[0]->codec->codec_type != AVMEDIA_TYPE_AUDIO)
       {
           printf("Couldn't find video stream information.(无法获取音频流信息)\n");
           return -1;
       }

       AVCodec *tmpCodec = avcodec_find_decoder(pFormatCtx_Audio->streams[0]->codec->codec_id);
       if(0 > avcodec_open2(pFormatCtx_Audio->streams[0]->codec, tmpCodec, NULL))
       {
           printf("can not find or open audio decoder!\n");
       }



       return 0;
    }

    int OpenOutPut()
    {
       AVStream *pVideoStream = NULL, *pAudioStream = NULL;
       const char *outFileName = "test.mp4";
       avformat_alloc_output_context2(&amp;pFormatCtx_Out, NULL, NULL, outFileName);

       if (pFormatCtx_Video->streams[0]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
       {
           VideoIndex = 0;
           pVideoStream = avformat_new_stream(pFormatCtx_Out, NULL);

           if (!pVideoStream)
           {
               printf("can not new stream for output!\n");
               return -1;
           }

           //set codec context param
           pVideoStream->codec->codec = avcodec_find_encoder(AV_CODEC_ID_MPEG4);
           pVideoStream->codec->height = pFormatCtx_Video->streams[0]->codec->height;
           pVideoStream->codec->width = pFormatCtx_Video->streams[0]->codec->width;

           pVideoStream->codec->time_base = pFormatCtx_Video->streams[0]->codec->time_base;
           pVideoStream->codec->sample_aspect_ratio = pFormatCtx_Video->streams[0]->codec->sample_aspect_ratio;
           // take first format from list of supported formats
           pVideoStream->codec->pix_fmt = pFormatCtx_Out->streams[VideoIndex]->codec->codec->pix_fmts[0];

           //open encoder
           if (!pVideoStream->codec->codec)
           {
               printf("can not find the encoder!\n");
               return -1;
           }

           if (pFormatCtx_Out->oformat->flags &amp; AVFMT_GLOBALHEADER)
               pVideoStream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;

           if ((avcodec_open2(pVideoStream->codec, pVideoStream->codec->codec, NULL)) &lt; 0)
           {
               printf("can not open the encoder\n");
               return -1;
           }
       }

       if(pFormatCtx_Audio->streams[0]->codec->codec_type == AVMEDIA_TYPE_AUDIO)
       {
           AVCodecContext *pOutputCodecCtx;
           AudioIndex = 1;
           pAudioStream = avformat_new_stream(pFormatCtx_Out, NULL);

           pAudioStream->codec->codec = avcodec_find_encoder(pFormatCtx_Out->oformat->audio_codec);

           pOutputCodecCtx = pAudioStream->codec;

           pOutputCodecCtx->sample_rate = pFormatCtx_Audio->streams[0]->codec->sample_rate;
           pOutputCodecCtx->channel_layout = pFormatCtx_Out->streams[0]->codec->channel_layout;
           pOutputCodecCtx->channels = av_get_channel_layout_nb_channels(pAudioStream->codec->channel_layout);
           if(pOutputCodecCtx->channel_layout == 0)
           {
               pOutputCodecCtx->channel_layout = AV_CH_LAYOUT_STEREO;
               pOutputCodecCtx->channels = av_get_channel_layout_nb_channels(pOutputCodecCtx->channel_layout);

           }
           pOutputCodecCtx->sample_fmt = pAudioStream->codec->codec->sample_fmts[0];
           AVRational time_base={1, pAudioStream->codec->sample_rate};
           pAudioStream->time_base = time_base;
           //audioCodecCtx->time_base = time_base;

           pOutputCodecCtx->codec_tag = 0;  
           if (pFormatCtx_Out->oformat->flags &amp; AVFMT_GLOBALHEADER)  
               pOutputCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;

           if (avcodec_open2(pOutputCodecCtx, pOutputCodecCtx->codec, 0) &lt; 0)
           {
               //
               return -1;
           }
       }

       if (!(pFormatCtx_Out->oformat->flags &amp; AVFMT_NOFILE))
       {
           if(avio_open(&amp;pFormatCtx_Out->pb, outFileName, AVIO_FLAG_WRITE) &lt; 0)
           {
               printf("can not open output file handle!\n");
               return -1;
           }
       }

       if(avformat_write_header(pFormatCtx_Out, NULL) &lt; 0)
       {
           printf("can not write the header of the output file!\n");
           return -1;
       }

       return 0;
    }

    int _tmain(int argc, _TCHAR* argv[])
    {
       av_register_all();
       avdevice_register_all();
       if (OpenVideoCapture() &lt; 0)
       {
           return -1;
       }
       if (OpenAudioCapture() &lt; 0)
       {
           return -1;
       }
       if (OpenOutPut() &lt; 0)
       {
           return -1;
       }

       InitializeCriticalSection(&amp;VideoSection);
       InitializeCriticalSection(&amp;AudioSection);

       AVFrame *picture = av_frame_alloc();
       int size = avpicture_get_size(pFormatCtx_Out->streams[VideoIndex]->codec->pix_fmt,
           pFormatCtx_Out->streams[VideoIndex]->codec->width, pFormatCtx_Out->streams[VideoIndex]->codec->height);
       picture_buf = new uint8_t[size];

       avpicture_fill((AVPicture *)picture, picture_buf,
           pFormatCtx_Out->streams[VideoIndex]->codec->pix_fmt,
           pFormatCtx_Out->streams[VideoIndex]->codec->width,
           pFormatCtx_Out->streams[VideoIndex]->codec->height);



       //star cap screen thread
       CreateThread( NULL, 0, ScreenCapThreadProc, 0, 0, NULL);
       //star cap audio thread
       CreateThread( NULL, 0, AudioCapThreadProc, 0, 0, NULL);
       int64_t cur_pts_v=0,cur_pts_a=0;
       int VideoFrameIndex = 0, AudioFrameIndex = 0;

       while(1)
       {
           if (_kbhit() != 0 &amp;&amp; bCap)
           {
               bCap = false;
               Sleep(2000);//
           }
           if (fifo_audio &amp;&amp; fifo_video)
           {
               int sizeAudio = av_audio_fifo_size(fifo_audio);
               int sizeVideo = av_fifo_size(fifo_video);
               //
               if (av_audio_fifo_size(fifo_audio) &lt;= pFormatCtx_Out->streams[AudioIndex]->codec->frame_size &amp;&amp;
                   av_fifo_size(fifo_video) &lt;= frame_size &amp;&amp; !bCap)
               {
                   break;
               }
           }

           if(av_compare_ts(cur_pts_v, pFormatCtx_Out->streams[VideoIndex]->time_base,
               cur_pts_a,pFormatCtx_Out->streams[AudioIndex]->time_base) &lt;= 0)
           {
               //read data from fifo
               if (av_fifo_size(fifo_video) &lt; frame_size &amp;&amp; !bCap)
               {
                   cur_pts_v = 0x7fffffffffffffff;
               }
               if(av_fifo_size(fifo_video) >= size)
               {
                   EnterCriticalSection(&amp;VideoSection);
                   av_fifo_generic_read(fifo_video, picture_buf, size, NULL);
                   LeaveCriticalSection(&amp;VideoSection);

                   avpicture_fill((AVPicture *)picture, picture_buf,
                       pFormatCtx_Out->streams[VideoIndex]->codec->pix_fmt,
                       pFormatCtx_Out->streams[VideoIndex]->codec->width,
                       pFormatCtx_Out->streams[VideoIndex]->codec->height);

                   //pts = n * ((1 / timbase)/ fps);
                   picture->pts = VideoFrameIndex * ((pFormatCtx_Video->streams[0]->time_base.den / pFormatCtx_Video->streams[0]->time_base.num) / 15);

                   int got_picture = 0;
                   AVPacket pkt;
                   av_init_packet(&amp;pkt);

                   pkt.data = NULL;
                   pkt.size = 0;
                   int ret = avcodec_encode_video2(pFormatCtx_Out->streams[VideoIndex]->codec, &amp;pkt, picture, &amp;got_picture);
                   if(ret &lt; 0)
                   {
                       //
                       continue;
                   }

                   if (got_picture==1)
                   {
                       pkt.stream_index = VideoIndex;
                       pkt.pts = av_rescale_q_rnd(pkt.pts, pFormatCtx_Video->streams[0]->time_base,
                           pFormatCtx_Out->streams[VideoIndex]->time_base, (AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));  
                       pkt.dts = av_rescale_q_rnd(pkt.dts,  pFormatCtx_Video->streams[0]->time_base,
                           pFormatCtx_Out->streams[VideoIndex]->time_base, (AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));  

                       pkt.duration = ((pFormatCtx_Out->streams[0]->time_base.den / pFormatCtx_Out->streams[0]->time_base.num) / 15);

                       cur_pts_v = pkt.pts;

                       ret = av_interleaved_write_frame(pFormatCtx_Out, &amp;pkt);
                       //delete[] pkt.data;
                       av_free_packet(&amp;pkt);
                   }
                   VideoFrameIndex++;
               }
           }
           else
           {
               if (NULL == fifo_audio)
               {
                   continue;//
               }
               if (av_audio_fifo_size(fifo_audio) &lt; pFormatCtx_Out->streams[AudioIndex]->codec->frame_size &amp;&amp; !bCap)
               {
                   cur_pts_a = 0x7fffffffffffffff;
               }
               if(av_audio_fifo_size(fifo_audio) >=
                   (pFormatCtx_Out->streams[AudioIndex]->codec->frame_size > 0 ? pFormatCtx_Out->streams[AudioIndex]->codec->frame_size : 1024))
               {
                   AVFrame *frame;
                   frame = av_frame_alloc();
                   frame->nb_samples = pFormatCtx_Out->streams[AudioIndex]->codec->frame_size>0 ? pFormatCtx_Out->streams[AudioIndex]->codec->frame_size: 1024;
                   frame->channel_layout = pFormatCtx_Out->streams[AudioIndex]->codec->channel_layout;
                   frame->format = pFormatCtx_Out->streams[AudioIndex]->codec->sample_fmt;
                   frame->sample_rate = pFormatCtx_Out->streams[AudioIndex]->codec->sample_rate;
                   av_frame_get_buffer(frame, 0);

                   EnterCriticalSection(&amp;AudioSection);
                   av_audio_fifo_read(fifo_audio, (void **)frame->data,
                       (pFormatCtx_Out->streams[AudioIndex]->codec->frame_size > 0 ? pFormatCtx_Out->streams[AudioIndex]->codec->frame_size : 1024));
                   LeaveCriticalSection(&amp;AudioSection);

                   if (pFormatCtx_Out->streams[0]->codec->sample_fmt != pFormatCtx_Audio->streams[AudioIndex]->codec->sample_fmt
                       || pFormatCtx_Out->streams[0]->codec->channels != pFormatCtx_Audio->streams[AudioIndex]->codec->channels
                       || pFormatCtx_Out->streams[0]->codec->sample_rate != pFormatCtx_Audio->streams[AudioIndex]->codec->sample_rate)
                   {
                       //
                   }

                   AVPacket pkt_out;
                   av_init_packet(&amp;pkt_out);
                   int got_picture = -1;
                   pkt_out.data = NULL;
                   pkt_out.size = 0;

                   frame->pts = AudioFrameIndex * pFormatCtx_Out->streams[AudioIndex]->codec->frame_size;
                   if (avcodec_encode_audio2(pFormatCtx_Out->streams[AudioIndex]->codec, &amp;pkt_out, frame, &amp;got_picture) &lt; 0)
                   {
                       printf("can not decoder a frame");
                   }
                   av_frame_free(&amp;frame);
                   if (got_picture)
                   {
                       pkt_out.stream_index = AudioIndex;
                       pkt_out.pts = AudioFrameIndex * pFormatCtx_Out->streams[AudioIndex]->codec->frame_size;
                       pkt_out.dts = AudioFrameIndex * pFormatCtx_Out->streams[AudioIndex]->codec->frame_size;
                       pkt_out.duration = pFormatCtx_Out->streams[AudioIndex]->codec->frame_size;

                       cur_pts_a = pkt_out.pts;

                       int ret = av_interleaved_write_frame(pFormatCtx_Out, &amp;pkt_out);
                       av_free_packet(&amp;pkt_out);
                   }
                   AudioFrameIndex++;
               }
           }
       }

       delete[] picture_buf;

       delete[]frame_buf;
       av_fifo_free(fifo_video);
       av_audio_fifo_free(fifo_audio);

       av_write_trailer(pFormatCtx_Out);

       avio_close(pFormatCtx_Out->pb);
       avformat_free_context(pFormatCtx_Out);

       if (pFormatCtx_Video != NULL)
       {
           avformat_close_input(&amp;pFormatCtx_Video);
           pFormatCtx_Video = NULL;
       }
       if (pFormatCtx_Audio != NULL)
       {
           avformat_close_input(&amp;pFormatCtx_Audio);
           pFormatCtx_Audio = NULL;
       }
       if (NULL != img_convert_ctx)
       {
           sws_freeContext(img_convert_ctx);
           img_convert_ctx = NULL;
       }

       return 0;
    }

    DWORD WINAPI ScreenCapThreadProc( LPVOID lpParam )
    {
       AVPacket packet;/* = (AVPacket *)av_malloc(sizeof(AVPacket))*/;
       int got_picture;
       AVFrame *pFrame;
       pFrame= av_frame_alloc();

       AVFrame *picture = av_frame_alloc();
       int size = avpicture_get_size(pFormatCtx_Out->streams[VideoIndex]->codec->pix_fmt,
           pFormatCtx_Out->streams[VideoIndex]->codec->width, pFormatCtx_Out->streams[VideoIndex]->codec->height);
       //picture_buf = new uint8_t[size];

       avpicture_fill((AVPicture *)picture, picture_buf,
           pFormatCtx_Out->streams[VideoIndex]->codec->pix_fmt,
           pFormatCtx_Out->streams[VideoIndex]->codec->width,
           pFormatCtx_Out->streams[VideoIndex]->codec->height);

    //  FILE *p = NULL;
    //  p = fopen("proc_test.yuv", "wb+");
       av_init_packet(&amp;packet);
       int height = pFormatCtx_Out->streams[VideoIndex]->codec->height;
       int width = pFormatCtx_Out->streams[VideoIndex]->codec->width;
       int y_size=height*width;
       while(bCap)
       {
           packet.data = NULL;
           packet.size = 0;
           if (av_read_frame(pFormatCtx_Video, &amp;packet) &lt; 0)
           {
               av_free_packet(&amp;packet);
               continue;
           }
           if(packet.stream_index == 0)
           {
               if (avcodec_decode_video2(pCodecCtx_Video, pFrame, &amp;got_picture, &amp;packet) &lt; 0)
               {
                   printf("Decode Error.(解码错误)\n");
                   continue;
               }
               if (got_picture)
               {
                   sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0,
                       pFormatCtx_Out->streams[VideoIndex]->codec->height, picture->data, picture->linesize);

                   if (av_fifo_space(fifo_video) >= size)
                   {
                       EnterCriticalSection(&amp;VideoSection);                    
                       av_fifo_generic_write(fifo_video, picture->data[0], y_size, NULL);
                       av_fifo_generic_write(fifo_video, picture->data[1], y_size/4, NULL);
                       av_fifo_generic_write(fifo_video, picture->data[2], y_size/4, NULL);
                       LeaveCriticalSection(&amp;VideoSection);
                   }
               }
           }
           av_free_packet(&amp;packet);
           //Sleep(50);
       }
       av_frame_free(&amp;pFrame);
       av_frame_free(&amp;picture);
       //delete[] picture_buf;
       return 0;
    }

    DWORD WINAPI AudioCapThreadProc( LPVOID lpParam )
    {
       AVPacket pkt;
       AVFrame *frame;
       frame = av_frame_alloc();
       int gotframe;
       while(bCap)
       {
           pkt.data = NULL;
           pkt.size = 0;
           if(av_read_frame(pFormatCtx_Audio,&amp;pkt) &lt; 0)
           {
               av_free_packet(&amp;pkt);
               continue;
           }

           if (avcodec_decode_audio4(pFormatCtx_Audio->streams[0]->codec, frame, &amp;gotframe, &amp;pkt) &lt; 0)
           {
               av_frame_free(&amp;frame);
               printf("can not decoder a frame");
               break;
           }
           av_free_packet(&amp;pkt);

           if (!gotframe)
           {
               continue;//
           }

           if (NULL == fifo_audio)
           {
               fifo_audio = av_audio_fifo_alloc(pFormatCtx_Audio->streams[0]->codec->sample_fmt,
                   pFormatCtx_Audio->streams[0]->codec->channels, 30 * frame->nb_samples);
           }

           int buf_space = av_audio_fifo_space(fifo_audio);
           if (av_audio_fifo_space(fifo_audio) >= frame->nb_samples)
           {
               EnterCriticalSection(&amp;AudioSection);
               av_audio_fifo_write(fifo_audio, (void **)frame->data, frame->nb_samples);
               LeaveCriticalSection(&amp;AudioSection);
           }
       }
       av_frame_free(&amp;frame);
       return 0;
    }