Recherche avancée

Médias (1)

Mot : - Tags -/copyleft

Autres articles (99)

  • MediaSPIP 0.1 Beta version

    25 avril 2011, par

    MediaSPIP 0.1 beta is the first version of MediaSPIP proclaimed as "usable".
    The zip file provided here only contains the sources of MediaSPIP in its standalone version.
    To get a working installation, you must manually install all-software dependencies on the server.
    If you want to use this archive for an installation in "farm mode", you will also need to proceed to other manual (...)

  • Les formats acceptés

    28 janvier 2010, par

    Les commandes suivantes permettent d’avoir des informations sur les formats et codecs gérés par l’installation local de ffmpeg :
    ffmpeg -codecs ffmpeg -formats
    Les format videos acceptés en entrée
    Cette liste est non exhaustive, elle met en exergue les principaux formats utilisés : h264 : H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10 m4v : raw MPEG-4 video format flv : Flash Video (FLV) / Sorenson Spark / Sorenson H.263 Theora wmv :
    Les formats vidéos de sortie possibles
    Dans un premier temps on (...)

  • Multilang : améliorer l’interface pour les blocs multilingues

    18 février 2011, par

    Multilang est un plugin supplémentaire qui n’est pas activé par défaut lors de l’initialisation de MediaSPIP.
    Après son activation, une préconfiguration est mise en place automatiquement par MediaSPIP init permettant à la nouvelle fonctionnalité d’être automatiquement opérationnelle. Il n’est donc pas obligatoire de passer par une étape de configuration pour cela.

Sur d’autres sites (13108)

  • Cryptic `ffmpeg` + Python error : `Cannot find a matching stream for unlabeled input pad 0 on filter Parsed_crop_1`

    5 août 2022, par mattze_frisch

    I want to use ffmpeg with Python in a Jupyter notebook to create a video from 2000 PNG image files generated by matplotlib. I'm on a Windows 7 machine.

    


    The frames are very narrow and high, so I'd like to cut them vertically into seven equal-sized "tiles" using crop and stacked those "tiles" horizontally using hstack, but I'm getting only getting cryptic error messages :

    


    ffmpeg_path = notebook_directory    # Make sure ffmpeg.exe is in notebook directory!&#xA;&#xA;frames_path = &#x27;C:\\Users\\Username\\Desktop&#x27;&#xA;frames_renamed_path = os.path.join(frames_path, &#x27;renamed&#x27;)&#xA;os.chdir(frames_renamed_path)    # Temporarily change current working directory&#xA;&#xA;check_call(&#xA;    [&#xA;        os.path.join(ffmpeg_path, &#x27;ffmpeg&#x27;),&#xA;        &#x27;-y&#x27;,    # Overwrite output files without asking&#xA;        &#x27;-report&#x27;,    # Write logfile to current working directory&#xA;        &#x27;-hwaccel&#x27;, &#x27;cuda&#x27;,    # Use GPU acceleration&#xA;        &#x27;-framerate&#x27;, &#x27;60&#x27;,    # Input frame rate&#xA;        &#x27;-i&#x27;, os.path.join(frames_renamed_path, &#x27;frame%05d.png&#x27;),    # Path to input frames&#xA;        #&#x27;-r&#x27;, &#x27;60&#x27;,    # Output frame rate&#xA;        #&#x27;-vf&#x27;, &#x27;pad=ceil(iw/2)*2:ceil(ih/2)*2&#x27;,    # Pad frames to even pixel numbers (required by many codecs)&#xA;        #&#x27;-pix_fmt&#x27;, &#x27;yuv420p&#x27;,&#xA;        &#x27;-pix_fmt&#x27;, &#x27;rgb32&#x27;,    # Use RGB32 pixel format matching MatPlotLib image output&#xA;        &#x27;-filter_complex&#x27;,    # Cut frames into tiles and rearrange (here: 7 horizontal tiles)&#xA;            &#x27;crop=in_w:in_h/7:0:0[tile_1],&#x27;    # 1st tile from top&#xA;            &#x2B; &#x27;crop=in_w:in_h/7:0:in_h*1/7[tile_2],&#x27;    # 2nd tile from top&#xA;            &#x2B; &#x27;crop=in_w:in_h/7:0:in_h*2/7[tile_3],&#x27;    # 3rd tile from top&#xA;            &#x2B; &#x27;crop=in_w:in_h/7:0:in_h*3/7[tile_4],&#x27;    # 4th tile from top&#xA;            &#x2B; &#x27;crop=in_w:in_h/7:0:in_h*4/7[tile_5],&#x27;    # 5th tile from top&#xA;            &#x2B; &#x27;crop=in_w:in_h/7:0:in_h*5/7[tile_6],&#x27;    # 6th tile from top&#xA;            &#x2B; &#x27;crop=in_w:in_h/7:0:in_h*6/7[tile_7],&#x27;    # 7th tile from top&#xA;            &#x2B; &#x27;[tile_1][tile_2][tile_3][tile_4][tile_5][tile_6][tile_7]hstack=7&#x27;,    # Stack tiles horizontally&#xA;        os.path.join(frames_renamed_path, &#x27;video.mp4&#x27;)    # Path to store output video&#xA;    ]&#xA;)&#xA;&#xA;os.chdir(notebook_directory)    # Reset current working directory to notebook directory&#xA;&#xA;---------------------------------------------------------------------------&#xA;CalledProcessError                        Traceback (most recent call last)&#xA;Input In [10], in <cell 9="9">()&#xA;      6 frames_renamed_path = os.path.join(frames_path, &#x27;renamed&#x27;)&#xA;      7 os.chdir(frames_renamed_path)    # Temporarily change current working directory&#xA;----> 9 check_call(&#xA;     10     [&#xA;     11         os.path.join(ffmpeg_path, &#x27;ffmpeg&#x27;),&#xA;     12         &#x27;-y&#x27;,    # Overwrite output files without asking&#xA;     13         &#x27;-report&#x27;,    # Write logfile to current working directory&#xA;     14         &#x27;-hwaccel&#x27;, &#x27;cuda&#x27;,    # Use GPU acceleration&#xA;     15         &#x27;-framerate&#x27;, &#x27;60&#x27;,    # Input frame rate&#xA;     16         &#x27;-i&#x27;, os.path.join(frames_renamed_path, &#x27;frame%05d.png&#x27;),    # Path to input frames&#xA;     17         #&#x27;-pattern_type&#x27;, &#x27;glob&#x27;, &#x27;-i&#x27;, os.path.join(frames_renamed_path, &#x27;*.png&#x27;),    # glob not available in Windows by default&#xA;     18         #&#x27;-r&#x27;, &#x27;60&#x27;,    # Output frame rate&#xA;     19         &#x27;-vf&#x27;, &#x27;pad=ceil(iw/2)*2:ceil(ih/2)*2&#x27;,    # Pad frames to even pixel numbers (required by many codecs)&#xA;     20         #&#x27;-pix_fmt&#x27;, &#x27;yuv420p&#x27;,&#xA;     21         &#x27;-pix_fmt&#x27;, &#x27;rgb32&#x27;,    # Use RGB32 pixel format matching MatPlotLib image output&#xA;     22         &#x27;-filter_complex&#x27;,    # Cut frames into tiles and rearrange (here: 7 horizontal tiles)&#xA;     23             &#x27;crop=in_w:in_h/7:0:0[tile_1],&#x27;    # 1st tile from top&#xA;     24             &#x2B; &#x27;crop=in_w:in_h/7:0:in_h*1/7[tile_2],&#x27;    # 2nd tile from top&#xA;     25             &#x2B; &#x27;crop=in_w:in_h/7:0:in_h*2/7[tile_3],&#x27;    # 3rd tile from top&#xA;     26             &#x2B; &#x27;crop=in_w:in_h/7:0:in_h*3/7[tile_4],&#x27;    # 4th tile from top&#xA;     27             &#x2B; &#x27;crop=in_w:in_h/7:0:in_h*4/7[tile_5],&#x27;    # 5th tile from top&#xA;     28             &#x2B; &#x27;crop=in_w:in_h/7:0:in_h*5/7[tile_6],&#x27;    # 6th tile from top&#xA;     29             &#x2B; &#x27;crop=in_w:in_h/7:0:in_h*6/7[tile_7],&#x27;    # 7th tile from top&#xA;     30             &#x2B; &#x27;[tile_1][tile_2][tile_3][tile_4][tile_5][tile_6][tile_7]hstack=7&#x27;,    # Stack tiles horizontally&#xA;     31         os.path.join(frames_renamed_path, &#x27;video.mp4&#x27;)    # Path to store output video&#xA;     32     ]&#xA;     33 )&#xA;     35 os.chdir(notebook_directory)&#xA;&#xA;File C:\Program Files\Python38\lib\subprocess.py:364, in check_call(*popenargs, **kwargs)&#xA;    362     if cmd is None:&#xA;    363         cmd = popenargs[0]&#xA;--> 364     raise CalledProcessError(retcode, cmd)&#xA;    365 return 0&#xA;&#xA;CalledProcessError: Command &#x27;[&#x27;C:\\Users\\Username\\Desktop\\ffmpeg\\ffmpeg&#x27;, &#x27;-y&#x27;, &#x27;-report&#x27;, &#x27;-hwaccel&#x27;, &#x27;cuda&#x27;, &#x27;-framerate&#x27;, &#x27;60&#x27;, &#x27;-i&#x27;, &#x27;C:\\Users\\Username\\Desktop\\renamed\\frame%05d.png&#x27;, &#x27;-vf&#x27;, &#x27;pad=ceil(iw/2)*2:ceil(ih/2)*2&#x27;, &#x27;-pix_fmt&#x27;, &#x27;rgb32&#x27;, &#x27;-filter_complex&#x27;, &#x27;crop=in_w:in_h/7:0:0[tile_1],crop=in_w:in_h/7:0:in_h*1/7[tile_2],crop=in_w:in_h/7:0:in_h*2/7[tile_3],crop=in_w:in_h/7:0:in_h*3/7[tile_4],crop=in_w:in_h/7:0:in_h*4/7[tile_5],crop=in_w:in_h/7:0:in_h*5/7[tile_6],crop=in_w:in_h/7:0:in_h*6/7[tile_7],[tile_1][tile_2][tile_3][tile_4][tile_5][tile_6][tile_7]hstack=7&#x27;, &#x27;C:\\Users\\Username\\Desktop\\renamed\\video.mp4&#x27;]&#x27; returned non-zero exit status 1.&#xA;</cell>

    &#xA;

    This is the ffmpeg log output (cropped) :

    &#xA;

    ffmpeg started on 2022-08-04 at 01:17:19&#xA;Report written to "ffmpeg-20220804-011719.log"&#xA;Log level: 48&#xA;Command line:&#xA;"C:\\Users\\Username\\Desktop\\ffmpeg\\ffmpeg" -y -report -hwaccel cuda -framerate 60 -i "C:\\Users\\Username\\Desktop\\renamed\\frame%05d.png" -vf "pad=ceil(iw/2)*2:ceil(ih/2)*2" -pix_fmt rgb32 -filter_complex "crop=in_w:in_h/7:0:0[tile_1],crop=in_w:in_h/7:0:in_h*1/7[tile_2],crop=in_w:in_h/7:0:in_h*2/7[tile_3],crop=in_w:in_h/7:0:in_h*3/7[tile_4],crop=in_w:in_h/7:0:in_h*4/7[tile_5],crop=in_w:in_h/7:0:in_h*5/7[tile_6],crop=in_w:in_h/7:0:in_h*6/7[tile_7],[tile_1][tile_2][tile_3][tile_4][tile_5][tile_6][tile_7]hstack=7" "C:\\Users\\Username\\Desktop\\renamed\\video.mp4"&#xA;ffmpeg version 2022-07-14-git-882aac99d2-full_build-www.gyan.dev Copyright (c) 2000-2022 the FFmpeg developers&#xA;  built with gcc 12.1.0 (Rev2, Built by MSYS2 project)&#xA;  configuration: --enable-gpl --enable-version3 --enable-static --disable-w32threads --disable-autodetect --enable-fontconfig --enable-iconv --enable-gnutls --enable-libxml2 --enable-gmp --enable-bzlib --enable-lzma --enable-libsnappy --enable-zlib --enable-librist --enable-libsrt --enable-libssh --enable-libzmq --enable-avisynth --enable-libbluray --enable-libcaca --enable-sdl2 --enable-libdav1d --enable-libdavs2 --enable-libuavs3d --enable-libzvbi --enable-librav1e --enable-libsvtav1 --enable-libwebp --enable-libx264 --enable-libx265 --enable-libxavs2 --enable-libxvid --enable-libaom --enable-libjxl --enable-libopenjpeg --enable-libvpx --enable-mediafoundation --enable-libass --enable-frei0r --enable-libfreetype --enable-libfribidi --enable-liblensfun --enable-libvidstab --enable-libvmaf --enable-libzimg --enable-amf --enable-cuda-llvm --enable-cuvid --enable-ffnvcodec --enable-nvdec --enable-nvenc --enable-d3d11va --enable-dxva2 --enable-libmfx --enable-libshaderc --enable-vulkan --enable-libplacebo --ena  libavutil      57. 29.100 / 57. 29.100&#xA;  libavcodec     59. 38.100 / 59. 38.100&#xA;  libavformat    59. 28.100 / 59. 28.100&#xA;  libavdevice    59.  8.100 / 59.  8.100&#xA;  libavfilter     8. 45.100 /  8. 45.100&#xA;  libswscale      6.  8.100 /  6.  8.100&#xA;  libswresample   4.  8.100 /  4.  8.100&#xA;  libpostproc    56.  7.100 / 56.  7.100&#xA;Splitting the commandline.&#xA;Reading option &#x27;-y&#x27; ... matched as option &#x27;y&#x27; (overwrite output files) with argument &#x27;1&#x27;.&#xA;Reading option &#x27;-report&#x27; ... matched as option &#x27;report&#x27; (generate a report) with argument &#x27;1&#x27;.&#xA;Reading option &#x27;-hwaccel&#x27; ... matched as option &#x27;hwaccel&#x27; (use HW accelerated decoding) with argument &#x27;cuda&#x27;.&#xA;Reading option &#x27;-framerate&#x27; ... matched as AVOption &#x27;framerate&#x27; with argument &#x27;60&#x27;.&#xA;Reading option &#x27;-i&#x27; ... matched as input url with argument &#x27;C:\Users\Username\Desktop\renamed\frame%05d.png&#x27;.&#xA;Reading option &#x27;-vf&#x27; ... matched as option &#x27;vf&#x27; (set video filters) with argument &#x27;pad=ceil(iw/2)*2:ceil(ih/2)*2&#x27;.&#xA;Reading option &#x27;-pix_fmt&#x27; ... matched as option &#x27;pix_fmt&#x27; (set pixel format) with argument &#x27;rgb32&#x27;.&#xA;Reading option &#x27;-filter_complex&#x27; ... matched as option &#x27;filter_complex&#x27; (create a complex filtergraph) with argument &#x27;crop=in_w:in_h/7:0:0[tile_1],crop=in_w:in_h/7:0:in_h*1/7[tile_2],crop=in_w:in_h/7:0:in_h*2/7[tile_3],crop=in_w:in_h/7:0:in_h*3/7[tile_4],crop=in_w:in_h/7:0:in_h*4/7[tile_5],crop=in_w:in_h/7:0:in_h*5/7[tile_6],crop=in_w:in_h/7:0:in_h*6/7[tile_7],[tile_1][tile_2][tile_3][tile_4][tile_5][tile_6][tile_7]hstack=7&#x27;.&#xA;Reading option &#x27;C:\Users\Username\Desktop\renamed\video.mp4&#x27; ... matched as output url.&#xA;Finished splitting the commandline.&#xA;Parsing a group of options: global .&#xA;Applying option y (overwrite output files) with argument 1.&#xA;Applying option report (generate a report) with argument 1.&#xA;Applying option filter_complex (create a complex filtergraph) with argument crop=in_w:in_h/7:0:0[tile_1],crop=in_w:in_h/7:0:in_h*1/7[tile_2],crop=in_w:in_h/7:0:in_h*2/7[tile_3],crop=in_w:in_h/7:0:in_h*3/7[tile_4],crop=in_w:in_h/7:0:in_h*4/7[tile_5],crop=in_w:in_h/7:0:in_h*5/7[tile_6],crop=in_w:in_h/7:0:in_h*6/7[tile_7],[tile_1][tile_2][tile_3][tile_4][tile_5][tile_6][tile_7]hstack=7.&#xA;Successfully parsed a group of options.&#xA;Parsing a group of options: input url C:\Users\Username\Desktop\renamed\frame%05d.png.&#xA;Applying option hwaccel (use HW accelerated decoding) with argument cuda.&#xA;Successfully parsed a group of options.&#xA;Opening an input file: C:\Users\Username\Desktop\renamed\frame%05d.png.&#xA;[image2 @ 00000000005db0c0] Opening &#x27;C:\Users\Username\Desktop\renamed\frame00000.png&#x27; for reading&#xA;[file @ 00000000005f6c00] Setting default whitelist &#x27;file,crypto,data&#x27;&#xA;[AVIOContext @ 00000000005fed40] Statistics: 28860 bytes read, 0 seeks&#xA;&#xA;&lt;...>&#xA;&#xA;[AVIOContext @ 00000000005fda00] Statistics: 67659 bytes read, 0 seeks&#xA;[image2 @ 00000000005db0c0] Probe buffer size limit of 5000000 bytes reached&#xA;Input #0, image2, from &#x27;C:\Users\Username\Desktop\renamed\frame%05d.png&#x27;:&#xA;  Duration: 00:00:25.00, start: 0.000000, bitrate: N/A&#xA;  Stream #0:0, 80, 1/60: Video: png, rgba(pc), 1200x1857 [SAR 3937:3937 DAR 400:619], 60 fps, 60 tbr, 60 tbn&#xA;Successfully opened the file.&#xA;[Parsed_crop_0 @ 00000000005fdc40] Setting &#x27;out_w&#x27; to value &#x27;in_w&#x27;&#xA;[Parsed_crop_0 @ 00000000005fdc40] Setting &#x27;out_h&#x27; to value &#x27;in_h/7&#x27;&#xA;[Parsed_crop_0 @ 00000000005fdc40] Setting &#x27;x&#x27; to value &#x27;0&#x27;&#xA;[Parsed_crop_0 @ 00000000005fdc40] Setting &#x27;y&#x27; to value &#x27;0&#x27;&#xA;[Parsed_crop_1 @ 0000000000607500] Setting &#x27;out_w&#x27; to value &#x27;in_w&#x27;&#xA;[Parsed_crop_1 @ 0000000000607500] Setting &#x27;out_h&#x27; to value &#x27;in_h/7&#x27;&#xA;[Parsed_crop_1 @ 0000000000607500] Setting &#x27;x&#x27; to value &#x27;0&#x27;&#xA;[Parsed_crop_1 @ 0000000000607500] Setting &#x27;y&#x27; to value &#x27;in_h*1/7&#x27;&#xA;[Parsed_crop_2 @ 0000000000607880] Setting &#x27;out_w&#x27; to value &#x27;in_w&#x27;&#xA;[Parsed_crop_2 @ 0000000000607880] Setting &#x27;out_h&#x27; to value &#x27;in_h/7&#x27;&#xA;[Parsed_crop_2 @ 0000000000607880] Setting &#x27;x&#x27; to value &#x27;0&#x27;&#xA;[Parsed_crop_2 @ 0000000000607880] Setting &#x27;y&#x27; to value &#x27;in_h*2/7&#x27;&#xA;[Parsed_crop_3 @ 0000000000607c00] Setting &#x27;out_w&#x27; to value &#x27;in_w&#x27;&#xA;[Parsed_crop_3 @ 0000000000607c00] Setting &#x27;out_h&#x27; to value &#x27;in_h/7&#x27;&#xA;[Parsed_crop_3 @ 0000000000607c00] Setting &#x27;x&#x27; to value &#x27;0&#x27;&#xA;[Parsed_crop_3 @ 0000000000607c00] Setting &#x27;y&#x27; to value &#x27;in_h*3/7&#x27;&#xA;[Parsed_crop_4 @ 00000000005fda00] Setting &#x27;out_w&#x27; to value &#x27;in_w&#x27;&#xA;[Parsed_crop_4 @ 00000000005fda00] Setting &#x27;out_h&#x27; to value &#x27;in_h/7&#x27;&#xA;[Parsed_crop_4 @ 00000000005fda00] Setting &#x27;x&#x27; to value &#x27;0&#x27;&#xA;[Parsed_crop_4 @ 00000000005fda00] Setting &#x27;y&#x27; to value &#x27;in_h*4/7&#x27;&#xA;[Parsed_crop_5 @ 0000000002b8f800] Setting &#x27;out_w&#x27; to value &#x27;in_w&#x27;&#xA;[Parsed_crop_5 @ 0000000002b8f800] Setting &#x27;out_h&#x27; to value &#x27;in_h/7&#x27;&#xA;[Parsed_crop_5 @ 0000000002b8f800] Setting &#x27;x&#x27; to value &#x27;0&#x27;&#xA;[Parsed_crop_5 @ 0000000002b8f800] Setting &#x27;y&#x27; to value &#x27;in_h*5/7&#x27;&#xA;[Parsed_crop_6 @ 0000000000607d00] Setting &#x27;out_w&#x27; to value &#x27;in_w&#x27;&#xA;[Parsed_crop_6 @ 0000000000607d00] Setting &#x27;out_h&#x27; to value &#x27;in_h/7&#x27;&#xA;[Parsed_crop_6 @ 0000000000607d00] Setting &#x27;x&#x27; to value &#x27;0&#x27;&#xA;[Parsed_crop_6 @ 0000000000607d00] Setting &#x27;y&#x27; to value &#x27;in_h*6/7&#x27;&#xA;[Parsed_hstack_7 @ 0000000002b8fb80] Setting &#x27;inputs&#x27; to value &#x27;7&#x27;&#xA;Cannot find a matching stream for unlabeled input pad 0 on filter Parsed_crop_1&#xA;

    &#xA;

    What is the problem here ? Also, how does one interpret this cryptic error message at the end of the log output :

    &#xA;

    Cannot find a matching stream for unlabeled input pad 0 on filter Parsed_crop_1

    &#xA;

    E.g. what is input pad 0, why is it unlabeled, and why can't it find a matching stream ?

    &#xA;

    (PS. : Yes, I do have a habit of excessively documenting my code. ^^)

    &#xA;

  • ffmpeg sws_scale crash when convert video frame NV12 buffer to I420 buffer

    5 février 2017, par AnhTuan.Ng

    I have a stream of 2048x896 (NV12 format) video frames, and i have to crop its to 1936x872 I420 frames, then encoding to new 1936x872MP4 file.
    Crop frame from 2048x986 to 1936x872 image
    I use openCV to crop the original NV12 frames and use FFMPEG sws_scale to convert NV12 to I420 format. But My app crash on sws_scale function.
    I also tried with some another crop size, and see that with 1920x872 crop frame size, it work !
    This is my code :

    // FFmpeg library
    extern "C" {
    #include <libavformat></libavformat>avformat.h>
    #include <libavcodec></libavcodec>avcodec.h>
    #include <libswscale></libswscale>swscale.h>
    }
    //Using openCV library:
    #include "opencv2/core/core.hpp"
    #include "opencv2/imgproc/imgproc.hpp"
    #include "opencv2/highgui/highgui.hpp"

    #ifdef _DEBUG
    #pragma comment(lib, "opencv_core2411d.lib")
    #pragma comment(lib, "opencv_highgui2411d.lib")
    #pragma comment(lib, "opencv_imgproc2411d.lib")
    #else
    #pragma comment(lib, "opencv_core2411.lib")
    #pragma comment(lib, "opencv_highgui2411.lib")
    #pragma comment(lib, "opencv_imgproc2411.lib")
    #endif

    using namespace cv;

    void RotateOpenCVMatrix(Mat&amp; src, double angle, Mat&amp; dst)
    {
       Mat tempMat;
       transpose(src, tempMat);
       flip(tempMat, dst, 1); //transpose+flip(1)=CW
    }

    void CropNV12ImageFrame(unsigned char *srcBuff, unsigned char *resultBuff,
       int srcWidth, int srcHeight, int x, int y, int cropWidth, int cropHeight,
       BOOL isRotate)
    {
       //create src_NV12 matrix:
       Mat src_Y_img;
       Mat src_UV_img;

       Mat crop_Y_mat;
       Mat crop_UV_mat;

       src_Y_img.create(srcHeight, srcWidth, CV_8UC1);
       src_UV_img.create(srcHeight / 2, srcWidth, CV_8UC1);

       memcpy(src_Y_img.data, srcBuff, srcWidth * srcHeight);
       memcpy(src_UV_img.data, srcBuff + srcWidth * srcHeight, srcWidth * srcHeight / 2);

       //Create result Y plane matrix:
       crop_Y_mat = src_Y_img(cv::Rect(x, y, cropWidth, cropHeight)).clone();
       crop_UV_mat = src_UV_img(cv::Rect(x, y / 2, cropWidth, cropHeight / 2)).clone();

       //Rotate by openCV:
       if (isRotate)
       {
           Mat result_Y_mat;
           Mat result_UV_mat;

           RotateOpenCVMatrix(crop_Y_mat, 90, result_Y_mat);
           RotateOpenCVMatrix(crop_UV_mat, 90, result_UV_mat);
           //Mem copy to output data buffer:
           memcpy(resultBuff, result_Y_mat.data, cropWidth * cropHeight);
           memcpy(resultBuff + cropWidth * cropHeight, result_UV_mat.data, cropWidth * cropHeight / 2);
       }
       else
       {
           memcpy(resultBuff, crop_Y_mat.data, cropWidth * cropHeight);
           memcpy(resultBuff + cropWidth * cropHeight, crop_UV_mat.data, cropWidth * cropHeight / 2);
       }
    }

    int ConvertColor(unsigned char *srcbuff, unsigned char *targetBuff, unsigned int width, unsigned int height, AVPixelFormat srcFormat, AVPixelFormat targetFormat)
    {

       int ret = 0;
       //create the conversion context
       struct SwsContext *swsContext = sws_getContext(
           width, height, srcFormat,    // source
           width, height, targetFormat, // target
           SWS_BILINEAR,
           NULL, NULL, NULL
       );
       if (swsContext &lt; 0) {
           return -1;
       }

       // allocate frame
       AVFrame *pSrcFrame = av_frame_alloc();
       AVFrame *pTargetFrame = av_frame_alloc();

       // source frame
       avpicture_fill(
           (AVPicture*)pSrcFrame,
           (const uint8_t *)srcbuff,
           srcFormat,
           width,
           height
       );

       // target frame
       if (srcFormat != AV_PIX_FMT_RGBA)
           avpicture_fill(
           (AVPicture*)pTargetFrame,
               (const uint8_t *)targetBuff,
               targetFormat,
               width,
               height
           );
       else
           avpicture_fill(
           (AVPicture*)pTargetFrame,
               (const uint8_t *)targetBuff,
               targetFormat,
               1936,
               872
           );
       ret = sws_scale(
           swsContext,             // sws context
                                   // source
           pSrcFrame->data,        // source
           pSrcFrame->linesize,    // source stride
           0,                      // slice Y
           height,                 // slice H
                                   // target
           pTargetFrame->data,        // target
           pTargetFrame->linesize     // target stride
       );

       av_frame_free(&amp;pSrcFrame);
       av_frame_free(&amp;pTargetFrame);
       sws_freeContext(swsContext);

       return ret;
    }

    void CropVideoFrame(BYTE* nv12BufferInput, BYTE* i420BufferOutput, int inWidth, int inHeight, int outWidth, int outHeight)
    {
       BYTE *cropNV12Image = new BYTE[outWidth* outHeight * 3 / 2 + 1];
       //Get 4 horizontal camera img buffer:
       CropImageFrame(input, cropNV12Image[0], inWidth, inHeight, 0, 0, outWidth, outHeight, FALSE);
       //Convert from NV21 to I420p:  
       ConvertColor(cropNV12Image[0], i420BufferOutput, outWidth, outHeight, AV_PIX_FMT_NV12, AV_PIX_FMT_YUV420P);
       //Free memory:
       delete[] cropNV12Image;
    }
  • FFMPEG Encoder error when playback with 1936 width video size

    5 février 2017, par AnhTuan.Ng

    I have a 4096x1760 YUV-NV12 video frames stream and i have to split it to 4 frames have resolution 1936x872, then encode them to 4 video file (Like bellow picture)
    Crop frame
    My algorithm is :
    1. Using openCV to crop and splitting (4096x1760) NV12 frame to 4 (1936x872) down size.
    2. Using FFMPEG swscale() to convert RGBA to I420 buffer
    3. Using FFMpeg to encode 4 (1936x872) frames to 4 video files.

    But i have a trouble, that 4 video encoded have error signals, Just horizontal stripes on screen (such as zebra skin, So sad !)
    Video error
    I think error caused by encoder, because when i change crop size to 1920x872, All thing seem be ok (By one step, I dump buffer data to bmp image to check, and every thing look good !)

    And, what a surprise ! When I change my crop famre size from 1936x872 to 1920x872. All things work done, Video output can be playing so smooth !

    So I think this error because my FFMPEG encoder setting up wrong. Please tell me why and show me a way to fix it ! Thanks you !

    this is my code :

    -My define :

    #define SIDE_FRAME_WIDTH        4096
    #define SIDE_FRAME_HEIGHT       1760

    #define SINGLE_SIDE_WIDTH       1936    //1920 is woking!
    #define SINGLE_SIDE_HEIGHT      872     //872

    #define SIDE_CROP_WIDTH         1936
    #define SIDE_CROP_HEIGHT        872

    My Splitting thread :

    void splittingThread(SplitingThreadParam &amp;param)
    {
       Converter *pConverter = (Converter*)param.parentParamClass;
       BYTE* cropRGBABuff[4];
       for (int i = 0; i &lt; 4; i++)
           cropRGBABuff[i] = new BYTE[SINGLE_SIDE_WIDTH*SINGLE_SIDE_HEIGHT * 4];
       //Split:
       pConverter->OpenCVSplittingSideFrame(param.inputBuff, SIDE_FRAME_WIDTH, SIDE_FRAME_HEIGHT, SINGLE_SIDE_WIDTH, SINGLE_SIDE_HEIGHT, cropRGBABuff[0], cropRGBABuff[1], cropRGBABuff[2], cropRGBABuff[3]);

       //Convert to I420:
       pConverter->ConvertColor(cropRGBABuff[0], param.out1Buff, SINGLE_SIDE_WIDTH, SINGLE_SIDE_HEIGHT, AV_PIX_FMT_RGBA, AV_PIX_FMT_YUV420P);
       pConverter->ConvertColor(cropRGBABuff[1], param.out2Buff, SINGLE_SIDE_WIDTH, SINGLE_SIDE_HEIGHT, AV_PIX_FMT_RGBA, AV_PIX_FMT_YUV420P);
       pConverter->ConvertColor(cropRGBABuff[2], param.out3Buff, SINGLE_SIDE_WIDTH, SINGLE_SIDE_HEIGHT, AV_PIX_FMT_RGBA, AV_PIX_FMT_YUV420P);
       pConverter->ConvertColor(cropRGBABuff[3], param.out4Buff, SINGLE_SIDE_WIDTH, SINGLE_SIDE_HEIGHT, AV_PIX_FMT_RGBA, AV_PIX_FMT_YUV420P);

       //pConverter->DumpBufferData(param.out1Buff, SINGLE_SIDE_WIDTH, SINGLE_SIDE_HEIGHT, 1);
    }

    My Splitting function :

    void Converter::OpenCVSplittingSideFrame(BYTE *input, unsigned int srcWidth, unsigned int srcHeight,
       unsigned int cropWidth, unsigned int cropHeight,
       BYTE *out1, BYTE *out2, BYTE *out3, BYTE *out4)
    {
       Mat nv12Mat;
       Mat rgbMat;
       Mat rgbCropImg[4];

       //Create YUV Matrix:
       nv12Mat.create(srcHeight * 3 / 2, srcWidth, CV_8UC1);
       memcpy(nv12Mat.data, input, srcWidth * srcHeight * 3 / 2);
       //Create RGB matrix:
       rgbMat.create(srcHeight, srcWidth, CV_8UC4);

       //Convert YUV to RGB:
       cvtColor(nv12Mat, rgbMat, COLOR_YUV2RGBA_NV21);

       //Crop:
       CropMatrix(rgbMat, rgbCropImg[0], 0, 0, cropWidth, cropHeight);
       CropMatrix(rgbMat, rgbCropImg[1], 0, SIDE_CROP_HEIGHT, cropWidth, cropHeight);
       CropMatrix(rgbMat, rgbCropImg[2], SIDE_CROP_WIDTH, 0, cropWidth, cropHeight);
       CropMatrix(rgbMat, rgbCropImg[3], SIDE_CROP_WIDTH, SIDE_CROP_HEIGHT, cropWidth, cropHeight);

       //Copy from matrix to output buffer:
       memcpy(out1, rgbCropImg[0].data, cropWidth * cropHeight * 4);
       memcpy(out2, rgbCropImg[1].data, cropWidth * cropHeight * 4);
       memcpy(out3, rgbCropImg[2].data, cropWidth * cropHeight * 4);
       memcpy(out4, rgbCropImg[3].data, cropWidth * cropHeight * 4);
    }

    Convert color range to RGBA to I420 function :

    int Converter::ConvertColor(unsigned char *srcbuff, unsigned char *targetBuff, unsigned int width, unsigned int height, AVPixelFormat srcFormat, AVPixelFormat targetFormat)
    {

       int ret = 0;
       //create the conversion context
       struct SwsContext *swsContext = sws_getContext(
           width, height, srcFormat,    // AV_PIX_FMT_NV12, // source
           width, height, targetFormat, // AV_PIX_FMT_YUV420P,    // target: GL_RGBA
           SWS_BILINEAR,
           NULL, NULL, NULL
           );
       if (swsContext &lt; 0) {
           return -1;
       }

       // allocate frame
       AVFrame *pSrcFrame = av_frame_alloc();
       AVFrame *pTargetFrame = av_frame_alloc();

       // source frame
       avpicture_fill(
           (AVPicture*)pSrcFrame,
           (const uint8_t *)srcbuff,
           srcFormat,
           width,
           height
           );

       // target frame
       if (srcFormat != AV_PIX_FMT_RGBA)
           avpicture_fill(
           (AVPicture*)pTargetFrame,
           (const uint8_t *)targetBuff,
           targetFormat,
           width,
           height
           );
       else
           avpicture_fill(
           (AVPicture*)pTargetFrame,
           (const uint8_t *)targetBuff,
           targetFormat,
           1936,
           872
           );
       ret = sws_scale(
           swsContext,             // sws context
           // source
           pSrcFrame->data,        // source
           pSrcFrame->linesize,    // source stride
           0,                      // slice Y
           height,                 // slice H
           // target
           pTargetFrame->data,        // target
           pTargetFrame->linesize     // target stride
           );

       av_frame_free(&amp;pSrcFrame);
       av_frame_free(&amp;pTargetFrame);
       sws_freeContext(swsContext);

       return ret;
    }

    My encoder setting :

    EncoderContext encoderCtx;
    encoderCtx.vCodecId = VIDEO_CODEC_H264;
    encoderCtx.aCodecId = AUDIO_CODEC_NONE;
    encoderCtx.numAudioChannel = 0;
    encoderCtx.eFormat = BEYOND_ENCODER_MP4;
    encoderCtx.videoBitrate = 10000000;
    encoderCtx.audioBitrate = 0;
    encoderCtx.audioBitrate = 0;
    encoderCtx.timeBaseDen = config->time_base_den;
    encoderCtx.timeBaseNum = config->time_base_num;
    encoderCtx.width = SINGLE_SIDE_WIDTH;
    encoderCtx.height = SINGLE_SIDE_HEIGHT;
    CString filePath(config->savePath.c_str());
    CString filePath2;
    filePath2.Format(_T("%s_%d.mp4"), filePath, i);
    CT2A multibyte(filePath2, CP_UTF8);
    encoderCtx.outFileName = multibyte;

    Thanks all for your help !