
Recherche avancée
Médias (1)
-
Revolution of Open-source and film making towards open film making
6 octobre 2011, par
Mis à jour : Juillet 2013
Langue : English
Type : Texte
Autres articles (99)
-
MediaSPIP 0.1 Beta version
25 avril 2011, parMediaSPIP 0.1 beta is the first version of MediaSPIP proclaimed as "usable".
The zip file provided here only contains the sources of MediaSPIP in its standalone version.
To get a working installation, you must manually install all-software dependencies on the server.
If you want to use this archive for an installation in "farm mode", you will also need to proceed to other manual (...) -
Les formats acceptés
28 janvier 2010, parLes commandes suivantes permettent d’avoir des informations sur les formats et codecs gérés par l’installation local de ffmpeg :
ffmpeg -codecs ffmpeg -formats
Les format videos acceptés en entrée
Cette liste est non exhaustive, elle met en exergue les principaux formats utilisés : h264 : H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10 m4v : raw MPEG-4 video format flv : Flash Video (FLV) / Sorenson Spark / Sorenson H.263 Theora wmv :
Les formats vidéos de sortie possibles
Dans un premier temps on (...) -
Multilang : améliorer l’interface pour les blocs multilingues
18 février 2011, parMultilang est un plugin supplémentaire qui n’est pas activé par défaut lors de l’initialisation de MediaSPIP.
Après son activation, une préconfiguration est mise en place automatiquement par MediaSPIP init permettant à la nouvelle fonctionnalité d’être automatiquement opérationnelle. Il n’est donc pas obligatoire de passer par une étape de configuration pour cela.
Sur d’autres sites (13108)
-
Cryptic `ffmpeg` + Python error : `Cannot find a matching stream for unlabeled input pad 0 on filter Parsed_crop_1`
5 août 2022, par mattze_frischI want to use
ffmpeg
with Python in a Jupyter notebook to create a video from 2000 PNG image files generated bymatplotlib
. I'm on a Windows 7 machine.

The frames are very narrow and high, so I'd like to cut them vertically into seven equal-sized "tiles" using
crop
and stacked those "tiles" horizontally usinghstack
, but I'm getting only getting cryptic error messages :

ffmpeg_path = notebook_directory # Make sure ffmpeg.exe is in notebook directory!

frames_path = 'C:\\Users\\Username\\Desktop'
frames_renamed_path = os.path.join(frames_path, 'renamed')
os.chdir(frames_renamed_path) # Temporarily change current working directory

check_call(
 [
 os.path.join(ffmpeg_path, 'ffmpeg'),
 '-y', # Overwrite output files without asking
 '-report', # Write logfile to current working directory
 '-hwaccel', 'cuda', # Use GPU acceleration
 '-framerate', '60', # Input frame rate
 '-i', os.path.join(frames_renamed_path, 'frame%05d.png'), # Path to input frames
 #'-r', '60', # Output frame rate
 #'-vf', 'pad=ceil(iw/2)*2:ceil(ih/2)*2', # Pad frames to even pixel numbers (required by many codecs)
 #'-pix_fmt', 'yuv420p',
 '-pix_fmt', 'rgb32', # Use RGB32 pixel format matching MatPlotLib image output
 '-filter_complex', # Cut frames into tiles and rearrange (here: 7 horizontal tiles)
 'crop=in_w:in_h/7:0:0[tile_1],' # 1st tile from top
 + 'crop=in_w:in_h/7:0:in_h*1/7[tile_2],' # 2nd tile from top
 + 'crop=in_w:in_h/7:0:in_h*2/7[tile_3],' # 3rd tile from top
 + 'crop=in_w:in_h/7:0:in_h*3/7[tile_4],' # 4th tile from top
 + 'crop=in_w:in_h/7:0:in_h*4/7[tile_5],' # 5th tile from top
 + 'crop=in_w:in_h/7:0:in_h*5/7[tile_6],' # 6th tile from top
 + 'crop=in_w:in_h/7:0:in_h*6/7[tile_7],' # 7th tile from top
 + '[tile_1][tile_2][tile_3][tile_4][tile_5][tile_6][tile_7]hstack=7', # Stack tiles horizontally
 os.path.join(frames_renamed_path, 'video.mp4') # Path to store output video
 ]
)

os.chdir(notebook_directory) # Reset current working directory to notebook directory

---------------------------------------------------------------------------
CalledProcessError Traceback (most recent call last)
Input In [10], in <cell 9="9">()
 6 frames_renamed_path = os.path.join(frames_path, 'renamed')
 7 os.chdir(frames_renamed_path) # Temporarily change current working directory
----> 9 check_call(
 10 [
 11 os.path.join(ffmpeg_path, 'ffmpeg'),
 12 '-y', # Overwrite output files without asking
 13 '-report', # Write logfile to current working directory
 14 '-hwaccel', 'cuda', # Use GPU acceleration
 15 '-framerate', '60', # Input frame rate
 16 '-i', os.path.join(frames_renamed_path, 'frame%05d.png'), # Path to input frames
 17 #'-pattern_type', 'glob', '-i', os.path.join(frames_renamed_path, '*.png'), # glob not available in Windows by default
 18 #'-r', '60', # Output frame rate
 19 '-vf', 'pad=ceil(iw/2)*2:ceil(ih/2)*2', # Pad frames to even pixel numbers (required by many codecs)
 20 #'-pix_fmt', 'yuv420p',
 21 '-pix_fmt', 'rgb32', # Use RGB32 pixel format matching MatPlotLib image output
 22 '-filter_complex', # Cut frames into tiles and rearrange (here: 7 horizontal tiles)
 23 'crop=in_w:in_h/7:0:0[tile_1],' # 1st tile from top
 24 + 'crop=in_w:in_h/7:0:in_h*1/7[tile_2],' # 2nd tile from top
 25 + 'crop=in_w:in_h/7:0:in_h*2/7[tile_3],' # 3rd tile from top
 26 + 'crop=in_w:in_h/7:0:in_h*3/7[tile_4],' # 4th tile from top
 27 + 'crop=in_w:in_h/7:0:in_h*4/7[tile_5],' # 5th tile from top
 28 + 'crop=in_w:in_h/7:0:in_h*5/7[tile_6],' # 6th tile from top
 29 + 'crop=in_w:in_h/7:0:in_h*6/7[tile_7],' # 7th tile from top
 30 + '[tile_1][tile_2][tile_3][tile_4][tile_5][tile_6][tile_7]hstack=7', # Stack tiles horizontally
 31 os.path.join(frames_renamed_path, 'video.mp4') # Path to store output video
 32 ]
 33 )
 35 os.chdir(notebook_directory)

File C:\Program Files\Python38\lib\subprocess.py:364, in check_call(*popenargs, **kwargs)
 362 if cmd is None:
 363 cmd = popenargs[0]
--> 364 raise CalledProcessError(retcode, cmd)
 365 return 0

CalledProcessError: Command '['C:\\Users\\Username\\Desktop\\ffmpeg\\ffmpeg', '-y', '-report', '-hwaccel', 'cuda', '-framerate', '60', '-i', 'C:\\Users\\Username\\Desktop\\renamed\\frame%05d.png', '-vf', 'pad=ceil(iw/2)*2:ceil(ih/2)*2', '-pix_fmt', 'rgb32', '-filter_complex', 'crop=in_w:in_h/7:0:0[tile_1],crop=in_w:in_h/7:0:in_h*1/7[tile_2],crop=in_w:in_h/7:0:in_h*2/7[tile_3],crop=in_w:in_h/7:0:in_h*3/7[tile_4],crop=in_w:in_h/7:0:in_h*4/7[tile_5],crop=in_w:in_h/7:0:in_h*5/7[tile_6],crop=in_w:in_h/7:0:in_h*6/7[tile_7],[tile_1][tile_2][tile_3][tile_4][tile_5][tile_6][tile_7]hstack=7', 'C:\\Users\\Username\\Desktop\\renamed\\video.mp4']' returned non-zero exit status 1.
</cell>


This is the
ffmpeg
log output (cropped) :

ffmpeg started on 2022-08-04 at 01:17:19
Report written to "ffmpeg-20220804-011719.log"
Log level: 48
Command line:
"C:\\Users\\Username\\Desktop\\ffmpeg\\ffmpeg" -y -report -hwaccel cuda -framerate 60 -i "C:\\Users\\Username\\Desktop\\renamed\\frame%05d.png" -vf "pad=ceil(iw/2)*2:ceil(ih/2)*2" -pix_fmt rgb32 -filter_complex "crop=in_w:in_h/7:0:0[tile_1],crop=in_w:in_h/7:0:in_h*1/7[tile_2],crop=in_w:in_h/7:0:in_h*2/7[tile_3],crop=in_w:in_h/7:0:in_h*3/7[tile_4],crop=in_w:in_h/7:0:in_h*4/7[tile_5],crop=in_w:in_h/7:0:in_h*5/7[tile_6],crop=in_w:in_h/7:0:in_h*6/7[tile_7],[tile_1][tile_2][tile_3][tile_4][tile_5][tile_6][tile_7]hstack=7" "C:\\Users\\Username\\Desktop\\renamed\\video.mp4"
ffmpeg version 2022-07-14-git-882aac99d2-full_build-www.gyan.dev Copyright (c) 2000-2022 the FFmpeg developers
 built with gcc 12.1.0 (Rev2, Built by MSYS2 project)
 configuration: --enable-gpl --enable-version3 --enable-static --disable-w32threads --disable-autodetect --enable-fontconfig --enable-iconv --enable-gnutls --enable-libxml2 --enable-gmp --enable-bzlib --enable-lzma --enable-libsnappy --enable-zlib --enable-librist --enable-libsrt --enable-libssh --enable-libzmq --enable-avisynth --enable-libbluray --enable-libcaca --enable-sdl2 --enable-libdav1d --enable-libdavs2 --enable-libuavs3d --enable-libzvbi --enable-librav1e --enable-libsvtav1 --enable-libwebp --enable-libx264 --enable-libx265 --enable-libxavs2 --enable-libxvid --enable-libaom --enable-libjxl --enable-libopenjpeg --enable-libvpx --enable-mediafoundation --enable-libass --enable-frei0r --enable-libfreetype --enable-libfribidi --enable-liblensfun --enable-libvidstab --enable-libvmaf --enable-libzimg --enable-amf --enable-cuda-llvm --enable-cuvid --enable-ffnvcodec --enable-nvdec --enable-nvenc --enable-d3d11va --enable-dxva2 --enable-libmfx --enable-libshaderc --enable-vulkan --enable-libplacebo --ena libavutil 57. 29.100 / 57. 29.100
 libavcodec 59. 38.100 / 59. 38.100
 libavformat 59. 28.100 / 59. 28.100
 libavdevice 59. 8.100 / 59. 8.100
 libavfilter 8. 45.100 / 8. 45.100
 libswscale 6. 8.100 / 6. 8.100
 libswresample 4. 8.100 / 4. 8.100
 libpostproc 56. 7.100 / 56. 7.100
Splitting the commandline.
Reading option '-y' ... matched as option 'y' (overwrite output files) with argument '1'.
Reading option '-report' ... matched as option 'report' (generate a report) with argument '1'.
Reading option '-hwaccel' ... matched as option 'hwaccel' (use HW accelerated decoding) with argument 'cuda'.
Reading option '-framerate' ... matched as AVOption 'framerate' with argument '60'.
Reading option '-i' ... matched as input url with argument 'C:\Users\Username\Desktop\renamed\frame%05d.png'.
Reading option '-vf' ... matched as option 'vf' (set video filters) with argument 'pad=ceil(iw/2)*2:ceil(ih/2)*2'.
Reading option '-pix_fmt' ... matched as option 'pix_fmt' (set pixel format) with argument 'rgb32'.
Reading option '-filter_complex' ... matched as option 'filter_complex' (create a complex filtergraph) with argument 'crop=in_w:in_h/7:0:0[tile_1],crop=in_w:in_h/7:0:in_h*1/7[tile_2],crop=in_w:in_h/7:0:in_h*2/7[tile_3],crop=in_w:in_h/7:0:in_h*3/7[tile_4],crop=in_w:in_h/7:0:in_h*4/7[tile_5],crop=in_w:in_h/7:0:in_h*5/7[tile_6],crop=in_w:in_h/7:0:in_h*6/7[tile_7],[tile_1][tile_2][tile_3][tile_4][tile_5][tile_6][tile_7]hstack=7'.
Reading option 'C:\Users\Username\Desktop\renamed\video.mp4' ... matched as output url.
Finished splitting the commandline.
Parsing a group of options: global .
Applying option y (overwrite output files) with argument 1.
Applying option report (generate a report) with argument 1.
Applying option filter_complex (create a complex filtergraph) with argument crop=in_w:in_h/7:0:0[tile_1],crop=in_w:in_h/7:0:in_h*1/7[tile_2],crop=in_w:in_h/7:0:in_h*2/7[tile_3],crop=in_w:in_h/7:0:in_h*3/7[tile_4],crop=in_w:in_h/7:0:in_h*4/7[tile_5],crop=in_w:in_h/7:0:in_h*5/7[tile_6],crop=in_w:in_h/7:0:in_h*6/7[tile_7],[tile_1][tile_2][tile_3][tile_4][tile_5][tile_6][tile_7]hstack=7.
Successfully parsed a group of options.
Parsing a group of options: input url C:\Users\Username\Desktop\renamed\frame%05d.png.
Applying option hwaccel (use HW accelerated decoding) with argument cuda.
Successfully parsed a group of options.
Opening an input file: C:\Users\Username\Desktop\renamed\frame%05d.png.
[image2 @ 00000000005db0c0] Opening 'C:\Users\Username\Desktop\renamed\frame00000.png' for reading
[file @ 00000000005f6c00] Setting default whitelist 'file,crypto,data'
[AVIOContext @ 00000000005fed40] Statistics: 28860 bytes read, 0 seeks

<...>

[AVIOContext @ 00000000005fda00] Statistics: 67659 bytes read, 0 seeks
[image2 @ 00000000005db0c0] Probe buffer size limit of 5000000 bytes reached
Input #0, image2, from 'C:\Users\Username\Desktop\renamed\frame%05d.png':
 Duration: 00:00:25.00, start: 0.000000, bitrate: N/A
 Stream #0:0, 80, 1/60: Video: png, rgba(pc), 1200x1857 [SAR 3937:3937 DAR 400:619], 60 fps, 60 tbr, 60 tbn
Successfully opened the file.
[Parsed_crop_0 @ 00000000005fdc40] Setting 'out_w' to value 'in_w'
[Parsed_crop_0 @ 00000000005fdc40] Setting 'out_h' to value 'in_h/7'
[Parsed_crop_0 @ 00000000005fdc40] Setting 'x' to value '0'
[Parsed_crop_0 @ 00000000005fdc40] Setting 'y' to value '0'
[Parsed_crop_1 @ 0000000000607500] Setting 'out_w' to value 'in_w'
[Parsed_crop_1 @ 0000000000607500] Setting 'out_h' to value 'in_h/7'
[Parsed_crop_1 @ 0000000000607500] Setting 'x' to value '0'
[Parsed_crop_1 @ 0000000000607500] Setting 'y' to value 'in_h*1/7'
[Parsed_crop_2 @ 0000000000607880] Setting 'out_w' to value 'in_w'
[Parsed_crop_2 @ 0000000000607880] Setting 'out_h' to value 'in_h/7'
[Parsed_crop_2 @ 0000000000607880] Setting 'x' to value '0'
[Parsed_crop_2 @ 0000000000607880] Setting 'y' to value 'in_h*2/7'
[Parsed_crop_3 @ 0000000000607c00] Setting 'out_w' to value 'in_w'
[Parsed_crop_3 @ 0000000000607c00] Setting 'out_h' to value 'in_h/7'
[Parsed_crop_3 @ 0000000000607c00] Setting 'x' to value '0'
[Parsed_crop_3 @ 0000000000607c00] Setting 'y' to value 'in_h*3/7'
[Parsed_crop_4 @ 00000000005fda00] Setting 'out_w' to value 'in_w'
[Parsed_crop_4 @ 00000000005fda00] Setting 'out_h' to value 'in_h/7'
[Parsed_crop_4 @ 00000000005fda00] Setting 'x' to value '0'
[Parsed_crop_4 @ 00000000005fda00] Setting 'y' to value 'in_h*4/7'
[Parsed_crop_5 @ 0000000002b8f800] Setting 'out_w' to value 'in_w'
[Parsed_crop_5 @ 0000000002b8f800] Setting 'out_h' to value 'in_h/7'
[Parsed_crop_5 @ 0000000002b8f800] Setting 'x' to value '0'
[Parsed_crop_5 @ 0000000002b8f800] Setting 'y' to value 'in_h*5/7'
[Parsed_crop_6 @ 0000000000607d00] Setting 'out_w' to value 'in_w'
[Parsed_crop_6 @ 0000000000607d00] Setting 'out_h' to value 'in_h/7'
[Parsed_crop_6 @ 0000000000607d00] Setting 'x' to value '0'
[Parsed_crop_6 @ 0000000000607d00] Setting 'y' to value 'in_h*6/7'
[Parsed_hstack_7 @ 0000000002b8fb80] Setting 'inputs' to value '7'
Cannot find a matching stream for unlabeled input pad 0 on filter Parsed_crop_1



What is the problem here ? Also, how does one interpret this cryptic error message at the end of the log output :


Cannot find a matching stream for unlabeled input pad 0 on filter Parsed_crop_1


E.g. what is
input pad 0
, why is itunlabeled
, and why can't it find a matching stream ?

(PS. : Yes, I do have a habit of excessively documenting my code. ^^)


-
ffmpeg sws_scale crash when convert video frame NV12 buffer to I420 buffer
5 février 2017, par AnhTuan.NgI have a stream of 2048x896 (NV12 format) video frames, and i have to crop its to 1936x872 I420 frames, then encoding to new 1936x872MP4 file.
Crop frame from 2048x986 to 1936x872 image
I use openCV to crop the original NV12 frames and use FFMPEG sws_scale to convert NV12 to I420 format. But My app crash on sws_scale function.
I also tried with some another crop size, and see that with 1920x872 crop frame size, it work !
This is my code :// FFmpeg library
extern "C" {
#include <libavformat></libavformat>avformat.h>
#include <libavcodec></libavcodec>avcodec.h>
#include <libswscale></libswscale>swscale.h>
}
//Using openCV library:
#include "opencv2/core/core.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv2/highgui/highgui.hpp"
#ifdef _DEBUG
#pragma comment(lib, "opencv_core2411d.lib")
#pragma comment(lib, "opencv_highgui2411d.lib")
#pragma comment(lib, "opencv_imgproc2411d.lib")
#else
#pragma comment(lib, "opencv_core2411.lib")
#pragma comment(lib, "opencv_highgui2411.lib")
#pragma comment(lib, "opencv_imgproc2411.lib")
#endif
using namespace cv;
void RotateOpenCVMatrix(Mat& src, double angle, Mat& dst)
{
Mat tempMat;
transpose(src, tempMat);
flip(tempMat, dst, 1); //transpose+flip(1)=CW
}
void CropNV12ImageFrame(unsigned char *srcBuff, unsigned char *resultBuff,
int srcWidth, int srcHeight, int x, int y, int cropWidth, int cropHeight,
BOOL isRotate)
{
//create src_NV12 matrix:
Mat src_Y_img;
Mat src_UV_img;
Mat crop_Y_mat;
Mat crop_UV_mat;
src_Y_img.create(srcHeight, srcWidth, CV_8UC1);
src_UV_img.create(srcHeight / 2, srcWidth, CV_8UC1);
memcpy(src_Y_img.data, srcBuff, srcWidth * srcHeight);
memcpy(src_UV_img.data, srcBuff + srcWidth * srcHeight, srcWidth * srcHeight / 2);
//Create result Y plane matrix:
crop_Y_mat = src_Y_img(cv::Rect(x, y, cropWidth, cropHeight)).clone();
crop_UV_mat = src_UV_img(cv::Rect(x, y / 2, cropWidth, cropHeight / 2)).clone();
//Rotate by openCV:
if (isRotate)
{
Mat result_Y_mat;
Mat result_UV_mat;
RotateOpenCVMatrix(crop_Y_mat, 90, result_Y_mat);
RotateOpenCVMatrix(crop_UV_mat, 90, result_UV_mat);
//Mem copy to output data buffer:
memcpy(resultBuff, result_Y_mat.data, cropWidth * cropHeight);
memcpy(resultBuff + cropWidth * cropHeight, result_UV_mat.data, cropWidth * cropHeight / 2);
}
else
{
memcpy(resultBuff, crop_Y_mat.data, cropWidth * cropHeight);
memcpy(resultBuff + cropWidth * cropHeight, crop_UV_mat.data, cropWidth * cropHeight / 2);
}
}
int ConvertColor(unsigned char *srcbuff, unsigned char *targetBuff, unsigned int width, unsigned int height, AVPixelFormat srcFormat, AVPixelFormat targetFormat)
{
int ret = 0;
//create the conversion context
struct SwsContext *swsContext = sws_getContext(
width, height, srcFormat, // source
width, height, targetFormat, // target
SWS_BILINEAR,
NULL, NULL, NULL
);
if (swsContext < 0) {
return -1;
}
// allocate frame
AVFrame *pSrcFrame = av_frame_alloc();
AVFrame *pTargetFrame = av_frame_alloc();
// source frame
avpicture_fill(
(AVPicture*)pSrcFrame,
(const uint8_t *)srcbuff,
srcFormat,
width,
height
);
// target frame
if (srcFormat != AV_PIX_FMT_RGBA)
avpicture_fill(
(AVPicture*)pTargetFrame,
(const uint8_t *)targetBuff,
targetFormat,
width,
height
);
else
avpicture_fill(
(AVPicture*)pTargetFrame,
(const uint8_t *)targetBuff,
targetFormat,
1936,
872
);
ret = sws_scale(
swsContext, // sws context
// source
pSrcFrame->data, // source
pSrcFrame->linesize, // source stride
0, // slice Y
height, // slice H
// target
pTargetFrame->data, // target
pTargetFrame->linesize // target stride
);
av_frame_free(&pSrcFrame);
av_frame_free(&pTargetFrame);
sws_freeContext(swsContext);
return ret;
}
void CropVideoFrame(BYTE* nv12BufferInput, BYTE* i420BufferOutput, int inWidth, int inHeight, int outWidth, int outHeight)
{
BYTE *cropNV12Image = new BYTE[outWidth* outHeight * 3 / 2 + 1];
//Get 4 horizontal camera img buffer:
CropImageFrame(input, cropNV12Image[0], inWidth, inHeight, 0, 0, outWidth, outHeight, FALSE);
//Convert from NV21 to I420p:
ConvertColor(cropNV12Image[0], i420BufferOutput, outWidth, outHeight, AV_PIX_FMT_NV12, AV_PIX_FMT_YUV420P);
//Free memory:
delete[] cropNV12Image;
} -
FFMPEG Encoder error when playback with 1936 width video size
5 février 2017, par AnhTuan.NgI have a 4096x1760 YUV-NV12 video frames stream and i have to split it to 4 frames have resolution 1936x872, then encode them to 4 video file (Like bellow picture)
Crop frame
My algorithm is :
1. Using openCV to crop and splitting (4096x1760) NV12 frame to 4 (1936x872) down size.
2. Using FFMPEG swscale() to convert RGBA to I420 buffer
3. Using FFMpeg to encode 4 (1936x872) frames to 4 video files.But i have a trouble, that 4 video encoded have error signals, Just horizontal stripes on screen (such as zebra skin, So sad !)
Video error
I think error caused by encoder, because when i change crop size to 1920x872, All thing seem be ok (By one step, I dump buffer data to bmp image to check, and every thing look good !)And, what a surprise ! When I change my crop famre size from 1936x872 to 1920x872. All things work done, Video output can be playing so smooth !
So I think this error because my FFMPEG encoder setting up wrong. Please tell me why and show me a way to fix it ! Thanks you !
this is my code :
-My define :
#define SIDE_FRAME_WIDTH 4096
#define SIDE_FRAME_HEIGHT 1760
#define SINGLE_SIDE_WIDTH 1936 //1920 is woking!
#define SINGLE_SIDE_HEIGHT 872 //872
#define SIDE_CROP_WIDTH 1936
#define SIDE_CROP_HEIGHT 872My Splitting thread :
void splittingThread(SplitingThreadParam &param)
{
Converter *pConverter = (Converter*)param.parentParamClass;
BYTE* cropRGBABuff[4];
for (int i = 0; i < 4; i++)
cropRGBABuff[i] = new BYTE[SINGLE_SIDE_WIDTH*SINGLE_SIDE_HEIGHT * 4];
//Split:
pConverter->OpenCVSplittingSideFrame(param.inputBuff, SIDE_FRAME_WIDTH, SIDE_FRAME_HEIGHT, SINGLE_SIDE_WIDTH, SINGLE_SIDE_HEIGHT, cropRGBABuff[0], cropRGBABuff[1], cropRGBABuff[2], cropRGBABuff[3]);
//Convert to I420:
pConverter->ConvertColor(cropRGBABuff[0], param.out1Buff, SINGLE_SIDE_WIDTH, SINGLE_SIDE_HEIGHT, AV_PIX_FMT_RGBA, AV_PIX_FMT_YUV420P);
pConverter->ConvertColor(cropRGBABuff[1], param.out2Buff, SINGLE_SIDE_WIDTH, SINGLE_SIDE_HEIGHT, AV_PIX_FMT_RGBA, AV_PIX_FMT_YUV420P);
pConverter->ConvertColor(cropRGBABuff[2], param.out3Buff, SINGLE_SIDE_WIDTH, SINGLE_SIDE_HEIGHT, AV_PIX_FMT_RGBA, AV_PIX_FMT_YUV420P);
pConverter->ConvertColor(cropRGBABuff[3], param.out4Buff, SINGLE_SIDE_WIDTH, SINGLE_SIDE_HEIGHT, AV_PIX_FMT_RGBA, AV_PIX_FMT_YUV420P);
//pConverter->DumpBufferData(param.out1Buff, SINGLE_SIDE_WIDTH, SINGLE_SIDE_HEIGHT, 1);
}My Splitting function :
void Converter::OpenCVSplittingSideFrame(BYTE *input, unsigned int srcWidth, unsigned int srcHeight,
unsigned int cropWidth, unsigned int cropHeight,
BYTE *out1, BYTE *out2, BYTE *out3, BYTE *out4)
{
Mat nv12Mat;
Mat rgbMat;
Mat rgbCropImg[4];
//Create YUV Matrix:
nv12Mat.create(srcHeight * 3 / 2, srcWidth, CV_8UC1);
memcpy(nv12Mat.data, input, srcWidth * srcHeight * 3 / 2);
//Create RGB matrix:
rgbMat.create(srcHeight, srcWidth, CV_8UC4);
//Convert YUV to RGB:
cvtColor(nv12Mat, rgbMat, COLOR_YUV2RGBA_NV21);
//Crop:
CropMatrix(rgbMat, rgbCropImg[0], 0, 0, cropWidth, cropHeight);
CropMatrix(rgbMat, rgbCropImg[1], 0, SIDE_CROP_HEIGHT, cropWidth, cropHeight);
CropMatrix(rgbMat, rgbCropImg[2], SIDE_CROP_WIDTH, 0, cropWidth, cropHeight);
CropMatrix(rgbMat, rgbCropImg[3], SIDE_CROP_WIDTH, SIDE_CROP_HEIGHT, cropWidth, cropHeight);
//Copy from matrix to output buffer:
memcpy(out1, rgbCropImg[0].data, cropWidth * cropHeight * 4);
memcpy(out2, rgbCropImg[1].data, cropWidth * cropHeight * 4);
memcpy(out3, rgbCropImg[2].data, cropWidth * cropHeight * 4);
memcpy(out4, rgbCropImg[3].data, cropWidth * cropHeight * 4);
}Convert color range to RGBA to I420 function :
int Converter::ConvertColor(unsigned char *srcbuff, unsigned char *targetBuff, unsigned int width, unsigned int height, AVPixelFormat srcFormat, AVPixelFormat targetFormat)
{
int ret = 0;
//create the conversion context
struct SwsContext *swsContext = sws_getContext(
width, height, srcFormat, // AV_PIX_FMT_NV12, // source
width, height, targetFormat, // AV_PIX_FMT_YUV420P, // target: GL_RGBA
SWS_BILINEAR,
NULL, NULL, NULL
);
if (swsContext < 0) {
return -1;
}
// allocate frame
AVFrame *pSrcFrame = av_frame_alloc();
AVFrame *pTargetFrame = av_frame_alloc();
// source frame
avpicture_fill(
(AVPicture*)pSrcFrame,
(const uint8_t *)srcbuff,
srcFormat,
width,
height
);
// target frame
if (srcFormat != AV_PIX_FMT_RGBA)
avpicture_fill(
(AVPicture*)pTargetFrame,
(const uint8_t *)targetBuff,
targetFormat,
width,
height
);
else
avpicture_fill(
(AVPicture*)pTargetFrame,
(const uint8_t *)targetBuff,
targetFormat,
1936,
872
);
ret = sws_scale(
swsContext, // sws context
// source
pSrcFrame->data, // source
pSrcFrame->linesize, // source stride
0, // slice Y
height, // slice H
// target
pTargetFrame->data, // target
pTargetFrame->linesize // target stride
);
av_frame_free(&pSrcFrame);
av_frame_free(&pTargetFrame);
sws_freeContext(swsContext);
return ret;
}My encoder setting :
EncoderContext encoderCtx;
encoderCtx.vCodecId = VIDEO_CODEC_H264;
encoderCtx.aCodecId = AUDIO_CODEC_NONE;
encoderCtx.numAudioChannel = 0;
encoderCtx.eFormat = BEYOND_ENCODER_MP4;
encoderCtx.videoBitrate = 10000000;
encoderCtx.audioBitrate = 0;
encoderCtx.audioBitrate = 0;
encoderCtx.timeBaseDen = config->time_base_den;
encoderCtx.timeBaseNum = config->time_base_num;
encoderCtx.width = SINGLE_SIDE_WIDTH;
encoderCtx.height = SINGLE_SIDE_HEIGHT;
CString filePath(config->savePath.c_str());
CString filePath2;
filePath2.Format(_T("%s_%d.mp4"), filePath, i);
CT2A multibyte(filePath2, CP_UTF8);
encoderCtx.outFileName = multibyte;Thanks all for your help !