
Recherche avancée
Autres articles (71)
-
Websites made with MediaSPIP
2 mai 2011, parThis page lists some websites based on MediaSPIP.
-
Creating farms of unique websites
13 avril 2011, parMediaSPIP platforms can be installed as a farm, with a single "core" hosted on a dedicated server and used by multiple websites.
This allows (among other things) : implementation costs to be shared between several different projects / individuals rapid deployment of multiple unique sites creation of groups of like-minded sites, making it possible to browse media in a more controlled and selective environment than the major "open" (...) -
Ecrire une actualité
21 juin 2013, parPrésentez les changements dans votre MédiaSPIP ou les actualités de vos projets sur votre MédiaSPIP grâce à la rubrique actualités.
Dans le thème par défaut spipeo de MédiaSPIP, les actualités sont affichées en bas de la page principale sous les éditoriaux.
Vous pouvez personnaliser le formulaire de création d’une actualité.
Formulaire de création d’une actualité Dans le cas d’un document de type actualité, les champs proposés par défaut sont : Date de publication ( personnaliser la date de publication ) (...)
Sur d’autres sites (15026)
-
ffmpeg sws_scale crash when convert video frame NV12 buffer to I420 buffer
5 février 2017, par AnhTuan.NgI have a stream of 2048x896 (NV12 format) video frames, and i have to crop its to 1936x872 I420 frames, then encoding to new 1936x872MP4 file.
Crop frame from 2048x986 to 1936x872 image
I use openCV to crop the original NV12 frames and use FFMPEG sws_scale to convert NV12 to I420 format. But My app crash on sws_scale function.
I also tried with some another crop size, and see that with 1920x872 crop frame size, it work !
This is my code :// FFmpeg library
extern "C" {
#include <libavformat></libavformat>avformat.h>
#include <libavcodec></libavcodec>avcodec.h>
#include <libswscale></libswscale>swscale.h>
}
//Using openCV library:
#include "opencv2/core/core.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv2/highgui/highgui.hpp"
#ifdef _DEBUG
#pragma comment(lib, "opencv_core2411d.lib")
#pragma comment(lib, "opencv_highgui2411d.lib")
#pragma comment(lib, "opencv_imgproc2411d.lib")
#else
#pragma comment(lib, "opencv_core2411.lib")
#pragma comment(lib, "opencv_highgui2411.lib")
#pragma comment(lib, "opencv_imgproc2411.lib")
#endif
using namespace cv;
void RotateOpenCVMatrix(Mat& src, double angle, Mat& dst)
{
Mat tempMat;
transpose(src, tempMat);
flip(tempMat, dst, 1); //transpose+flip(1)=CW
}
void CropNV12ImageFrame(unsigned char *srcBuff, unsigned char *resultBuff,
int srcWidth, int srcHeight, int x, int y, int cropWidth, int cropHeight,
BOOL isRotate)
{
//create src_NV12 matrix:
Mat src_Y_img;
Mat src_UV_img;
Mat crop_Y_mat;
Mat crop_UV_mat;
src_Y_img.create(srcHeight, srcWidth, CV_8UC1);
src_UV_img.create(srcHeight / 2, srcWidth, CV_8UC1);
memcpy(src_Y_img.data, srcBuff, srcWidth * srcHeight);
memcpy(src_UV_img.data, srcBuff + srcWidth * srcHeight, srcWidth * srcHeight / 2);
//Create result Y plane matrix:
crop_Y_mat = src_Y_img(cv::Rect(x, y, cropWidth, cropHeight)).clone();
crop_UV_mat = src_UV_img(cv::Rect(x, y / 2, cropWidth, cropHeight / 2)).clone();
//Rotate by openCV:
if (isRotate)
{
Mat result_Y_mat;
Mat result_UV_mat;
RotateOpenCVMatrix(crop_Y_mat, 90, result_Y_mat);
RotateOpenCVMatrix(crop_UV_mat, 90, result_UV_mat);
//Mem copy to output data buffer:
memcpy(resultBuff, result_Y_mat.data, cropWidth * cropHeight);
memcpy(resultBuff + cropWidth * cropHeight, result_UV_mat.data, cropWidth * cropHeight / 2);
}
else
{
memcpy(resultBuff, crop_Y_mat.data, cropWidth * cropHeight);
memcpy(resultBuff + cropWidth * cropHeight, crop_UV_mat.data, cropWidth * cropHeight / 2);
}
}
int ConvertColor(unsigned char *srcbuff, unsigned char *targetBuff, unsigned int width, unsigned int height, AVPixelFormat srcFormat, AVPixelFormat targetFormat)
{
int ret = 0;
//create the conversion context
struct SwsContext *swsContext = sws_getContext(
width, height, srcFormat, // source
width, height, targetFormat, // target
SWS_BILINEAR,
NULL, NULL, NULL
);
if (swsContext < 0) {
return -1;
}
// allocate frame
AVFrame *pSrcFrame = av_frame_alloc();
AVFrame *pTargetFrame = av_frame_alloc();
// source frame
avpicture_fill(
(AVPicture*)pSrcFrame,
(const uint8_t *)srcbuff,
srcFormat,
width,
height
);
// target frame
if (srcFormat != AV_PIX_FMT_RGBA)
avpicture_fill(
(AVPicture*)pTargetFrame,
(const uint8_t *)targetBuff,
targetFormat,
width,
height
);
else
avpicture_fill(
(AVPicture*)pTargetFrame,
(const uint8_t *)targetBuff,
targetFormat,
1936,
872
);
ret = sws_scale(
swsContext, // sws context
// source
pSrcFrame->data, // source
pSrcFrame->linesize, // source stride
0, // slice Y
height, // slice H
// target
pTargetFrame->data, // target
pTargetFrame->linesize // target stride
);
av_frame_free(&pSrcFrame);
av_frame_free(&pTargetFrame);
sws_freeContext(swsContext);
return ret;
}
void CropVideoFrame(BYTE* nv12BufferInput, BYTE* i420BufferOutput, int inWidth, int inHeight, int outWidth, int outHeight)
{
BYTE *cropNV12Image = new BYTE[outWidth* outHeight * 3 / 2 + 1];
//Get 4 horizontal camera img buffer:
CropImageFrame(input, cropNV12Image[0], inWidth, inHeight, 0, 0, outWidth, outHeight, FALSE);
//Convert from NV21 to I420p:
ConvertColor(cropNV12Image[0], i420BufferOutput, outWidth, outHeight, AV_PIX_FMT_NV12, AV_PIX_FMT_YUV420P);
//Free memory:
delete[] cropNV12Image;
} -
FFMPEG Encoder error when playback with 1936 width video size
5 février 2017, par AnhTuan.NgI have a 4096x1760 YUV-NV12 video frames stream and i have to split it to 4 frames have resolution 1936x872, then encode them to 4 video file (Like bellow picture)
Crop frame
My algorithm is :
1. Using openCV to crop and splitting (4096x1760) NV12 frame to 4 (1936x872) down size.
2. Using FFMPEG swscale() to convert RGBA to I420 buffer
3. Using FFMpeg to encode 4 (1936x872) frames to 4 video files.But i have a trouble, that 4 video encoded have error signals, Just horizontal stripes on screen (such as zebra skin, So sad !)
Video error
I think error caused by encoder, because when i change crop size to 1920x872, All thing seem be ok (By one step, I dump buffer data to bmp image to check, and every thing look good !)And, what a surprise ! When I change my crop famre size from 1936x872 to 1920x872. All things work done, Video output can be playing so smooth !
So I think this error because my FFMPEG encoder setting up wrong. Please tell me why and show me a way to fix it ! Thanks you !
this is my code :
-My define :
#define SIDE_FRAME_WIDTH 4096
#define SIDE_FRAME_HEIGHT 1760
#define SINGLE_SIDE_WIDTH 1936 //1920 is woking!
#define SINGLE_SIDE_HEIGHT 872 //872
#define SIDE_CROP_WIDTH 1936
#define SIDE_CROP_HEIGHT 872My Splitting thread :
void splittingThread(SplitingThreadParam &param)
{
Converter *pConverter = (Converter*)param.parentParamClass;
BYTE* cropRGBABuff[4];
for (int i = 0; i < 4; i++)
cropRGBABuff[i] = new BYTE[SINGLE_SIDE_WIDTH*SINGLE_SIDE_HEIGHT * 4];
//Split:
pConverter->OpenCVSplittingSideFrame(param.inputBuff, SIDE_FRAME_WIDTH, SIDE_FRAME_HEIGHT, SINGLE_SIDE_WIDTH, SINGLE_SIDE_HEIGHT, cropRGBABuff[0], cropRGBABuff[1], cropRGBABuff[2], cropRGBABuff[3]);
//Convert to I420:
pConverter->ConvertColor(cropRGBABuff[0], param.out1Buff, SINGLE_SIDE_WIDTH, SINGLE_SIDE_HEIGHT, AV_PIX_FMT_RGBA, AV_PIX_FMT_YUV420P);
pConverter->ConvertColor(cropRGBABuff[1], param.out2Buff, SINGLE_SIDE_WIDTH, SINGLE_SIDE_HEIGHT, AV_PIX_FMT_RGBA, AV_PIX_FMT_YUV420P);
pConverter->ConvertColor(cropRGBABuff[2], param.out3Buff, SINGLE_SIDE_WIDTH, SINGLE_SIDE_HEIGHT, AV_PIX_FMT_RGBA, AV_PIX_FMT_YUV420P);
pConverter->ConvertColor(cropRGBABuff[3], param.out4Buff, SINGLE_SIDE_WIDTH, SINGLE_SIDE_HEIGHT, AV_PIX_FMT_RGBA, AV_PIX_FMT_YUV420P);
//pConverter->DumpBufferData(param.out1Buff, SINGLE_SIDE_WIDTH, SINGLE_SIDE_HEIGHT, 1);
}My Splitting function :
void Converter::OpenCVSplittingSideFrame(BYTE *input, unsigned int srcWidth, unsigned int srcHeight,
unsigned int cropWidth, unsigned int cropHeight,
BYTE *out1, BYTE *out2, BYTE *out3, BYTE *out4)
{
Mat nv12Mat;
Mat rgbMat;
Mat rgbCropImg[4];
//Create YUV Matrix:
nv12Mat.create(srcHeight * 3 / 2, srcWidth, CV_8UC1);
memcpy(nv12Mat.data, input, srcWidth * srcHeight * 3 / 2);
//Create RGB matrix:
rgbMat.create(srcHeight, srcWidth, CV_8UC4);
//Convert YUV to RGB:
cvtColor(nv12Mat, rgbMat, COLOR_YUV2RGBA_NV21);
//Crop:
CropMatrix(rgbMat, rgbCropImg[0], 0, 0, cropWidth, cropHeight);
CropMatrix(rgbMat, rgbCropImg[1], 0, SIDE_CROP_HEIGHT, cropWidth, cropHeight);
CropMatrix(rgbMat, rgbCropImg[2], SIDE_CROP_WIDTH, 0, cropWidth, cropHeight);
CropMatrix(rgbMat, rgbCropImg[3], SIDE_CROP_WIDTH, SIDE_CROP_HEIGHT, cropWidth, cropHeight);
//Copy from matrix to output buffer:
memcpy(out1, rgbCropImg[0].data, cropWidth * cropHeight * 4);
memcpy(out2, rgbCropImg[1].data, cropWidth * cropHeight * 4);
memcpy(out3, rgbCropImg[2].data, cropWidth * cropHeight * 4);
memcpy(out4, rgbCropImg[3].data, cropWidth * cropHeight * 4);
}Convert color range to RGBA to I420 function :
int Converter::ConvertColor(unsigned char *srcbuff, unsigned char *targetBuff, unsigned int width, unsigned int height, AVPixelFormat srcFormat, AVPixelFormat targetFormat)
{
int ret = 0;
//create the conversion context
struct SwsContext *swsContext = sws_getContext(
width, height, srcFormat, // AV_PIX_FMT_NV12, // source
width, height, targetFormat, // AV_PIX_FMT_YUV420P, // target: GL_RGBA
SWS_BILINEAR,
NULL, NULL, NULL
);
if (swsContext < 0) {
return -1;
}
// allocate frame
AVFrame *pSrcFrame = av_frame_alloc();
AVFrame *pTargetFrame = av_frame_alloc();
// source frame
avpicture_fill(
(AVPicture*)pSrcFrame,
(const uint8_t *)srcbuff,
srcFormat,
width,
height
);
// target frame
if (srcFormat != AV_PIX_FMT_RGBA)
avpicture_fill(
(AVPicture*)pTargetFrame,
(const uint8_t *)targetBuff,
targetFormat,
width,
height
);
else
avpicture_fill(
(AVPicture*)pTargetFrame,
(const uint8_t *)targetBuff,
targetFormat,
1936,
872
);
ret = sws_scale(
swsContext, // sws context
// source
pSrcFrame->data, // source
pSrcFrame->linesize, // source stride
0, // slice Y
height, // slice H
// target
pTargetFrame->data, // target
pTargetFrame->linesize // target stride
);
av_frame_free(&pSrcFrame);
av_frame_free(&pTargetFrame);
sws_freeContext(swsContext);
return ret;
}My encoder setting :
EncoderContext encoderCtx;
encoderCtx.vCodecId = VIDEO_CODEC_H264;
encoderCtx.aCodecId = AUDIO_CODEC_NONE;
encoderCtx.numAudioChannel = 0;
encoderCtx.eFormat = BEYOND_ENCODER_MP4;
encoderCtx.videoBitrate = 10000000;
encoderCtx.audioBitrate = 0;
encoderCtx.audioBitrate = 0;
encoderCtx.timeBaseDen = config->time_base_den;
encoderCtx.timeBaseNum = config->time_base_num;
encoderCtx.width = SINGLE_SIDE_WIDTH;
encoderCtx.height = SINGLE_SIDE_HEIGHT;
CString filePath(config->savePath.c_str());
CString filePath2;
filePath2.Format(_T("%s_%d.mp4"), filePath, i);
CT2A multibyte(filePath2, CP_UTF8);
encoderCtx.outFileName = multibyte;Thanks all for your help !
-
Using ffmpeg showwaves as circle. Is it possible ?
14 décembre 2020, par Tudor-Radu Barbu