
Recherche avancée
Autres articles (75)
-
Demande de création d’un canal
12 mars 2010, parEn fonction de la configuration de la plateforme, l’utilisateur peu avoir à sa disposition deux méthodes différentes de demande de création de canal. La première est au moment de son inscription, la seconde, après son inscription en remplissant un formulaire de demande.
Les deux manières demandent les mêmes choses fonctionnent à peu près de la même manière, le futur utilisateur doit remplir une série de champ de formulaire permettant tout d’abord aux administrateurs d’avoir des informations quant à (...) -
Personnaliser en ajoutant son logo, sa bannière ou son image de fond
5 septembre 2013, parCertains thèmes prennent en compte trois éléments de personnalisation : l’ajout d’un logo ; l’ajout d’une bannière l’ajout d’une image de fond ;
-
Gestion de la ferme
2 mars 2010, parLa ferme est gérée dans son ensemble par des "super admins".
Certains réglages peuvent être fais afin de réguler les besoins des différents canaux.
Dans un premier temps il utilise le plugin "Gestion de mutualisation"
Sur d’autres sites (8254)
-
Encode image to video using ffmpeg (sws_scale)
26 septembre 2012, par bahar_pI'm trying to encode an image to video using ffmpeg library.
I have these global params ://Global params
AVCodec *codec;
AVCodecContext *codecCtx;
uint8_t *output_buffer;
int output_buffer_size;I divided the encoding to 3 methods :
Initialize the encoder :jint Java_com_camera_simpledoublewebcams2_CameraPreview_initencoder(JNIEnv* env,jobject thiz){
avcodec_register_all();
avcodec_init();
av_register_all();
int fps = 30;
/* find the H263 video encoder */
codec = avcodec_find_encoder(CODEC_ID_H263);
if (!codec) {
LOGI("avcodec_find_encoder() run fail.");
return -5;
}
//allocate context
codecCtx = avcodec_alloc_context();
/* put sample parameters */
codecCtx->bit_rate = 400000;
/* resolution must be a multiple of two */
codecCtx->width = 176;
codecCtx->height = 144;
/* frames per second */
codecCtx->time_base = (AVRational){1,fps};
codecCtx->pix_fmt = PIX_FMT_YUV420P;
codecCtx->codec_id = CODEC_ID_H263;
codecCtx->codec_type = AVMEDIA_TYPE_VIDEO;
/* open it */
if (avcodec_open(codecCtx, codec) < 0) {
LOGI("avcodec_open() run fail.");
return -10;
}
//init buffer
output_buffer_size = 500000;
output_buffer = malloc(output_buffer_size);
return 0;}
Encoding the image :
jint Java_com_camera_simpledoublewebcams2_CameraPreview_encodejpeg(JNIEnv* env,jobject thiz,jchar* cImage, jint imageSize){
int out_size;
AVFrame *picture;
AVFrame *outpic;
uint8_t *outbuffer;
//allocate frame
picture = avcodec_alloc_frame();
outpic = avcodec_alloc_frame();
int nbytes = avpicture_get_size(PIX_FMT_YUV420P, codecCtx->width, codecCtx->height);
outbuffer = (uint8_t*)av_malloc(nbytes);
outpic->pts = 0;
//fill picture with image
avpicture_fill((AVPicture*)picture, (uint8_t*)cImage, PIX_FMT_RGBA, codecCtx->width, codecCtx->height);
//fill outpic with empty image
avpicture_fill((AVPicture*)outpic, outbuffer, PIX_FMT_YUV420P, codecCtx->width, codecCtx->height);
//rescale the image
struct SwsContext* fooContext = sws_getContext(codecCtx->width, codecCtx->height,
PIX_FMT_RGBA,
codecCtx->width, codecCtx->height,
PIX_FMT_YUV420P,
SWS_FAST_BILINEAR, NULL, NULL, NULL);
sws_scale(fooContext, picture->data, picture->linesize, 0, codecCtx->height, outpic->data, outpic->linesize);
//encode the image
out_size = avcodec_encode_video(codecCtx, output_buffer, output_buffer_size, outpic);
out_size += avcodec_encode_video(codecCtx, output_buffer, output_buffer_size, outpic);
//release pictures
av_free(outbuffer);
av_free(picture);
av_free(outpic);
return out_size;}
And closing the encoder :
void Java_com_camera_simpledoublewebcams2_CameraPreview_closeencoder(JNIEnv* env,jobject thiz){
free(output_buffer);
avcodec_close(codecCtx);
av_free(codecCtx);}
When I send the first image, I get a result from the encoder. When I try to send another image the program crashes.
I tried calling init once and then the images, then the close - didn't work.
I tried calling the init and the close for every image - didn't work.Any suggestions ?
Thanks !
EDIT : After further research I found that the problem is at
sws_scale
method.
Still don't know what is causing this issue... -
screenrecorder appliaction. video output size issues [closed]
25 juin 2022, par jNcHaving working github project
https://github.com/abdullahfarwees/screen-recorder-ffmpeg-cpp


All works perfectly fine, but output video resolution is not correct
I need to change output video resolution


Using ubuntu/gcc/opencv/


[...]
outAVCodecContext->bit_rate = 400000; // 2500000
outAVCodecContext->width = 1920; // <- change this one
outAVCodecContext->height = 1080; // <- and also this one
outAVCodecContext->gop_size = 3;
[...]



-
FFmpeg Autogen and Unity C# to generate video from screenshots (FFmpeg.Autogen)
1er juin 2022, par cameron gibbsI've taken the
FFmpegHelper
,VideoFrameConverter
,H264VideoStreamEncoder
classes straight from the FFmpeg.AutoGen.Example, rolled my ownFFmpegBinariesHelper class
andSize struct
and mangled theEncodeImagesToH264
from Program.cs to look like the below code. I capture a bunch of frames into textures and feed them intoEncoder.EncodeImagesToH264
. It produces a file I'm callingoutputFileName.h264
just fine, no errors. I've changedH264VideoStreamEncoder
a little based on ffmpeg's own c++ examples because they had a few things it seemed the C# example was missing but that hasn't made any difference.

The video is weird :


- 

- it only plays in VLC, is there another
AVPixelFormat
I should be using for thedestinationPixelFormat
so that anything can play ? - VLC is unable to detect the video length or show current time
- it plays back weird as if the first few seconds are all the same frame then starts playing what appears to be some of the frames I'd expect








public static class Encoder
 {
 public static unsafe void EncodeImagesToH264(Texture2D[] images, int fps, string outputFileName)
 {
 FFmpegBinariesHelper.RegisterFFmpegBinaries();

 var fistFrameImage = images[0];
 outputFileName = Path.ChangeExtension(outputFileName, ".h264");
 var sourceSize = new Size(fistFrameImage.width, fistFrameImage.height);
 var sourcePixelFormat = AVPixelFormat.AV_PIX_FMT_RGB24;
 var destinationSize = sourceSize;
 var destinationPixelFormat = AVPixelFormat.AV_PIX_FMT_YUV420P;

 try
 {
 using (var vfc = new VideoFrameConverter(
 sourceSize,
 sourcePixelFormat,
 destinationSize,
 destinationPixelFormat))
 {
 using var fs = File.Open(outputFileName, FileMode.Create);
 using var vse = new H264VideoStreamEncoder(fs, fps, destinationSize);
 var frameNumber = 0;
 foreach (var frameFile in images)
 {
 var bitmapData = GetBitmapData(frameFile);

 //var pBitmapData = (byte*)NativeArrayUnsafeUtility
 // .GetUnsafeBufferPointerWithoutChecks(bitmapData);

 fixed (byte* pBitmapData = bitmapData)
 {
 var data = new byte_ptrArray8 { [0] = pBitmapData };
 var linesize = new int_array8 { [0] = bitmapData.Length / sourceSize.Height };
 var frame = new AVFrame
 {
 data = data,
 linesize = linesize,
 height = sourceSize.Height
 };

 var convertedFrame = vfc.Convert(frame);
 convertedFrame.pts = frameNumber;

 vse.Encode(convertedFrame);

 Debug.Log($"frame: {frameNumber}");
 frameNumber++;
 }
 }
 byte[] endcode = { 0, 0, 1, 0xb7 };
 fs.Write(endcode, 0, endcode.Length);
 }
 Debug.Log(outputFileName);
 }
 catch (Exception ex)
 {
 Debug.LogException(ex);
 }
 }

 private static byte[] GetBitmapData(Texture2D frameBitmap)
 {
 return frameBitmap.GetRawTextureData();
 }
 }

 public sealed unsafe class H264VideoStreamEncoder : IDisposable
 {
 private readonly Size _frameSize;
 private readonly int _linesizeU;
 private readonly int _linesizeV;
 private readonly int _linesizeY;
 private readonly AVCodec* _pCodec;
 private readonly AVCodecContext* _pCodecContext;
 private readonly Stream _stream;
 private readonly int _uSize;
 private readonly int _ySize;

 public H264VideoStreamEncoder(Stream stream, int fps, Size frameSize)
 {
 _stream = stream;
 _frameSize = frameSize;

 var codecId = AVCodecID.AV_CODEC_ID_H264;
 _pCodec = ffmpeg.avcodec_find_encoder(codecId);
 if (_pCodec == null)
 throw new InvalidOperationException("Codec not found.");

 _pCodecContext = ffmpeg.avcodec_alloc_context3(_pCodec);
 _pCodecContext->bit_rate = 400000;
 _pCodecContext->width = frameSize.Width;
 _pCodecContext->height = frameSize.Height;
 _pCodecContext->time_base = new AVRational { num = 1, den = fps };
 _pCodecContext->gop_size = 10;
 _pCodecContext->max_b_frames = 1;
 _pCodecContext->pix_fmt = AVPixelFormat.AV_PIX_FMT_YUV420P;

 if (codecId == AVCodecID.AV_CODEC_ID_H264)
 ffmpeg.av_opt_set(_pCodecContext->priv_data, "preset", "veryslow", 0);

 ffmpeg.avcodec_open2(_pCodecContext, _pCodec, null).ThrowExceptionIfError();

 _linesizeY = frameSize.Width;
 _linesizeU = frameSize.Width / 2;
 _linesizeV = frameSize.Width / 2;

 _ySize = _linesizeY * frameSize.Height;
 _uSize = _linesizeU * frameSize.Height / 2;
 }

 public void Dispose()
 {
 ffmpeg.avcodec_close(_pCodecContext);
 ffmpeg.av_free(_pCodecContext);
 }

 public void Encode(AVFrame frame)
 {
 if (frame.format != (int)_pCodecContext->pix_fmt)
 throw new ArgumentException("Invalid pixel format.", nameof(frame));
 if (frame.width != _frameSize.Width)
 throw new ArgumentException("Invalid width.", nameof(frame));
 if (frame.height != _frameSize.Height)
 throw new ArgumentException("Invalid height.", nameof(frame));
 if (frame.linesize[0] < _linesizeY)
 throw new ArgumentException("Invalid Y linesize.", nameof(frame));
 if (frame.linesize[1] < _linesizeU)
 throw new ArgumentException("Invalid U linesize.", nameof(frame));
 if (frame.linesize[2] < _linesizeV)
 throw new ArgumentException("Invalid V linesize.", nameof(frame));
 if (frame.data[1] - frame.data[0] < _ySize)
 throw new ArgumentException("Invalid Y data size.", nameof(frame));
 if (frame.data[2] - frame.data[1] < _uSize)
 throw new ArgumentException("Invalid U data size.", nameof(frame));

 var pPacket = ffmpeg.av_packet_alloc();
 try
 {
 int error;
 do
 {
 ffmpeg.avcodec_send_frame(_pCodecContext, &frame).ThrowExceptionIfError();
 ffmpeg.av_packet_unref(pPacket);
 error = ffmpeg.avcodec_receive_packet(_pCodecContext, pPacket);
 } while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN));

 error.ThrowExceptionIfError();

 using var packetStream = new UnmanagedMemoryStream(pPacket->data, pPacket->size);
 packetStream.CopyTo(_stream);
 }
 finally
 {
 ffmpeg.av_packet_free(&pPacket);
 }
 }
 }



- it only plays in VLC, is there another