
Recherche avancée
Médias (1)
-
Somos millones 1
21 juillet 2014, par
Mis à jour : Juin 2015
Langue : français
Type : Video
Autres articles (47)
-
Personnaliser en ajoutant son logo, sa bannière ou son image de fond
5 septembre 2013, parCertains thèmes prennent en compte trois éléments de personnalisation : l’ajout d’un logo ; l’ajout d’une bannière l’ajout d’une image de fond ;
-
Ecrire une actualité
21 juin 2013, parPrésentez les changements dans votre MédiaSPIP ou les actualités de vos projets sur votre MédiaSPIP grâce à la rubrique actualités.
Dans le thème par défaut spipeo de MédiaSPIP, les actualités sont affichées en bas de la page principale sous les éditoriaux.
Vous pouvez personnaliser le formulaire de création d’une actualité.
Formulaire de création d’une actualité Dans le cas d’un document de type actualité, les champs proposés par défaut sont : Date de publication ( personnaliser la date de publication ) (...) -
Publier sur MédiaSpip
13 juin 2013Puis-je poster des contenus à partir d’une tablette Ipad ?
Oui, si votre Médiaspip installé est à la version 0.2 ou supérieure. Contacter au besoin l’administrateur de votre MédiaSpip pour le savoir
Sur d’autres sites (6487)
-
Unity : Converting Texture2D to YUV420P and sending with UDP using FFmpeg
22 juin 2018, par potu1304In my Unity game each frame is rendered into a texture and then put together into a video using FFmpeg. Now my questions is if I am doing this right because avcodec_send_frame throws every time an exception.
I am pretty sure that I am doing something wrong or in the wrong order or simply missing something.Here is the code for capturing the texture :
void Update() {
//StartCoroutine(CaptureFrame());
if (rt == null)
{
rect = new Rect(0, 0, captureWidth, captureHeight);
rt = new RenderTexture(captureWidth, captureHeight, 24);
frame = new Texture2D(captureWidth, captureHeight, TextureFormat.RGB24, false);
}
Camera camera = this.GetComponent<camera>(); // NOTE: added because there was no reference to camera in original script; must add this script to Camera
camera.targetTexture = rt;
camera.Render();
RenderTexture.active = rt;
frame.ReadPixels(rect, 0, 0);
frame.Apply();
camera.targetTexture = null;
RenderTexture.active = null;
byte[] fileData = null;
fileData = frame.GetRawTextureData();
encoding(fileData, fileData.Length);
}
</camera>And here is the code for encoding and sending the byte data :
private unsafe void encoding(byte[] bytes, int size)
{
Debug.Log("Encoding...");
AVCodec* codec;
codec = ffmpeg.avcodec_find_encoder(AVCodecID.AV_CODEC_ID_H264);
int ret, got_output = 0;
AVCodecContext* codecContext = null;
codecContext = ffmpeg.avcodec_alloc_context3(codec);
codecContext->bit_rate = 400000;
codecContext->width = captureWidth;
codecContext->height = captureHeight;
//codecContext->time_base.den = 25;
//codecContext->time_base.num = 1;
AVRational timeBase = new AVRational();
timeBase.num = 1;
timeBase.den = 25;
codecContext->time_base = timeBase;
//AVStream* videoAVStream = null;
//videoAVStream->time_base = timeBase;
AVRational frameRate = new AVRational();
frameRate.num = 25;
frameRate.den = 1;
codecContext->framerate = frameRate;
codecContext->gop_size = 10;
codecContext->max_b_frames = 1;
codecContext->pix_fmt = AVPixelFormat.AV_PIX_FMT_YUV420P;
AVFrame* inputFrame;
inputFrame = ffmpeg.av_frame_alloc();
inputFrame->format = (int)codecContext->pix_fmt;
inputFrame->width = captureWidth;
inputFrame->height = captureHeight;
inputFrame->linesize[0] = inputFrame->width;
AVPixelFormat dst_pix_fmt = AVPixelFormat.AV_PIX_FMT_YUV420P, src_pix_fmt = AVPixelFormat.AV_PIX_FMT_RGBA;
int src_w = 1920, src_h = 1080, dst_w = 1920, dst_h = 1080;
SwsContext* sws_ctx;
GCHandle pinned = GCHandle.Alloc(bytes, GCHandleType.Pinned);
IntPtr address = pinned.AddrOfPinnedObject();
sbyte** inputData = (sbyte**)address;
sws_ctx = ffmpeg.sws_getContext(src_w, src_h, src_pix_fmt,
dst_w, dst_h, dst_pix_fmt,
0, null, null, null);
fixed (int* lineSize = new int[1])
{
lineSize[0] = 4 * captureHeight;
// Convert RGBA to YUV420P
ffmpeg.sws_scale(sws_ctx, inputData, lineSize, 0, codecContext->width, inputFrame->extended_data, inputFrame->linesize);
}
inputFrame->pts = counter++;
if (ffmpeg.avcodec_send_frame(codecContext, inputFrame) < 0)
throw new ApplicationException("Error sending a frame for encoding!");
AVPacket pkt;
pkt = new AVPacket();
//pkt.data = inData;
AVPacket* packet = &pkt;
ffmpeg.av_init_packet(packet);
Debug.Log("pkt.size " + pkt.size);
pinned.Free();
AVDictionary* options = null;
ffmpeg.av_dict_set(&options, "pkt_size", "1300", 0);
ffmpeg.av_dict_set(&options, "buffer_size", "65535", 0);
AVIOContext* server = null;
ffmpeg.avio_open2(&server, "udp://192.168.0.1:1111", ffmpeg.AVIO_FLAG_WRITE, null, &options);
Debug.Log("encoded");
ret = ffmpeg.avcodec_encode_video2(codecContext, &pkt, inputFrame, &got_output);
ffmpeg.avio_write(server, pkt.data, pkt.size);
ffmpeg.av_free_packet(&pkt);
pkt.data = null;
pkt.size = 0;
}And every time I start the game
if (ffmpeg.avcodec_send_frame(codecContext, inputFrame) < 0)
throw new ApplicationException("Error sending a frame for encoding!");throws the exception.
Any help in fixing the issue would be greatly appreciated :) -
ffmpeg memory leak in the avcodec_open2 method
21 août 2019, par unresolved_externalI’ve developed an application which handles live video stream. The problem is that it should run as a service and over time I am noticing some memory increase. When I check the application with valgrind - it did not find any leak related issues.
So I’ve check it with google profile tools. This is a result(substracting the one of the first dumps from the latest) after approximately 6 hour run :30.0 35.7% 35.7% 30.0 35.7% av_malloc
28.9 34.4% 70.2% 28.9 34.4% av_reallocp
24.5 29.2% 99.4% 24.5 29.2% x264_mallocWhen I check the memory on the graph I see, that these allocations are related to avcodec_open2. The client code is :
` g_EncoderMutex.lock();
ffmpeg_encoder_start(OutFileName.c_str(), AV_CODEC_ID_H264, m_FPS, width, height);
for (pts = 0; pts < VideoImages.size(); pts++) {
m_frame->pts = pts;
ffmpeg_encoder_encode_frame(VideoImages[pts].RGBimage[0]);
}
ffmpeg_encoder_finish();
g_EncoderMutex.unlock()The ffmpeg_encoder_start method is :
void VideoEncoder::ffmpeg_encoder_start(const char *filename, int codec_id, int fps, int width, int height)
{
int ret;
m_FPS=fps;
AVOutputFormat * fmt = av_guess_format(filename, NULL, NULL);
m_oc = NULL;
avformat_alloc_output_context2(&m_oc, NULL, NULL, filename);
m_stream = avformat_new_stream(m_oc, 0);
AVCodec *codec=NULL;
codec = avcodec_find_encoder(codec_id);
if (!codec)
{
fprintf(stderr, "Codec not found\n");
return; //-1
}
m_c=m_stream->codec;
avcodec_get_context_defaults3(m_c, codec);
m_c->bit_rate = 400000;
m_c->width = width;
m_c->height = height;
m_c->time_base.num = 1;
m_c->time_base.den = m_FPS;
m_c->gop_size = 10;
m_c->max_b_frames = 1;
m_c->pix_fmt = AV_PIX_FMT_YUV420P;
if (codec_id == AV_CODEC_ID_H264)
av_opt_set(m_c->priv_data, "preset", "ultrafast", 0);
if (m_oc->oformat->flags & AVFMT_GLOBALHEADER)
m_c->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
avcodec_open2( m_c, codec, NULL );
m_stream->time_base=(AVRational){1, m_FPS};
if (avio_open(&m_oc->pb, filename, AVIO_FLAG_WRITE) < 0)
{
printf( "Could not open '%s'\n", filename);
exit(1);
}
avformat_write_header(m_oc, NULL);
m_frame = av_frame_alloc();
if (!m_frame) {
printf( "Could not allocate video frame\n");
exit(1);
}
m_frame->format = m_c->pix_fmt;
m_frame->width = m_c->width;
m_frame->height = m_c->height;
ret = av_image_alloc(m_frame->data, m_frame->linesize, m_c->width, m_c->height, m_c->pix_fmt, 32);
if (ret < 0) {
printf("Could not allocate raw picture buffer\n");
exit(1);
}
}The ffmpeg_encoder_encode_frame is :
void VideoEncoder::ffmpeg_encoder_encode_frame(uint8_t *rgb)
{
int ret, got_output;
ffmpeg_encoder_set_frame_yuv_from_rgb(rgb);
av_init_packet(&m_pkt);
m_pkt.data = NULL;
m_pkt.size = 0;
ret = avcodec_encode_video2(m_c, &m_pkt, m_frame, &got_output);
if (ret < 0) {
printf("Error encoding frame\n");
exit(1);
}
if (got_output)
{
av_packet_rescale_ts(&m_pkt,
(AVRational){1, m_FPS}, m_stream->time_base);
m_pkt.stream_index = m_stream->index;
int ret = av_interleaved_write_frame(m_oc, &m_pkt);
av_packet_unref(&m_pkt);
}
}ffmpeg_encoder_finish code is :
void VideoEncoder::ffmpeg_encoder_finish(void)
{
int got_output, ret;
do {
ret = avcodec_encode_video2(m_c, &m_pkt, NULL, &got_output);
if (ret < 0) {
printf( "Error encoding frame\n");
exit(1);
}
if (got_output) {
av_packet_rescale_ts(&m_pkt,
(AVRational){1, m_FPS}, m_stream->time_base);
m_pkt.stream_index = m_stream->index;
int ret = av_interleaved_write_frame(m_oc, &m_pkt);
av_packet_unref(&m_pkt);
}
} while (got_output);
av_write_trailer(m_oc);
avio_closep(&m_oc->pb);
avformat_free_context(m_oc);
av_freep(&m_frame->data[0]);
av_frame_free(&m_frame);
av_packet_unref(&m_pkt);
sws_freeContext(m_sws_context);
}This code runs multiple times in the loop.
So my question is - what am I doing wrong ? maybe ffmpeg is using some kind of internal buffering ? If so, how to disable it ? Because such an increase in memory usage is unacceptable at all. -
how to stream live m3u8 file using ffmpeg to youtube rtmp
18 juillet 2018, par Dark webmasteri want to restream a live m3u8 file to youtube
.i used following codefmpeg -re -i /mypanel.tv:8080/live/****/slyv0955k9/14131.m3u8
> -c:v copy -c:a aac -ar 44100 -ab 128k -ac 2 -strict -2 -flags +global_header -bsf:a aac_adtstoasc -bufsize 3000k -f flv "/live-dfw.twitch.tv/app/{live_231566994_FS4BN0qoJMeXEuWklm6j0l1ODQj9u6}>"and i return i get this from my linux server
[root@server ~]# ffmpeg -re -i http://mypanel.tv:8080/live/****/slyv0955k9/14131.m3u8
-c:v copy -c:a aac -ar 44100 -ab 128k -ac 2 -strict -2 -flags +global_header -bsf:a aac_adtstoasc -bufsize 3000k -f flv "/live-dfw.twitch.tv/app/{live_23156556994_FS4BN0qoJMeXEuWklm6j0l1ODQj9u6}>"ffmpeg version 2.6.8 Copyright (c) 2000-2016 the FFmpeg developers
built with gcc 4.8.5 (GCC) 20150623 (Red Hat 4.8.5-4)
configuration: --prefix=/usr --bindir=/usr/bin --datadir=/usr/share/ffmpeg --incdir=/usr/include/ffmpeg --libdir=/usr/lib64 --mandir=/usr/share/man --arch=x86_64 --optflags='-O2 -g -pipe -Wall -Wp,-D_FORTIFY_SOURCE=2 -fexceptions -fstack-protector-strong --param=ssp-buffer-size=4 -grecord-gcc-switches -m64 -mtune=generic' --enable-bzlib --disable-crystalhd --enable-gnutls --enable-ladspa --enable-libass --enable-libcdio --enable-libdc1394 --enable-libfaac --enable-nonfree --enable-libfdk-aac --enable-nonfree --disable-indev=jack --enable-libfreetype --enable-libgsm --enable-libmp3lame --enable-openal --enable-libopenjpeg --enable-libopus --enable-libpulse --enable-libschroedinger --enable-libsoxr --enable-libspeex --enable-libtheora --enable-libvorbis --enable-libv4l2 --enable-libx264 --enable-libx265 --enable-libxvid --enable-x11grab --enable-avfilter --enable-avresample --enable-postproc --enable-pthreads --disable-static --enable-shared --enable-gpl --disable-debug --disable-stripping --shlibdir=/usr/lib64 --enable-runtime-cpudetect
libavutil 54. 20.100 / 54. 20.100
libavcodec 56. 26.100 / 56. 26.100
libavformat 56. 25.101 / 56. 25.101
libavdevice 56. 4.100 / 56. 4.100
libavfilter 5. 11.102 / 5. 11.102
libavresample 2. 1. 0 / 2. 1. 0
libswscale 3. 1.101 / 3. 1.101
libswresample 1. 1.100 / 1. 1.100
libpostproc 53. 3.100 / 53. 3.100
[h264 @ 0x1029ba0] non-existing SPS 0 referenced in buffering period
Last message repeated 1 times
[h264 @ 0x1073680] non-existing SPS 0 referenced in buffering period
Input #0, hls,applehttp, from 'http://mypanel.tv:8080/live/***/slyv0955k9/14131.m3u8':
Duration: N/A, start: 39062.400000, bitrate: N/A
Program 0
Metadata:
variant_bitrate : 0
Stream #0:0: Video: h264 (Main) ([27][0][0][0] / 0x001B), yuv420p, 1280x720 [SAR 1:1 DAR 16:9], 25 fps, 25 tbr, 90k tbn, 50 tbc
Stream #0:1: Audio: aac (LC) ([15][0][0][0] / 0x000F), 48000 Hz, stereo, fltp, 133 kb/s
At least one output file must be specifiedso can anyone help me with this ?please note i’m not an expert in linux so please give me specific commands to restream a live m3u8 file