Recherche avancée
Médias (2)
-
Rennes Emotion Map 2010-11
19 octobre 2011, par
Mis à jour : Juillet 2013
Langue : français
Type : Texte
-
Carte de Schillerkiez
13 mai 2011, par
Mis à jour : Septembre 2011
Langue : English
Type : Texte
Autres articles (16)
-
Les formats acceptés
28 janvier 2010, parLes commandes suivantes permettent d’avoir des informations sur les formats et codecs gérés par l’installation local de ffmpeg :
ffmpeg -codecs ffmpeg -formats
Les format videos acceptés en entrée
Cette liste est non exhaustive, elle met en exergue les principaux formats utilisés : h264 : H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10 m4v : raw MPEG-4 video format flv : Flash Video (FLV) / Sorenson Spark / Sorenson H.263 Theora wmv :
Les formats vidéos de sortie possibles
Dans un premier temps on (...) -
Ajouter notes et légendes aux images
7 février 2011, parPour pouvoir ajouter notes et légendes aux images, la première étape est d’installer le plugin "Légendes".
Une fois le plugin activé, vous pouvez le configurer dans l’espace de configuration afin de modifier les droits de création / modification et de suppression des notes. Par défaut seuls les administrateurs du site peuvent ajouter des notes aux images.
Modification lors de l’ajout d’un média
Lors de l’ajout d’un média de type "image" un nouveau bouton apparait au dessus de la prévisualisation (...) -
Gestion générale des documents
13 mai 2011, parMédiaSPIP ne modifie jamais le document original mis en ligne.
Pour chaque document mis en ligne il effectue deux opérations successives : la création d’une version supplémentaire qui peut être facilement consultée en ligne tout en laissant l’original téléchargeable dans le cas où le document original ne peut être lu dans un navigateur Internet ; la récupération des métadonnées du document original pour illustrer textuellement le fichier ;
Les tableaux ci-dessous expliquent ce que peut faire MédiaSPIP (...)
Sur d’autres sites (4942)
-
Encoding .png images with h264 to a file on disk
19 février 2021, par xyfixCan somebody help me to find out why I end up with a file on disk that is only 24 kb and not readable by vlc or so, while I send valid YUV images to the codec. I have added the .h and .cpp file. Up till "avcodec_receive_packet" everything seems to be OK. The function call "avcodec_send_frame" returns 0, so that must be OK but "avcodec_receive_packet" returns -11. If I flush the encoder (currently commented) then "avcodec_receive_packet" returns 0 and I can see encoded data if I store it on disk. Also the input image to the encoder is also correct (currently commented) and checked. I'm aiming for an intra-frame encoding, so I should get the encoded frame data back, but I don't get anything back even if I send 24 images to it.


.h file


#ifndef MOVIECODEC_H
#define MOVIECODEC_H

#include 

extern "C"
{
 #include "Codec/include/libavcodec/avcodec.h"
 #include "Codec/include/libavdevice/avdevice.h"
 #include "Codec/include/libavformat/avformat.h"
 #include "Codec/include/libavutil/avutil.h"
 #include "Codec/include/libavutil/imgutils.h"
 #include "Codec/include/libswscale/swscale.h"
}


class MovieCodec
{
public:

 MovieCodec(const char *filename);

 ~MovieCodec();

 void encodeImage( const cv::Mat& image );

 void close();
 
private :

 void add_stream();

 void openVideoCodec();

 void write_video_frame(const cv::Mat& image);

 void createFrame( const cv::Mat& image );

private:

 static int s_frameCount;

 int m_timeVideo = 0;

 std::string m_filename;

 FILE* m_file;

 AVCodec* m_encoder = NULL;

 AVOutputFormat* m_outputFormat = NULL;

 AVFormatContext* m_formatCtx = NULL;

 AVCodecContext* m_codecCtx = NULL;

 AVStream* m_streamOut = NULL;

 AVFrame* m_frame = NULL;

 AVPacket* m_packet = NULL;

};


.cpp file


#ifndef MOVIECODEC_CPP
#define MOVIECODEC_CPP

#include "moviecodec.h"


#define STREAM_DURATION 5.0
#define STREAM_FRAME_RATE 24
#define STREAM_NB_FRAMES ((int)(STREAM_DURATION * STREAM_FRAME_RATE))
#define STREAM_PIX_FMT AV_PIX_FMT_YUV420P /* default pix_fmt */


static int sws_flags = SWS_BICUBIC;
int MovieCodec::s_frameCount = 0;

MovieCodec::MovieCodec( const char* filename ) :
 m_filename( filename ),
 m_encoder( avcodec_find_encoder( AV_CODEC_ID_H264 ))
{
 av_log_set_level(AV_LOG_VERBOSE);

 int ret(0);

 m_file = fopen( m_filename.c_str(), "wb");

 // allocate the output media context
 ret = avformat_alloc_output_context2( &m_formatCtx, m_outputFormat, NULL, m_filename.c_str());

 if (!m_formatCtx)
 return;

 m_outputFormat = m_formatCtx->oformat;

 // Add the video stream using H264 codec
 add_stream();

 // Open video codec and allocate the necessary encode buffers
 if (m_streamOut)
 openVideoCodec();

 av_dump_format( m_formatCtx, 0, m_filename.c_str(), 1);

 // Open the output media file, if needed
 if (!( m_outputFormat->flags & AVFMT_NOFILE))
 {
 ret = avio_open( &m_formatCtx->pb, m_filename.c_str(), AVIO_FLAG_WRITE);

 if (ret < 0)
 {
 char error[255];
 ret = av_strerror( ret, error, 255);
 fprintf(stderr, "Could not open '%s': %s\n", m_filename.c_str(), error);
 return ;
 }
 }
 else
 {
 return;
 }

 m_formatCtx->flush_packets = 1;

 ret = avformat_write_header( m_formatCtx, NULL );

 if (ret < 0)
 {
 char error[255];
 av_strerror(ret, error, 255);
 fprintf(stderr, "Error occurred when opening output file: %s\n", error);
 return;
 }


 if ( m_frame )
 m_frame->pts = 0;
}



MovieCodec::~MovieCodec()
{
 close();
}



void MovieCodec::encodeImage(const cv::Mat &image)
{
 // Compute video time from last added video frame
 m_timeVideo = (double)m_frame->pts) * av_q2d(m_streamOut->time_base);

 // Stop media if enough time
 if (!m_streamOut /*|| m_timeVideo >= STREAM_DURATION*/)
 return;

 // Add a video frame
 write_video_frame( image );

 // Increase frame pts according to time base
 m_frame->pts += av_rescale_q(1, m_codecCtx->time_base, m_streamOut->time_base);
}


void MovieCodec::close()
{
 int ret( 0 );

 // Write media trailer
// if( m_formatCtx )
// ret = av_write_trailer( m_formatCtx );

 /* flush the encoder */
 ret = avcodec_send_frame(m_codecCtx, NULL);

 /* Close each codec. */
 if ( m_streamOut )
 {
 av_free( m_frame->data[0]);
 av_free( m_frame );
 }

 if (!( m_outputFormat->flags & AVFMT_NOFILE))
 /* Close the output file. */
 ret = avio_close( m_formatCtx->pb);


 /* free the stream */
 avformat_free_context( m_formatCtx );

 fflush( m_file );
}


void MovieCodec::createFrame( const cv::Mat& image )
{
 /**
 * \note allocate frame
 */
 m_frame = av_frame_alloc();
 m_frame->format = STREAM_PIX_FMT;
 m_frame->width = image.cols();
 m_frame->height = image.rows();
 m_frame->pict_type = AV_PICTURE_TYPE_I;
 int ret = av_image_alloc(m_frame->data, m_frame->linesize, m_frame->width, m_frame->height, STREAM_PIX_FMT, 1);

 if (ret < 0)
 {
 return;
 }

 struct SwsContext* sws_ctx = sws_getContext((int)image.cols(), (int)image.rows(), AV_PIX_FMT_RGB24,
 (int)image.cols(), (int)image.rows(), STREAM_PIX_FMT, 0, NULL, NULL, NULL);

 const uint8_t* rgbData[1] = { (uint8_t* )image.getData() };
 int rgbLineSize[1] = { 3 * image.cols() };

 sws_scale(sws_ctx, rgbData, rgbLineSize, 0, image.rows(), m_frame->data, m_frame->linesize);
}


/* Add an output stream. */
void MovieCodec::add_stream()
{
 AVCodecID codecId = AV_CODEC_ID_H264;

 if (!( m_encoder ))
 {
 fprintf(stderr, "Could not find encoder for '%s'\n",
 avcodec_get_name(codecId));
 return;
 }

 // Get the stream for codec
 m_streamOut = avformat_new_stream(m_formatCtx, m_encoder);

 if (!m_streamOut) {
 fprintf(stderr, "Could not allocate stream\n");
 return;
 }

 m_streamOut->id = m_formatCtx->nb_streams - 1;

 m_codecCtx = avcodec_alloc_context3( m_encoder);

 m_streamOut->codecpar->codec_id = codecId;
 m_streamOut->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
 m_streamOut->codecpar->bit_rate = 400000;
 m_streamOut->codecpar->width = 800;
 m_streamOut->codecpar->height = 640;
 m_streamOut->codecpar->format = STREAM_PIX_FMT;
 m_streamOut->codecpar->codec_tag = 0x31637661;
 m_streamOut->codecpar->video_delay = 0;
 m_streamOut->time_base = { 1, STREAM_FRAME_RATE };


 avcodec_parameters_to_context( m_codecCtx, m_streamOut->codecpar);
 
 m_codecCtx->gop_size = 0; /* emit one intra frame every twelve frames at most */
 m_codecCtx->max_b_frames = 0;
 m_codecCtx->time_base = { 1, STREAM_FRAME_RATE };
 m_codecCtx->framerate = { STREAM_FRAME_RATE, 1 };
 m_codecCtx->pix_fmt = STREAM_PIX_FMT;



 if (m_streamOut->codecpar->codec_id == AV_CODEC_ID_H264)
 {
 av_opt_set( m_codecCtx, "preset", "ultrafast", 0 );
 av_opt_set( m_codecCtx, "vprofile", "baseline", 0 );
 av_opt_set( m_codecCtx, "tune", "zerolatency", 0 );
 }

// /* Some formats want stream headers to be separate. */
// if (m_formatCtx->oformat->flags & AVFMT_GLOBALHEADER)
// m_codecCtx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;


}


void MovieCodec::openVideoCodec()
{
 int ret;

 /* open the codec */
 ret = avcodec_open2(m_codecCtx, m_encoder, NULL);

 if (ret < 0)
 {
 char error[255];
 av_strerror(ret, error, 255);
 fprintf(stderr, "Could not open video codec: %s\n", error);
 }
}



void MovieCodec::write_video_frame( const cv::Mat& image )
{
 int ret;

 createFrame( image );


 if (m_formatCtx->oformat->flags & 0x0020 )
 {
 /* Raw video case - directly store the picture in the packet */
 AVPacket pkt;
 av_init_packet(&pkt);

 pkt.flags |= AV_PKT_FLAG_KEY;
 pkt.stream_index = m_streamOut->index;
 pkt.data = m_frame->data[0];
 pkt.size = sizeof(AVPicture);

// ret = av_interleaved_write_frame(m_formatCtx, &pkt);
 ret = av_write_frame( m_formatCtx, &pkt );
 }
 else
 {
 AVPacket pkt;
 av_init_packet(&pkt);

 /* encode the image */

//cv::Mat yuv420p( m_frame->height + m_frame->height/2, m_frame->width, CV_8UC1, m_frame->data[0]);
//cv::Mat cvmIm;
//cv::cvtColor(yuv420p,cvmIm,CV_YUV420p2BGR);
//cv::imwrite("c:\\tmp\\YUVoriginal.png", cvmIm);

 ret = avcodec_send_frame(m_codecCtx, m_frame);

 if (ret < 0)
 {
 char error[255];
 av_strerror(ret, error, 255);
 fprintf(stderr, "Error encoding video frame: %s\n", error);
 return;
 }

 /* If size is zero, it means the image was buffered. */
// ret = avcodec_receive_packet(m_codecCtx, &pkt);

 do
 {
 ret = avcodec_receive_packet(m_codecCtx, &pkt);

 if (ret == 0)
 {
 ret = av_write_frame( m_formatCtx, &pkt );
 av_packet_unref(&pkt);

 break;
 }
// else if ((ret < 0) && (ret != AVERROR(EAGAIN)))
// {
// return;
// }
// else if (ret == AVERROR(EAGAIN))
// {
// /* flush the encoder */
// ret = avcodec_send_frame(m_codecCtx, NULL);
//
// if (0 > ret)
// return;
// }
 } while (ret == 0);

 if( !ret && pkt.size)
 {
 pkt.stream_index = m_streamOut->index;

 /* Write the compressed frame to the media file. */
// ret = av_interleaved_write_frame(m_formatCtx, &pkt);
 ret = av_write_frame( m_formatCtx, &pkt );
 }
 else
 {
 ret = 0;
 }
 }

 if (ret != 0)
 {
 char error[255];
 av_strerror(ret, error, 255);
 fprintf(stderr, "Error while writing video frame: %s\n", error);
 return;
 }

 s_frameCount++;
}


-
FFmpeg stream stops after a certain time
7 avril 2021, par AnarnoWe have a little Nodejs app, which starts a stream process, with a
child_process.spawn. On the client-side, we have an HTML5-canvas element, which records the video datanew MediaRecorder(canvas.captureStream(30), config), then this client sends its data to our Nodejs server over a WebSocket connection. We using FFmpeg for video encoding and decoding, then we send the data to our 3-rd party service (MUX), which accepts the stream and broadcasts them. Sadly the process continuously loses its fps, and after in general 1 minute, stops with an interesting error code. (when we save the video result locally instead of streaming viartmps, it works perfectly.

*The whole system is in docker.


The error :


stderr: [tls @ 0x7f998e7bca40] Error in the pull function.
Our_app_logs: | av_interleaved_write_frame(): I/O error 
Our_app_logs: | [flv @ 0x7f998eeb1680] Failed to update header with correct duration.
Our_app_logs: | [flv @ 0x7f998eeb1680] Failed to update header with correct filesize.
Our_app_logs: | Error writing trailer of rtmps://global-live.mux.com/app/94e85197-78a3-f092-3437-03d93aba74e0: I/O error
Our_app_logs: | <buffer 5b="5b" 74="74" 6c="6c" 73="73" 20="20" 40="40" 30="30" 78="78" 37="37" 66="66" 39="39" 38="38" 65="65" 62="62" 63="63" 61="61" 34="34" 5d="5d" 45="45" 72="72" 6f="6f" 69="69" 6e="6e" 68="68" 70="70" 75="75" 2e="2e">
Our_app_logs: | stderr: frame= 1478 fps= 25 q=23.0 Lsize= 
 402kB time=00:01:02.89 bitrate= 52.4kbits/s speed=1.05x
Our_app_logs: | video:369kB audio:0kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: 9.034639%
Our_app_logs: | <buffer 66="66" 72="72" 61="61" 6d="6d" 65="65" 3d="3d" 20="20" 31="31" 34="34" 37="37" 38="38" 70="70" 73="73" 32="32" 35="35" 71="71" 33="33" 2e="2e" 30="30" 4c="4c" 69="69" 7a="7a" 6b="6b" 42="42" 74="74">
Our_app_logs: | stderr: [tls @ 0x7f998e7bca40] <buffer 5b="5b" 6c="6c" 73="73" 20="20" 40="40" 30="30" 78="78" 37="37" 66="66" 39="39" 38="38" 65="65" 62="62" 63="63" 61="61" 34="34" 5d="5d"> 
Our_app_logs: | stderr: The specified session has been invalidated for some reason.
Our_app_logs: | <buffer 54="54" 68="68" 65="65" 20="20" 73="73" 70="70" 63="63" 69="69" 66="66" 64="64" 6f="6f" 6e="6e" 61="61" 62="62" 76="76" 6c="6c" 74="74" 72="72" 6d="6d">
Our_app_logs: | stderr: Last message repeated 1 times 
Our_app_logs: | <buffer 20="20" 4c="4c" 61="61" 73="73" 74="74" 6d="6d" 65="65" 67="67" 72="72" 70="70" 64="64" 31="31" 69="69" 0a="0a"> 
Our_app_logs: | stderr: [libx264 @ 0x7f998e790080] <buffer 5b="5b" 6c="6c" 69="69" 62="62" 78="78" 32="32" 36="36" 34="34" 20="20" 40="40" 30="30" 37="37" 66="66" 39="39" 38="38" 65="65" 5d="5d">
Our_app_logs: | stderr: frame I:13 Avg QP: 5.39 size: 
2478
Our_app_logs: | <buffer 66="66" 72="72" 61="61" 6d="6d" 65="65" 20="20" 49="49" 3a="3a" 31="31" 33="33" 41="41" 76="76" 67="67" 51="51" 50="50" 35="35" 2e="2e" 39="39" 73="73" 69="69" 7a="7a" 32="32" 34="34" 37="37" 38="38" 0a="0a">
Our_app_logs: | stderr: [libx264 @ 0x7f998e790080] <buffer 5b="5b" 6c="6c" 69="69" 62="62" 78="78" 32="32" 36="36" 34="34" 20="20" 40="40" 30="30" 37="37" 66="66" 39="39" 38="38" 65="65" 5d="5d">
Our_app_logs: | stderr: frame P:1465 Avg QP:13.51 size: 
 235
Our_app_logs: | <buffer 66="66" 72="72" 61="61" 6d="6d" 65="65" 20="20" 50="50" 3a="3a" 31="31" 34="34" 36="36" 35="35" 41="41" 76="76" 67="67" 51="51" 33="33" 2e="2e" 73="73" 69="69" 7a="7a" 32="32" 0a="0a">
Our_app_logs: | stderr: [libx264 @ 0x7f998e790080] <buffer 5b="5b" 6c="6c" 69="69" 62="62" 78="78" 32="32" 36="36" 34="34" 20="20" 40="40" 30="30" 37="37" 66="66" 39="39" 38="38" 65="65" 5d="5d">
Our_app_logs: | stderr: mb I I16..4: 99.2% 0.1% 0.7% 
Our_app_logs: | <buffer 6d="6d" 62="62" 20="20" 49="49" 31="31" 36="36" 2e="2e" 34="34" 3a="3a" 39="39" 32="32" 25="25" 30="30" 37="37" 0a="0a"> 
Our_app_logs: | stderr: [libx264 @ 0x7f998e790080] <buffer 5b="5b" 6c="6c" 69="69" 62="62" 78="78" 32="32" 36="36" 34="34" 20="20" 40="40" 30="30" 37="37" 66="66" 39="39" 38="38" 65="65" 5d="5d">
Our_app_logs: | stderr: mb P I16..4: 0.3% 0.0% 0.0% P16..4: 0.1% 0.0% 0.0% 0.0% 0.0% skip:99.6%
Our_app_logs: | <buffer 6d="6d" 62="62" 20="20" 50="50" 49="49" 31="31" 36="36" 2e="2e" 34="34" 3a="3a" 30="30" 33="33" 25="25">
Our_app_logs: | stderr: [libx264 @ 0x7f998e790080] <buffer 5b="5b" 6c="6c" 69="69" 62="62" 78="78" 32="32" 36="36" 34="34" 20="20" 40="40" 30="30" 37="37" 66="66" 39="39" 38="38" 65="65" 5d="5d">
Our_app_logs: | stderr: 8x8 transform intra:0.3% inter:17.3%
Our_app_logs: | <buffer 38="38" 78="78" 20="20" 74="74" 72="72" 61="61" 6e="6e" 73="73" 66="66" 6f="6f" 6d="6d" 69="69" 3a="3a" 30="30" 2e="2e" 33="33" 25="25" 65="65" 31="31" 37="37" 0a="0a">
Our_app_logs: | stderr: [libx264 @ 0x7f998e790080] <buffer 5b="5b" 6c="6c" 69="69" 62="62" 78="78" 32="32" 36="36" 34="34" 20="20" 40="40" 30="30" 37="37" 66="66" 39="39" 38="38" 65="65" 5d="5d">
Our_app_logs: | stderr: coded y,uvDC,uvAC intra: 1.4% 6.9% 
4.7% inter: 0.0% 0.0% 0.0%
Our_app_logs: | <buffer 63="63" 6f="6f" 64="64" 65="65" 20="20" 79="79" 2c="2c" 75="75" 76="76" 44="44" 43="43" 41="41" 69="69" 6e="6e" 74="74" 72="72" 61="61" 3a="3a" 31="31" 2e="2e" 34="34" 25="25" 36="36" 39="39" 37="37" 30="30">
Our_app_logs: | stderr: [libx264 @ 0x7f998e790080] <buffer 5b="5b" 6c="6c" 69="69" 62="62" 78="78" 32="32" 36="36" 34="34" 20="20" 40="40" 30="30" 37="37" 66="66" 39="39" 38="38" 65="65" 5d="5d">
Our_app_logs: | stderr: i16 v,h,dc,p: 90% 5% 5% 0%
Our_app_logs: | <buffer 69="69" 31="31" 36="36" 20="20" 76="76" 2c="2c" 68="68" 64="64" 63="63" 70="70" 3a="3a" 39="39" 30="30" 25="25" 35="35" 0a="0a">
Our_app_logs: | stderr: [libx264 @ 0x7f998e790080] <buffer 5b="5b" 6c="6c" 69="69" 62="62" 78="78" 32="32" 36="36" 34="34" 20="20" 40="40" 30="30" 37="37" 66="66" 39="39" 38="38" 65="65" 5d="5d">
Our_app_logs: | stderr: i8 v,h,dc,ddl,ddr,vr,hd,vl,hu: 23% 
18% 51% 6% 0% 0% 0% 0% 3%
Our_app_logs: | <buffer 69="69" 38="38" 20="20" 76="76" 2c="2c" 68="68" 64="64" 63="63" 6c="6c" 72="72" 75="75" 3a="3a" 32="32" 33="33" 25="25" 31="31" 35="35" 36="36" 30="30">
Our_app_logs: | stderr: [libx264 @ 0x7f998e790080] <buffer 5b="5b" 6c="6c" 69="69" 62="62" 78="78" 32="32" 36="36" 34="34" 20="20" 40="40" 30="30" 37="37" 66="66" 39="39" 38="38" 65="65" 5d="5d">
Our_app_logs: | stderr: i4 v,h,dc,ddl,ddr,vr,hd,vl,hu: 33% 
25% 40% 0% 0% 0% 0% 0% 0%
Our_app_logs: | <buffer 69="69" 34="34" 20="20" 76="76" 2c="2c" 68="68" 64="64" 63="63" 6c="6c" 72="72" 75="75" 3a="3a" 33="33" 25="25" 32="32" 35="35" 30="30">
Our_app_logs: | stderr: [libx264 @ 0x7f998e790080] <buffer 5b="5b" 6c="6c" 69="69" 62="62" 78="78" 32="32" 36="36" 34="34" 20="20" 40="40" 30="30" 37="37" 66="66" 39="39" 38="38" 65="65" 5d="5d">
Our_app_logs: | stderr: i8c dc,h,v,p: 86% 7% 6% 0%
Our_app_logs: | <buffer 69="69" 38="38" 63="63" 20="20" 64="64" 2c="2c" 68="68" 76="76" 70="70" 3a="3a" 36="36" 25="25" 37="37" 30="30" 0a="0a">
Our_app_logs: | stderr: [libx264 @ 0x7f998e790080] <buffer 5b="5b" 6c="6c" 69="69" 62="62" 78="78" 32="32" 36="36" 34="34" 20="20" 40="40" 30="30" 37="37" 66="66" 39="39" 38="38" 65="65" 5d="5d">
Our_app_logs: | stderr: Weighted P-Frames: Y:0.1% UV:0.1% 
Our_app_logs: | <buffer 57="57" 65="65" 69="69" 67="67" 68="68" 74="74" 64="64" 20="20" 50="50" 2d="2d" 46="46" 72="72" 61="61" 6d="6d" 73="73" 3a="3a" 59="59" 30="30" 2e="2e" 31="31" 25="25" 55="55" 56="56" 0a="0a"> 
Our_app_logs: | stderr: [libx264 @ 0x7f998e790080] <buffer 5b="5b" 6c="6c" 69="69" 62="62" 78="78" 32="32" 36="36" 34="34" 20="20" 40="40" 30="30" 37="37" 66="66" 39="39" 38="38" 65="65" 5d="5d">
Our_app_logs: | stderr: kb/s:2041.23
Our_app_logs: | <buffer 6b="6b" 62="62" 2f="2f" 73="73" 3a="3a" 32="32" 30="30" 34="34" 31="31" 2e="2e" 33="33" 0a="0a">
Our_app_logs: | stderr: Conversion failed!
Our_app_logs: | <buffer 43="43" 6f="6f" 6e="6e" 76="76" 65="65" 72="72" 73="73" 69="69" 20="20" 66="66" 61="61" 6c="6c" 64="64" 21="21" 0a="0a">
Our_app_logs: | close, code: 1, signal: null
Our_app_logs: | from react application: 14203
Our_app_logs: | Status ok...
Our_app_logs: | Data ok...
Our_app_logs: | FFmpeg ok...
Our_app_logs: | Writeable ok... <buffer c4="c4" 81="81" 0e="0e" 11="11" 00="00" 01="01" 61="61" c7="c7" 80="80" 5b="5b" b6="b6" 72="72" 03="03" bc="bc" b7="b7" de="de" 59="59" 7f="7f" 3c="3c" 27="27" b3="b3" 87="87" b2="b2" e6="e6" 84="84" d0="d0" f0="f0" 02="02" 2d="2d" c0="c0" 08="08" 70="70"> undefined
Our_app_logs: | stderr: ffmpeg version 4.2.4 Copyright (c) 
2000-2020 the FFmpeg developers
Our_app_logs: | built with gcc 9.2.0 (Alpine 9.2.0)
Our_app_logs: | configuration: --prefix=/usr --enable-avresample --enable-avfilter --enable-gnutls --enable-gpl --enable-libass --enable-libmp3lame --enable-libvorbis --enable-libvpx --enable-libxvid --enable-libx264 --enable-libx265 --enable-libtheora --enable-libv4l2 --enable-postproc --enable-pic --enable-pthreads --enable-shared --enable-libxcb --disable-stripping --disable-static --disable-librtmp --enable-vaapi --enable-vdpau --enable-libopus --disable-debug
Our_app_logs: | <buffer 66="66" 6d="6d" 70="70" 65="65" 67="67" 20="20" 76="76" 72="72" 73="73" 69="69" 6f="6f" 6e="6e" 34="34" 2e="2e" 32="32" 43="43" 79="79" 68="68" 74="74" 28="28" 63="63" 29="29" 30="30" 2d="2d" 46="46">
Our_app_logs: | stderr: libavutil 56. 31.100 / 56. 31.100
Our_app_logs: | libavcodec 58. 54.100 / 58. 54.100 
Our_app_logs: | libavformat 58. 29.100 / 58. 29.100
Our_app_logs: | libavdevice 58. 8.100 / 58. 8.100
Our_app_logs: | libavfilter 7. 57.100 / 7. 57.100
Our_app_logs: | libavresample 4. 0. 0 / 4. 0. 0 
Our_app_logs: | libswscale 5. 5.100 / 5. 5.100 
Our_app_logs: | libswresample 3. 5.100 / 3. 5.100 
Our_app_logs: | libpostproc 55. 5.100 / 55. 5.100 
Our_app_logs: | <buffer 20="20" 6c="6c" 69="69" 62="62" 61="61" 76="76" 75="75" 74="74" 35="35" 36="36" 2e="2e" 33="33" 31="31" 30="30" 2f="2f" 0a="0a" 63="63" 6f="6f">
Our_app_logs: | stderr: [h264 @ 0x7f299f44f600] non-existing PPS 14 referenced
Our_app_logs: | Last message repeated 4 times
Our_app_logs: | [h264 @ 0x7f299f44f600] decode_slice_header error
Our_app_logs: | [h264 @ 0x7f299f44f600] non-existing PPS 14 referenced
Our_app_logs: | [h264 @ 0x7f299f44f600] decode_slice_header error
Our_app_logs: | [h264 @ 0x7f299f44f600] no frame!
Our_app_logs: | [h264 @ 0x7f299f44f600] non-existing PPS 14 referenced
Our_app_logs: | [h264 @ 0x7f299f44f600] decode_slice_header error
Our_app_logs: | <buffer 5b="5b" 68="68" 32="32" 36="36" 34="34" 20="20" 40="40" 30="30" 78="78" 37="37" 66="66" 39="39" 5d="5d" 6e="6e" 6f="6f" 2d="2d" 65="65" 69="69" 73="73" 74="74" 67="67" 50="50" 53="53" 31="31" 72="72">
Our_app_logs: | stderr: [h264 @ 0x7f299f44f600] non-existing PPS 14 referenced
Our_app_logs: | [h264 @ 0x7f299f44f600] decode_slice_header error
Our_app_logs: | [h264 @ 0x7f299f44f600] no frame!
Our_app_logs: | [h264 @ 0x7f299f44f600] non-existing PPS 14 referenced
Our_app_logs: | [h264 @ 0x7f299f44f600] decode_slice_header error
Our_app_logs: | [h264 @ 0x7f299f44f600] non-existing PPS 14 referenced
Our_app_logs: | [h264 @ 0x7f299f44f600] decode_slice_header error
Our_app_logs: | [h264 @ 0x7f299f44f600] no frame!
Our_app_logs: | [h264 @ 0x7f299f44f600] non-existing PPS 14 referenced
Our_app_logs: | [h264 @ 0x7f299f44f600] decode_slice_header error
Our_app_logs: | [h264 @ 0x7f299f44f600] non-existing PPS 14 referenced
Our_app_logs: | [h264 @ 0x7f299f44f600] decode_slice_header error
Our_app_logs: | [h264 @ 0x7f299f44f600] no frame!
Our_app_logs: | [h264 @ 0x7f299f44f600] non-existing PPS 14 referenced
Our_app_logs: | Last message repeated 5 times
Our_app_logs: | [h264 @ 0x7f299f44f600] decode_slice_header error
Our_app_logs: | [h264 @ 0x7f299f44f600] non-existing PPS 14 referenced
Our_app_logs: | [h264 @ 0x7f299f44f600] decode_slice_header error
Our_app_logs: | [h264 @ 0x7f299f44f600] no frame!
Our_app_logs: | [h264 @ 0x7f299f44f600] non-existing PPS 14 referenced
Our_app_logs: | [h264 @ 0x7f299f44f600] decode_slice_header error
Our_app_logs: | [h264 @ 0x7f299f44f600] non-existing PPS 14 referenced
Our_app_logs: | [h264 @ 0x7f299f44f600] decode_slice_header error
Our_app_logs: | [h264 @ 0x7f299f44f600] no frame!
Our_app_logs: | [h264 @ 0x7f299f44f600] non-existing PPS 14 referenced
Our_app_logs: | [h264 @ 0x7f299f44f600] decode_slice_headerOur_app_logs: | [h264 @ 0x7f299f44f600] non-existing PPS 14 referenced
Our_app_logs: | [h264 @ 0x7f299f44f600] decode_slice_header error
Our_app_logs: | [h264 @ 0x7f299f44f600] no frame!
Our_app_logs: | [h264 @ 0x7f299f44f600] non-existing PPS 14 referenced
Our_app_logs: | [h264 @ 0x7f299f44f600] decode_slice_header error
Our_app_logs: | [h264 @ 0x7f299f44f600] non-existing PPS 14 referenced
Our_app_logs: | [h264 @ 0x7f299f44f600] decode_slice_header error
Our_app_logs: | [h264 @ 0x7f299f44f600] no frame!
Our_app_logs: | [h264 @ 0x7f299f44f600] non-existing PPS 14 referenced
Our_app_logs: | [h264 @ 0x7f299f44f600] decode_slice_header error
Our_app_logs: | [h264 @ 0x7f299f44f600] non-existing PPS 14 referenced
Our_app_logs: | [h264 @ 0x7f299f44f600] decode_slice_header error
Our_app_logs: | [h264 @ 0x7f299f44f600] no frame!

</buffer></buffer></buffer></buffer></buffer></buffer></buffer></buffer></buffer></buffer></buffer></buffer></buffer></buffer></buffer></buffer></buffer></buffer></buffer></buffer></buffer></buffer></buffer></buffer></buffer></buffer></buffer></buffer></buffer></buffer></buffer></buffer></buffer></buffer>

The FFmpeg config :


const FFMPEG_CONFIG = [
 '-i',
 '-',

 // video codec config: low latency, adaptive bitrate
 // '-vcodec',
 // 'copy',
 '-c:v',
 'libx264',
 '-preset',
 'veryfast',
 '-tune',
 'zerolatency',

 // audio codec config: sampling frequency (11025, 22050, 44100), bitrate 64 kbits
 '-c:a',
 'aac',
 '-ar',
 '44100',
 '-b:a',
 '64k',

 //force to overwrite
 '-y',

 // used for audio sync
 '-use_wallclock_as_timestamps',
 '1',
 '-async',
 '1',

 //'-filter_complex', 'aresample=44100', // resample audio to 44100Hz, needed if input is not 44100
 //'-strict', 'experimental',
 '-bufsize',
 '1000',
 '-f',
 'flv',
];


The process :


const process = child_process.spawn('ffmpeg', [
 ...FFMPEG_CONFIG,
 // 'local.bin',
 url,
 ]);
 
 process.stderr.on('data', data => {
 console.log(`stderr: ${data}`, data);
 });

 process.stdin.on('error', e => {
 console.log('FFmpeg STDIN Error', e);
 });

 process.on('error', err => console.log(err));

 process.on('close', (code, signal) => {
 console.log(`close, code: ${code}, signal: ${signal}`);
 });


The writing :


if (!Buffer.isBuffer(data)) return;
if (!process.stdin.writable) return;
process.stdin.write(data);


-
FFMPEG C++ Non monotonically increasing dts to muxer
14 février 2021, par blacksmithI have two inputs :


- 

- Stream with audio and video
- Stream with only video






I want to generate an ouput stream mixing Audio from the first stream and Video from the second stream.


But when i generate the output it says i am non monotonically increasing dts to muxer. I think im messing up with the dts.
Please help !!


Here is my code :


#include <iostream>

//Linux...
#ifdef __cplusplus
extern "C"
{
#endif
#include <libavformat></libavformat>avformat.h>
#include <libavutil></libavutil>mathematics.h>
#include <libavutil></libavutil>time.h>
#ifdef __cplusplus
}
#endif

using namespace std;
int main(int argc, char* argv[])
{
 AVOutputFormat* ofmt = NULL;
 //Input AVFormatContext and Output AVFormatContext
 AVFormatContext *audio_video_ifmt_ctx = NULL,
 * only_video_ifmt_ctx = NULL,
 * ofmt_ctx = NULL;

 AVPacket audio_video_pkt, only_video_pkt;
 const char *audio_video_in_filename, * only_video_in_filename, * out_filename;
 int ret, i;
 int audio_video_audioindex = -1;
 int audio_video_videoindex = -1;
 int only_video_videoindex = -1;
 int audio_video_dataindex = -1;
 int only_video_dataindex = -1;


 int audio_video_frame_index = 0, only_video_frame_index = 0;
 audio_video_in_filename = "rtmp://localhost/live/STREAM_NAME.flv";
 only_video_in_filename = "rtmp://localhost/live/STREAM_NAME2.flv";
 out_filename = "rtmp://localhost/live/STREAM_NAME3.flv";
 //Network
 avformat_network_init();

 //Input ONLY VIDEO
 if ((ret = avformat_open_input(&only_video_ifmt_ctx, only_video_in_filename, 0, 0)) < 0) {
 printf("Could not open audio and video input file.");
 return 0;
 }
 if ((ret = avformat_find_stream_info(only_video_ifmt_ctx, 0)) < 0) {
 printf("Failed to retrieve audio and video input stream information");
 return 0;
 }
 //Input AUDIO AND VIDEO
 if ((ret = avformat_open_input(&audio_video_ifmt_ctx, audio_video_in_filename, 0, 0)) < 0) {
 printf("Could not open only video input file.");
 return 0;
 }
 if ((ret = avformat_find_stream_info(audio_video_ifmt_ctx, 0)) < 0) {
 printf("Failed to retrieve only video input stream information");
 return 0;
 }
 //Search audio_video stream index
 for (i = 0; i < audio_video_ifmt_ctx->nb_streams; i++) {
 if (audio_video_ifmt_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
 audio_video_videoindex = i;
 }
 if (audio_video_ifmt_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) {
 audio_video_audioindex = i;
 }
 if (audio_video_ifmt_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_DATA) {
 audio_video_dataindex = i;
 }
 }

 //Search only_video stream index
 for (i = 0; i < only_video_ifmt_ctx->nb_streams; i++) {
 if (only_video_ifmt_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
 only_video_videoindex = i;
 }
 else if(audio_video_ifmt_ctx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_DATA) {
 only_video_dataindex = i;
 }
 }

 av_dump_format(audio_video_ifmt_ctx, 0, audio_video_in_filename, 0);
 av_dump_format(only_video_ifmt_ctx, 0, only_video_in_filename, 0);

 //Allocate output stream context
 avformat_alloc_output_context2(&ofmt_ctx, NULL, NULL, out_filename); //RTMP
 if (!ofmt_ctx) {
 printf("Could not create output context\n");
 ret = AVERROR_UNKNOWN;
 return 0;
 }
 ofmt = ofmt_ctx->oformat;

 //Create Output Data and Audio Stream
 for (i = 0; i < audio_video_ifmt_ctx->nb_streams; i++) {
 AVStream* video_in_stream;
 //Create output AVStream according to input AVStream
 if (i == audio_video_dataindex) {
 video_in_stream = audio_video_ifmt_ctx->streams[audio_video_dataindex];
 }
 else if (i == audio_video_audioindex) {
 video_in_stream = audio_video_ifmt_ctx->streams[audio_video_audioindex];
 }
 else {
 video_in_stream = NULL;
 continue;
 }


 //AVStream *out_stream = avformat_new_stream(ofmt_ctx, in_stream->codec->codec);
 AVCodec* codec = avcodec_find_decoder(video_in_stream->codecpar->codec_id);
 AVStream* out_stream = avformat_new_stream(ofmt_ctx, codec);

 if (!out_stream) {
 printf("Failed allocating output stream\n");
 ret = AVERROR_UNKNOWN;
 return 0;
 }

 AVCodecContext* p_codec_ctx = avcodec_alloc_context3(codec);
 ret = avcodec_parameters_to_context(p_codec_ctx, video_in_stream->codecpar);

 //Copy the settings of AVCodecContext
 if (ret < 0) {
 printf("Failed to copy context from input to output stream codec context\n");
 return 0;
 }
 p_codec_ctx->codec_tag = 0;
 if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
 p_codec_ctx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
 ret = avcodec_parameters_from_context(out_stream->codecpar, p_codec_ctx);
 if (ret < 0) {
 av_log(NULL, AV_LOG_ERROR, "eno:[%d] error to paramters codec paramter \n", ret);
 }
 }
 
 //Create Output Video Stream
 for (i = 0; i < only_video_ifmt_ctx->nb_streams; i++) {
 AVStream* video_in_stream;
 //Create output AVStream according to input AVStream
 if (i == only_video_dataindex) {
 continue;
 video_in_stream = only_video_ifmt_ctx->streams[only_video_dataindex];
 }
 else if (i == only_video_videoindex) {
 video_in_stream = only_video_ifmt_ctx->streams[only_video_videoindex];
 }
 else {
 video_in_stream = NULL;
 continue;
 }


 //AVStream *out_stream = avformat_new_stream(ofmt_ctx, in_stream->codec->codec);
 AVCodec* codec = avcodec_find_decoder(video_in_stream->codecpar->codec_id);
 AVStream* out_stream = avformat_new_stream(ofmt_ctx, codec);

 if (!out_stream) {
 printf("Failed allocating output stream\n");
 ret = AVERROR_UNKNOWN;
 return 0;
 }

 AVCodecContext* p_codec_ctx = avcodec_alloc_context3(codec);
 ret = avcodec_parameters_to_context(p_codec_ctx, video_in_stream->codecpar);

 //Copy the settings of AVCodecContext
 if (ret < 0) {
 printf("Failed to copy context from input to output stream codec context\n");
 return 0;
 }
 p_codec_ctx->codec_tag = 0;
 if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
 p_codec_ctx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
 ret = avcodec_parameters_from_context(out_stream->codecpar, p_codec_ctx);
 if (ret < 0) {
 av_log(NULL, AV_LOG_ERROR, "eno:[%d] error to paramters codec paramter \n", ret);
 }
 }

 
 
 //Dump Format------------------
 av_dump_format(ofmt_ctx, 0, out_filename, 1);
 //Open output URL
 if (!(ofmt->flags & AVFMT_NOFILE)) {
 ret = avio_open(&ofmt_ctx->pb, out_filename, AVIO_FLAG_WRITE);
 if (ret < 0) {
 printf("Could not open output URL '%s'", out_filename);
 goto end;
 }
 }
 //Write output file header
 ret = avformat_write_header(ofmt_ctx, NULL);
 if (ret < 0) {
 printf("Error occurred when opening output URL\n");
 goto end;
 }
 //Pull stream
 while (1)
 {

 AVStream *audio_video_in_stream, * only_video_in_stream, * out_stream;
 ret = av_read_frame(audio_video_ifmt_ctx, &audio_video_pkt);
 ret = av_read_frame(only_video_ifmt_ctx, &only_video_pkt);
 if (ret < 0)
 break;

 audio_video_in_stream = audio_video_ifmt_ctx->streams[audio_video_pkt.stream_index];
 only_video_in_stream = only_video_ifmt_ctx->streams[only_video_pkt.stream_index];
 
 out_stream = NULL;

 ///// TREAT AUDIO PACKETS /////

 if (audio_video_pkt.stream_index == audio_video_dataindex) {
 out_stream = ofmt_ctx->streams[0];
 }
 if (audio_video_pkt.stream_index == audio_video_audioindex) {
 out_stream = ofmt_ctx->streams[1];
 }
 
 
 //Convert PTS/DTS
 if (out_stream != NULL) {
 audio_video_pkt.pts = av_rescale_q_rnd(audio_video_pkt.pts, audio_video_in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
 audio_video_pkt.dts = av_rescale_q_rnd(audio_video_pkt.dts, audio_video_in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
 audio_video_pkt.duration = av_rescale_q(audio_video_pkt.duration, audio_video_in_stream->time_base, out_stream->time_base);
 audio_video_pkt.pos = -1;

 
 }
 //Writing audio stream
 if (out_stream != NULL) {
 ret = av_interleaved_write_frame(ofmt_ctx, &audio_video_pkt);
 }
 
 ///// TREAT VIDEO PACKETS /////

 if (only_video_pkt.stream_index == only_video_videoindex) {
 out_stream = ofmt_ctx->streams[2];
 }

 //Convert PTS/DTS
 if (out_stream != NULL) {
 only_video_pkt.pts = av_rescale_q_rnd(only_video_pkt.pts, only_video_in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
 only_video_pkt.dts = av_rescale_q_rnd(only_video_pkt.dts, only_video_in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
 only_video_pkt.duration = av_rescale_q(only_video_pkt.duration, only_video_in_stream->time_base, out_stream->time_base);
 only_video_pkt.pos = -1;
 }
 //Writing video
 if (out_stream != NULL) {
 ret = av_interleaved_write_frame(ofmt_ctx, &only_video_pkt);
 }
 //Desreferenciamos el paquete
 av_packet_unref(&audio_video_pkt);
 av_packet_unref(&only_video_pkt);

 if (ret < 0) {
 printf("Error muxing packet\n");
 break;
 }
 }

 //Write file trailer
 av_write_trailer(ofmt_ctx);
end:
 avformat_close_input(&audio_video_ifmt_ctx);
 avformat_close_input(&only_video_ifmt_ctx);

 /* close output */
 if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE))
 avio_close(ofmt_ctx->pb);
 avformat_free_context(ofmt_ctx);
 if (ret < 0 && ret != AVERROR_EOF) {
 printf("Error occurred.\n");
 return -1;
 }
 return 0;
}
</iostream>

Here is the output :


Input #0, flv, from 'rtmp://localhost/live/STREAM_NAME.flv':
 Metadata:
 fileSize : 0
 audiochannels : 2
 2.1 : false
 3.1 : false
 4.0 : false
 4.1 : false
 5.1 : false
 7.1 : false
 encoder : obs-output module (libobs version 25.0.4)
 Duration: 00:00:00.00, start: 2910.332000, bitrate: N/A
 Stream #0:0: Data: none
 Stream #0:1: Audio: aac (LC), 44100 Hz, stereo, fltp, 163 kb/s
 Stream #0:2: Video: h264 (High), yuv420p(progressive), 1920x1080, 2560 kb/s, 23.98 fps, 23.98 tbr, 1k tbn, 47.95 tbc
Input #0, flv, from 'rtmp://localhost/live/STREAM_NAME2.flv':
 Metadata:
 encoder : Lavf58.65.101
 Duration: 00:00:00.00, start: 2726.365000, bitrate: N/A
 Stream #0:0: Data: none
 Stream #0:1: Video: h264 (High), yuv420p(progressive), 1920x1080, 52 fps, 52 tbr, 1k tbn, 104 tbc
Output #0, flv, to 'rtmp://localhost/live/STREAM_NAME3.flv':
 Stream #0:0: Data: none
 Stream #0:1: Audio: aac (LC), 44100 Hz, stereo, fltp, 163 kb/s
 Stream #0:2: Video: h264 (High), yuv420p(progressive), 1920x1080, q=2-31
[flv @ 0000014F2CDB9500] Application provided invalid, non monotonically increasing dts to muxer in stream 1: 2910332 >= 2726346
Error muxing packet
[flv @ 0000014F2CDB9500] Failed to update header with correct duration.
[flv @ 0000014F2CDB9500] Failed to update header with correct filesize.
Error occurred.