
Recherche avancée
Autres articles (27)
-
La file d’attente de SPIPmotion
28 novembre 2010, parUne file d’attente stockée dans la base de donnée
Lors de son installation, SPIPmotion crée une nouvelle table dans la base de donnée intitulée spip_spipmotion_attentes.
Cette nouvelle table est constituée des champs suivants : id_spipmotion_attente, l’identifiant numérique unique de la tâche à traiter ; id_document, l’identifiant numérique du document original à encoder ; id_objet l’identifiant unique de l’objet auquel le document encodé devra être attaché automatiquement ; objet, le type d’objet auquel (...) -
Personnaliser les catégories
21 juin 2013, parFormulaire de création d’une catégorie
Pour ceux qui connaissent bien SPIP, une catégorie peut être assimilée à une rubrique.
Dans le cas d’un document de type catégorie, les champs proposés par défaut sont : Texte
On peut modifier ce formulaire dans la partie :
Administration > Configuration des masques de formulaire.
Dans le cas d’un document de type média, les champs non affichés par défaut sont : Descriptif rapide
Par ailleurs, c’est dans cette partie configuration qu’on peut indiquer le (...) -
Contribute to documentation
13 avril 2011Documentation is vital to the development of improved technical capabilities.
MediaSPIP welcomes documentation by users as well as developers - including : critique of existing features and functions articles contributed by developers, administrators, content producers and editors screenshots to illustrate the above translations of existing documentation into other languages
To contribute, register to the project users’ mailing (...)
Sur d’autres sites (5687)
-
Writing image to RTP with ffmpeg
22 septembre 2017, par Gaulois94I am actually trying to send real time images via the network efficiently. For this, I thought that the RTP protocole in video streaming can be a good way to achieve this.
I actually tried this :
extern "C"
{
#include <libavcodec></libavcodec>avcodec.h>
#include <libavformat></libavformat>avformat.h>
#include <libswscale></libswscale>swscale.h>
#include <libavutil></libavutil>opt.h>
#include <libavutil></libavutil>channel_layout.h>
#include <libavutil></libavutil>common.h>
#include <libavutil></libavutil>imgutils.h>
#include <libavutil></libavutil>mathematics.h>
#include <libavutil></libavutil>samplefmt.h>
}
#include <iostream>
#include
#include
//Mainly based on https://stackoverflow.com/questions/40825300/ffmpeg-create-rtp-stream
int main()
{
//Init ffmpeg
avcodec_register_all();
av_register_all();
avformat_network_init();
//Init the codec used to encode our given image
AVCodecID codecID = AV_CODEC_ID_MPEG4;
AVCodec* codec;
AVCodecContext* codecCtx;
codec = avcodec_find_encoder(codecID);
codecCtx = avcodec_alloc_context3(codec);
//codecCtx->bit_rate = 400000;
codecCtx->width = 352;
codecCtx->height = 288;
codecCtx->time_base.num = 1;
codecCtx->time_base.den = 25;
codecCtx->gop_size = 25;
codecCtx->max_b_frames = 1;
codecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
codecCtx->codec_type = AVMEDIA_TYPE_VIDEO;
if (codecID == AV_CODEC_ID_H264)
{
av_opt_set(codecCtx->priv_data, "preset", "ultrafast", 0);
av_opt_set(codecCtx->priv_data, "tune", "zerolatency", 0);
}
avcodec_open2(codecCtx, codec, NULL);
//Init the Frame containing our raw data
AVFrame* frame;
frame = av_frame_alloc();
frame->format = codecCtx->pix_fmt;
frame->width = codecCtx->width;
frame->height = codecCtx->height;
av_image_alloc(frame->data, frame->linesize, frame->width, frame->height, codecCtx->pix_fmt, 32);
//Init the format context
AVFormatContext* fmtCtx = avformat_alloc_context();
AVOutputFormat* format = av_guess_format("rtp", NULL, NULL);
avformat_alloc_output_context2(&fmtCtx, format, format->name, "rtp://127.0.0.1:49990");
avio_open(&fmtCtx->pb, fmtCtx->filename, AVIO_FLAG_WRITE);
//Configure the AVStream for the output format context
struct AVStream* stream = avformat_new_stream(fmtCtx, codec);
avcodec_parameters_from_context(stream->codecpar, codecCtx);
stream->time_base.num = 1;
stream->time_base.den = 25;
/* Rewrite the header */
avformat_write_header(fmtCtx, NULL);
/* Write a file for VLC */
char buf[200000];
AVFormatContext *ac[] = { fmtCtx };
av_sdp_create(ac, 1, buf, 20000);
printf("sdp:\n%s\n", buf);
FILE* fsdp = fopen("test.sdp", "w");
fprintf(fsdp, "%s", buf);
fclose(fsdp);
AVPacket pkt;
int j = 0;
for(int i = 0; i < 10000; i++)
{
fflush(stdout);
av_init_packet(&pkt);
pkt.data = NULL; // packet data will be allocated by the encoder
pkt.size = 0;
int R, G, B;
R = G = B = i % 255;
int Y = 0.257 * R + 0.504 * G + 0.098 * B + 16;
int U = -0.148 * R - 0.291 * G + 0.439 * B + 128;
int V = 0.439 * R - 0.368 * G - 0.071 * B + 128;
/* prepare a dummy image */
/* Y */
for (int y = 0; y < codecCtx->height; y++)
for (int x = 0; x < codecCtx->width; x++)
frame->data[0][y * codecCtx->width + x] = Y;
for (int y = 0; y < codecCtx->height/2; y++)
for (int x=0; x < codecCtx->width / 2; x++)
{
frame->data[1][y * frame->linesize[1] + x] = U;
frame->data[2][y * frame->linesize[2] + x] = V;
}
/* Which frame is it ? */
frame->pts = i;
/* Send the frame to the codec */
avcodec_send_frame(codecCtx, frame);
/* Use the data in the codec to the AVPacket */
switch(avcodec_receive_packet(codecCtx, &pkt))
{
case AVERROR_EOF:
printf("Stream EOF\n");
break;
case AVERROR(EAGAIN):
printf("Stream EAGAIN\n");
break;
default:
printf("Write frame %3d (size=%5d)\n", j++, pkt.size);
/* Write the data on the packet to the output format */
av_interleaved_write_frame(fmtCtx, &pkt);
/* Reset the packet */
av_packet_unref(&pkt);
break;
}
usleep(1e6/25);
}
// end
avcodec_send_frame(codecCtx, NULL);
//Free everything
av_free(codecCtx);
av_free(fmtCtx);
return 0;
}
</iostream>And I can with VLC to see one image, but not a video (I have to reaload it to see another image in grayscale).
Does someone know why vlc don’t play my video well ? Thank you !
-
C++ FFmpeg create mp4 file
1er février 2021, par DDovzhenkoI'm trying to create mp4 video file with FFmpeg and C++, but in result I receive broken file (windows player shows "Can't play ... 0xc00d36c4"). If I create .h264 file, it can be played with 'ffplay' and successfully converted to mp4 via CL.



My code :



int main() {
 char *filename = "tmp.mp4";
 AVOutputFormat *fmt;
 AVFormatContext *fctx;
 AVCodecContext *cctx;
 AVStream *st;

 av_register_all();
 avcodec_register_all();

 //auto detect the output format from the name
 fmt = av_guess_format(NULL, filename, NULL);
 if (!fmt) {
 cout << "Error av_guess_format()" << endl; system("pause"); exit(1);
 }

 if (avformat_alloc_output_context2(&fctx, fmt, NULL, filename) < 0) {
 cout << "Error avformat_alloc_output_context2()" << endl; system("pause"); exit(1);
 }


 //stream creation + parameters
 st = avformat_new_stream(fctx, 0);
 if (!st) {
 cout << "Error avformat_new_stream()" << endl; system("pause"); exit(1);
 }

 st->codecpar->codec_id = fmt->video_codec;
 st->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
 st->codecpar->width = 352;
 st->codecpar->height = 288;
 st->time_base.num = 1;
 st->time_base.den = 25;

 AVCodec *pCodec = avcodec_find_encoder(st->codecpar->codec_id);
 if (!pCodec) {
 cout << "Error avcodec_find_encoder()" << endl; system("pause"); exit(1);
 }

 cctx = avcodec_alloc_context3(pCodec);
 if (!cctx) {
 cout << "Error avcodec_alloc_context3()" << endl; system("pause"); exit(1);
 }

 avcodec_parameters_to_context(cctx, st->codecpar);
 cctx->bit_rate = 400000;
 cctx->width = 352;
 cctx->height = 288;
 cctx->time_base.num = 1;
 cctx->time_base.den = 25;
 cctx->gop_size = 12;
 cctx->pix_fmt = AV_PIX_FMT_YUV420P;
 if (st->codecpar->codec_id == AV_CODEC_ID_H264) {
 av_opt_set(cctx->priv_data, "preset", "ultrafast", 0);
 }
 if (fctx->oformat->flags & AVFMT_GLOBALHEADER) {
 cctx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
 }
 avcodec_parameters_from_context(st->codecpar, cctx);

 av_dump_format(fctx, 0, filename, 1);

 //OPEN FILE + WRITE HEADER
 if (avcodec_open2(cctx, pCodec, NULL) < 0) {
 cout << "Error avcodec_open2()" << endl; system("pause"); exit(1);
 }
 if (!(fmt->flags & AVFMT_NOFILE)) {
 if (avio_open(&fctx->pb, filename, AVIO_FLAG_WRITE) < 0) {
 cout << "Error avio_open()" << endl; system("pause"); exit(1);
 }
 }
 if (avformat_write_header(fctx, NULL) < 0) {
 cout << "Error avformat_write_header()" << endl; system("pause"); exit(1);
 }


 //CREATE DUMMY VIDEO
 AVFrame *frame = av_frame_alloc();
 frame->format = cctx->pix_fmt;
 frame->width = cctx->width;
 frame->height = cctx->height;
 av_image_alloc(frame->data, frame->linesize, cctx->width, cctx->height, cctx->pix_fmt, 32);

 AVPacket pkt;
 double video_pts = 0;
 for (int i = 0; i < 50; i++) {
 video_pts = (double)cctx->time_base.num / cctx->time_base.den * 90 * i;

 for (int y = 0; y < cctx->height; y++) {
 for (int x = 0; x < cctx->width; x++) {
 frame->data[0][y * frame->linesize[0] + x] = x + y + i * 3;
 if (y < cctx->height / 2 && x < cctx->width / 2) {
 /* Cb and Cr */
 frame->data[1][y * frame->linesize[1] + x] = 128 + y + i * 2;
 frame->data[2][y * frame->linesize[2] + x] = 64 + x + i * 5;
 }
 }
 }

 av_init_packet(&pkt);
 pkt.flags |= AV_PKT_FLAG_KEY;
 pkt.pts = frame->pts = video_pts;
 pkt.data = NULL;
 pkt.size = 0;
 pkt.stream_index = st->index;

 if (avcodec_send_frame(cctx, frame) < 0) {
 cout << "Error avcodec_send_frame()" << endl; system("pause"); exit(1);
 }
 if (avcodec_receive_packet(cctx, &pkt) == 0) {
 //cout << "Write frame " << to_string((int) pkt.pts) << endl;
 av_interleaved_write_frame(fctx, &pkt);
 av_packet_unref(&pkt);
 }
 }

 //DELAYED FRAMES
 for (;;) {
 avcodec_send_frame(cctx, NULL);
 if (avcodec_receive_packet(cctx, &pkt) == 0) {
 //cout << "-Write frame " << to_string((int)pkt.pts) << endl;
 av_interleaved_write_frame(fctx, &pkt);
 av_packet_unref(&pkt);
 }
 else {
 break;
 }
 }

 //FINISH
 av_write_trailer(fctx);
 if (!(fmt->flags & AVFMT_NOFILE)) {
 if (avio_close(fctx->pb) < 0) {
 cout << "Error avio_close()" << endl; system("pause"); exit(1);
 }
 }
 av_frame_free(&frame);
 avcodec_free_context(&cctx);
 avformat_free_context(fctx);

 system("pause");
 return 0;
}




Output of program :



Output #0, mp4, to 'tmp.mp4':
 Stream #0:0: Video: h264, yuv420p, 352x288, q=2-31, 400 kb/s, 25 tbn
[libx264 @ 0000021c4a995ba0] using cpu capabilities: MMX2 SSE2Fast SSSE3 SSE4.2 AVX FMA3 BMI2 AVX2
[libx264 @ 0000021c4a995ba0] profile Constrained Baseline, level 2.0
[libx264 @ 0000021c4a995ba0] 264 - core 152 r2851 ba24899 - H.264/MPEG-4 AVC codec - Copyleft 2003-2017 - http://www.videolan.org/x264.html - options: cabac=0 ref=1 deblock=0:0:0 analyse=0:0 me=dia subme=0 psy=1 psy_rd=1.00:0.00 mixed_ref=0 me_range=16 chroma_me=1 trellis=0 8x8dct=0 cqm=0 deadzone=21,11 fast_pskip=1 chroma_qp_offset=0 threads=6 lookahead_threads=1 sliced_threads=0 nr=0 decimate=1 interlaced=0 bluray_compat=0 constrained_intra=0 bframes=0 weightp=0 keyint=12 keyint_min=1 scenecut=0 intra_refresh=0 rc=abr mbtree=0 bitrate=400 ratetol=1.0 qcomp=0.60 qpmin=0 qpmax=69 qpstep=4 ip_ratio=1.40 aq=0
[libx264 @ 0000021c4a995ba0] frame I:5 Avg QP: 7.03 size: 9318
[libx264 @ 0000021c4a995ba0] frame P:45 Avg QP: 4.53 size: 4258
[libx264 @ 0000021c4a995ba0] mb I I16..4: 100.0% 0.0% 0.0%
[libx264 @ 0000021c4a995ba0] mb P I16..4: 0.0% 0.0% 0.0% P16..4: 100.0% 0.0% 0.0% 0.0% 0.0% skip: 0.0%
[libx264 @ 0000021c4a995ba0] final ratefactor: 9.11
[libx264 @ 0000021c4a995ba0] coded y,uvDC,uvAC intra: 18.9% 21.8% 14.5% inter: 7.8% 100.0% 15.5%
[libx264 @ 0000021c4a995ba0] i16 v,h,dc,p: 4% 5% 5% 86%
[libx264 @ 0000021c4a995ba0] i8c dc,h,v,p: 2% 9% 6% 82%
[libx264 @ 0000021c4a995ba0] kb/s:264.68




If I will try to play mp4 file with 'ffplay' it prints :



[mov,mp4,m4a,3gp,3g2,mj2 @ 00000000026bf900] Could not find codec parameters for stream 0 (Video: h264 (avc1 / 0x31637661), none, 352x288, 138953 kb/s): unspecified pixel format
[h264 @ 00000000006c6ae0] non-existing PPS 0 referenced
[h264 @ 00000000006c6ae0] decode_slice_header error
[h264 @ 00000000006c6ae0] no frame!




I've spent a lot of time without success of finding issue, what could be the reason of it ?



Thank for help !


-
Assigning of dts values to encoded packets
24 mars, par AlexI have a dump of H264-encoded data, which I need to put in mp4 container. I verified the validity of the encoded data by using mp4box utility against it. The mp4 file created by mp4box contained a proper 17 seconds long video. It is interesting that if I try ffmpeg to achieve the same, the resulting video is 34 seconds long and rather crappy (probably ffmpeg tries to decode video and then encode it, which results in the loss of video quality ?) Anyway, for my project I can't use command line approach and need to come up wit a programmatic way to embed the data in the mp4 container.



Below is the code I use (I removed error checking for brevity. During execution all the calls succeed) :



AVFormatContext* pInputFormatContext = avformat_alloc_context();
avformat_open_input(&pInputFormatContext, "Data.264", NULL, NULL);
avformat_find_stream_info(pInputFormatContext, NULL);
AVRational* pTime_base = &pInputFormatContext->streams[0]->time_base;

int nFrameRate = pInputFormatContext->streams[0]->r_frame_rate.num / pFormatCtx->streams[0]->r_frame_rate.den;
int nWidth = pInputFormatContext->streams[0]->codecpar->width;
int nHeight = pInputFormatContext->streams[0]->codecpar->height;
// nWidth = 1920, nHeight = 1080, nFrameRate = 25

// Create output objects
AVFormatContext* pOutputFormatContext = NULL;
avformat_alloc_output_context2(&pOutputFormatContext, NULL, NULL, "Destination.mp4");

AVCodec* pVideoCodec = avcodec_find_encoder(pOutputFormatContext->oformat->video_codec/*AV_CODEC_ID_264*/);
AVStream* pOutputStream = avformat_new_stream(pOutputFormatContext, NULL);
pOutputStream->id = pOutputFormatContext->nb_streams - 1;
AVCodecContext* pCodecContext = avcodec_alloc_context3(pVideoCodec);

switch (pVideoCodec->type) {
case AVMEDIA_TYPE_VIDEO:
 pCodecContext->codec_id = codec_id;
 pCodecContext->bit_rate = 400000;
 /* Resolution must be a multiple of two. */
 pCodecContext->width = nFrameWidth;
 pCodecContext->height = nFrameHeight;
 /* timebase: This is the fundamental unit of time (in seconds) in terms
 * of which frame timestamps are represented. For fixed-fps content,
 * timebase should be 1/framerate and timestamp increments should be
 * identical to 1. */
 pOutputStream->time_base.num = 1;
 pOutputStream->time_base.den = nFrameRate;
 pCodecContext->time_base = pOutputStream->time_base;
 pCodecContext->gop_size = 12; /* emit one intra frame every twelve frames at most */
 pCodecContext->pix_fmt = STREAM_PIX_FMT;
 break;
default:
 break;
}

/* copy the stream parameters to the muxer */
avcodec_parameters_from_context(pOutputStream->codecpar, pCodecContext);

avio_open(&pOutputFormatContext->pb, "Destination.mp4", AVIO_FLAG_WRITE);

// Start writing
AVDictionary* pDict = NULL;
avformat_write_header(pOutputFormatContext, &pDict);

// Process packets
AVPacket packet;
int64_t nCurrentDts = 0;
int64_t nDuration = 0;
int nReadResult = 0;

while (nReadResult == 0)
{
 nReadResult = av_read_frame(m_pInputFormatContext, &packet);
// At this point, packet.dts == AV_NOPTS_VALUE. 
// The duration field of the packet contains valid data

 packet.flags |= AV_PKT_FLAG_KEY;
 nDuration = packet.duration;
 packet.dts = nCurrentDts;
 packet.dts = av_rescale_q(nCurrentDts, pOutputFormatContext->streams[0]->codec->time_base, pOutputFormatContext->streams[0]->time_base);
 av_interleaved_write_frame(pOutputFormatContext, &packet);
 nCurrentDts += nDuration;
 nDuration += packet.duration;
 av_free_packet(&packet);
}

av_write_trailer(pOutputFormatContext);




The properties for the Destination.mp4 file I receive indicate it is about 1 hour long with frame rate 0. I am sure the culprit is in the way I calculate dts values for each packet and use av_rescale_q(), but I do not have sufficient understanding of the avformat library to figure out the proper way to do it. Any help will be appreciated !