
Recherche avancée
Médias (1)
-
Richard Stallman et le logiciel libre
19 octobre 2011, par
Mis à jour : Mai 2013
Langue : français
Type : Texte
Autres articles (111)
-
MediaSPIP version 0.1 Beta
16 avril 2011, parMediaSPIP 0.1 beta est la première version de MediaSPIP décrétée comme "utilisable".
Le fichier zip ici présent contient uniquement les sources de MediaSPIP en version standalone.
Pour avoir une installation fonctionnelle, il est nécessaire d’installer manuellement l’ensemble des dépendances logicielles sur le serveur.
Si vous souhaitez utiliser cette archive pour une installation en mode ferme, il vous faudra également procéder à d’autres modifications (...) -
De l’upload à la vidéo finale [version standalone]
31 janvier 2010, parLe chemin d’un document audio ou vidéo dans SPIPMotion est divisé en trois étapes distinctes.
Upload et récupération d’informations de la vidéo source
Dans un premier temps, il est nécessaire de créer un article SPIP et de lui joindre le document vidéo "source".
Au moment où ce document est joint à l’article, deux actions supplémentaires au comportement normal sont exécutées : La récupération des informations techniques des flux audio et video du fichier ; La génération d’une vignette : extraction d’une (...) -
MediaSPIP v0.2
21 juin 2013, parMediaSPIP 0.2 est la première version de MediaSPIP stable.
Sa date de sortie officielle est le 21 juin 2013 et est annoncée ici.
Le fichier zip ici présent contient uniquement les sources de MediaSPIP en version standalone.
Comme pour la version précédente, il est nécessaire d’installer manuellement l’ensemble des dépendances logicielles sur le serveur.
Si vous souhaitez utiliser cette archive pour une installation en mode ferme, il vous faudra également procéder à d’autres modifications (...)
Sur d’autres sites (8809)
-
Decoding then re-encoding frames in reverse order with ffmpeg
21 août 2014, par user3875919I am pretty new to ffmpeg, and now I want to decode the frames and encode it back in reverse order. I have used this tutorial for decoding the video and this example to encode it back. I can decode the frames properly, but when I re-encode it, I get a dummy image rather than clear video. How can I get the actual reversed video ?
static void video_encode_example(const char *filename, int codec_id)
{
AVCodec *codec;
AVCodecContext *c= NULL;
int i, ret, x, y, got_output;
FILE *f;
struct SwsContext *sws_ctx = NULL;
AVFrame *frame;
AVFrame *frameRGB = NULL;
AVPacket pkt;
uint8_t endcode[] = { 0, 0, 1, 0xb7 };
printf("Encode video file %s\n", filename);
/* find the mpeg1 video encoder */
codec = avcodec_find_encoder(codec_id);
if (!codec) {
fprintf(stderr, "Codec not found\n");
exit(1);
}
c = avcodec_alloc_context3(codec);
if (!c) {
fprintf(stderr, "Could not allocate video codec context\n");
exit(1);
}
/* put sample parameters */
c->bit_rate = 400000;
/* resolution must be a multiple of two */
c->width = 352;
c->height = 288;
/* frames per second */
c->time_base = (AVRational){1,25};
/* emit one intra frame every ten frames
* check frame pict_type before passing frame
* to encoder, if frame->pict_type is AV_PICTURE_TYPE_I
* then gop_size is ignored and the output of encoder
* will always be I frame irrespective to gop_size
*/
c->gop_size = 10;
c->max_b_frames = 1;
c->pix_fmt = AV_PIX_FMT_YUV420P;
if (codec_id == AV_CODEC_ID_H264)
av_opt_set(c->priv_data, "preset", "slow", 0);
/* open it */
if (avcodec_open2(c, codec, NULL) < 0) {
fprintf(stderr, "Could not open codec\n");
exit(1);
}
f = fopen(filename, "wb");
if (!f) {
fprintf(stderr, "Could not open %s\n", filename);
exit(1);
}
frame = av_frame_alloc();
if (!frame) {
fprintf(stderr, "Could not allocate video frame\n");
exit(1);
}
frame->format = c->pix_fmt;
frame->width = c->width;
frame->height = c->height;
/* the image can be allocated by any means and av_image_alloc() is
* just the most convenient way if av_malloc() is to be used */
ret = av_image_alloc(frame->data, frame->linesize, c->width, c->height,
c->pix_fmt, 32);
if (ret < 0) {
fprintf(stderr, "Could not allocate raw picture buffer\n");
exit(1);
}
/* encode 1 second of video */
for (i = 0; i < 25; i++) {
av_init_packet(&pkt);
pkt.data = NULL; // packet data will be allocated by the encoder
pkt.size = 0;
fflush(stdout);
/* prepare a dummy image */
/* Y */
for (y = 0; y < c->height; y++) {
for (x = 0; x < c->width; x++) {
frame->data[0][y * frame->linesize[0] + x] = x + y + i * 3;
}
}
/* Cb and Cr */
for (y = 0; y < c->height/2; y++) {
for (x = 0; x < c->width/2; x++) {
frame->data[1][y * frame->linesize[1] + x] = 128 + y + i * 2;
frame->data[2][y * frame->linesize[2] + x] = 64 + x + i * 5;
}
}
frame->pts = i;
/* encode the image */
ret = avcodec_encode_video2(c, &pkt, frame, &got_output);
if (ret < 0) {
fprintf(stderr, "Error encoding frame\n");
exit(1);
}
if (got_output) {
// Convert the image from its native format to RGB
sws_scale
(
sws_ctx,
(uint8_t const * const *)frame->data,
frame->linesize,
0,
c->height,
frameRGB->data,
frameRGB->linesize
);
printf("Write frame %3d (size=%5d)\n", i, pkt.size);
fwrite(pkt.data, 1, pkt.size, f);
av_free_packet(&pkt);
}
}
/* get the delayed frames */
for (got_output = 1; got_output; i++) {
fflush(stdout);
ret = avcodec_encode_video2(c, &pkt, NULL, &got_output);
if (ret < 0) {
fprintf(stderr, "Error encoding frame\n");
exit(1);
}
if (got_output) {
printf("Write frame %3d (size=%5d)\n", i, pkt.size);
fwrite(pkt.data, 1, pkt.size, f);
av_free_packet(&pkt);
}
}
/* add sequence end code to have a real mpeg file */
fwrite(endcode, 1, sizeof(endcode), f);
fclose(f);
avcodec_close(c);
av_free(c);
av_freep(&frame->data[0]);
av_frame_free(&frame);
printf("\n");
} -
FFMPEG avformat_write_header changing my stream time_base
24 avril 2015, par cesarpachonI am muxing video using ffmpeg 2.1.3 with libavcodec 55.39.101 (forced to that version because is the one available in google-chrome PNACL port project). all my frames seems to have bad the time. they try to be rendered all at once at the beggining of the video when playing it.
I am setting the stream time base to 1/25, but just after calling avformat_write_header, it has the value of -18082736/1.
in each frame, when I print the stream time_base it says 1/12800, while the time_base of codec is always ok (1/25).console log before and after av_format_write_header :
before avformat_write_header stream time_base: 1/25
after avformat_write_header ret 0 stream time_base: -18082736/1the code (abreviated to keep the post short, all calls in the original version had error checking) :
AVCodecContext *codecContext;
AVCodec * codec = avcodec_find_encoder(codec_id);
myOutputStream->stream = avformat_new_stream(outputFormatContext, *codec);
myOutputStream->stream->id = outputFormatContext->nb_streams-1;
codecContext = myOutputStream->stream->codec;
codecContext->codec_id = codec_id;
codecContext->bit_rate = 400000;
codecContext->width = width;
codecContext->height = height;
myOutputStream->stream->time_base = (AVRational){ 1, 25 };
codecContext->time_base = myOutputStream->stream->time_base;
codecContext->gop_size = 12;
codecContext->pix_fmt = AV_PIX_FMT_YUV420P;
AVDictionary *opt = NULL;
av_dict_copy(&opt, opt_arg, 0);
ret = avcodec_open2(codecContext, codec, &opt);
av_dict_free(&opt);
myOutputStream->frame = alloc_picture(codecContext->pix_fmt, codecContext->width, codecContext->height);
myOutputStream->tmp_frame = alloc_picture(AV_PIX_FMT_YUV420P, codecContext->width, codecContext->height);
//before: printing g_outputContext->stream time_base here
ret = avformat_write_header(g_outputContext, &opt);
//after: printing g_outputContext->stream time_base hereIf I run ffmpeg -i on the final video, I got this (why duration is zero ?) :
Input #0, mov,mp4,m4a,3gp,3g2,mj2, from 'test4.mp4':
Metadata:
major_brand : isom
minor_version : 512
compatible_brands: isomiso2mp41
encoder : Lavf55.19.104
Duration: 00:00:00.05, start: 0.000000, bitrate: 99549 kb/s
Stream #0:0(und): Video: mpeg4 (Simple Profile) (mp4v / 0x7634706D), yuv420p, 800x600 [SAR 1:1 DAR 4:3], 463106 kb/s, 12800 fps, 12800 tbr, 12800 tbn, 25 tbc (default)
Metadata:
handler_name : VideoHandler -
use ffmpeg encode a video in android
18 août 2014, par user2098010i followed below link for using ffmpeg in android
http://www.roman10.net/how-to-build-ffmpeg-with-ndk-r9/and done ! ndk-build
i need to encode a video captured by phone camera for slow down
i’ve using sample file in ffmpeg/sample...
but i can’t get encoded a video(slow down)output video has 1sec playtime.
few color are displayed !plz... help me
i wanna sleep well...
AVCodec *codec;
AVCodecContext *c= NULL;
int i, ret, x, y, got_output;
FILE *f;
AVFrame *frame;
AVPacket pkt;
uint8_t endcode[] = { 0, 0, 1, 0xb7 };
__android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "%s",filename);
/* find the mpeg1 video encoder */
codec = avcodec_find_encoder(CODEC_ID_H263);
if (!codec) {
__android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Codec not found");
exit(1);
}
c = avcodec_alloc_context3(codec);
if (!c) {
__android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Could not allocate video codec context");
exit(1);
}
/* put sample parameters */
c->bit_rate = 400000;
/* resolution must be a multiple of two */
c->width = 352;
c->height = 288;
/* frames per second */
c->time_base= (AVRational){1,25};
c->gop_size = 10; /* emit one intra frame every ten frames */
c->pix_fmt = AV_PIX_FMT_YUV420P;
// if(codec == AV_CODEC_ID_H264)
// av_opt_set(c->priv_data, "preset", "slow", 0);
/* open it */
if (avcodec_open2(c, codec, NULL) < 0) {
__android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Could not open codec");
exit(1);
}
f = fopen(filename, "wb");
if (f == NULL) {
__android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Could not open");
exit(1);
}
frame = avcodec_alloc_frame();
if (!frame) {
__android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Could not allocate video frame");
exit(1);
}
frame->format = c->pix_fmt;
frame->width = c->width;
frame->height = c->height;
/* the image can be allocated by any means and av_image_alloc() is
* just the most convenient way if av_malloc() is to be used */
ret = av_image_alloc(frame->data, frame->linesize, c->width, c->height,
c->pix_fmt, 32);
if (ret < 0) {
__android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Could not allocate raw picture buffer");
exit(1);
}
/* encode 1 second of video */
for(i=0;i<250;i++) {
av_init_packet(&pkt);
pkt.data = NULL; // packet data will be allocated by the encoder
pkt.size = 0;
fflush(stdout);
/* prepare a dummy image */
/* Y */
for(y=0;yheight;y++) {
for(x=0;xwidth;x++) {
frame->data[0][y * frame->linesize[0] + x] = x + y + i * 3;
}
}
/* Cb and Cr */
for(y=0;yheight/2;y++) {
for(x=0;xwidth/2;x++) {
frame->data[1][y * frame->linesize[1] + x] = 128 + y + i * 2;
frame->data[2][y * frame->linesize[2] + x] = 64 + x + i * 5;
}
}
frame->pts = i;
/* encode the image */
ret = avcodec_encode_video2(c, &pkt, frame, &got_output);
if (ret < 0) {
__android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Error encoding frame");
exit(1);
}
if (got_output) {
__android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "encode the image Write frame pktsize %d", pkt.size);
fwrite(pkt.data, 1, pkt.size, f);
av_free_packet(&pkt);
}
}
/* get the delayed frames */
for (got_output = 1; got_output; i++) {
fflush(stdout);
ret = avcodec_encode_video2(c, &pkt, NULL, &got_output);
if (ret < 0) {
__android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Error encoding frame");
fprintf(stderr, "Error encoding frame\n");
exit(1);
}
if (got_output) {
__android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "get the delayed frames Write frame pktsize %d", pkt.size);
fwrite(pkt.data, 1, pkt.size, f);
av_free_packet(&pkt);
}
}
/* add sequence end code to have a real mpeg file */
fwrite(endcode, 1, sizeof(endcode), f);
fclose(f);
avcodec_close(c);
av_free(c);
av_freep(&frame->data[0]);
avcodec_free_frame(&frame);