Recherche avancée

Médias (91)

Autres articles (61)

  • Gestion des droits de création et d’édition des objets

    8 février 2011, par

    Par défaut, beaucoup de fonctionnalités sont limitées aux administrateurs mais restent configurables indépendamment pour modifier leur statut minimal d’utilisation notamment : la rédaction de contenus sur le site modifiables dans la gestion des templates de formulaires ; l’ajout de notes aux articles ; l’ajout de légendes et d’annotations sur les images ;

  • Supporting all media types

    13 avril 2011, par

    Unlike most software and media-sharing platforms, MediaSPIP aims to manage as many different media types as possible. The following are just a few examples from an ever-expanding list of supported formats : images : png, gif, jpg, bmp and more audio : MP3, Ogg, Wav and more video : AVI, MP4, OGV, mpg, mov, wmv and more text, code and other data : OpenOffice, Microsoft Office (Word, PowerPoint, Excel), web (html, CSS), LaTeX, Google Earth and (...)

  • Dépôt de média et thèmes par FTP

    31 mai 2013, par

    L’outil MédiaSPIP traite aussi les média transférés par la voie FTP. Si vous préférez déposer par cette voie, récupérez les identifiants d’accès vers votre site MédiaSPIP et utilisez votre client FTP favori.
    Vous trouverez dès le départ les dossiers suivants dans votre espace FTP : config/ : dossier de configuration du site IMG/ : dossier des média déjà traités et en ligne sur le site local/ : répertoire cache du site web themes/ : les thèmes ou les feuilles de style personnalisées tmp/ : dossier de travail (...)

Sur d’autres sites (8863)

  • getting frame using ffmpeg in android

    5 août 2014, par user2098010

    I wanna get frames from .mp4(recored by phone camera) using ffmpeg in android.
    did ! comfile and ndk-build
    my questions are

    1. for getting frames from .mp4, what should i do ?
      encoding ? decoding ?

    2. encoded mp4 video file, could not play file mp4 on mediaplayer.

    help me plz....

    enter code here

    #include
    #include
    #include
    #include <android></android>log.h>
    #include <libavcodec></libavcodec>avcodec.h>
    #include <libavformat></libavformat>avformat.h>
    #include <libswscale></libswscale>swscale.h>
    #include <libavutil></libavutil>opt.h>
    #include <libavutil></libavutil>channel_layout.h>
    #include <libavutil></libavutil>common.h>
    #include <libavutil></libavutil>imgutils.h>
    #include <libavutil></libavutil>mathematics.h>
    #include <libavutil></libavutil>samplefmt.h>
    #include "player.h"

    void video_encode_example(const char *filename, int codec_id)
    {
       AVCodec *codec;
       AVCodecContext *c= NULL;
       int i, ret, x, y, got_output;
       FILE *f;
       AVFrame *frame;
       AVPacket pkt;
       uint8_t endcode[] = { 0, 0, 1, 0xb7 };

           __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "%s",filename);

       /* find the mpeg1 video encoder */
       codec = avcodec_find_encoder(CODEC_ID_H263);
       if (!codec) {
           __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Codec not found");
           exit(1);
       }

       c = avcodec_alloc_context3(codec);
       if (!c) {
           __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Could not allocate video codec context");
           exit(1);
       }

       /* put sample parameters */
       c->bit_rate = 400000;
       /* resolution must be a multiple of two */
       c->width = 352;
       c->height = 288;
       /* frames per second */
       c->time_base= (AVRational){1,25};
       c->gop_size = 10; /* emit one intra frame every ten frames */
       c->max_bframes=1;
       c->pix_fmt = AV_PIX_FMT_YUV420P;


      // if(codec == AV_CODEC_ID_H264)
         // av_opt_set(c->priv_data, "preset", "slow", 0);

       /* open it */
       if (avcodec_open2(c, codec, NULL) &lt; 0) {
           __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Could not open codec");
           exit(1);
       }

       f = fopen(filename, "wb");
       if (f == NULL) {
           __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Could not open");
           exit(1);
       }

       frame = avcodec_alloc_frame();
       if (!frame) {
           __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Could not allocate video frame");
           exit(1);
       }
       frame->format = c->pix_fmt;
       frame->width  = c->width;
       frame->height = c->height;

       /* the image can be allocated by any means and av_image_alloc() is
        * just the most convenient way if av_malloc() is to be used */
       ret = av_image_alloc(frame->data, frame->linesize, c->width, c->height,
                            c->pix_fmt, 32);
       if (ret &lt; 0) {
           __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Could not allocate raw picture buffer");
           exit(1);
       }

       /* encode 1 second of video */
       for(i=0;i&lt;250;i++) {
           av_init_packet(&amp;pkt);
           pkt.data = NULL;    // packet data will be allocated by the encoder
           pkt.size = 0;

           fflush(stdout);
           /* prepare a dummy image */
           /* Y */
           for(y=0;yheight;y++) {
               for(x=0;xwidth;x++) {
                   frame->data[0][y * frame->linesize[0] + x] = x + y + i * 3;
               }
           }

           /* Cb and Cr */
           for(y=0;yheight/2;y++) {
               for(x=0;xwidth/2;x++) {
                   frame->data[1][y * frame->linesize[1] + x] = 128 + y + i * 2;
                   frame->data[2][y * frame->linesize[2] + x] = 64 + x + i * 5;
               }
           }

           frame->pts = i;

           /* encode the image */
           ret = avcodec_encode_video2(c, &amp;pkt, frame, &amp;got_output);
           if (ret &lt; 0) {
               __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Error encoding frame");
               exit(1);
           }

           if (got_output) {
               __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "encode the image Write frame pktsize  %d", pkt.size);
               fwrite(pkt.data, 1, pkt.size, f);
               av_free_packet(&amp;pkt);
           }
       }

       /* get the delayed frames */
       for (got_output = 1; got_output; i++) {
           fflush(stdout);

           ret = avcodec_encode_video2(c, &amp;pkt, NULL, &amp;got_output);
           if (ret &lt; 0) {
               __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Error encoding frame");
               fprintf(stderr, "Error encoding frame\n");
               exit(1);
           }

           if (got_output) {
               __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "get the delayed frames Write frame pktsize  %d", pkt.size);
               fwrite(pkt.data, 1, pkt.size, f);
               av_free_packet(&amp;pkt);
           }
       }

       /* add sequence end code to have a real mpeg file */
       fwrite(endcode, 1, sizeof(endcode), f);
       fclose(f);

       avcodec_close(c);
       av_free(c);
       av_freep(&amp;frame->data[0]);
       avcodec_free_frame(&amp;frame);
    }
    enter code here
  • use ffmpeg encode a video in android

    18 août 2014, par user2098010

    i followed below link for using ffmpeg in android
    http://www.roman10.net/how-to-build-ffmpeg-with-ndk-r9/

    and done ! ndk-build

    i need to encode a video captured by phone camera for slow down

    i’ve using sample file in ffmpeg/sample...
    but i can’t get encoded a video(slow down)

    output video has 1sec playtime.
    few color are displayed !

    plz... help me

    i wanna sleep well...

     AVCodec *codec;
       AVCodecContext *c= NULL;
       int i, ret, x, y, got_output;
       FILE *f;
       AVFrame *frame;
       AVPacket pkt;
       uint8_t endcode[] = { 0, 0, 1, 0xb7 };

           __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "%s",filename);

       /* find the mpeg1 video encoder */
       codec = avcodec_find_encoder(CODEC_ID_H263);
       if (!codec) {
           __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Codec not found");
           exit(1);
       }

       c = avcodec_alloc_context3(codec);
       if (!c) {
           __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Could not allocate video codec context");
           exit(1);
       }

       /* put sample parameters */
       c->bit_rate = 400000;
       /* resolution must be a multiple of two */
       c->width = 352;
       c->height = 288;
       /* frames per second */
       c->time_base= (AVRational){1,25};
       c->gop_size = 10; /* emit one intra frame every ten frames */
       c->pix_fmt = AV_PIX_FMT_YUV420P;


      // if(codec == AV_CODEC_ID_H264)
         // av_opt_set(c->priv_data, "preset", "slow", 0);

       /* open it */
       if (avcodec_open2(c, codec, NULL) &lt; 0) {
           __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Could not open codec");
           exit(1);
       }

       f = fopen(filename, "wb");
       if (f == NULL) {
           __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Could not open");
           exit(1);
       }

       frame = avcodec_alloc_frame();
       if (!frame) {
           __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Could not allocate video frame");
           exit(1);
       }
       frame->format = c->pix_fmt;
       frame->width  = c->width;
       frame->height = c->height;

       /* the image can be allocated by any means and av_image_alloc() is
        * just the most convenient way if av_malloc() is to be used */
       ret = av_image_alloc(frame->data, frame->linesize, c->width, c->height,
                            c->pix_fmt, 32);
       if (ret &lt; 0) {
           __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Could not allocate raw picture buffer");
           exit(1);
       }

       /* encode 1 second of video */
       for(i=0;i&lt;250;i++) {
           av_init_packet(&amp;pkt);
           pkt.data = NULL;    // packet data will be allocated by the encoder
           pkt.size = 0;

           fflush(stdout);
           /* prepare a dummy image */
           /* Y */
           for(y=0;yheight;y++) {
               for(x=0;xwidth;x++) {
                   frame->data[0][y * frame->linesize[0] + x] = x + y + i * 3;
               }
           }

           /* Cb and Cr */
           for(y=0;yheight/2;y++) {
               for(x=0;xwidth/2;x++) {
                   frame->data[1][y * frame->linesize[1] + x] = 128 + y + i * 2;
                   frame->data[2][y * frame->linesize[2] + x] = 64 + x + i * 5;
               }
           }

           frame->pts = i;

           /* encode the image */
           ret = avcodec_encode_video2(c, &amp;pkt, frame, &amp;got_output);
           if (ret &lt; 0) {
               __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Error encoding frame");
               exit(1);
           }

           if (got_output) {
               __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "encode the image Write frame pktsize  %d", pkt.size);
               fwrite(pkt.data, 1, pkt.size, f);
               av_free_packet(&amp;pkt);
           }
       }

       /* get the delayed frames */
       for (got_output = 1; got_output; i++) {
           fflush(stdout);

           ret = avcodec_encode_video2(c, &amp;pkt, NULL, &amp;got_output);
           if (ret &lt; 0) {
               __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "Error encoding frame");
               fprintf(stderr, "Error encoding frame\n");
               exit(1);
           }

           if (got_output) {
               __android_log_print(ANDROID_LOG_DEBUG, "BASEBALL", "get the delayed frames Write frame pktsize  %d", pkt.size);
               fwrite(pkt.data, 1, pkt.size, f);
               av_free_packet(&amp;pkt);
           }
       }

       /* add sequence end code to have a real mpeg file */
       fwrite(endcode, 1, sizeof(endcode), f);
       fclose(f);

       avcodec_close(c);
       av_free(c);
       av_freep(&amp;frame->data[0]);
       avcodec_free_frame(&amp;frame);
  • FFMPEG avformat_write_header changing my stream time_base

    24 avril 2015, par cesarpachon

    I am muxing video using ffmpeg 2.1.3 with libavcodec 55.39.101 (forced to that version because is the one available in google-chrome PNACL port project). all my frames seems to have bad the time. they try to be rendered all at once at the beggining of the video when playing it.

    I am setting the stream time base to 1/25, but just after calling avformat_write_header, it has the value of -18082736/1.
    in each frame, when I print the stream time_base it says 1/12800, while the time_base of codec is always ok (1/25).

    console log before and after av_format_write_header :

    before avformat_write_header stream time_base: 1/25
    after avformat_write_header ret 0 stream time_base: -18082736/1

    the code (abreviated to keep the post short, all calls in the original version had error checking) :

    AVCodecContext *codecContext;
    AVCodec * codec = avcodec_find_encoder(codec_id);  
    myOutputStream->stream = avformat_new_stream(outputFormatContext, *codec);
    myOutputStream->stream->id = outputFormatContext->nb_streams-1;
    codecContext = myOutputStream->stream->codec;
    codecContext->codec_id = codec_id;
    codecContext->bit_rate = 400000;
    codecContext->width    = width;
    codecContext->height   = height;
    myOutputStream->stream->time_base = (AVRational){ 1, 25 };
    codecContext->time_base       = myOutputStream->stream->time_base;
    codecContext->gop_size      = 12;
    codecContext->pix_fmt       = AV_PIX_FMT_YUV420P;
    AVDictionary *opt = NULL;
    av_dict_copy(&amp;opt, opt_arg, 0);
    ret = avcodec_open2(codecContext, codec, &amp;opt);
    av_dict_free(&amp;opt);
    myOutputStream->frame = alloc_picture(codecContext->pix_fmt, codecContext->width, codecContext->height);
     myOutputStream->tmp_frame = alloc_picture(AV_PIX_FMT_YUV420P, codecContext->width, codecContext->height);

    //before: printing g_outputContext->stream time_base here
    ret = avformat_write_header(g_outputContext, &amp;opt);
    //after: printing g_outputContext->stream time_base here

    If I run ffmpeg -i on the final video, I got this (why duration is zero ?) :

    Input #0, mov,mp4,m4a,3gp,3g2,mj2, from 'test4.mp4':
     Metadata:
       major_brand     : isom
       minor_version   : 512
       compatible_brands: isomiso2mp41
       encoder         : Lavf55.19.104
     Duration: 00:00:00.05, start: 0.000000, bitrate: 99549 kb/s
       Stream #0:0(und): Video: mpeg4 (Simple Profile) (mp4v / 0x7634706D), yuv420p, 800x600 [SAR 1:1 DAR 4:3], 463106 kb/s, 12800 fps, 12800 tbr, 12800 tbn, 25 tbc (default)
       Metadata:
         handler_name    : VideoHandler