Advanced search

Medias (21)

Tag: - Tags -/Nine Inch Nails

Other articles (61)

  • Ajouter notes et légendes aux images

    7 February 2011, by

    Pour pouvoir ajouter notes et légendes aux images, la première étape est d’installer le plugin "Légendes".
    Une fois le plugin activé, vous pouvez le configurer dans l’espace de configuration afin de modifier les droits de création / modification et de suppression des notes. Par défaut seuls les administrateurs du site peuvent ajouter des notes aux images.
    Modification lors de l’ajout d’un média
    Lors de l’ajout d’un média de type "image" un nouveau bouton apparait au dessus de la prévisualisation (...)

  • Submit bugs and patches

    13 April 2011

    Unfortunately a software is never perfect.
    If you think you have found a bug, report it using our ticket system. Please to help us to fix it by providing the following information: the browser you are using, including the exact version as precise an explanation as possible of the problem if possible, the steps taken resulting in the problem a link to the site / page in question
    If you think you have solved the bug, fill in a ticket and attach to it a corrective patch.
    You may also (...)

  • MediaSPIP 0.1 Beta version

    25 April 2011, by

    MediaSPIP 0.1 beta is the first version of MediaSPIP proclaimed as "usable".
    The zip file provided here only contains the sources of MediaSPIP in its standalone version.
    To get a working installation, you must manually install all-software dependencies on the server.
    If you want to use this archive for an installation in "farm mode", you will also need to proceed to other manual (...)

On other websites (9203)

  • Setting ffmpeg properly in ubuntu 16.04

    22 August 2017, by pro neon

    I am following this website for ffmpeg tutorial: http://dranger.com
    I tried to compile the programs after setting up ffmpeg in ubuntu by looking on some online videos but none of them worked. Some times GCC gives me undefined reference error and sometimes header not found error. I looked on some of the answers on SO that said that we need to do some change in the code as the new api is not backwards compatible but still GCC gives me undefined reference error.
    Here is the code that I am trying to compile:

       // tutorial01.c
    // Code based on a tutorial by Martin Bohme (boehme@inb.uni-luebeckREMOVETHIS.de)
    // Tested on Gentoo, CVS version 5/01/07 compiled with GCC 4.1.1
    // With updates from https://github.com/chelyaev/ffmpeg-tutorial
    // Updates tested on:
    // LAVC 54.59.100, LAVF 54.29.104, LSWS 2.1.101
    // on GCC 4.7.2 in Debian February 2015

    // A small sample program that shows how to use libavformat and libavcodec to
    // read video from a file.
    //
    // Use
    //
    // gcc -o tutorial01 tutorial01.c -lavformat -lavcodec -lswscale -lz
    //
    // to build (assuming libavformat and libavcodec are correctly installed
    // your system).
    //
    // Run using
    //
    // tutorial01 myvideofile.mpg
    //
    // to write the first five frames from "myvideofile.mpg" to disk in PPM
    // format.

    #include <libavcodec></libavcodec>avcodec.h>
    #include <libavformat></libavformat>avformat.h>
    #include <libswscale></libswscale>swscale.h>

    #include

    // compatibility with newer API
    #if LIBAVCODEC_VERSION_INT &lt; AV_VERSION_INT(55,28,1)
    #define av_frame_alloc avcodec_alloc_frame
    #define av_frame_free avcodec_free_frame
    #endif

    void SaveFrame(AVFrame *pFrame, int width, int height, int iFrame) {
     FILE *pFile;
     char szFilename[32];
     int  y;

     // Open file
     sprintf(szFilename, "frame%d.ppm", iFrame);
     pFile=fopen(szFilename, "wb");
     if(pFile==NULL)
       return;

     // Write header
     fprintf(pFile, "P6\n%d %d\n255\n", width, height);

     // Write pixel data
     for(y=0; ydata[0]+y*pFrame->linesize[0], 1, width*3, pFile);

     // Close file
     fclose(pFile);
    }

    int main(int argc, char *argv[]) {
     // Initalizing these to NULL prevents segfaults!
     AVFormatContext   *pFormatCtx = NULL;
     int               i, videoStream;
     AVCodecContext    *pCodecCtxOrig = NULL;
     AVCodecContext    *pCodecCtx = NULL;
     AVCodec           *pCodec = NULL;
     AVFrame           *pFrame = NULL;
     AVFrame           *pFrameRGB = NULL;
     AVPacket          packet;
     int               frameFinished;
     int               numBytes;
     uint8_t           *buffer = NULL;
     struct SwsContext *sws_ctx = NULL;

     if(argc &lt; 2) {
       printf("Please provide a movie file\n");
       return -1;
     }
     // Register all formats and codecs
     av_register_all();

     // Open video file
     if(avformat_open_input(&amp;pFormatCtx, argv[1], NULL, NULL)!=0)
       return -1; // Couldn't open file

     // Retrieve stream information
     if(avformat_find_stream_info(pFormatCtx, NULL)&lt;0)
       return -1; // Couldn't find stream information

     // Dump information about file onto standard error
     av_dump_format(pFormatCtx, 0, argv[1], 0);

     // Find the first video stream
     videoStream=-1;
     for(i=0; inb_streams; i++)
       if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) {
         videoStream=i;
         break;
       }
     if(videoStream==-1)
       return -1; // Didn't find a video stream

     // Get a pointer to the codec context for the video stream
     pCodecCtxOrig=pFormatCtx->streams[videoStream]->codec;
     // Find the decoder for the video stream
     pCodec=avcodec_find_decoder(pCodecCtxOrig->codec_id);
     if(pCodec==NULL) {
       fprintf(stderr, "Unsupported codec!\n");
       return -1; // Codec not found
     }
     // Copy context
     pCodecCtx = avcodec_alloc_context3(pCodec);
     if(avcodec_copy_context(pCodecCtx, pCodecCtxOrig) != 0) {
       fprintf(stderr, "Couldn't copy codec context");
       return -1; // Error copying codec context
     }

     // Open codec
     if(avcodec_open2(pCodecCtx, pCodec, NULL)&lt;0)
       return -1; // Could not open codec

     // Allocate video frame
     pFrame=av_frame_alloc();

     // Allocate an AVFrame structure
     pFrameRGB=av_frame_alloc();
     if(pFrameRGB==NULL)
       return -1;

     // Determine required buffer size and allocate buffer
     numBytes=avpicture_get_size(PIX_FMT_RGB24, pCodecCtx->width,
                     pCodecCtx->height);
     buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));

     // Assign appropriate parts of buffer to image planes in pFrameRGB
     // Note that pFrameRGB is an AVFrame, but AVFrame is a superset
     // of AVPicture
     avpicture_fill((AVPicture *)pFrameRGB, buffer, PIX_FMT_RGB24,
            pCodecCtx->width, pCodecCtx->height);

     // initialize SWS context for software scaling
     sws_ctx = sws_getContext(pCodecCtx->width,
                  pCodecCtx->height,
                  pCodecCtx->pix_fmt,
                  pCodecCtx->width,
                  pCodecCtx->height,
                  PIX_FMT_RGB24,
                  SWS_BILINEAR,
                  NULL,
                  NULL,
                  NULL
                  );

     // Read frames and save first five frames to disk
     i=0;
     while(av_read_frame(pFormatCtx, &amp;packet)>=0) {
       // Is this a packet from the video stream?
       if(packet.stream_index==videoStream) {
         // Decode video frame
         avcodec_decode_video2(pCodecCtx, pFrame, &amp;frameFinished, &amp;packet);

         // Did we get a video frame?
         if(frameFinished) {
       // Convert the image from its native format to RGB
       sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
             pFrame->linesize, 0, pCodecCtx->height,
             pFrameRGB->data, pFrameRGB->linesize);

       // Save the frame to disk
       if(++i&lt;=5)
         SaveFrame(pFrameRGB, pCodecCtx->width, pCodecCtx->height,
               i);
         }
       }

       // Free the packet that was allocated by av_read_frame
       av_free_packet(&amp;packet);
     }

     // Free the RGB image
     av_free(buffer);
     av_frame_free(&amp;pFrameRGB);

     // Free the YUV frame
     av_frame_free(&amp;pFrame);

     // Close the codecs
     avcodec_close(pCodecCtx);
     avcodec_close(pCodecCtxOrig);

     // Close the video file
     avformat_close_input(&amp;pFormatCtx);

     return 0;
    }

    This is the command I use to compile:

    gcc -o tutorial01 tutorial01.c -lavformat -lavcodec -lswscale -lz
  • Streaming RTP with ffmpeg and node.js to voip phone

    5 July 2023, by Nik Hendricks

    I am trying to implement SIP in node.js. Here is the library i am working on

    &#xA;

    Upon receiving an invite request such as

    &#xA;

    &#xA;Received INVITE&#xA;INVITE sip:201@192.168.1.2:5060 SIP/2.0&#xA;Via: SIP/2.0/UDP 192.168.1.39:5062;branch=z9hG4bK1534941205&#xA;From: "Nik" &lt;sip:nik@192.168.1.2&gt;;tag=564148403&#xA;To: &lt;sip:201@192.168.1.2&gt;&#xA;Call-ID: 2068254636@192.168.1.39&#xA;CSeq: 2 INVITE&#xA;Contact: &lt;sip:nik@192.168.1.39:5062&gt;&#xA;Authorization: Digest username="Nik", realm="NRegistrar", nonce="1234abcd", uri="sip:201@192.168.1.2:5060", response="7fba16dafe3d60c270b774bd5bba524c", algorithm=MD5&#xA;Content-Type: application/sdp&#xA;Allow: INVITE, INFO, PRACK, ACK, BYE, CANCEL, OPTIONS, NOTIFY, REGISTER, SUBSCRIBE, REFER, PUBLISH, UPDATE, MESSAGE&#xA;Max-Forwards: 70&#xA;User-Agent: Yealink SIP-T42G 29.71.0.120&#xA;Supported: replaces&#xA;Allow-Events: talk,hold,conference,refer,check-sync&#xA;Content-Length: 306&#xA;&#xA;v=0&#xA;o=- 20083 20083 IN IP4 192.168.1.39&#xA;s=SDP data&#xA;c=IN IP4 192.168.1.39&#xA;t=0 0&#xA;m=audio 11782 RTP/AVP 0 8 18 9 101&#xA;a=rtpmap:0 PCMU/8000&#xA;a=rtpmap:8 PCMA/8000&#xA;a=rtpmap:18 G729/8000&#xA;a=fmtp:18 annexb=no&#xA;a=rtpmap:9 G722/8000&#xA;a=fmtp:101 0-15&#xA;a=rtpmap:101 telephone-event/8000&#xA;a=ptime:20&#xA;a=sendrecv&#xA;&#xA;

    &#xA;

    I can then parse the SDP into an object like this

    &#xA;

     &#xA;{&#xA;    "session":{&#xA;        "version":"0",&#xA;        "origin":"- 20084 20084 IN IP4 192.168.1.39",&#xA;        "sessionName":"SDP data"&#xA;    },&#xA;    "media":[&#xA;        {&#xA;            "media":"audio",&#xA;            "port":11784,&#xA;            "protocol":"RTP/AVP",&#xA;            "format":"0",&#xA;            "attributes":[&#xA;                "rtpmap:0 PCMU/8000",&#xA;                "rtpmap:8 PCMA/8000",&#xA;                "rtpmap:18 G729/8000",&#xA;                "fmtp:18 annexb=no",&#xA;                "rtpmap:9 G722/8000",&#xA;                "fmtp:101 0-15",&#xA;                "rtpmap:101 telephone-event/8000",&#xA;                "ptime:20",&#xA;                "sendrecv"&#xA;            ]&#xA;        }&#xA;    ]&#xA;}&#xA;

    &#xA;

    After sending the 100 and 180 responses with my library i attempt to start a RTP stream with ffmpeg

    &#xA;

    var port = SDPParser.parse(res.message.body).media[0].port&#xA;var s = new STREAMER(&#x27;output.wav&#x27;, &#x27;192.168.1.39&#x27;, port)&#xA;

    &#xA;

    with the following STREAMER class

    &#xA;

    class Streamer{&#xA;    constructor(inputFilePath, rtpAddress, rtpPort){&#xA;        this.inputFilePath = &#x27;output.wav&#x27;;&#xA;        this.rtpAddress = rtpAddress;&#xA;        this.rtpPort = rtpPort;&#xA;    }&#xA;&#xA;    start(){&#xA;        return new Promise((resolve) => {&#xA;            const ffmpegCommand = `ffmpeg -re -i ${this.inputFilePath} -ar 8000 -f mulaw -f rtp rtp://${this.rtpAddress}:${this.rtpPort}`;&#xA;            const ffmpegProcess = spawn(ffmpegCommand, { shell: true });&#xA;    &#xA;            ffmpegProcess.stdout.on(&#x27;data&#x27;, (data) => {&#xA;                data = data.toString()&#xA;                //replace all instances of 127.0.0.1 with our local ip address&#xA;                data = data.replace(new RegExp(&#x27;127.0.0.1&#x27;, &#x27;g&#x27;), &#x27;192.168.1.3&#x27;);&#xA;&#xA;                resolve(data.toString())&#xA;            });&#xA;    &#xA;            ffmpegProcess.stderr.on(&#x27;data&#x27;, (data) => {&#xA;              // Handle stderr data if required&#xA;              console.log(data.toString())&#xA;            });&#xA;    &#xA;            ffmpegProcess.on(&#x27;close&#x27;, (code) => {&#xA;              // Handle process close event if required&#xA;              console.log(&#x27;close&#x27;)&#xA;              console.log(code.toString())&#xA;            });&#xA;    &#xA;            ffmpegProcess.on(&#x27;error&#x27;, (error) => {&#xA;              // Handle process error event if required&#xA;              console.log(error.toString())&#xA;            });&#xA;        })&#xA;    }&#xA;     &#xA;}&#xA;

    &#xA;

    the start() function resolves with the SDP that ffmpeg generates. I am starting to think that ffmpeg cant generate proper SDP for voip calls.

    &#xA;

    so when i create 200 response with the following sdp

    &#xA;

    v=0&#xA;o=- 0 0 IN IP4 192.168.1.3&#xA;s=Impact Moderato&#xA;c=IN IP4 192.168.1.39&#xA;t=0 0&#xA;a=tool:libavformat 58.29.100&#xA;m=audio 12123 RTP/AVP 97&#xA;b=AS:128&#xA;a=rtpmap:97 PCMU/8000/2&#xA;

    &#xA;

    the other line never picks up. from my understanding the first invite from the caller will provide SDP that will tell me where to send the RTP stream too and the correct codecs and everything. I know that currently, my wav file is PCMU and i can listen to it with ffplay and the provided sdp. what is required to make the other line pickup specifically a Yealink t42g

    &#xA;

    my full attempt looks like this

    &#xA;

    Client.on(&#x27;INVITE&#x27;, (res) => {&#xA;    console.log("Received INVITE")&#xA;    var d = Client.Dialog(res).then(dialog => {&#xA;        dialog.send(res.CreateResponse(100))&#xA;        dialog.send(res.CreateResponse(180))&#xA;        var port = SDPParser.parse(res.message.body).media[0].port&#xA;&#xA;        var s = new STREAMER(&#x27;output.wav&#x27;, &#x27;192.168.1.39&#x27;, port)&#xA;        s.start().then(sdp => {&#xA;            console.log(sdp.split(&#x27;SDP:&#x27;)[1])&#xA;            var ok = res.CreateResponse(200)&#xA;            ok.body = sdp.split(&#x27;SDP:&#x27;)[1]&#xA;            dialog.send(ok)&#xA;        })&#xA;&#xA;        dialog.on(&#x27;BYE&#x27;, (res) => {&#xA;            console.log("BYE")&#xA;            dialog.send(res.CreateResponse(200))&#xA;            dialog.kill()&#xA;        })&#xA;    })&#xA;})&#xA;

    &#xA;

    I have provided a link to my library at the top of this message. My current problem is in the examples/Client folder.

    &#xA;

    I'm not sure what could be going wrong here. Maybe i'm not using the right format or codec for the VOIP phone i dont see whats wrong with the SDP. especially if i can listen to SDP generated by ffmpeg if i stream RTP back to the same computer i use ffplay on. Any help is greatly appreciated.

    &#xA;

    Update

    &#xA;

    As i test i decided to send the caller back SDP that was generated by a Yealink phone like itself. but with some modifications

    &#xA;

    v=0&#xA;o=- ${this.output_port} ${this.output_port} IN IP4 192.168.1.39&#xA;s=SDP data&#xA;c=IN IP4 192.168.1.39&#xA;t=0 0&#xA;m=audio ${this.output_port} RTP/AVP 0 8 18 9 101&#xA;a=rtpmap:0 PCMU/8000&#xA;a=rtpmap:8 PCMA/8000&#xA;a=rtpmap:18 G729/8000&#xA;a=fmtp:18 annexb=no&#xA;a=rtpmap:9 G722/8000&#xA;a=fmtp:101 0-15&#xA;a=rtpmap:1&#xA;01 telephone-event/8000&#xA;a=ptime:20&#xA;a=sendrecv&#xA;

    &#xA;

    Finally, the phone that makes the call in the first place will fully answer but still no audio stream. I notice if I change the IP address or port to something wrong the other phone Will hear its own audio instead of just quiet. so this leads me to believe I am headed in the right direction. And maybe the problem lies in not sending the right audio format for what I'm describing.

    &#xA;

    Additionaly, Whenever using ffmpeg to stream my audio with rtp I notice that it sees the file format as this pcm_alaw, 8000 Hz, mono, s16, 64 kb/s My new SDP describes using both ulaw and alaw but I'm not sure which it is saying it prefers

    &#xA;

    v=0&#xA;o=- ${this.output_port} ${this.output_port} IN IP4 192.168.1.39&#xA;s=SDP data&#xA;c=IN IP4 192.168.1.39&#xA;t=0 0&#xA;m=audio ${this.output_port} RTP/AVP 0 101&#xA;a=rtpmap:0 PCMU/8000&#xA;a=fmtp:101 0-15&#xA;a=rtpmap:101 telephone-event/8000&#xA;a=ptime:0&#xA;a=sendrecv&#xA;

    &#xA;

    I have been able to simply the SDP down to this. This will let the other phone actually pickup and not hear its own audio. it's just a completely dead air stream.

    &#xA;

  • H264 Encoding - Could not play video using VLC Player

    31 March 2016, by bot1131357

    I am have trouble encoding an H264 video correctly using FFmpeg libav. I could not play the encoded video in VLC media player, and although I could play the video on MPC-HC the time shows 00:00/00:00. Clearly I’m missing something.

    The Media info from MPC-HC shows this:

    General
    Format : AVC
    Format/Info : Advanced Video Codec
    File size : 110 KiB
    Duration : 2s 400ms
    Overall bit rate : 375 Kbps
    Writing library : x264 core 148 r2665 a01e339
    Encoding settings : cabac=0 / ref=3 / deblock=1:0:0 / analyse=0x1:0x111 / me=hex / subme=7 / psy=1 / psy_rd=1.00:0.00 / mixed_ref=1 / me_range=16 / chroma_me=1 / trellis=1 / 8x8dct=0 / cqm=0 / deadzone=21,11 / fast_pskip=1 / chroma_qp_offset=-2 / threads=7 / lookahead_threads=1 / sliced_threads=0 / nr=0 / decimate=1 / interlaced=0 / bluray_compat=0 / constrained_intra=0 / bframes=0 / weightp=0 / keyint=12 / keyint_min=1 / scenecut=40 / intra_refresh=0 / rc_lookahead=12 / rc=abr / mbtree=1 / bitrate=2000 / ratetol=1.0 / qcomp=0.60 / qpmin=0 / qpmax=69 / qpstep=4 / ip_ratio=1.40 / aq=1:1.00

    Video
    Format : AVC
    Format/Info : Advanced Video Codec
    Format profile : Baseline@L2.1
    Format settings, CABAC : No
    Format settings, ReFrames : 3 frames
    Format settings, GOP : M=1, N=12
    Duration : 2s 400ms
    Bit rate : 2 000 Kbps
    Width : 320 pixels
    Height : 240 pixels
    Display aspect ratio : 4:3
    Frame rate mode : Variable
    Frame rate : 20.833 fps
    Color space : YUV
    Chroma subsampling : 4:2:0
    Bit depth : 8 bits
    Scan type : Progressive
    Bits/(Pixel*Frame) : 1.250
    Stream size : 586 KiB
    Writing library : x264 core 148 r2665 a01e339
    Encoding settings : cabac=0 / ref=3 / deblock=1:0:0 / analyse=0x1:0x111 / me=hex / subme=7 / psy=1 / psy_rd=1.00:0.00 / mixed_ref=1 / me_range=16 / chroma_me=1 / trellis=1 / 8x8dct=0 / cqm=0 / deadzone=21,11 / fast_pskip=1 / chroma_qp_offset=-2 / threads=7 / lookahead_threads=1 / sliced_threads=0 / nr=0 / decimate=1 / interlaced=0 / bluray_compat=0 / constrained_intra=0 / bframes=0 / weightp=0 / keyint=12 / keyint_min=1 / scenecut=40 / intra_refresh=0 / rc_lookahead=12 / rc=abr / mbtree=1 / bitrate=2000 / ratetol=1.0 / qcomp=0.60 / qpmin=0 / qpmax=69 / qpstep=4 / ip_ratio=1.40 / aq=1:1.00

    I noticed something odd in the above info:
    - The frame rate is 20.833 fps, instead of the specified 10 fps.
    - Duration of 2s 400ms did not seem right either, since the video played for more than 4s.

    Also, (AVFrame* picture)->pict_type is always set to AV_PICTURE_TYPE_NONE. I don’t think this is normal.

    The library that I’m using is ffmpeg-20160219-git-98a0053-win32-dev. I would really really appreciate if you could help me out of this confusion.

    /*
    * Video encoding example
    */
    char filename[] = "test.mp4";
    int main(int argc, char** argv)
    {
       AVCodec *codec = NULL;
       AVCodecContext *codecCtx= NULL;
       AVFormatContext *pFormatCtx = NULL;
       AVStream * pVideoStream = NULL;
       AVFrame *picture = NULL;

       int i, x, y,            //
           ret,                // Return value
           got_packet_ptr;     // Data encoded into packet

       printf("Video encoding\n");

       // Register all formats and codecs
       av_register_all();

       // allocate context
       pFormatCtx = avformat_alloc_context();
       memcpy(pFormatCtx->filename,filename,
           min(strlen(filename), sizeof(pFormatCtx->filename)));

       // guess format
       pFormatCtx->oformat = av_guess_format("h264", NULL, NULL);
       if (NULL==pFormatCtx->oformat)
       {
           cerr &lt;&lt; "Could not guess output format" &lt;&lt; endl;
           return -1;
       }  

       // Find the codec.
       codec = avcodec_find_encoder(pFormatCtx->oformat->video_codec);
       if (codec == NULL) {
           fprintf(stderr, "Codec not found\n");
           return -1;
       }

       // Set context
       int framerate = 10;
       codecCtx = avcodec_alloc_context3(codec);
       avcodec_get_context_defaults3(codecCtx, codec);
       codecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
       codecCtx->profile = FF_PROFILE_H264_BASELINE;
       // Resolution must be a multiple of two.
       codecCtx->width  = 320;
       codecCtx->height = 240;

       codecCtx->bit_rate = 2000000;
       codecCtx->time_base.den = framerate;
       codecCtx->time_base.num = 1;
       codecCtx->gop_size = 12; // emit one intra frame every twelve frames at most

       // Open the codec.  
       if (avcodec_open2(codecCtx, codec, NULL) &lt; 0)
       {
           printf("Cannot open video codec\n");
           return -1;
       }

       // Add stream to pFormatCtx
       pVideoStream = avformat_new_stream(pFormatCtx, codec);
       if (!pVideoStream)
       {
           printf("Cannot add new video stream\n");
           return -1;
       }
       pVideoStream->codec = codecCtx;
       pVideoStream->time_base.den = framerate;
       pVideoStream->time_base.num = 1;

       if (avio_open2(&amp;pFormatCtx->pb, filename, AVIO_FLAG_WRITE, NULL, NULL) &lt; 0)
       {
           printf("Cannot open file\n");
           return -1;
       }

       // Write file header.
       avformat_write_header(pFormatCtx, NULL);

       // Create frame
       picture= av_frame_alloc();
       picture->format = codecCtx->pix_fmt;
       picture->width  = codecCtx->width;
       picture->height = codecCtx->height;

       int bufferImgSize = av_image_get_buffer_size(codecCtx->pix_fmt, codecCtx->width,
                       codecCtx->height,1);    
       av_image_alloc(picture->data, picture->linesize, codecCtx->width, codecCtx->height,                 codecCtx->pix_fmt, 32);

       AVPacket avpkt;

       /* encode 1 second of video */
       for(i=0;i&lt;50;i++)
       {
           /* prepare a dummy image */
           /* Y */
           for(y=0;yheight;y++)
           {
               for(x=0;xwidth;x++)
               {
                   picture->data[0][y * picture->linesize[0] + x] = x + y + i * 3;
               }
           }
           /* Cb and Cr */
           for(y=0;yheight/2;y++)
           {
               for(x=0;xwidth/2;x++)
               {
                   picture->data[1][y * picture->linesize[1] + x] = 128 + y + i * 2;
                   picture->data[2][y * picture->linesize[2] + x] = 64 + x + i * 5;
               }
           }

           // Get timestamp
           picture->pts = (float) i * (1000.0/(float)(codecCtx->time_base.den)) * 90;

           // Encode frame to packet
           av_init_packet(&amp;avpkt);
           got_packet_ptr = 0;
           int error = avcodec_encode_video2(codecCtx, &amp;avpkt, picture, &amp;got_packet_ptr);
           if (!error &amp;&amp; got_packet_ptr > 0)
           {
               // Write packet with frame.
               ret = (av_interleaved_write_frame(pFormatCtx, &amp;avpkt) == 0);        
           }  
           av_packet_unref(&amp;avpkt);
       }

       // Flush remaining encoded data
       while(1)
       {
           av_init_packet(&amp;avpkt);
           got_packet_ptr = 0;
           // Encode frame to packet.
           int error = avcodec_encode_video2(codecCtx, &amp;avpkt, NULL, &amp;got_packet_ptr);
           if (!error &amp;&amp; got_packet_ptr > 0)
           {
               // Write packet with frame.
               ret = (av_interleaved_write_frame(pFormatCtx, &amp;avpkt) == 0);        
           }
           else
           {
               break;
           }
           av_packet_unref(&amp;avpkt);
       }
       av_write_trailer(pFormatCtx);

       av_packet_unref(&amp;avpkt);
       av_frame_free(&amp;picture);

       avcodec_close(codecCtx);
       av_free(codecCtx);

       cin.get();
    }