Recherche avancée

Médias (0)

Mot : - Tags -/presse-papier

Aucun média correspondant à vos critères n’est disponible sur le site.

Autres articles (17)

  • Supporting all media types

    13 avril 2011, par

    Unlike most software and media-sharing platforms, MediaSPIP aims to manage as many different media types as possible. The following are just a few examples from an ever-expanding list of supported formats : images : png, gif, jpg, bmp and more audio : MP3, Ogg, Wav and more video : AVI, MP4, OGV, mpg, mov, wmv and more text, code and other data : OpenOffice, Microsoft Office (Word, PowerPoint, Excel), web (html, CSS), LaTeX, Google Earth and (...)

  • Organiser par catégorie

    17 mai 2013, par

    Dans MédiaSPIP, une rubrique a 2 noms : catégorie et rubrique.
    Les différents documents stockés dans MédiaSPIP peuvent être rangés dans différentes catégories. On peut créer une catégorie en cliquant sur "publier une catégorie" dans le menu publier en haut à droite ( après authentification ). Une catégorie peut être rangée dans une autre catégorie aussi ce qui fait qu’on peut construire une arborescence de catégories.
    Lors de la publication prochaine d’un document, la nouvelle catégorie créée sera proposée (...)

  • Automated installation script of MediaSPIP

    25 avril 2011, par

    To overcome the difficulties mainly due to the installation of server side software dependencies, an "all-in-one" installation script written in bash was created to facilitate this step on a server with a compatible Linux distribution.
    You must have access to your server via SSH and a root account to use it, which will install the dependencies. Contact your provider if you do not have that.
    The documentation of the use of this installation script is available here.
    The code of this (...)

Sur d’autres sites (5954)

  • FFmpeg mux video use libavformat avcodec but output couldn't be played

    10 août 2017, par tqn

    I’m trying write a app that take an input video and crop it to square video and ignore audio stream. Because bad performance if using command, I’m trying to use libavcodec and libavformat to do it. But the output isn’t playable by any video player and duration is 0 although I wrote all frame. Here are my code.

    void convert_video(char* input) {
       AVFormatContext *pFormatCtx = NULL;
       int             i, videoStreamIndex;
       AVCodecContext  *pCodecCtx = NULL;
       AVCodec         *pCodec = NULL;
       AVFrame         *pFrame = NULL;
       AVFrame         *pFrameSquare = NULL;
       AVPacket        packet, outPacket;
       int             frameFinished;
       int             numBytes;
       uint8_t         *buffer = NULL;
       AVCodec         *pEncodec = NULL;
       AVFormatContext *poFormatCxt = NULL;
       MuxOutputStream    videoStream = {0}, audioStream = {0};
       int tar_w, tar_h;

       const enum AVPixelFormat pic_format = AV_PIX_FMT_YUV420P;
       const enum AVCodecID codec_id = AV_CODEC_ID_H264;
       AVDictionary    *optionsDict = NULL;
       char output[50];
       sprintf(output, "%soutput.mp4", ANDROID_SDCARD);

       // Register all formats and codecs
       av_register_all();

       // Open video file
       if(avformat_open_input(&pFormatCtx, input, NULL, NULL)!=0)
           return; // Couldn't open file
       avformat_alloc_output_context2(&poFormatCxt, NULL, NULL, output);

       // Retrieve stream information
       if(avformat_find_stream_info(pFormatCtx, NULL)<0)
           return; // Couldn't find stream information

       // Find the first video stream
       videoStreamIndex=-1;
       for(i=0; inb_streams; i++)
           if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) {
               videoStreamIndex=i;
               break;
           }
       if(videoStreamIndex==-1)
           return; // Didn't find a video stream

       // Get a pointer to the codec context for the video stream
       pCodecCtx = pFormatCtx->streams[videoStreamIndex]->codec;
       tar_w = pCodecCtx->width > pCodecCtx->height ? pCodecCtx->height : pCodecCtx->width;
       tar_h = tar_w;

       // Find the decoder for the video stream
       pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
       pEncodec = avcodec_find_encoder(codec_id);

       add_stream_mux(&videoStream, poFormatCxt, &pEncodec, codec_id, tar_w, tar_h);
       videoStream.st[0].time_base = pFormatCtx->streams[videoStreamIndex]->time_base;
       videoStream.st[0].codec->time_base = videoStream.st[0].time_base;
       videoStream.st[0].codec->time_base.den *= videoStream.st[0].codec->ticks_per_frame;
    //    add_stream(&audioStream, poFormatCxt, &)
       open_video(poFormatCxt, pEncodec, &videoStream, optionsDict);
       int ret = avio_open(&poFormatCxt->pb, output, AVIO_FLAG_WRITE);

       // Open codec
       if(avcodec_open2(pCodecCtx, pCodec, &optionsDict) < 0)
           return; // Could not open codec

       ret = avformat_write_header(poFormatCxt, &optionsDict);
       if (ret != 0) {
           ANDROID_LOG("Died");
       }

       // Allocate video frame
       pFrame=av_frame_alloc();
       pFrame->format = videoStream.st->codec->pix_fmt;
       pFrame->width = pCodecCtx->width;
       pFrame->height = pCodecCtx->height;
       av_frame_get_buffer(pFrame, 32);

       // Allocate an AVFrame structure
       pFrameSquare=av_frame_alloc();
       if(pFrameSquare==NULL)
           return;

       // Determine required buffer size and allocate buffer
       numBytes=avpicture_get_size(pic_format, tar_w,
                                   tar_h);
       buffer = (uint8_t *)av_malloc(numBytes*sizeof(uint8_t));

       // Assign appropriate parts of buffer to image planes in pFrameSquare
       // Note that pFrameSquare is an AVFrame, but AVFrame is a superset
       // of AVPicture
       ret = avpicture_fill((AVPicture *)pFrameSquare, buffer, pic_format,
                      tar_w, tar_h);
       if (ret < 0) {
           ANDROID_LOG("Can't fill picture");
           return;
       }

       // Read frames and save first five frames to disk
       i=0;
       ret = av_read_frame(pFormatCtx, &packet);
       while(ret >= 0) {
           // Is this a packet from the video stream?
           if(packet.stream_index == videoStreamIndex) {
               // Decode video frame
    //            av_packet_rescale_ts(&packet, videoStream.st->time_base, videoStream.st->codec->time_base);
               avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished,
                                     &packet);
    //            while (!frameFinished) {
    //                avcodec_decode_video2(videoStream.st->codec, pFrame, &frameFinished, NULL);
    //            }
               ANDROID_LOG("Trying to decode frame %d with result %d", i, frameFinished);
               ret = av_picture_crop((AVPicture*) pFrameSquare, (AVPicture*) pFrame, pic_format, 0, 0);
               if (ret < 0) {
                   ANDROID_LOG("Can't crop image");
               }
    //            av_frame_get_best_effort_timestamp(pFrame);
    //            av_rescale_q()

               if(frameFinished) {

                   // Save the frame to disk
                   av_init_packet(&outPacket);
    //                av_packet_rescale_ts(&outPacket, videoStream.st->codec->time_base, videoStream.st->time_base);
                   pFrameSquare->width = tar_w;
                   pFrameSquare->height = tar_h;
                   pFrameSquare->format = pic_format;
                   pFrameSquare->pts = ++videoStream.next_pts;
                   ret = avcodec_encode_video2(videoStream.st->codec, &outPacket, pFrameSquare, &frameFinished);

    //                int count = 0;
    //                while (!frameFinished && count++ < 6) {
    //                    ret = avcodec_encode_video2(videoStream.st->codec, &outPacket, NULL, &frameFinished);
    //                }
                   if (frameFinished) {
                       ANDROID_LOG("Writing frame %d", i);
                       outPacket.stream_index = videoStreamIndex;
                       av_interleaved_write_frame(poFormatCxt, &outPacket);
                   }
                   av_free_packet(&outPacket);
               }
           }

           // Free the packet that was allocated by av_read_frameav_free_packet(&packet);
           ret = av_read_frame(pFormatCtx, &packet);
       }

       ret = av_write_trailer(poFormatCxt);
       if (ret < 0) {
           ANDROID_LOG("Couldn't write trailer");
       } else {
           ANDROID_LOG("Video convert finished");
       }

       // Free the RGB image
       av_free(buffer);
       av_free(pFrameSquare);

       // Free the YUV frame
       av_free(pFrame);

       // Close the codec
       avcodec_close(pCodecCtx);
    //    avcodec_close(pEncodecCtx);

       // Close the video file
       avformat_close_input(&pFormatCtx);

       return;
    }

    Helper

    #define STREAM_DURATION   10.0
    #define STREAM_FRAME_RATE 25 /* 25 images/s */
    #define STREAM_PIX_FMT    AV_PIX_FMT_YUV420P /* default pix_fmt */

    /* Add an output stream. */
    void add_stream_mux(MuxOutputStream *ost, AVFormatContext *oc,
                          AVCodec **codec,
                          enum AVCodecID codec_id, int width, int heigh)
    {
       AVCodecContext *codecCtx;
       int i;
       /* find the encoder */
       *codec = avcodec_find_encoder(codec_id);
       if (!(*codec)) {
           fprintf(stderr, "Could not find encoder for '%s'\n",
                   avcodec_get_name(codec_id));
           exit(1);
       }
       ost->st = avformat_new_stream(oc, *codec);
       if (!ost->st) {
           fprintf(stderr, "Could not allocate stream\n");
           exit(1);
       }
       ost->st->id = oc->nb_streams-1;
       codecCtx = ost->st->codec;
       switch ((*codec)->type) {
           case AVMEDIA_TYPE_AUDIO:
               codecCtx->sample_fmt  = (*codec)->sample_fmts ?
                                (*codec)->sample_fmts[0] : AV_SAMPLE_FMT_FLTP;
               codecCtx->bit_rate    = 64000;
               codecCtx->sample_rate = 44100;
               if ((*codec)->supported_samplerates) {
                   codecCtx->sample_rate = (*codec)->supported_samplerates[0];
                   for (i = 0; (*codec)->supported_samplerates[i]; i++) {
                       if ((*codec)->supported_samplerates[i] == 44100)
                           codecCtx->sample_rate = 44100;
                   }
               }
               codecCtx->channels        = av_get_channel_layout_nb_channels(codecCtx->channel_layout);
               codecCtx->channel_layout = AV_CH_LAYOUT_STEREO;
               if ((*codec)->channel_layouts) {
                   codecCtx->channel_layout = (*codec)->channel_layouts[0];
                   for (i = 0; (*codec)->channel_layouts[i]; i++) {
                       if ((*codec)->channel_layouts[i] == AV_CH_LAYOUT_STEREO)
                           codecCtx->channel_layout = AV_CH_LAYOUT_STEREO;
                   }
               }
               codecCtx->channels        = av_get_channel_layout_nb_channels(codecCtx->channel_layout);
               ost->st->time_base = (AVRational){ 1, codecCtx->sample_rate };
               break;
           case AVMEDIA_TYPE_VIDEO:
               codecCtx->codec_id = codec_id;
               codecCtx->bit_rate = 400000;
               /* Resolution must be a multiple of two. */
               codecCtx->width    = width;
               codecCtx->height   = heigh;
               /* timebase: This is the fundamental unit of time (in seconds) in terms
                * of which frame timestamps are represented. For fixed-fps content,
                * timebase should be 1/framerate and timestamp increments should be
                * identical to 1. */
               ost->st->time_base = (AVRational){ 1, STREAM_FRAME_RATE };
               codecCtx->time_base       = ost->st->time_base;
               codecCtx->gop_size      = 12; /* emit one intra frame every twelve frames at most */
               codecCtx->pix_fmt       = STREAM_PIX_FMT;
               if (codecCtx->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
                   /* just for testing, we also add B frames */
                   codecCtx->max_b_frames = 2;
               }
               if (codecCtx->codec_id == AV_CODEC_ID_MPEG1VIDEO) {
                   /* Needed to avoid using macroblocks in which some coeffs overflow.
                    * This does not happen with normal video, it just happens here as
                    * the motion of the chroma plane does not match the luma plane. */
                   codecCtx->mb_decision = 2;
               }
               break;
           default:
               break;
       }
       /* Some formats want stream headers to be separate. */
       if (oc->oformat->flags & AVFMT_GLOBALHEADER)
           codecCtx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
    }
    static void open_video(AVFormatContext *oc, AVCodec *codec, MuxOutputStream *ost, AVDictionary *opt_arg)
    {
       int ret;
       AVCodecContext *c = ost->st->codec;
       AVDictionary *opt = NULL;
       av_dict_copy(&opt, opt_arg, 0);
       /* open the codec */
       ret = avcodec_open2(c, codec, &opt);
       av_dict_free(&opt);
       if (ret < 0) {
           fprintf(stderr, "Could not open video codec: %s\n", av_err2str(ret));
           exit(1);
       }
       /* allocate and init a re-usable frame */
       ost->frame = alloc_picture(c->pix_fmt, c->width, c->height);
       if (!ost->frame) {
           fprintf(stderr, "Could not allocate video frame\n");
           exit(1);
       }
       /* If the output format is not YUV420P, then a temporary YUV420P
        * picture is needed too. It is then converted to the required
        * output format. */
       ost->tmp_frame = NULL;
       if (c->pix_fmt != AV_PIX_FMT_YUV420P) {
           ost->tmp_frame = alloc_picture(AV_PIX_FMT_YUV420P, c->width, c->height);
           if (!ost->tmp_frame) {
               fprintf(stderr, "Could not allocate temporary picture\n");
               exit(1);
           }
       }
    }

    I’m afraid that I set wrong pts or time_base of frame, and also when decoding or encoding, I see that some first frame is lost, frameFinished is 0. See a post that I’ve to flush decoder by avcodec_decode_video2(videoStream.st->codec, pFrame, &frameFinished, NULL) but after try a few times, frameFinished still is 0, and with avcodec_encode_video2(videoStream.st->codec, &outPacket, NULL, &frameFinished) will throw error in next encode frame. So how I can get all frame that lost ? I’m using FFmpeg version 3.0.1

  • Ffmpeg Ignore stream if it doesn't exist

    14 juillet 2017, par Martin Dawson

    I’m using ffmpeg to convert files to .mp3 and extract cover images from the metadata.

    This works fine for files that have cover images, files that don’t throw the error :

    Output #1 does not contain any stream

    ffmpeg -i kalimba.mp3 -y test.mp3 -acodec copy test.jpg

    How can I tell ffmpeg to just ignore streams if they don’t exist and continue converting to .mp3 if no cover image exists in the metadata ?

  • C# Process Multitasking and Limitations

    30 juin 2017, par Komak57

    To give you a intro, me and some guys have suddenly come across a need for a dedicated encoding machine to receive dynamic requests from a server. I wrote a TCP Server, and put it on a Pi to listen for requests. I also wrote a TCP Client that will connect to the Pi, tell it how many cores it has, how many encoders it can run, what kind of latency is involved and other system relevant information. When the Server receives instruction, it will send a request to a valid TCP Client that will then begin a Process to use FFMPEG to start encoding based on some parameters. I’ve been having some weird issues where, when stress testing an 8-core system, it starts 8 encoders, but only 3 of them are actually encoding. The remaining 5 sit at 0% idle waiting for a slot to open up.

    tl ;dr - I’ve determined that using Process.StartInfo.UseShellExecute == false manages the processes in a thread based system, and due to my systems current limitations, only allows 3 threads to run simultaneously. This command is required for Process.BeginOutputReadLine() and Process.BeginErrorReadLine() to get useful information about the encoding process.

    Program.cs

    using System;
    using System.Collections.Generic;
    using System.Diagnostics;
    using System.Linq;
    using System.Text;
    using System.Text.RegularExpressions;
    using System.Threading;
    using System.Threading.Tasks;

    namespace EncodeMinimum
    {
       class Program
       {
           private static int threadCount = 0;
           /// <summary>
           /// Returns the number of available CPU Threads
           /// </summary>
           public static int ThreadCount
           {
               set
               {
                   threadCount = value;
                   CoreStatus = new bool[threadCount];
               }
               get
               {
                   return threadCount;
               }
           }
           private static int lastCore = -1;
           /// <summary>
           /// Increment to next available core and return. Causes IndexOutOfRangeException if there are no available cores.
           /// </summary>
           private static int nextCore
           {
               get
               {
                   int start = lastCore;
                   lastCore++;
                   if (lastCore >= ThreadCount)
                       lastCore = 0;
                   while (CoreStatus[lastCore] &amp;&amp; lastCore != start)
                   {
                       lastCore++;
                       if (lastCore >= ThreadCount)
                           lastCore = 0;
                   }
                   if (lastCore == start &amp;&amp; CoreStatus[lastCore])
                   {
                       throw new IndexOutOfRangeException("No valid cores available.");
                   }
                   return lastCore;
               }
           }
           private static bool[] CoreStatus = new bool[0];
           private static Dictionary tasks = new Dictionary();

           /// <summary>
           /// IntPtr representing the affinity of a single core. Max of 1 of 16 cores.
           /// </summary>
           public static IntPtr[] SingleCore = new IntPtr[]{ (IntPtr)0x0001, (IntPtr)0x0002, (IntPtr)0x0004, (IntPtr)0x0008,
                                                       (IntPtr)0x0010,(IntPtr)0x0020,(IntPtr)0x0040,(IntPtr)0x0080,
                                                       (IntPtr)0x0100,(IntPtr)0x0200,(IntPtr)0x0400,(IntPtr)0x0800,
                                                       (IntPtr)0x1000,(IntPtr)0x2000,(IntPtr)0x4000,(IntPtr)0x8000};

           /// <summary>
           /// Allows for compatibility between Linux and Windows operating systems, sorta.
           /// </summary>
           public static bool IsLinux
           {
               get
               {
                   PlatformID p = Environment.OSVersion.Platform;
                   return (p == PlatformID.Unix);
               }
           }

           static void Main(string[] args)
           {
               Console.WriteLine("[" + DateTime.Now.ToString("h:mm:ss") + "] Program Start");
               ThreadCount = Environment.ProcessorCount;
               bool HandleText = true;
               for (int id = 0; id &lt; ThreadCount; id++)
               {
                   Console.WriteLine("[" + DateTime.Now.ToString("h:mm:ss") + "] Creating Encoding process for ID " + id);
                   tasks.Add(id, new Encoder(id, new Process(), nextCore));
                   CoreStatus[tasks[id].Core] = true;

                   if (Program.IsLinux)
                       tasks[id].Process.StartInfo.FileName = "/usr/bin/ffmpeg";
                   else
                       tasks[id].Process.StartInfo.FileName = @"C:\FFMEPG\ffmpeg.exe";
                   tasks[id].Process.StartInfo.Arguments = String.Format("-y -i {0} -threads 1 -c:v libx264 -preset ultrafast -s {1} -r {2} -b:v {3}k -c:a aac -b:a 41k -f flv {4}", "./original.mp4", "1280x720", "60", "7500", "./out-"+id+".flv");
                   tasks[id].Process.StartInfo.Verb = "runas";
                   if (HandleText)
                   {
                       tasks[id].Process.StartInfo.CreateNoWindow = true; // true
                       tasks[id].Process.StartInfo.UseShellExecute = false; //false
                       tasks[id].Process.StartInfo.StandardOutputEncoding = Encoding.UTF8;
                       tasks[id].Process.OutputDataReceived += new DataReceivedEventHandler(ScannerHandler);
                       tasks[id].Process.ErrorDataReceived += new DataReceivedEventHandler(ScannerHandler);
                       tasks[id].Process.StartInfo.RedirectStandardOutput = true; // true
                       tasks[id].Process.StartInfo.RedirectStandardError = true; // true
                   } else
                   {
                       tasks[id].Process.StartInfo.CreateNoWindow = false;
                       tasks[id].Process.StartInfo.UseShellExecute = true;
                   }

                   try
                   {
                       Console.WriteLine("[" + DateTime.Now.ToString("h:mm:ss") + "] Starting Encoder for stream " + id);
                       tasks[id].Process.Start();
                       if (HandleText)
                       {
                           tasks[id].Process.BeginOutputReadLine();
                           tasks[id].Process.BeginErrorReadLine();
                       }
                       tasks[id].Process.ProcessorAffinity = Program.SingleCore[tasks[id].Core];
                       // Used in multithreaded operations to immediately notify when a process has closed
                       /*tasks[id].Process.WaitForExit();
                       CoreStatus[tasks[id].Core] = false;
                       tasks.Remove(id);*/
                   }
                   catch (Exception e)
                   {
                       Console.WriteLine("[" + DateTime.Now.ToString("h:mm:ss") + "] Failed to start Encoder: " + e.ToString());
                       CoreStatus[tasks[id].Core] = false;
                       tasks.Remove(id);
                   }
               }

               // Asynchronously look for Escape Key
               Thread keyscan = new Thread(new ThreadStart(CheckKeys));
               keyscan.Start();

               // Sleep until Escape Key found
               while (keyscan.IsAlive)
                   Thread.Sleep(100);

               for(int i = 0; i &lt; tasks.Count; i++)
               {
                   tasks[i].Process.Kill();
               }
           }

           /// <summary>
           /// Scans for Escape key 10 time a second
           /// </summary>
           static void CheckKeys()
           {
               // run thread until key is pressed
               while (Console.ReadKey(true).Key != ConsoleKey.Escape)
                   Thread.Sleep(100);
               return;
           }

           /// <summary>
           /// Prints output from processes directly to console.
           /// </summary>
           private static void ScannerHandler(object sendingProcess,
               DataReceivedEventArgs errLine)
           {
               if (!String.IsNullOrEmpty(errLine.Data))
               {
                   Process p = sendingProcess as Process;
                   string msg = errLine.Data.ToString().Trim();
                   Console.WriteLine("[" + DateTime.Now.ToString("h:mm:ss") + "] &lt;" + p.Id + ">: " + msg);
               }
           }
       }
    }

    Encoder.cs

    using System;
    using System.Collections.Generic;
    using System.Diagnostics;
    using System.Linq;
    using System.Text;
    using System.Threading;
    using System.Threading.Tasks;

    namespace EncodeMinimum
    {
       public class Encoder
       {
           public Process Process;
           public Thread Task;
           public int ID;
           public int Core;
           public Encoder(int ID, Process Task, int Core)
           {
               this.ID = ID;
               this.Process = Task;
               this.Core = Core;
           }
       }
    }

    I need a way to handle the output from each process created in a live-update manner, while being able to handle any number of simultaneous processes. I might be able to command the process to output to a text file, and read changes from it, but because there’s so little documentation on doing so, I’m not sure how to begin. Are there any alternatives to reading a Processes output without running into this threading issue ?