Recherche avancée

Médias (3)

Mot : - Tags -/collection

Autres articles (18)

  • Websites made ​​with MediaSPIP

    2 mai 2011, par

    This page lists some websites based on MediaSPIP.

  • Creating farms of unique websites

    13 avril 2011, par

    MediaSPIP platforms can be installed as a farm, with a single "core" hosted on a dedicated server and used by multiple websites.
    This allows (among other things) : implementation costs to be shared between several different projects / individuals rapid deployment of multiple unique sites creation of groups of like-minded sites, making it possible to browse media in a more controlled and selective environment than the major "open" (...)

  • Other interesting software

    13 avril 2011, par

    We don’t claim to be the only ones doing what we do ... and especially not to assert claims to be the best either ... What we do, we just try to do it well and getting better ...
    The following list represents softwares that tend to be more or less as MediaSPIP or that MediaSPIP tries more or less to do the same, whatever ...
    We don’t know them, we didn’t try them, but you can take a peek.
    Videopress
    Website : http://videopress.com/
    License : GNU/GPL v2
    Source code : (...)

Sur d’autres sites (7153)

  • FFmpeg.swr_convert : audio to raw 16 bit pcm, to be used with xna SoundEffect. Audio cuts out when i convert

    21 mars 2019, par Robert Russell

    I want to resample mkv(vp8/ogg) and also raw 4 bit adpcm to raw 16bit pcm byte[] to be loaded into SoundEffect from xna library. So I can play it out while I’m using other code to display the frames (the video side is working).
    I can read a 16 bit wav file and play it. But when I goto resample something it doesn’t play 100%. One file is 3 mins and 15 secs. I only get 13 sec and 739 ms before it quits playing. I have been learning to do this by finding code samples in c++ and correcting it to work in c# using ffmpeg.autogen.

    the below is my best attempt at resampling.

               int nb_samples = Frame->nb_samples;
                       int output_nb_samples = nb_samples;
                       int nb_channels = ffmpeg.av_get_channel_layout_nb_channels(ffmpeg.AV_CH_LAYOUT_STEREO);
                       int bytes_per_sample = ffmpeg.av_get_bytes_per_sample(AVSampleFormat.AV_SAMPLE_FMT_S16) * nb_channels;
                       int bufsize = ffmpeg.av_samples_get_buffer_size(null, nb_channels, nb_samples,
                                                                AVSampleFormat.AV_SAMPLE_FMT_S16, 1);

                       byte*[] b = Frame->data;
                       fixed (byte** input = b)
                       {
                           byte* output = null;
                           ffmpeg.av_samples_alloc(&output, null,
                               nb_channels,
                               nb_samples,
                               (AVSampleFormat)Frame->format, 0);//

                           // Buffer input

                           Ret = ffmpeg.swr_convert(Swr, &output, output_nb_samples / 2, input, nb_samples);
                           CheckRet();
                           WritetoMs(output, 0, Ret * bytes_per_sample);
                           output_nb_samples -= Ret;

                           // Drain buffer
                           while ((Ret = ffmpeg.swr_convert(Swr, &output, output_nb_samples, null, 0)) > 0)
                           {
                               CheckRet();
                               WritetoMs(output, 0, Ret * bytes_per_sample);
                               output_nb_samples -= Ret;
                           }
                       }

    I changed that all to this but it cuts off sooner.

     Channels = ffmpeg.av_get_channel_layout_nb_channels(OutFrame->channel_layout);
                       int nb_channels = ffmpeg.av_get_channel_layout_nb_channels(ffmpeg.AV_CH_LAYOUT_STEREO);
                       int bytes_per_sample = ffmpeg.av_get_bytes_per_sample(AVSampleFormat.AV_SAMPLE_FMT_S16) * nb_channels;

                       if((Ret = ffmpeg.swr_convert_frame(Swr, OutFrame, Frame))>=0)
                           WritetoMs(*OutFrame->extended_data, 0, OutFrame->nb_samples * bytes_per_sample);
                       CheckRet();

    Both code use a function to set Swr it runs one time after the first frame is decoded.

           private void PrepareResampler()
       {
           ffmpeg.av_frame_copy_props(OutFrame, Frame);
           OutFrame->channel_layout = ffmpeg.AV_CH_LAYOUT_STEREO;
           OutFrame->format = (int)AVSampleFormat.AV_SAMPLE_FMT_S16;
           OutFrame->sample_rate = Frame->sample_rate;
           OutFrame->channels = 2;
           Swr = ffmpeg.swr_alloc();
           if (Swr == null)
               throw new Exception("SWR = Null");
           Ret = ffmpeg.swr_config_frame(Swr, OutFrame, Frame);
           CheckRet();
           Ret = ffmpeg.swr_init(Swr);
           CheckRet();
           Ret = ffmpeg.swr_is_initialized(Swr);
           CheckRet();
       }

    This is where I take the output and put it in the sound effect

    private void ReadAll()
       {

           using (Ms = new MemoryStream())
           {
               while (true)
               {
                   Ret = ffmpeg.av_read_frame(Format, Packet);
                   if (Ret == ffmpeg.AVERROR_EOF)
                       break;
                   CheckRet();
                   Decode();
               }
               if (Ms.Length > 0)
               {
                   se = new SoundEffect(Ms.ToArray(), 0, (int)Ms.Length, OutFrame->sample_rate, (AudioChannels)Channels, 0, 0);
                   //se.Duration; Stream->duration;


                   see = se.CreateInstance();
                   see.Play();
               }
           }
       }
  • html5 video wont play ANY mp4 encodes on ipad

    12 mars 2019, par nickg

    Im trying to embed a mp4 on a page with the HTML5 video tag. Everything works fine on Desktop but nothing will work on an iPad Version 9.3.4.
    I have the mime types in an .htaccess file. I’ve tried various encodes with handbrake, Miro and FFmpeg conversions.

    Even sample videos like at w3schools and videojs don’t play.
    The video will play if i actually sync it to the iPad, but nothing works over the web. An older iPad actually plays mp4s through the HTML5 video player.
    I’m ready to throw this POS iPad through a window.

    <video autoplay="false" width="320" height="240" controls="true">
     <source src="http://webnamehere.com/video/bunny.mp4" type="video/mp4; codecs=&quot;avc1.42E01E, mp4a.40.2&quot;">

    Your browser does not support the video tag.
    </source></video>

    Has anyone found a way to fix this ? Is there ANY encoding that this thing will actually play ? Thank you in advance for any help.

  • Converting ffmpeg & ffprobe outputs to variables in an ffmpeg AWS Lambda Layer

    7 mars 2019, par Gracie

    I have two tasks I am trying to perform with the ffmpeg AWS Lambda layer.

    1) Convert an audio file from stereo to mono (with ffmpeg)

    2) Get the duration of the audio file and pass the result to a variable (with ffprobe)

    const { spawnSync } = require("child_process");
    const { readFileSync, writeFileSync, unlinkSync } = require("fs");
    const util = require('util');
    var fs = require('fs');

    exports.handler = (event, context, callback) => {

       // Windows 10 ffmpeg command to convert stereo to ffmpeg is
       // ffmpeg -i volando.flac -ac 1 volando-mono.flac

       // Convert from stereo to mono
       spawnSync(
           "/opt/bin/ffmpeg",
           [
               "-i",
               `volando.flac`,
               "-ac",
               "1",
               `/tmp/volando-mono.flac`
           ],
           { stdio: "inherit" }
       );

       //Pass result to a variable
       //var duration = stdio;

       //Read the content from the /tmp directory
       fs.readdir("/tmp/", function (err, data) {
           if (err) throw err;
           console.log('Contents of tmp file: ', data);
       });

       // Get duration of Flac file
       // Windows 10 ffmpeg command is
       // ffprobe in.wav -show_entries stream=duration -select_streams a -of compact=p=0:nk=1 -v 0
       spawnSync(
           "/opt/bin/ffprobe",
           [
               `in.wav`,
               "-show_entries",
               "stream=duration",
               "-select_streams",
               "a",
               "-of",
               "compact=p=0:nk=1",
               "-v",
               "0"
           ],
           { stdio: "inherit" }

           //Pass result to a variable
           //var duration = stdio;
       );

    };

    Can anyone who has had success with this ffmpeg Lambda layer help get an output for these commands ?

    Here are some resources regarding the FFmpeg Lambda layer :

    https://serverless.com/blog/publish-aws-lambda-layers-serverless-framework/
    https://github.com/serverlesspub/ffmpeg-aws-lambda-layer
    https://devopstar.com/2019/01/28/serverless-watermark-using-aws-lambda-layers-ffmpeg/