Recherche avancée

Médias (2)

Mot : - Tags -/map

Autres articles (105)

  • Publier sur MédiaSpip

    13 juin 2013

    Puis-je poster des contenus à partir d’une tablette Ipad ?
    Oui, si votre Médiaspip installé est à la version 0.2 ou supérieure. Contacter au besoin l’administrateur de votre MédiaSpip pour le savoir

  • Encoding and processing into web-friendly formats

    13 avril 2011, par

    MediaSPIP automatically converts uploaded files to internet-compatible formats.
    Video files are encoded in MP4, Ogv and WebM (supported by HTML5) and MP4 (supported by Flash).
    Audio files are encoded in MP3 and Ogg (supported by HTML5) and MP3 (supported by Flash).
    Where possible, text is analyzed in order to retrieve the data needed for search engine detection, and then exported as a series of image files.
    All uploaded files are stored online in their original format, so you can (...)

  • Les formats acceptés

    28 janvier 2010, par

    Les commandes suivantes permettent d’avoir des informations sur les formats et codecs gérés par l’installation local de ffmpeg :
    ffmpeg -codecs ffmpeg -formats
    Les format videos acceptés en entrée
    Cette liste est non exhaustive, elle met en exergue les principaux formats utilisés : h264 : H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10 m4v : raw MPEG-4 video format flv : Flash Video (FLV) / Sorenson Spark / Sorenson H.263 Theora wmv :
    Les formats vidéos de sortie possibles
    Dans un premier temps on (...)

Sur d’autres sites (8340)

  • audio convert to mp3,pcm and vox using ffmpeg

    8 juillet 2014, par user3789242

    Please can someone help me with the code for ffmpeg.
    I have to use ffmpeg to convert a recorder voice (using HTML5) into mp3,pcm or vox depending on the user’s selection.
    I don’t know how to write the code for ffmpeg if some one can help me with the code or libraries.
    Thank you in advance.

    Here is my code for recording the voice with a visualizer :

    // variables
    var leftchannel = [];
    var rightchannel = [];
    var recorder = null;
    var recording = false;
    var recordingLength = 0;
    var volume = null;
    var audioInput = null;
    var sampleRate = 44100;
    var audioContext = null;
    var context = null;
    var outputString;



    if (!navigator.getUserMedia)
      navigator.getUserMedia = navigator.getUserMedia ||
                            navigator.webkitGetUserMedia ||
                            navigator.mozGetUserMedia ||
                            navigator.msGetUserMedia;

    if (navigator.getUserMedia){
    navigator.getUserMedia({audio:true}, success, function(e) {
    alert('Error capturing audio.');
    });
    } else alert('getUserMedia not supported in this browser.');



    function getVal(value)
     {

    // if R is pressed, we start recording
    if ( value == "record"){
       recording = true;
       // reset the buffers for the new recording
       leftchannel.length = rightchannel.length = 0;
       recordingLength = 0;
       document.getElementById('output').innerHTML="Recording now...";

    // if S is pressed, we stop the recording and package the WAV file
    } else if ( value == "stop" ){

       // we stop recording
       recording = false;
       document.getElementById('output').innerHTML="Building wav file...";

       // we flat the left and right channels down
       var leftBuffer = mergeBuffers ( leftchannel, recordingLength );
       var rightBuffer = mergeBuffers ( rightchannel, recordingLength );
       // we interleave both channels together
       var interleaved = interleave ( leftBuffer, rightBuffer );



       var buffer = new ArrayBuffer(44 + interleaved.length * 2);
       var view = new DataView(buffer);

       // RIFF chunk descriptor
       writeUTFBytes(view, 0, 'RIFF');
       view.setUint32(4, 44 + interleaved.length * 2, true);
       writeUTFBytes(view, 8, 'WAVE');
       // FMT sub-chunk
       writeUTFBytes(view, 12, 'fmt ');
       view.setUint32(16, 16, true);
       view.setUint16(20, 1, true);
       // stereo (2 channels)
       view.setUint16(22, 2, true);
       view.setUint32(24, sampleRate, true);
       view.setUint32(28, sampleRate * 4, true);
       view.setUint16(32, 4, true);
       view.setUint16(34, 16, true);
       // data sub-chunk
       writeUTFBytes(view, 36, 'data');
       view.setUint32(40, interleaved.length * 2, true);


       var lng = interleaved.length;
       var index = 44;
       var volume = 1;
       for (var i = 0; i < lng; i++){
           view.setInt16(index, interleaved[i] * (0x7FFF * volume), true);
           index += 2;
       }

       var blob = new Blob ( [ view ], { type : 'audio/wav' } );

       // let's save it locally

       document.getElementById('output').innerHTML='Handing off the file now...';
       var url = (window.URL || window.webkitURL).createObjectURL(blob);

       var li = document.createElement('li');
       var au = document.createElement('audio');
       var hf = document.createElement('a');

       au.controls = true;
       au.src = url;
       hf.href = url;
       hf.download = 'audio_recording_' + new Date().getTime() + '.wav';
       hf.innerHTML = hf.download;
       li.appendChild(au);
       li.appendChild(hf);
       recordingList.appendChild(li);

    }
    }


    function success(e){

    audioContext = window.AudioContext || window.webkitAudioContext;
    context = new audioContext();


    volume = context.createGain();

    // creates an audio node from the microphone incoming stream(source)
    source = context.createMediaStreamSource(e);

    // connect the stream(source) to the gain node
    source.connect(volume);

    var bufferSize = 2048;

    recorder = context.createScriptProcessor(bufferSize, 2, 2);

    //node for the visualizer
    analyser = context.createAnalyser();
    analyser.smoothingTimeConstant = 0.3;
    analyser.fftSize = 512;

    splitter = context.createChannelSplitter();
    //when recording happens
    recorder.onaudioprocess = function(e){

       if (!recording) return;
       var left = e.inputBuffer.getChannelData (0);
       var right = e.inputBuffer.getChannelData (1);

       leftchannel.push (new Float32Array (left));
       rightchannel.push (new Float32Array (right));
       recordingLength += bufferSize;

       // get the average for the first channel
       var array =  new Uint8Array(analyser.frequencyBinCount);
       analyser.getByteFrequencyData(array);

       var c=document.getElementById("myCanvas");
       var ctx = c.getContext("2d");
       // clear the current state
       ctx.clearRect(0, 0, 1000, 325);
       var gradient = ctx.createLinearGradient(0,0,0,300);
       gradient.addColorStop(1,'#000000');
       gradient.addColorStop(0.75,'#ff0000');
       gradient.addColorStop(0.25,'#ffff00');
       gradient.addColorStop(0,'#ffffff');
       // set the fill style
       ctx.fillStyle=gradient;
       drawSpectrum(array);
       function drawSpectrum(array) {
           for ( var i = 0; i < (array.length); i++ ){
                   var value = array[i];
                   ctx.fillRect(i*5,325-value,3,325);
               }

       }
    }

    function getAverageVolume(array) {
       var values = 0;
       var average;

       var length = array.length;

       // get all the frequency amplitudes
       for (var i = 0; i < length; i++) {
           values += array[i];
       }

       average = values / length;
       return average;
    }

       // we connect the recorder(node to destination(speakers))
       volume.connect(splitter);
       splitter.connect(analyser, 0, 0);

       analyser.connect(recorder);
       recorder.connect(context.destination);

    }




    function mergeBuffers(channelBuffer, recordingLength){
    var result = new Float32Array(recordingLength);
    var offset = 0;
    var lng = channelBuffer.length;
    for (var i = 0; i < lng; i++){
    var buffer = channelBuffer[i];
    result.set(buffer, offset);
    offset += buffer.length;
    }
       return result;
      }

    function interleave(leftChannel, rightChannel){  
    var length = leftChannel.length + rightChannel.length;
    var result = new Float32Array(length);

    var inputIndex = 0;

    for (var index = 0; index < length; ){
    result[index++] = leftChannel[inputIndex];
    result[index++] = rightChannel[inputIndex];
    inputIndex++;
    }
    return result;
    }


    function writeUTFBytes(view, offset, string){
    var lng = string.length;
    for (var i = 0; i < lng; i++){

    view.setUint8(offset + i, string.charCodeAt(i));
    }
    }
  • Webm video files recorded on Chrome Mobile cannot be converted to MP4

    7 juillet 2022, par Tobias Kristensen

    I have a website where I record the user's webcam via the MediaRecorder API.
The video stream is created with navigator.mediaDevices.getUserMedia() :

    


    // Create stream
const cameraStream = await navigator.mediaDevices.getUserMedia({ video: 
  { 
    aspectRatio: 1/1, 
    facingMode: 'user',
    width: { min: 360, ideal: 720, max: 1080 },
    height: { min: 360, ideal: 720, max: 1080 },
    deviceId: undefined
  }  
});

// Add stream to videoElement to display a video preview
videoElement.srcObject = cameraStream; 


    


    I then check which mime types are available in the browser and use that info to initialize the MediaRecorder :

    


    const validMimeTypes = [
  "video/webm\;codecs=vp8",
  "video/webm\;codecs=daala",
  "video/webm\;codecs=h264",
  "video/webm",
  "video/mpeg"
];

const getFirstAvailableMimeType = () => {
  for (const mimeType of validMimeTypes) {
    if (MediaRecorder.isTypeSupported(mimeType)) {
      return mimeType;
    }
  }
}

// Initialize Media Recorder
const mediaRecorder = new MediaRecorder(cameraStream, {
  mimeType: getFirstAvailableMimeType(),
});


    


    After I finish recording a video, I upload it to a server and store it on Firebase Storage, so it can be downloaded later.

    


    After downloading a video, I would like to convert it to an MP4 file. I've tried using CloudConvert and HandBrake. Both services have no issues converting videos that were recorded via Chrome on my desktop, but both fail when I try to convert videos recorded via Chrome Mobile on my phone.

    


    When trying to convert the video to MP4 via CloudConvert, the following error is shown :

    


    EBML header parsing failed. /input/import1/69fcceaccc27a0d6eabcb8a65045e87e.webm Invalid data found when processing input


    


    Any ideas how I can resolve this issue ?

    


  • Using ffmpeg to convert voice recording captured by HTML5

    8 juillet 2014, par user3789242

    I’m building an HTML5 voice recording software with visualizer.I want the user when recording the voice, and after uploading the file as wave in a blob (server-side), the user should be able to select the audio format of that file using ffmpeg. what I Have achieved so far is uploading the file as wave.what I still want to do is :

    • On the server
    • side pick your preferable web programming framework
    • The web programming framework accepts the upload and stores the file on the server
    • The web programming framework runs a ffmpeg (command line) which processes the file
    • The user can download the processed file

    here is my code so far :

    // variables
    var leftchannel = [];
    var rightchannel = [];
    var recorder = null;
    var recording = false;
    var recordingLength = 0;
    var volume = null;
    var audioInput = null;
    var sampleRate = 44100;
    var audioContext = null;
    var context = null;
    var outputString;



    if (!navigator.getUserMedia)
       navigator.getUserMedia = navigator.getUserMedia ||
       navigator.webkitGetUserMedia ||
       navigator.mozGetUserMedia ||
       navigator.msGetUserMedia;

    if (navigator.getUserMedia) {
       navigator.getUserMedia({
           audio: true
       }, success, function (e) {
           alert('Error capturing audio.');
       });
    } else alert('getUserMedia not supported in this browser.');



    function getVal(value) {

       // if R is pressed, we start recording
       if (value == "record") {
           recording = true;
           // reset the buffers for the new recording
           leftchannel.length = rightchannel.length = 0;
           recordingLength = 0;
           document.getElementById('output').innerHTML = "Recording now...";

           // if S is pressed, we stop the recording and package the WAV file
       } else if (value == "stop") {

           // we stop recording
           recording = false;
           document.getElementById('output').innerHTML = "Building wav file...";

           // we flat the left and right channels down
           var leftBuffer = mergeBuffers(leftchannel, recordingLength);
           var rightBuffer = mergeBuffers(rightchannel, recordingLength);
           // we interleave both channels together
           var interleaved = interleave(leftBuffer, rightBuffer);



           var buffer = new ArrayBuffer(44 + interleaved.length * 2);
           var view = new DataView(buffer);

           // RIFF chunk descriptor
           writeUTFBytes(view, 0, 'RIFF');
           view.setUint32(4, 44 + interleaved.length * 2, true);
           writeUTFBytes(view, 8, 'WAVE');
           // FMT sub-chunk
           writeUTFBytes(view, 12, 'fmt ');
           view.setUint32(16, 16, true);
           view.setUint16(20, 1, true);
           // stereo (2 channels)
           view.setUint16(22, 2, true);
           view.setUint32(24, sampleRate, true);
           view.setUint32(28, sampleRate * 4, true);
           view.setUint16(32, 4, true);
           view.setUint16(34, 16, true);
           // data sub-chunk
           writeUTFBytes(view, 36, 'data');
           view.setUint32(40, interleaved.length * 2, true);


           var lng = interleaved.length;
           var index = 44;
           var volume = 1;
           for (var i = 0; i < lng; i++) {
               view.setInt16(index, interleaved[i] * (0x7FFF * volume), true);
               index += 2;
           }

           var blob = new Blob([view], {
               type: 'audio/wav'
           });

           // let's save it locally

           document.getElementById('output').innerHTML = 'Handing off the file now...';
           var url = (window.URL || window.webkitURL).createObjectURL(blob);

           var li = document.createElement('li');
           var au = document.createElement('audio');
           var hf = document.createElement('a');

           au.controls = true;
           au.src = url;
           hf.href = url;
           hf.download = 'audio_recording_' + new Date().getTime() + '.wav';
           hf.innerHTML = hf.download;
           li.appendChild(au);
           li.appendChild(hf);
           recordingList.appendChild(li);

       }
    }


    function success(e) {

       audioContext = window.AudioContext || window.webkitAudioContext;
       context = new audioContext();


       volume = context.createGain();

       // creates an audio node from the microphone incoming stream(source)
       source = context.createMediaStreamSource(e);

       // connect the stream(source) to the gain node
       source.connect(volume);

       var bufferSize = 2048;

       recorder = context.createScriptProcessor(bufferSize, 2, 2);

       //node for the visualizer
       analyser = context.createAnalyser();
       analyser.smoothingTimeConstant = 0.3;
       analyser.fftSize = 512;

       splitter = context.createChannelSplitter();
       //when recording happens
       recorder.onaudioprocess = function (e) {

           if (!recording) return;
           var left = e.inputBuffer.getChannelData(0);
           var right = e.inputBuffer.getChannelData(1);

           leftchannel.push(new Float32Array(left));
           rightchannel.push(new Float32Array(right));
           recordingLength += bufferSize;

           // get the average for the first channel
           var array = new Uint8Array(analyser.frequencyBinCount);
           analyser.getByteFrequencyData(array);

           var c = document.getElementById("myCanvas");
           var ctx = c.getContext("2d");
           // clear the current state
           ctx.clearRect(0, 0, 1000, 325);
           var gradient = ctx.createLinearGradient(0, 0, 0, 300);
           gradient.addColorStop(1, '#000000');
           gradient.addColorStop(0.75, '#ff0000');
           gradient.addColorStop(0.25, '#ffff00');
           gradient.addColorStop(0, '#ffffff');
           // set the fill style
           ctx.fillStyle = gradient;
           drawSpectrum(array);

           function drawSpectrum(array) {
               for (var i = 0; i < (array.length); i++) {
                   var value = array[i];
                   ctx.fillRect(i * 5, 325 - value, 3, 325);
               }

           }
       }

       function getAverageVolume(array) {
           var values = 0;
           var average;

           var length = array.length;

           // get all the frequency amplitudes
           for (var i = 0; i < length; i++) {
               values += array[i];
           }

           average = values / length;
           return average;
       }

       // we connect the recorder(node to destination(speakers))
       volume.connect(splitter);
       splitter.connect(analyser, 0, 0);

       analyser.connect(recorder);
       recorder.connect(context.destination);

    }




    function mergeBuffers(channelBuffer, recordingLength) {
       var result = new Float32Array(recordingLength);
       var offset = 0;
       var lng = channelBuffer.length;
       for (var i = 0; i < lng; i++) {
           var buffer = channelBuffer[i];
           result.set(buffer, offset);
           offset += buffer.length;
       }
       return result;
    }

    function interleave(leftChannel, rightChannel) {
       var length = leftChannel.length + rightChannel.length;
       var result = new Float32Array(length);

       var inputIndex = 0;

       for (var index = 0; index < length;) {
           result[index++] = leftChannel[inputIndex];
           result[index++] = rightChannel[inputIndex];
           inputIndex++;
       }
       return result;
    }


    function writeUTFBytes(view, offset, string) {
       var lng = string.length;
       for (var i = 0; i < lng; i++) {

           view.setUint8(offset + i, string.charCodeAt(i));
       }
    }