Recherche avancée

Médias (1)

Mot : - Tags -/Christian Nold

Autres articles (71)

  • Des sites réalisés avec MediaSPIP

    2 mai 2011, par

    Cette page présente quelques-uns des sites fonctionnant sous MediaSPIP.
    Vous pouvez bien entendu ajouter le votre grâce au formulaire en bas de page.

  • Modifier la date de publication

    21 juin 2013, par

    Comment changer la date de publication d’un média ?
    Il faut au préalable rajouter un champ "Date de publication" dans le masque de formulaire adéquat :
    Administrer > Configuration des masques de formulaires > Sélectionner "Un média"
    Dans la rubrique "Champs à ajouter, cocher "Date de publication "
    Cliquer en bas de la page sur Enregistrer

  • Personnaliser en ajoutant son logo, sa bannière ou son image de fond

    5 septembre 2013, par

    Certains thèmes prennent en compte trois éléments de personnalisation : l’ajout d’un logo ; l’ajout d’une bannière l’ajout d’une image de fond ;

Sur d’autres sites (10876)

  • WebRTC : unsync audio after processing using ffmpeg

    22 novembre 2013, par QuickSilver

    I am recording a video and using RecordRTC : WebRTC . After receiving the webm video and wav audio at server, I'm encoding it to a mp4 file using ffmpeg(executing shell command via php). But after encoding process, the audio is unsync with video (audio ends before video). How can I fix this ?

    I have noticed that the recorded audio is 1 sec less in length with video.

    js code is here

    record.onclick = function() {
       record.disabled = true;
       var video_constraints = {
           mandatory: {
               "minWidth": "320",
               "minHeight": "240",
               "minFrameRate": "24",
               "maxWidth": "320",
               "maxHeight": "240",
               "maxFrameRate": "24"
           },
           optional: []
       };
       navigator.getUserMedia({
           audio: true,
           video: video_constraints
       }, function(stream) {
           preview.src = window.URL.createObjectURL(stream);
           preview.play();

           // var legalBufferValues = [256, 512, 1024, 2048, 4096, 8192, 16384];
           // sample-rates in at least the range 22050 to 96000.
           recordAudio = RecordRTC(stream, {
               /* extra important, we need to set a big buffer when capturing audio and video at the same time*/
               bufferSize: 16384
               //sampleRate: 45000
           });

           recordVideo = RecordRTC(stream, {
               type: 'video'
           });

           recordVideo.startRecording();
           recordAudio.startRecording();

           stop.disabled = false;
           recording_flag = true;
           $("#divcounter").show();
           $("#second-step-title").text('Record your video');
           initCountdown();
           uploadStatus.video = false;
           uploadStatus.audio = false;
       });
    };

    ffmpeg command used is :

    ffmpeg -y -i 166890589.wav -i 166890589.webm -vcodec libx264 166890589.mp4

    Currently I'm adding an offset of -1 to ffmpeg, but i don't think it's right.

    ffmpeg -y -itsoffset -1 -i 166890589.wav -i 166890589.webm -vcodec libx264 166890589.mp4
  • WebRTC : unsync audio after processing using ffmpeg (audio length is less than that of video)

    22 novembre 2013, par QuickSilver

    I am recording a video and using RecordRTC : WebRTC . After receiving the webm video and wav audio at server, I'm encoding it to a mp4 file using ffmpeg(executing shell command via php). But after encoding process, the audio is unsync with video (audio ends before video). How can I fix this ?

    I have noticed that the recorded audio is 1 sec less in length with video.

    js code is here

    record.onclick = function() {
       record.disabled = true;
       var video_constraints = {
           mandatory: {
               "minWidth": "320",
               "minHeight": "240",
               "minFrameRate": "24",
               "maxWidth": "320",
               "maxHeight": "240",
               "maxFrameRate": "24"
           },
           optional: []
       };
       navigator.getUserMedia({
           audio: true,
           video: video_constraints
       }, function(stream) {
           preview.src = window.URL.createObjectURL(stream);
           preview.play();

           // var legalBufferValues = [256, 512, 1024, 2048, 4096, 8192, 16384];
           // sample-rates in at least the range 22050 to 96000.
           recordAudio = RecordRTC(stream, {
               /* extra important, we need to set a big buffer when capturing audio and video at the same time*/
               bufferSize: 16384
               //sampleRate: 45000
           });

           recordVideo = RecordRTC(stream, {
               type: 'video'
           });

           recordVideo.startRecording();
           recordAudio.startRecording();

           stop.disabled = false;
           recording_flag = true;
           $("#divcounter").show();
           $("#second-step-title").text('Record your video');
           initCountdown();
           uploadStatus.video = false;
           uploadStatus.audio = false;
       });
    };

    ffmpeg command used is :

    ffmpeg -y -i 166890589.wav -i 166890589.webm -vcodec libx264 166890589.mp4

    Currently I'm adding an offset of -1 to ffmpeg, but i don't think it's right.

    ffmpeg -y -itsoffset -1 -i 166890589.wav -i 166890589.webm -vcodec libx264 166890589.mp4
  • Converting a binary stream to an mpegts stream

    22 décembre 2018, par John Kim

    I’m trying to create a livestream web app using NodeJS. The code I currently have emits a raw binary stream from the webcam on the client using socket IO and the node server receives this raw data. Using fluent-ffmpeg, I want to encode this binary stream into mpegts and send it to an RTMP server in real time, without creating any intermediary files. Could I somehow convert the binary stream into a webm stream and pipe that stream into an mpegts encoder in one ffmpeg command ?

    My relevant frontend client code :

    navigator.mediaDevices.getUserMedia(constraints).then(function(stream) {
       socket.emit('config_rtmpDestination',url);
       socket.emit('start','start');
       mediaRecorder = new MediaRecorder(stream);
       mediaRecorder.start(2000);

       mediaRecorder.onstop = function(e) {
           stream.stop();
       }

       mediaRecorder.ondataavailable = function(e) {
         socket.emit("binarystream",e.data);
       }
    }).catch(function(err) {
       console.log('The following error occured: ' + err);
       show_output('Local getUserMedia ERROR:'+err);
    });

    Relevant NodeJS server code :

    socket.on('binarystream',function(m){
       feedStream(m);
    });

    socket.on('start',function(m){
       ...
       var ops=[
           '-vcodec', socket._vcodec,'-i','-',
           '-c:v', 'libx264', '-preset', 'veryfast', '-tune', 'zerolatency',
           '-an', '-bufsize', '1000',
           '-f', 'mpegts', socket._rtmpDestination
       ];
       ffmpeg_process=spawn('ffmpeg', ops);
       feedStream=function(data){
           ffmpeg_process.stdin.write(data);
       }
       ...
    }

    The above code of course doesn’t work, I get these errors on ffmpeg :

    Error while decoding stream #0:1: Invalid data found when processing input
    [NULL @ 000001b15e67bd80] Invalid sync code 61f192.
    [libvpx @ 000001b15e6c5000] Failed to decode frame: Bitstream not supported by this decoder

    because I’m trying to convert raw binary data into mpegts.