Recherche avancée

Médias (91)

Autres articles (99)

  • MediaSPIP 0.1 Beta version

    25 avril 2011, par

    MediaSPIP 0.1 beta is the first version of MediaSPIP proclaimed as "usable".
    The zip file provided here only contains the sources of MediaSPIP in its standalone version.
    To get a working installation, you must manually install all-software dependencies on the server.
    If you want to use this archive for an installation in "farm mode", you will also need to proceed to other manual (...)

  • ANNEXE : Les plugins utilisés spécifiquement pour la ferme

    5 mars 2010, par

    Le site central/maître de la ferme a besoin d’utiliser plusieurs plugins supplémentaires vis à vis des canaux pour son bon fonctionnement. le plugin Gestion de la mutualisation ; le plugin inscription3 pour gérer les inscriptions et les demandes de création d’instance de mutualisation dès l’inscription des utilisateurs ; le plugin verifier qui fournit une API de vérification des champs (utilisé par inscription3) ; le plugin champs extras v2 nécessité par inscription3 (...)

  • Activation de l’inscription des visiteurs

    12 avril 2011, par

    Il est également possible d’activer l’inscription des visiteurs ce qui permettra à tout un chacun d’ouvrir soit même un compte sur le canal en question dans le cadre de projets ouverts par exemple.
    Pour ce faire, il suffit d’aller dans l’espace de configuration du site en choisissant le sous menus "Gestion des utilisateurs". Le premier formulaire visible correspond à cette fonctionnalité.
    Par défaut, MediaSPIP a créé lors de son initialisation un élément de menu dans le menu du haut de la page menant (...)

Sur d’autres sites (9583)

  • How do i play an HLS stream when playlist.m3u8 file is constantly being updated ?

    3 janvier 2021, par Adnan Ahmed

    I am using MediaRecorder to record chunks of my live video in webm format from MediaStream and converting these chunks to .ts files on the server using ffmpeg and then updating my playlist.m3u8 file with this code :

    


    function generateM3u8Playlist(fileDataArr, playlistFp, isLive, cb) {
    var durations = fileDataArr.map(function(fd) {
        return fd.duration;
    });
    var maxT = maxOfArr(durations);

    var meta = [
        '#EXTM3U',
        '#EXT-X-VERSION:3',
        '#EXT-X-MEDIA-SEQUENCE:0',
        '#EXT-X-ALLOW-CACHE:YES',
        '#EXT-X-TARGETDURATION:' + Math.ceil(maxT),
    ];

    fileDataArr.forEach(function(fd) {
        meta.push('#EXTINF:' + fd.duration.toFixed(2) + ',');
        meta.push(fd.fileName2);
    });

    if (!isLive) {
        meta.push('#EXT-X-ENDLIST');
    }

    meta.push('');
    meta = meta.join('\n');

    fs.writeFile(playlistFp, meta, cb);
}


    


    Here fileDataArr holds information for all the chunks that have been created.

    


    After that i use this code to create a hls server :

    


    var runStreamServer = (function(streamFolder) {
    var executed = false;
    return function(streamFolder) {
        if (!executed) {
            executed = true;
            var HLSServer = require('hls-server')
            var http = require('http')

            var server = http.createServer()
            var hls = new HLSServer(server, {
                path: '/stream', // Base URI to output HLS streams
                dir: 'C:\\Users\\Work\\Desktop\\live-stream\\webcam2hls\\videos\\' + streamFolder // Directory that input files are stored
            })
            console.log("We are going to stream from folder:" + streamFolder);
            server.listen(8000);
            console.log('Server Listening on Port 8000');
        }
    };
})();


    


    The problem is that if i stop creating new chunks and then use the hls server link :
http://localhost:8000/stream/playlist.m3u8 then the video plays in VLC but if i try to play during the recording it keeps loading the file but does not play. I want it to play while its creating new chunks and updating playlist.m3u8. The quirk in generateM3u8Playlist function is that it adds '#EXT-X-ENDLIST' to the playlist file after i have stopped recording.
The software is still in production so its a bit messy code. Thank you for any answers.

    


    The client side that generates blobs is as follows :

    


    var mediaConstraints = {
            video: true,
            audio:true
        };
navigator.getUserMedia(mediaConstraints, onMediaSuccess, onMediaError);
function onMediaSuccess(stream) {
            console.log('will start capturing and sending ' + (DT / 1000) + 's videos when you press start');
            var mediaRecorder = new MediaStreamRecorder(stream);

            mediaRecorder.mimeType = 'video/webm';

            mediaRecorder.ondataavailable = function(blob) {
                var count2 = zeroPad(count, 5);
                // here count2 just creates a blob number 
                console.log('sending chunk ' + name + ' #' + count2 + '...');
                send('/chunk/' + name + '/' + count2 + (stopped ? '/finish' : ''), blob);
                ++count;
            };
        }
// Here we have the send function which sends our blob to server:
        function send(url, blob) {
            var xhr = new XMLHttpRequest();
            xhr.open('POST', url, true);

            xhr.responseType = 'text/plain';
            xhr.setRequestHeader('Content-Type', 'video/webm');
            //xhr.setRequestHeader("Content-Length", blob.length);

            xhr.onload = function(e) {
                if (this.status === 200) {
                    console.log(this.response);
                }
            };
            xhr.send(blob);
        }


    


    The code that receives the XHR request is as follows :

    


    var parts = u.split('/');
        var prefix = parts[2];
        var num = parts[3];
        var isFirst = false;
        var isLast = !!parts[4];

        if ((/^0+$/).test(num)) {
            var path = require('path');
            shell.mkdir(path.join(__dirname, 'videos', prefix));
            isFirst = true;
        }

        var fp = 'videos/' + prefix + '/' + num + '.webm';
        var msg = 'got ' + fp;
        console.log(msg);
        console.log('isFirst:%s, isLast:%s', isFirst, isLast);

        var stream = fs.createWriteStream(fp, { encoding: 'binary' });
        /*stream.on('end', function() {
            respond(res, ['text/plain', msg]);
        });*/

        //req.setEncoding('binary');

        req.pipe(stream);
        req.on('end', function() {
            respond(res, ['text/plain', msg]);

            if (!LIVE) { return; }

            var duration = 20;
            var fd = {
                fileName: num + '.webm',
                filePath: fp,
                duration: duration
            };
            var fileDataArr;
            if (isFirst) {
                fileDataArr = [];
                fileDataArrs[prefix] = fileDataArr;
            } else {
                var fileDataArr = fileDataArrs[prefix];
            }
            try {
                fileDataArr.push(fd);
            } catch (err) {
                fileDataArr = [];
                console.log(err.message);
            }
            videoUtils.computeStartTimes(fileDataArr);

            videoUtils.webm2Mpegts(fd, function(err, mpegtsFp) {
                if (err) { return console.error(err); }
                console.log('created %s', mpegtsFp);

                var playlistFp = 'videos/' + prefix + '/playlist.m3u8';

                var fileDataArr2 = (isLast ? fileDataArr : lastN(fileDataArr, PREV_ITEMS_IN_LIVE));

                var action = (isFirst ? 'created' : (isLast ? 'finished' : 'updated'));

                videoUtils.generateM3u8Playlist(fileDataArr2, playlistFp, !isLast, function(err) {
                    console.log('playlist %s %s', playlistFp, (err ? err.toString() : action));
                });
            });


            runStreamServer(prefix);
        }


    


  • How to write a video stream to a server ?

    14 août, par The Mask

    Recently been playing with FFmpeg and it's powerful abilities. Working on a cool project where I'm trying to create a live video stream using FFmpeg. The client (reactJs) and server (nodeJS) are connected via web-socket. The client sends the byte packets to server and the server then spawns an FFmpeg process and serve it to an nginx server.

    


    Client(live-stream.js) :

    


    const stream = await navigator.mediaDevices.getUserMedia({
    video: true,
    audio: true,
  });
  videoRef.current.srcObject = stream;

  const ingestUrl = `ws://localhost:8081/ws`
  const socket = new WebSocket(ingestUrl);
  socket.binaryType = "arraybuffer";
  socket.onopen = () => {
    console.log("✅ WebSocket connection established");
    socket.send(JSON.stringify({ type: "start", stream_key: streamKey }));
    mediaRecorderRef.current.start(500);
  };
  socketRef.current = socket;

  socket.onerror = (error) => {
    console.error("❌ WebSocket error:", error);
  };

  mediaRecorderRef.current = new MediaRecorder(stream, {
    mimeType: "video/webm;codecs=vp8,opus",
    videoBitsPerSecond: 1000000,
    audioBitsPerSecond: 128000
  });
  mediaRecorderRef.current.ondataavailable = (event) => {
    if (event.data.size > 0 && socket.readyState === WebSocket.OPEN) {
      event.data.arrayBuffer().then((buffer) => socket.send(buffer));
    }
  };


    


    Server(index.js) :

    


    const http = require('http');
const WebSocket = require('ws');
const { spawn } = require('child_process');
const fs = require('fs');


const server = new WebSocket.Server({ server:wss, path:'/ws'});

const startFFmpeg = (stream_key) => {
  return ffmpeg = spawn("ffmpeg", [
    "-re",
    "-f", "matroska",
    "-i", "pipe:0",
    "-map", "0:v:0",
    "-map", "0:a:0",
    "-c:v", "libx264",
    "-c:a", "aac ",
    "-b:v", "6000k",
    "-maxrate", "6000k ",
    "-bufsize", "6000k ",
    "-pix_fmt", "yuv420p ",
    "-f", "flv",
    `rtmp://localhost/live/${stream_key}`,
  ]);
}
server.on('connection', (ws) => {
  console.log('📡 New WebSocket connection');

  let ffmpeg = null;
  let buffer = Buffer.alloc(0);
  let streamStarted = false;

  ws.on('message', (msg, isBinary) => {
    if (!isBinary) {
      const parsed = JSON.parse(msg);
      if (parsed.type === "start") {
        const { stream_key } = parsed;
        console.log(`🔑 Stream key: ${stream_key}`);
        console.log(`🎥 Starting ingest for stream key: ${stream_key}`);

        ffmpeg = startFFmpeg(stream_key)
        ffmpeg.stdin.on("error", (e) => {
          console.error("FFmpeg stdin error:", e.message);
        });

        ffmpeg.stderr.on("data", (data) => {
          console.log(`FFmpeg Data: ${data}`);
        });

        ffmpeg.on("close", (code) => {
          console.log(`FFmpeg exited with code ${code}`);
        });

        ffmpeg.on("exit", (code, signal) => {
          console.log(`FFmpeg exited with code: ${code}, signal: ${signal}`);
          if (signal === 'SIGSEGV') {
            console.log('🔄 FFmpeg segfaulted, attempting restart...');
            setTimeout(() => {
              if (ws.readyState === WebSocket.OPEN) {
                startFFmpeg(stream_key);
              }
            }, 1000);
          }
        });

        streamStarted = true;
      }
    } else if (isBinary && ffmpeg && ffmpeg.stdin.writable) {
        try {
        // Convert to Buffer if it's an ArrayBuffer
        let data;
        if (msg instanceof ArrayBuffer) {
          data = Buffer.from(msg);
        } else {
          data = Buffer.from(msg);
        }

        // Buffer the data
        buffer = Buffer.concat([buffer, data]);
        
        // Write in larger chunks to reduce overhead
        if (buffer.length >= 8192) { // 8KB threshold
          console.log(`📥 Writing ${buffer.length} bytes to FFmpeg`);
          
          if (ffmpeg.stdin.write(buffer)) {
            buffer = Buffer.alloc(0);
          } else {
            // Handle backpressure
            ffmpeg.stdin.once('drain', () => {
              buffer = Buffer.alloc(0);
              ffmpeg.stdin.setMaxListeners(20); // or a safe upper bound
            });
          }
        }
      } catch (e) {
        console.error("FFmpeg write error:", e);
      }
    }
  });
  
  ws.on('close', () => {
    console.log('❌ WebSocket closed');
    streamStarted = false;

    if (ffmpeg){     // Write any remaining buffer
      if (buffer.length > 0 && ffmpeg.stdin.writable) {
        console.log(`📥 Writing final ${buffer.length} bytes to FFmpeg`);
        ffmpeg.stdin.write(buffer);
      }
            
      // Gracefully close FFmpeg
      if (ffmpeg.stdin.writable) {
        ffmpeg.stdin.end();
      }
      
      setTimeout(() => {
        if (ffmpeg && !ffmpeg.killed) {
          ffmpeg.kill('SIGTERM');
          setTimeout(() => {
            if (ffmpeg && !ffmpeg.killed) {
              ffmpeg.kill('SIGKILL');
            }
          }, 5000);
        }
      }, 1000);
    }
  });
});

wss.listen(8081, "localhost", () => {
  console.log("🛰️ Server listening on http://localhost:8081/ws");
});


    


    The problem statment :
Been facing error like pixels drops in the video, bad quality. FFmpeg is crashing with error :

    


    FFmpeg Data: Input #0, matroska,webm, from 'pipe:0':

FFmpeg Data:   Metadata:
    encoder         : Chrome
  Duration: N/A, start: 0.000000, bitrate: N/A
  Stream #0:0(eng): Audio: opus, 48000 Hz, mono, fltp (default)

FFmpeg Data:   Stream #0:1(eng): Video: vp8, yuv420p(progressive), 640x480, SAR 1:1 DAR 4:3, 
FFmpeg Data: 1k tbr, 1k tbn (default)
    Metadata:
      alpha_mode      : 1

FFmpeg Data: Unknown pixel format requested: yuv420p .

FFmpeg stdin error: write EPIPE
FFmpeg exited with code: 1, signal: null
FFmpeg exited with code 1


    


  • webrtc to rtmp send video from camera to rtmp link

    14 avril 2024, par Leo-Mahendra

    i cant send the video from webrtc which is converted to bufferd data for every 10seconds and send to server.js where it takes it via websockets and convert it to flv format using ffmpeg.

    


    i am trying to send it to rtmp server named restreamer for start, here i tried to convert the buffer data and send it to rtmp link using ffmpeg commands, where i initially started to suceesfully save the file from webrtc to mp4 format for a duration of 2-3 minute.

    


    after i tried to use webrtc to send video data for every 10 seconds and in server i tried to send it to rtmp but i cant send it, but i can see the connection of rtmp url and server is been taken place but i cant see the video i can see the logs in rtmp server as

    


    2024-04-14 12:35:45 ts=2024-04-14T07:05:45Z level=INFO component="RTMP" msg="no streams available" action="INVALID" address=":1935" client="172.17.0.1:37700" path="/3d30c5a9-2059-4843-8957-da963c7bc19b.stream" who="PUBLISH"
2024-04-14 12:35:45 ts=2024-04-14T07:05:45Z level=INFO component="RTMP" msg="no streams available" action="INVALID" address=":1935" client="172.17.0.1:37716" path="/3d30c5a9-2059-4843-8957-da963c7bc19b.stream" who="PUBLISH"
2024-04-14 12:35:45 ts=2024-04-14T07:05:45Z level=INFO component="RTMP" msg="no streams available" action="INVALID" address=":1935" client="172.17.0.1:37728" path="/3d30c5a9-2059-4843-8957-da963c7bc19b.stream" who="PUBLISH"   


    


    my frontend code

    


         const handleSendVideo = async () => {
        console.log("start");
    
        if (!ws) {
            console.error('WebSocket connection not established.');
            return;
        }
    
        try {
            const videoStream = await navigator.mediaDevices.getUserMedia({ video: true });
            const mediaRecorder = new MediaRecorder(videoStream);
    
            const requiredFrameSize = 460800;
            const frameDuration = 10 * 1000; // 10 seconds in milliseconds
    
            mediaRecorder.ondataavailable = async (event) => {
                if (ws.readyState !== WebSocket.OPEN) {
                    console.error('WebSocket connection is not open.');
                    return;
                }
    
                if (event.data.size > 0) {
                    const arrayBuffer = await event.data.arrayBuffer();
                    const uint8Array = new Uint8Array(arrayBuffer);
    
                    const width = videoStream.getVideoTracks()[0].getSettings().width;
                    const height = videoStream.getVideoTracks()[0].getSettings().height;
    
                    const numFrames = Math.ceil(uint8Array.length / requiredFrameSize);
    
                    for (let i = 0; i < numFrames; i++) {
                        const start = i * requiredFrameSize;
                        const end = Math.min((i + 1) * requiredFrameSize, uint8Array.length);
                        let frameData = uint8Array.subarray(start, end);
    
                        // Pad or trim the frameData to match the required size
                        if (frameData.length < requiredFrameSize) {
                            // Pad with zeros to reach the required size
                            const paddedData = new Uint8Array(requiredFrameSize);
                            paddedData.set(frameData, 0);
                            frameData = paddedData;
                        } else if (frameData.length > requiredFrameSize) {
                            // Trim to match the required size
                            frameData = frameData.subarray(0, requiredFrameSize);
                        }
    
                        const dataToSend = {
                            buffer: Array.from(frameData), // Convert Uint8Array to array of numbers
                            width: width,
                            height: height,
                            pixelFormat: 'yuv420p',
                            mode: 'SendRtmp'
                        };
    
                        console.log("Sending frame:", i);
                        ws.send(JSON.stringify(dataToSend));
                    }
                }
            };
    
            // Start recording and send data every 10 seconds
            mediaRecorder.start(frameDuration);
    
            console.log("MediaRecorder started.");
        } catch (error) {
            console.error('Error accessing media devices or starting recorder:', error);
        }
      };


    


    and my backend

    


        wss.on('connection', (ws) => {
    console.log('WebSocket connection established.');

    ws.on('message', async (data) => {
        try {
            const parsedData = JSON.parse(data);

            if (parsedData.mode === 'SendRtmp' && Array.isArray(parsedData.buffer)) {
                const { buffer, pixelFormat, width, height } = parsedData;
                const bufferArray = Buffer.from(buffer);

                await sendRtmpVideo(bufferArray, pixelFormat, width, height);
            } else {
                console.log('Received unknown or invalid mode or buffer data');
            }
        } catch (error) {
            console.error('Error parsing WebSocket message:', error);
        }
    });

    ws.on('close', () => {
        console.log('WebSocket connection closed.');
    });
    });
    const sendRtmpVideo = async (frameBuffer, pixelFormat, width, height) => {
    console.log("ffmpeg data",frameBuffer)
    try {
        const ratio = `${width}x${height}`;
        const ffmpegCommand = [
            '-re',
            '-f', 'rawvideo',
            '-pix_fmt', pixelFormat,
            '-s', ratio,
            '-i', 'pipe:0',
            '-c:v', 'libx264',
            '-preset', 'fast', // Specify the preset for libx264
            '-b:v', '3000k',    // Specify the video bitrate
            '-loglevel', 'debug',
            '-f', 'flv',
            // '-flvflags', 'no_duration_filesize', 
            RTMPLINK
        ];


        const ffmpeg = spawn('ffmpeg', ffmpegCommand);

        ffmpeg.on('exit', (code, signal) => {
            if (code === 0) {
                console.log('FFmpeg process exited successfully.');
            } else {
                console.error(`FFmpeg process exited with code ${code} and signal ${signal}`);
            }
        });

        ffmpeg.on('error', (error) => {
            console.error('FFmpeg spawn error:', error);
        });

        ffmpeg.stderr.on('data', (data) => {
            console.error(`FFmpeg stderr: ${data}`);
        });

        ffmpeg.stdin.write(frameBuffer, (err) => {
            if (err) {
                console.error('Error writing to FFmpeg stdin:', err);
            } else {
                console.log('Data written to FFmpeg stdin successfully.');
            }
            ffmpeg.stdin.end(); // Close stdin after writing the buffer
        });
        } catch (error) {
        console.error('Error in sendRtmpVideo:', error);
        }
    };