Recherche avancée

Médias (0)

Mot : - Tags -/alertes

Aucun média correspondant à vos critères n’est disponible sur le site.

Autres articles (20)

  • Websites made ​​with MediaSPIP

    2 mai 2011, par

    This page lists some websites based on MediaSPIP.

  • Creating farms of unique websites

    13 avril 2011, par

    MediaSPIP platforms can be installed as a farm, with a single "core" hosted on a dedicated server and used by multiple websites.
    This allows (among other things) : implementation costs to be shared between several different projects / individuals rapid deployment of multiple unique sites creation of groups of like-minded sites, making it possible to browse media in a more controlled and selective environment than the major "open" (...)

  • Other interesting software

    13 avril 2011, par

    We don’t claim to be the only ones doing what we do ... and especially not to assert claims to be the best either ... What we do, we just try to do it well and getting better ...
    The following list represents softwares that tend to be more or less as MediaSPIP or that MediaSPIP tries more or less to do the same, whatever ...
    We don’t know them, we didn’t try them, but you can take a peek.
    Videopress
    Website : http://videopress.com/
    License : GNU/GPL v2
    Source code : (...)

Sur d’autres sites (5381)

  • FFMPEG ERROR on streaming video generated from MediaRecorder API on RTMP url

    20 février 2024, par Prince Mishra

    Ref : https://www.mux.com/blog/the-state-of-going-live-from-a-browser

    


    The above blog states my problem in detail and presented a solution also.
I am trying to implement the solution which is using socketio

    


    Here is the description of the problem :

    


    I want to capture the video and audio from the browser using

    


    navigator.mediaDevices
    .getUserMedia({ video: true, audio: true })


    


    and i am using the

    


    const options = {
    mimeType: "video/webm;codecs=vp8",
};
const mediaRecorder = new MediaRecorder(stream, options);


    


    to record the video chunk by chunk from the stream given by getusermedia and then using the socket io to send the video to the backend. Where I am using the ffmpeg to stream the chunks on rtmp url.

    


    I am using the following ffmpeg commands :

    


    const { spawn } = require('child_process');

const ffmpegProcess = spawn('ffmpeg', [
    '-i', 'pipe:0',
    '-c:v', 'libx264',
    '-preset', 'veryfast',
    '-tune', 'zerolatency',
    '-c:a', 'aac',
    '-ar', '44100',
    '-f', 'flv',
    rtmpurl
]);


    


    And I am getting the following errors :

    


    Error Image 1

    


    Error Image 2

    


    Error Image 3

    


    Can anyone help me how to fix this. I am new to FFmpeg.

    


    Here is the complete frontend and Backend code :

    


    Frontend (App.jsx) :


    


    import { useEffect } from "react";&#xA;import "./App.css";&#xA;import io from "socket.io-client";&#xA;&#xA;function App() {&#xA;  let video;&#xA;&#xA;  useEffect(() => {&#xA;    video = document.getElementById("video");&#xA;  }, []);&#xA;&#xA;  const socket = io("http://localhost:3050");&#xA;  socket.on("connect", () => {&#xA;    console.log("Connected to server");&#xA;  });&#xA;&#xA;  let stream;&#xA;  navigator.mediaDevices&#xA;    .getUserMedia({ video: true, audio: true })&#xA;    .then((strea) => {&#xA;      video.srcObject = strea;&#xA;      stream = strea;&#xA;      const options = {&#xA;        mimeType: "video/webm;codecs=vp8",&#xA;      };&#xA;      const mediaRecorder = new MediaRecorder(stream, options);&#xA;      console.log(mediaRecorder);&#xA;      let chunks = [];&#xA;&#xA;      mediaRecorder.ondataavailable = function (e) {&#xA;        chunks.push(e.data);&#xA;        console.log(e.data);&#xA;      };&#xA;      mediaRecorder.onstop = function (e) {&#xA;        const blob = new Blob(chunks, { type: "video/webm;codecs=vp8" });&#xA;        console.log("emitted");&#xA;        socket.emit("videoChunk", blob);&#xA;        chunks = [];&#xA;        // const videoURL = URL.createObjectURL(blob);&#xA;        // const a = document.createElement(&#x27;a&#x27;);&#xA;        // a.href = videoURL;&#xA;        // a.download = &#x27;video.mp4&#x27;;&#xA;        // a.click();&#xA;        window.URL.revokeObjectURL(videoURL);&#xA;      };&#xA;      mediaRecorder.start();&#xA;      setInterval(() => {&#xA;        mediaRecorder.stop();&#xA;        mediaRecorder.start();&#xA;      }, 2000);&#xA;    })&#xA;    .catch((error) => {&#xA;      console.error("Error accessing camera:", error);&#xA;    });&#xA;&#xA;  // Capture video after 10 seconds&#xA;&#xA;  return (&#xA;    &lt;>&#xA;      <video width="640" height="480" autoplay="autoplay"></video>&#xA;      <button>Capture</button>&#xA;    >&#xA;  );&#xA;}&#xA;&#xA;export default App;&#xA;

    &#xA;

    Backend Code :

    &#xA;

    const express = require(&#x27;express&#x27;);&#xA;const http = require(&#x27;http&#x27;);&#xA;const socketIo = require(&#x27;socket.io&#x27;);&#xA;const { spawn } = require(&#x27;child_process&#x27;);&#xA;&#xA;const app = express();&#xA;&#xA;const server = http.createServer(app);&#xA;const io = socketIo(server, {&#xA;    cors: {&#xA;      origin: "*",&#xA;      methods: ["GET", "POST"]&#xA;    }, maxhttpBufferSize: 1e8&#xA;  });&#xA;&#xA;  const rtmpurl = &#x27;rtmp://localhost/live/test&#x27;;&#xA;&#xA;io.on(&#x27;connection&#x27;, (socket) => {&#xA;    console.log(&#x27;A user connected&#x27;);&#xA;&#xA;    const ffmpegProcess = spawn(&#x27;ffmpeg&#x27;, [&#xA;        &#x27;-i&#x27;, &#x27;pipe:0&#x27;,&#xA;        &#x27;-c:v&#x27;, &#x27;libx264&#x27;,&#xA;        &#x27;-preset&#x27;, &#x27;veryfast&#x27;,&#xA;        &#x27;-tune&#x27;, &#x27;zerolatency&#x27;,&#xA;        &#x27;-c:a&#x27;, &#x27;aac&#x27;,&#xA;        &#x27;-ar&#x27;, &#x27;44100&#x27;,&#xA;        &#x27;-f&#x27;, &#x27;flv&#x27;,&#xA;        rtmpurl&#xA;    ]);&#xA;&#xA;&#xA;    ffmpegProcess.stdin.on(&#x27;error&#x27;, (e) => {&#xA;        console.log(e);&#xA;    });&#xA;    &#xA;    ffmpegProcess.stderr.on(&#x27;data&#x27;, (data) => {&#xA;        console.log(data.toString());&#xA;    });&#xA;&#xA;    ffmpegProcess.on(&#x27;close&#x27;, (code) => {&#xA;        console.log(`child process exited with code ${code}`);&#xA;    });&#xA;&#xA;&#xA;    socket.on(&#x27;videoChunk&#x27;, (chunk) => {&#xA;        console.log(chunk)&#xA;        ffmpegProcess.stdin.write(chunk);&#xA;&#xA;    });&#xA;&#xA;    socket.on(&#x27;disconnect&#x27;, () => {&#xA;        console.log(&#x27;User disconnected&#x27;);&#xA;        ffmpegProcess.stdin.end();&#xA;    });&#xA;});&#xA;&#xA;const PORT = process.env.PORT || 3050;&#xA;&#xA;app.get(&#x27;/test&#x27;, (req, res) => {&#xA;    res.send(&#x27;Hello from /test route!&#x27;);&#xA;});&#xA;&#xA;&#xA;server.listen(PORT, () => {&#xA;    console.log(`Server is running on port ${PORT}`);&#xA;});&#xA;

    &#xA;

  • configure : Include quotes around pkg_version

    16 décembre 2024, par Joe Schiffler
    configure : Include quotes around pkg_version
    

    In some MSYS environments it can happen that the 3 argument syntax
    for pkg-config library specifications fails because somehow the
    expansion of pkg_version ends up with a redirection we guess.

    To avoid failures like in the referenced build[2], we quote it
    so the whole module including operators will be expanded into
    a single shell word and the single argument syntax for specifying
    the library for pkg-config will be used.

    The single argument syntax seems to be supported by the original
    pkg-config from the beginning more than 20 years[3].

    In the pkgconf implementation single argument syntax was supported
    pretty much from the beginning as well. The multiple argument syntax
    we used until this change, was not supported until a change[4] more
    than 10 years ago.

    References


    1. Build passing with quotes :
    https://github.com/JoeSchiff/pyav-ffmpeg/actions/runs/12358403929
    2. Build failing without quotes :
    https://github.com/JoeSchiff/pyav-ffmpeg/actions/runs/12360472377
    3. Earliest commit of the current pkg-config Git repo already mentions the single argument syntax :
    https://gitlab.freedesktop.org/pkg-config/pkg-config/-/commit/2ac96cbcc708d8945329fd1b2001386e1c895c64#124c0becfe68b1ef671f49ed2b9d24779ace126f_0_162
    4. pkgconf gets support for 3 argument syntax (pkgconf —exists liba = 1.2.3) :
    https://github.com/pkgconf/pkgconf/commit/793de6a06ca52fbfe906a269b5f2b2ba41739517

    Commit-message-by : Alexander Strasser <eclipse7@gmx.net>
    Signed-off-by : Joe Schiffler <joeschiffler3@gmail.com>
    Signed-off-by : Alexander Strasser <eclipse7@gmx.net>

    • [DH] configure
  • delphi firemonkey + FFmpeg Fill image/Tbitmap with data of AVFRAME->pixelformat->YUV420P

    9 février 2020, par coban

    I have managed to create a simple Video player using SDL2 + FFmpeg libraries with Delphi VCL. It’s about the same as ffplay.exe but not a Console app.
    I’ve noticed that FFmpeg (I might be wrong) converts/scales (sws_scale) source pixelformat(any) -> to destination -> YUV420P faster than to any other format.

    What I want to achieve is some kind of a (video)surface, where over I can put other components, like for example a TProgressbar. SDL has a function sdl_createwindowfrom which can turn a tpanel into video(surface/window) where it is possible to put any component over it. But this function is only for windows.

    Maybe I am looking in the wrong direction to achieve what I want, if so, any hint is welcome.
    I was thinkin of drawing the data retrieved in pixelformat yuv420p to a TBitmap of a Timage, this way I won’t need SDL2 library, and I will be able to put any other component above, in this case, Timage. Or another component which might be faster.

    It seems like I need to convert the YUV420P into BGRA format, because TBitmap does not seem to support any YUV format, worse is FIREMONKEY tbitmap is always BGRA format, changing to other format is not possible.

    In first case, I need a function to convert yuv420 to BGRA, can anyone help with this, is there a component/package/function for this which I could use ? Or maybe is it anyhow possible to use yuv420p format directly without converting ?
    I tried to convert some SDL2 functions from SDL2 source (C/C++) to Delphi functions but it’s to complicate for me, specially with my knowledge of C/C++. In SDL2 there are methods/functions implemented for converting RGB <-> YUV. (Why did I ever start Delphi programming ? my mistake).

    BTW, I already tried TMediaplayer, it’s drawing video(picture) above everything, nothing else than the video is visible.


    I’ve made an attempt, what I don’t understand is where to get/what is "y_stride, uv_stride and rgb_stride"
    Some variable declarations and/or assignments can be incorrect, need to debug the values, but first I need to know what to pass for the above variables.


       procedure STD_FUNCTION_NAME(width, height:Cardinal;Y, U, V:PByte; Y_stride, UV_stride:Cardinal;
                             RGB:PByte;     RGB_stride:Cardinal;yuv_type:YCbCrType;
                           YUV_FORMAT,RGB_FORMAT:Word);
    var param:PYUV2RGBParam;
     y_pixel_stride,
     uv_pixel_stride,
     uv_x_sample_interval,
     uv_y_sample_interval:Word;

     x, ys:Cardinal;
     y_ptr1,y_ptr2,u_ptr,v_ptr:PByte;
     rgb_ptr1,rgb_ptr2:PByte;

     u_tmp,v_tmp,r_tmp,
     g_tmp,b_tmp:Cardinal;
     y_tmp:Integer;
    begin
    param := @(YUV2RGB[integer( yuv_type)]);
    if YUV_FORMAT = YUV_FORMAT_420
    then begin
     y_pixel_stride      := 1;
     uv_pixel_stride     := 1;
     uv_x_sample_interval:= 2;
     uv_y_sample_interval:= 2;
    end;
    if YUV_FORMAT = YUV_FORMAT_422
    then begin
     y_pixel_stride        := 2;
     uv_pixel_stride       := 4;
     uv_x_sample_interval  := 2;
     uv_y_sample_interval  := 1;
    end;
    if YUV_FORMAT = YUV_FORMAT_NV12
    then begin
     y_pixel_stride        := 1;
     uv_pixel_stride       := 2;
     uv_x_sample_interval  := 2;
     uv_y_sample_interval  := 2;
    end;


    //for(y=0; y&lt;(height-(uv_y_sample_interval-1)); y+=uv_y_sample_interval)
    ys := 0;
    while ys &lt; height-(uv_y_sample_interval-1) do
    begin
       y_ptr1  := Y+ys*Y_stride;
     y_ptr2  := Y+(ys+1)*Y_stride;
     u_ptr   := U+(ys div uv_y_sample_interval)*UV_stride;
     v_ptr   := V+(ys div uv_y_sample_interval)*UV_stride;

       rgb_ptr1:=RGB+ys*RGB_stride;

       if uv_y_sample_interval > 1
     then rgb_ptr2:=RGB+(ys+1)*RGB_stride;


       //for(x=0; x&lt;(width-(uv_x_sample_interval-1)); x+=uv_x_sample_interval)
    x := 0;
    while x&lt;(width-(uv_x_sample_interval-1)) do
       begin
           // Compute U and V contributions, common to the four pixels

           u_tmp := (( u_ptr^)-128);
           v_tmp := (( v_ptr^)-128);

           r_tmp := (v_tmp*param.v_r_factor);
           g_tmp := (u_tmp*param.u_g_factor + v_tmp*param.v_g_factor);
           b_tmp := (u_tmp*param.u_b_factor);

           // Compute the Y contribution for each pixel

           y_tmp := ((y_ptr1[0]-param.y_shift)*param.y_factor);
           PACK_PIXEL(RGB_FORMAT,y_tmp,r_tmp, g_tmp, b_tmp, rgb_ptr1);

           y_tmp := ((y_ptr1[y_pixel_stride]-param.y_shift)*param.y_factor);
           PACK_PIXEL(RGB_FORMAT,y_tmp,r_tmp, g_tmp, b_tmp, rgb_ptr1);

           if uv_y_sample_interval > 1
     then begin
       y_tmp := ((y_ptr2[0]-param.y_shift)*param.y_factor);
       PACK_PIXEL(RGB_FORMAT,y_tmp,r_tmp, g_tmp, b_tmp, rgb_ptr2);

       y_tmp := ((y_ptr2[y_pixel_stride]-param.y_shift)*param.y_factor);
       PACK_PIXEL(RGB_FORMAT,y_tmp,r_tmp, g_tmp, b_tmp, rgb_ptr2);
           end;

           y_ptr1 := y_ptr1 + 2*y_pixel_stride;
           y_ptr2 := y_ptr2 + 2*y_pixel_stride;
           u_ptr  := u_ptr  + 2*uv_pixel_stride div uv_x_sample_interval;
           v_ptr  := v_ptr  + 2*uv_pixel_stride div uv_x_sample_interval;
     x := x + uv_x_sample_interval
       end;

       //* Catch the last pixel, if needed */
       if (uv_x_sample_interval = 2) and (x = (width-1))
       then begin
           // Compute U and V contributions, common to the four pixels

           u_tmp := (( u_ptr^)-128);
           v_tmp := (( v_ptr^)-128);

           r_tmp := (v_tmp*param.v_r_factor);
           g_tmp := (u_tmp*param.u_g_factor + v_tmp*param.v_g_factor);
           b_tmp := (u_tmp*param.u_b_factor);

           // Compute the Y contribution for each pixel

           y_tmp := ((y_ptr1[0]-param.y_shift)*param.y_factor);
           PACK_PIXEL(RGB_FORMAT,y_tmp,r_tmp, g_tmp, b_tmp, rgb_ptr1);

           if uv_y_sample_interval > 1
     then begin
             y_tmp := ((y_ptr2[0]-param.y_shift)*param.y_factor);
       PACK_PIXEL(RGB_FORMAT,y_tmp,r_tmp, g_tmp, b_tmp, rgb_ptr2);
             //PACK_PIXEL(rgb_ptr2);
           end;
       end;
    ys := ys +uv_y_sample_interval;
    end;

    //* Catch the last line, if needed */
    if (uv_y_sample_interval = 2) and (ys = (height-1))
    then begin
       y_ptr1 :=Y+ys*Y_stride;
    u_ptr  :=U+(ys div uv_y_sample_interval)*UV_stride;
    v_ptr  :=V+(ys div uv_y_sample_interval)*UV_stride;

       rgb_ptr1:=RGB+ys*RGB_stride;

       //for(x=0; x&lt;(width-(uv_x_sample_interval-1)); x+=uv_x_sample_interval)
    x := 0;
    while x &lt; (width-(uv_x_sample_interval-1)) do
       begin
           // Compute U and V contributions, common to the four pixels

           u_tmp := (( u_ptr^)-128);
           v_tmp := (( v_ptr^)-128);

           r_tmp := (v_tmp*param.v_r_factor);
           g_tmp := (u_tmp*param.u_g_factor + v_tmp*param.v_g_factor);
           b_tmp := (u_tmp*param.u_b_factor);

           // Compute the Y contribution for each pixel

           y_tmp := ((y_ptr1[0]-param.y_shift)*param.y_factor);
           //PACK_PIXEL(rgb_ptr1);
     PACK_PIXEL(RGB_FORMAT,y_tmp,r_tmp, g_tmp, b_tmp, rgb_ptr1);
           y_tmp := ((y_ptr1[y_pixel_stride]-param.y_shift)*param.y_factor);
           //PACK_PIXEL(rgb_ptr1);
     PACK_PIXEL(RGB_FORMAT,y_tmp,r_tmp, g_tmp, b_tmp, rgb_ptr1);

           y_ptr1 := y_ptr1 + 2*y_pixel_stride;
           u_ptr  := u_ptr  + 2*uv_pixel_stride div uv_x_sample_interval;
           v_ptr  := v_ptr  + 2*uv_pixel_stride div uv_x_sample_interval;

     x := x + uv_x_sample_interval
       end;

       //* Catch the last pixel, if needed */
       if (uv_x_sample_interval = 2) and (x = (width-1))
       then begin
           // Compute U and V contributions, common to the four pixels

           u_tmp := (( u_ptr^)-128);
           v_tmp := (( v_ptr^)-128);

           r_tmp := (v_tmp*param.v_r_factor);
           g_tmp := (u_tmp*param.u_g_factor + v_tmp*param.v_g_factor);
           b_tmp := (u_tmp*param.u_b_factor);

           // Compute the Y contribution for each pixel

           y_tmp := ((y_ptr1[0]-param.y_shift)*param.y_factor);
           //PACK_PIXEL(rgb_ptr1);
     PACK_PIXEL(RGB_FORMAT,y_tmp,r_tmp, g_tmp, b_tmp, rgb_ptr1);
       end;
    end;

    end ;