Newest 'ffmpeg' Questions - Stack Overflow

http://stackoverflow.com/questions/tagged/ffmpeg

Les articles publiés sur le site

  • Write EPIPE after upgrade NodeJS

    28 juillet, par Rougher

    I am using this code for detecting audio replay gain. It was working well with NodeJs 16, but after upgrading to NodeJs 22, it started crashing a few times in an hour with this error:

    write EPIPE
        at WriteWrap.onWriteComplete [as oncomplete] (node:internal/stream_base_commons:87:19) {
      errno: -32,
      code: 'EPIPE',
      syscall: 'write'
    }
    

    My original code was

    static getReplayGainVolume(audioData: Buffer) {
            // Calculate the mean volume of the audio file at the given filePath
            var ffmpeg = spawn('ffmpeg', [
                '-i', '-',
                '-af', 'replaygain',
                '-f', 'null', '/dev/null',
                '-hide_banner', '-nostats'
            ]);
    
            var output = '';
    
            ffmpeg.stdin.write(audioData);
            ffmpeg.stdin.end();
    
            return new Promise((resolve,reject)=>{
                ffmpeg.on('error', function (err: any) {
                    reject(err);
                });
                
                ffmpeg.on('close', function (_code: any) {
                    // [Parsed_replaygain_0 @ 0000000002a2b5c0] track_gain = +6.53 dB
                    if (!output.includes("track_gain")) {
                        reject(output);
    
                        return;
                    }
    
                    const gainWithDb = output.split("track_gain = ")[1];
                    if (!gainWithDb) {
                        reject(output);
    
                        return;
                    }
    
                    const gain = gainWithDb.split(" dB")[0];
                    if (!gain) {
                        reject(output);
    
                        return;
                    }
    
                    resolve(parseFloat(gain));
                });
                
                ffmpeg.stderr.on('data', function (data: any) {
                    // ffmpeg sends all output to stderr. It is not a bug, it is a feature :)
                    var tData = data.toString('utf8');
                    output += tData;
                });
            });
        }
    

    Then after search in forums and Google, I improved (I hope I improved it with cleanups)

    static getReplayGainVolume(audioData: Buffer): Promise {
            return new Promise((resolve, reject) => {
                const FFMPEG_PATH = 'ffmpeg'; // Adjust this if ffmpeg is not in system PATH
                const FFMPEG_TIMEOUT_MS = 30 * 1000; // 30 seconds timeout for FFmpeg execution
    
                let ffmpeg: ChildProcessWithoutNullStreams;
                let output = ''; // Accumulate all stderr output
    
                // Timeout for the FFmpeg process itself
                const ffmpegTimeout = setTimeout(() => {
                    log.error(`[FFmpeg] FFmpeg process timed out after ${FFMPEG_TIMEOUT_MS / 1000} seconds. Killing process.`);
                    if (ffmpeg && !ffmpeg.killed) {
                        ffmpeg.kill('SIGKILL'); // Force kill
                        reject(new Error(`FFmpeg process timed out and was killed.`));
                    }
                }, FFMPEG_TIMEOUT_MS);
    
                // --- Define cleanup function to be called on process exit/error ---
                const cleanup = (shouldReject = false, error?: Error | string) => {
                    clearTimeout(ffmpegTimeout); // Ensure timeout is cleared
    
                    // Remove all listeners to prevent leaks
                    // This is CRITICAL for long-running bots that spawn many child processes
                    ffmpeg.stdin.removeAllListeners();
                    ffmpeg.stdout.removeAllListeners();
                    ffmpeg.stderr.removeAllListeners();
                    ffmpeg.removeAllListeners(); // Remove process listeners
    
                    if (ffmpeg && !ffmpeg.killed) { // Ensure ffmpeg process is killed if still alive
                        ffmpeg.kill(); // Graceful kill (SIGTERM), then wait for exit. If not, then SIGKILL.
                    }
    
                    if (shouldReject) {
                        reject(error instanceof Error ? error : new Error(String(error)));
                    }
                };
    
                try {
                    ffmpeg = spawn(FFMPEG_PATH, [
                        '-i', 'pipe:0', // Read input from stdin (pipe:0)
                        '-af', 'replaygain',
                        '-f', 'null', '/dev/null', // Write output to null device (discard audio output)
                        '-hide_banner', '-nostats' // Suppress ffmpeg's initial info and progress stats
                    ], { stdio: ['pipe', 'pipe', 'pipe'] }); // Explicitly pipe stdin, stdout, stderr
    
                    // --- CRITICAL: Event Handlers for ffmpeg process ---
    
                    // 1. Handle errors during spawning or execution (e.g., ffmpeg not found)
                    ffmpeg.on('error', (err: any) => {
                        log.error(`[FFmpeg] Failed to spawn or execute FFmpeg process:`, err);
                        cleanup(true, new Error(`FFmpeg process error: ${err.message}`));
                    });
    
                    // 2. Accumulate stderr output (where replaygain results and ffmpeg errors are printed)
                    ffmpeg.stderr.on('data', (data: Buffer) => {
                        output += data.toString('utf8');
                    });
    
                    // 3. Handle process exit (success or failure)
                    ffmpeg.on('close', (code: number) => { // 'close' indicates process has exited
                        log.debug(`[FFmpeg] FFmpeg process exited with code: ${code}.`);
                        if (code !== 0) { // Non-zero exit code means failure
                            log.error(`[FFmpeg] FFmpeg process exited with non-zero code ${code}. Output:\n${output}`);
                            cleanup(true, new Error(`FFmpeg process failed with exit code ${code}. Output: ${output}`));
                            return;
                        }
    
                        // If successful exit (code 0), parse the output
                        if (!output.includes("track_gain")) {
                            log.error(`[FFmpeg] 'track_gain' not found in FFmpeg output (exit code 0). Output:\n${output}`);
                            cleanup(true, new Error(`'track_gain' not found in FFmpeg output. Output: ${output}`));
                            return;
                        }
    
                        try {
                            // Regex to parse track_gain (e.g., "+6.53 dB" or "-12.00 dB")
                            const gainMatch = output.match(/track_gain\s*=\s*([+-]?\d+\.?\d*)\s*dB/);
                            if (gainMatch && gainMatch[1]) {
                                const gain = parseFloat(gainMatch[1]);
                                log.debug(`[FFmpeg] Replay gain volume: ${gain} dB.`);
                                cleanup(); // Clean up on success
                                resolve(gain);
                            } else {
                                log.error(`[FFmpeg] Failed to parse gain from FFmpeg output. Output:\n${output}`);
                                cleanup(true, new Error(`Failed to parse gain from FFmpeg output. Output: ${output}`));
                            }
                        } catch (parseError: any) {
                            log.error(`[FFmpeg] Error parsing FFmpeg replay gain output:`, parseError);
                            cleanup(true, new Error(`Error parsing FFmpeg output: ${parseError.message}. Output: ${output}`));
                        }
                    });
    
                    // 4. Write audio data to ffmpeg's stdin
                    // This is the only write operation that could throw EPIPE in this function.
                    try {
                        ffmpeg.stdin.write(audioData);
                        ffmpeg.stdin.end(); // Close stdin to signal end of input
                    } catch (stdinError: any) {
                        log.error(`[FFmpeg] Error writing audioData to FFmpeg stdin:`, stdinError);
                        // This error means ffmpeg's stdin pipe closed unexpectedly.
                        // This is the direct equivalent of an EPIPE (Broken Pipe) at the child process level.
                        cleanup(true, new Error(`Failed to pipe audio data to FFmpeg stdin: ${stdinError.message}`));
                    }
    
                } catch (spawnError: any) { // Catch errors from the spawn call itself (e.g., FFMPEG_PATH is invalid)
                    log.error(`[FFmpeg] Error spawning FFmpeg:`, spawnError);
                    cleanup(true, new Error(`Failed to spawn FFmpeg process: ${spawnError.message}`));
                }
            });
        }
    

    But unfortunately I still get the same error. Has anyone encountered this problem? How can I solve it?

    I use ffmpeg version 4.2.7-0ubuntu0.1

    Thanks.

  • Downloading a video using fluent-ffmpeg in nodejs and express

    26 juillet, par Apdo Elsaed

    I am working on a side project to download videos from Reddit, but they separate video and audio in different files. so i have to merge them first before downloading them in the client. i was able to do all of this as in the following snippet of code.

    const ffmpeg = require("fluent-ffmpeg");
    const proc = new ffmpeg();
    
    app.post('/download', async (req, res) => {
       
       const audio = "some aduio link";
       const video = "some video link";
    
       proc.addInput(video)
         .output('${some path}./video.mp4')
         .format('mp4')
         .on("error", err => console.log(err))
         .on('end', () => console.log('Done'));
    
      if(audio) {
      proc.addInput(audio);
      }
    
      proc.run()
     
    });
    

    using the above code, the video is being download locally in the the server in the specified path.

    but i want to download the video in the client browser who sent the request. i tried :

    proc.pipe(res); 
    

    but it didn't work, it's my first time working with ffmpeg , so it would be nice if someone give me a hint

  • How is better to use ffmpeg with the ASP.NET Core, on Linux ?

    26 juillet, par user8245660

    I've got the project (ASP.NET Core, on Linux) where are the several tasks, which relate to the video converting and extracting frames from the video file.

    I thought about the two possible options:

    1. using ffmpeg console utility
    2. using P/Invoke with the libavcodec library and others, which ffmpeg uses

    The second option is miles harder and may be very impractical, because it reminds me developing the new wrapper/library, instead of using the ready products.

    I've done googling, but there aren't well-done projects for the C#/ASP.NET Core on Linux platform. There are some good for C++ and Python, but NOT for the C# on Linux/.NET Core.

    I decided to look at the first option, which I suppose would be more easier and practical. But, there are some weak places, which can produce many problems. We understand (I suppose), that using the 1st option, the end-developer shall use the process forking. So, there could be possible problems with the process idle and other possible issues...

    I'm asking about your practice, because it's my first experience on Linux platform with the video converting/sampling using C#. I've used the Expression Encoder .NET library on Windows platform, but it's other story and it makes no sense, right now.

    May be, there are other options, which I can't see right at the moment. I dislike the 1st option because of possible unhandled exceptions, because ffmpeg with such role becomes the black box for the ASP.NET Core backend.

  • How to write H264 raw stream into mp4 using ffmpeg directly

    26 juillet, par Yelsin

    I want to wrap the H264 Nalus(x264 encoded) into mp4 using ffmpeg(SDK 2.1), but the output mp4 file could not play. I don't know how to set the pts and dts. Here's my code, using the code from Raw H264 frames in mpegts container using libavcodec and muxing.c from www.ffmpeg.org. My H264 stream has no B-Frame, every nalu starts with 00 00 00 01,the stream begins with sps pps then the h264 data.

    #include "stdafx.h"
    #include 
    #include 
    #include "Stream2Mp4.h"
    
    #include opt.h>
    #include mathematics.h>
    #include timestamp.h>
    #include avformat.h>
    #include swresample.h>
    #include swresample.h>
    
    #define STREAM_FRAME_RATE 25
    #define STREAM_PIX_FMT    AV_PIX_FMT_YUV420P /* default pix_fmt */
    
    static int ptsInc = 0;
    static int vi = -1;
    static int waitkey = 1;
    
    // < 0 = error
    // 0 = I-Frame
    // 1 = P-Frame
    // 2 = B-Frame
    // 3 = S-Frame
    int getVopType( const void *p, int len )
    {   
    if ( !p || 6 >= len )
        return -1;
    
    unsigned char *b = (unsigned char*)p;
    
    // Verify NAL marker
    if ( b[ 0 ] || b[ 1 ] || 0x01 != b[ 2 ] )
    {   b++;
    if ( b[ 0 ] || b[ 1 ] || 0x01 != b[ 2 ] )
        return -1;
    } // end if
    
    b += 3;
    
    // Verify VOP id
    if ( 0xb6 == *b )
    {   b++;
    return ( *b & 0xc0 ) >> 6;
    } // end if
    
    switch( *b )
    {   case 0x65 : return 0;
    case 0x61 : return 1;
    case 0x01 : return 2;
    } // end switch
    
    return -1;
    }
    
    int get_nal_type( void *p, int len )
    {
    if ( !p || 5 >= len )
        return -1;
    
    unsigned char *b = (unsigned char*)p;
    
    // Verify NAL marker
    if ( b[ 0 ] || b[ 1 ] || 0x01 != b[ 2 ] )
    {   b++;
    if ( b[ 0 ] || b[ 1 ] || 0x01 != b[ 2 ] )
        return -1;
    } // end if
    
    b += 3;
    
    return *b;
    }
    
    
    /* Add an output stream */
    AVStream *add_stream(AVFormatContext *oc, AVCodec **codec, enum AVCodecID codec_id)
    {
    AVCodecContext *c;
    AVStream *st;
    
    /* find the encoder */
    *codec = avcodec_find_encoder(codec_id);
    if (!*codec)
    {
        printf("could not find encoder for '%s' \n", avcodec_get_name(codec_id));
        exit(1);
    }
    
    st = avformat_new_stream(oc, *codec);
    if (!st)
    {
        printf("could not allocate stream \n");
        exit(1);
    }
    st->id = oc->nb_streams-1;
    c = st->codec;
    vi = st->index;
    
    switch ((*codec)->type)
    {
    case AVMEDIA_TYPE_AUDIO:
        c->sample_fmt = (*codec)->sample_fmts ? (*codec)->sample_fmts[0] : AV_SAMPLE_FMT_FLTP;
        c->bit_rate = 64000;
        c->sample_rate = 44100;
        c->channels = 2;
        break;
    
    case AVMEDIA_TYPE_VIDEO:
        c->codec_id = codec_id;
        c->bit_rate = 90000;
        c->width = 480;
        c->height = 354;
        c->time_base.den = 15;
        c->time_base.num = 1;
        c->gop_size = 12;
        c->pix_fmt = STREAM_PIX_FMT;
        if (c->codec_id == AV_CODEC_ID_MPEG2VIDEO)
        {
            c->max_b_frames = 2;
        }
        if (c->codec_id == AV_CODEC_ID_MPEG1VIDEO)
        {
            c->mb_decision = 2;
        }
        break;
    
    default:
        break;
    }
    
    if (oc->oformat->flags & AVFMT_GLOBALHEADER)
    {
        c->flags |= CODEC_FLAG_GLOBAL_HEADER;
    }
    
    return st;
    }
    
    
    
    void open_video(AVFormatContext *oc, AVCodec *codec, AVStream *st)
    {
    int ret;
    AVCodecContext *c = st->codec;
    
    /* open the codec */
    ret = avcodec_open2(c, codec, NULL);
    if (ret < 0)
    {
        printf("could not open video codec");
        //exit(1);
    }
    
    }
    
    int CreateMp4(AVFormatContext *&m_pOc, void *p, int len)
    {
    int ret; 
    const char* pszFileName = "output002.mp4";
    AVOutputFormat *fmt;
    AVCodec *video_codec;
    AVStream *m_pVideoSt;
    
    if (0x67 != get_nal_type(p, len))
    {
        printf("can not detect nal type");
        return -1;
    }
    av_register_all();
    
    avformat_alloc_output_context2(&m_pOc, NULL, NULL, pszFileName);
    if (!m_pOc)
    {
        printf("Could not deduce output format from file extension: using MPEG. \n");
        avformat_alloc_output_context2(&m_pOc, NULL, "mpeg", pszFileName);
    }
    if (!m_pOc)
    {
        return 1;
    }
    
    fmt = m_pOc->oformat;
    
    if (fmt->video_codec != AV_CODEC_ID_NONE)
    {
        m_pVideoSt = add_stream(m_pOc, &video_codec, fmt->video_codec);
    }
    
    if (m_pVideoSt)
    {
        open_video(m_pOc, video_codec, m_pVideoSt); 
    }
    
    av_dump_format(m_pOc, 0, pszFileName, 1);
    
    /* open the output file, if needed */
    if (!(fmt->flags & AVFMT_NOFILE))
    {
        ret = avio_open(&m_pOc->pb, pszFileName, AVIO_FLAG_WRITE);
        if (ret < 0)
        {
            printf("could not open '%s': %s\n", pszFileName);
            return 1;
        }
    }
    
    /* Write the stream header, if any */
    ret = avformat_write_header(m_pOc, NULL);
    if (ret < 0)
    {
        printf("Error occurred when opening output file");
        return 1;
    }
    }
    
    
    /* write h264 data to mp4 file*/
    
    
    void WriteVideo(AVFormatContext *&m_pOc,void* data, int nLen)
    {
    int ret;
    
    if ( 0 > vi )
    {
        printf("vi less than 0");
        //return -1;
    }
    AVStream *pst = m_pOc->streams[ vi ];
    
    // Init packet
    AVPacket pkt;
    
    AVCodecContext *c = pst->codec;
    
    av_init_packet( &pkt );
    pkt.flags |= ( 0 >= getVopType( data, nLen ) ) ? AV_PKT_FLAG_KEY : 0;   
    
    pkt.stream_index = pst->index;
    pkt.data = (uint8_t*)data;
    pkt.size = nLen;
    
    // Wait for key frame
    if ( waitkey )
        if ( 0 == ( pkt.flags & AV_PKT_FLAG_KEY ) )
            return ;
        else
            waitkey = 0;
    
    
    pkt.pts = (ptsInc++) * (90000/STREAM_FRAME_RATE);
    //pkt.dts = (ptsInc++) * (90000/STREAM_FRAME_RATE);
    
    ret = av_interleaved_write_frame( m_pOc, &pkt );
    if (ret < 0)
    {
        printf("cannot write frame");
    }
    
    
    }
    
    void CloseMp4(AVFormatContext *&m_pOc)
    {
    waitkey = -1;
    vi = -1;
    
    if (m_pOc)
        av_write_trailer(m_pOc);
    
    if (m_pOc && !(m_pOc->oformat->flags & AVFMT_NOFILE))
        avio_close(m_pOc->pb);
    
    if (m_pOc)
    {
        avformat_free_context(m_pOc);
        m_pOc = NULL;
    }
    
    }
    

    could anybody help me? Thank you very much!

  • How to make FFmpeg download only the required segments [closed]

    25 juillet, par daniil_

    I'm developing a tool, that create timelapse. For this purpose I use ffmpeg 7.1.1 verion. I have a playlist file called index.m3u8 (inside it there are URLs to TS segments—typically around 5,000 of them). Here’s a small excerpt:

    #EXTM3U
    #EXT-X-TARGETDURATION:12
    #EXT-X-VERSION:6
    #EXT-X-MEDIA-SEQUENCE:0
    #EXT-X-PLAYLIST-TYPE:EVENT
    #EXT-X-START:TIME-OFFSET=8,PRECISE=YES
    #EXT-X-PROGRAM-DATE-TIME:2025-07-22T07:59:51.173Z
    #EXTINF:10.080,
    https://my-storage.io/receiver/archives/3a1b162d-392d-d45a-089a-dc93fb842a35/files/2025-07-22/07/59/1753171191173_aa_00000_00000_10080.ts?exp=1753543086&signature=MEYCIQCx_hDnFVwk7WJyQG5QujV5ZGhLoBqkD5uoQFebha1knQIhAMJm1KA5DLEuqD4_5zS5QlzTGIuj5q9TCUcML7MEPMl7
    #EXT-X-PROGRAM-DATE-TIME:2025-07-22T08:00:01.253Z
    #EXTINF:10.080,
    https://my-storage.io/receiver/archives/3a1b162d-392d-d45a-089a-dc93fb842a35/files/2025-07-22/08/00/1753171201253_aa_00000_00000_10080.ts?exp=1753543086&signature=MEYCIQCx_hDnFVwk7WJyQG5QujV5ZGhLoBqkD5uoQFebha1knQIhAMJm1KA5DLEuqD4_5zS5QlzTGIuj5q9TCUcML7MEPMl7
    #EXT-X-PROGRAM-DATE-TIME:2025-07-22T08:00:11.333Z
    #EXTINF:10.021,
    https://my-storage.io/receiver/archives/3a1b162d-392d-d45a-089a-dc93fb842a35/files/2025-07-22/08/00/1753171211333_aa_00000_00000_10021.ts?exp=1753543086&signature=MEYCIQCx_hDnFVwk7WJyQG5QujV5ZGhLoBqkD5uoQFebha1knQIhAMJm1KA5DLEuqD4_5zS5QlzTGIuj5q9TCUcML7MEPMl7
    #EXT-X-PROGRAM-DATE-TIME:2025-07-22T08:00:21.354Z
    #EXTINF:10.079,
    https://my-storage.io/receiver/archives/3a1b162d-392d-d45a-089a-dc93fb842a35/files/2025-07-22/08/00/1753171221354_aa_00000_00000_10079.ts?exp=1753543086&signature=MEYCIQCx_hDnFVwk7WJyQG5QujV5ZGhLoBqkD5uoQFebha1knQIhAMJm1KA5DLEuqD4_5zS5QlzTGIuj5q9TCUcML7MEPMl7
    #EXT-X-PROGRAM-DATE-TIME:2025-07-22T08:00:31.433Z
    ...
    

    I’m trying to extract still images from the stream at specific moments.

    When I run a command like:

    ffmpeg \
      -analyzeduration 5000000 \
      -probesize 5000000 \
      -err_detect ignore_err \
      -protocol_whitelist file,http,https,tcp,tls \
      -allowed_extensions ALL \
      -f hls \
      -y \
      -ss 11316.719 \
      -i /Users/daniil/Desktop/test/exports/.../index.m3u8 \
      -frames:v 1 \
      -q:v 2 \
      /Users/daniil/Desktop/test/exports/.../frames/frame_00100.png
    

    ffmpeg downloads segments 1 and 2 (presumably to probe the format), and then it downloads two more TS files around the target timestamp.

    But as soon as I push the -ss value beyond a certain point—in my case -ss 8487.54—it starts downloading every TS segment from the very start of the playlist up to the one I need. That can easily be 1,000 TS files or more. In other words, when -ss is between 0 and 8374.372, everything works fine, but beyond that it exhibits this strange behavior.