Recherche avancée

Médias (0)

Mot : - Tags -/content

Aucun média correspondant à vos critères n’est disponible sur le site.

Autres articles (94)

  • MediaSPIP v0.2

    21 juin 2013, par

    MediaSPIP 0.2 est la première version de MediaSPIP stable.
    Sa date de sortie officielle est le 21 juin 2013 et est annoncée ici.
    Le fichier zip ici présent contient uniquement les sources de MediaSPIP en version standalone.
    Comme pour la version précédente, il est nécessaire d’installer manuellement l’ensemble des dépendances logicielles sur le serveur.
    Si vous souhaitez utiliser cette archive pour une installation en mode ferme, il vous faudra également procéder à d’autres modifications (...)

  • Organiser par catégorie

    17 mai 2013, par

    Dans MédiaSPIP, une rubrique a 2 noms : catégorie et rubrique.
    Les différents documents stockés dans MédiaSPIP peuvent être rangés dans différentes catégories. On peut créer une catégorie en cliquant sur "publier une catégorie" dans le menu publier en haut à droite ( après authentification ). Une catégorie peut être rangée dans une autre catégorie aussi ce qui fait qu’on peut construire une arborescence de catégories.
    Lors de la publication prochaine d’un document, la nouvelle catégorie créée sera proposée (...)

  • Récupération d’informations sur le site maître à l’installation d’une instance

    26 novembre 2010, par

    Utilité
    Sur le site principal, une instance de mutualisation est définie par plusieurs choses : Les données dans la table spip_mutus ; Son logo ; Son auteur principal (id_admin dans la table spip_mutus correspondant à un id_auteur de la table spip_auteurs)qui sera le seul à pouvoir créer définitivement l’instance de mutualisation ;
    Il peut donc être tout à fait judicieux de vouloir récupérer certaines de ces informations afin de compléter l’installation d’une instance pour, par exemple : récupérer le (...)

Sur d’autres sites (10578)

  • Create thumbnail from a video directory using ffmpeg

    21 juillet 2017, par Shimon Wiener

    I have a project that requires me to produce a thumbnail from 2000 videos, i researched for a tool on the net and found that ffmpeg can do it, however i could not find a sample that will demonstrate how to work on a directory of videos and run on it do create thumbnail for all the videos, can any one point to a good sample
    Thanks Shimon

  • WebM live streaming via DASH

    4 juin 2018, par ewack

    I am following the instructions here to try to make WebM live streaming via DASH. My input is from an Axis camera and it is streaming as h264 encoding. I am using node to spin up the ffmpeg processes. I am able to create the .hdr file and the .chk files. The .mpd file is even created but it’s empty and I get an error saying :

    Could not write header for output file #0 (incorrect codec parameters ?): Operation not permittedStream mapping: Stream #0:0 -> #0:0 (copy)

    Here’s all of my code :

    var express = require('express');
    spawn = require('child_process').spawn;

    var app = express();

    app.use(express.static(__dirname + '/public'));

    app.listen(8080);
    console.log("Running on Port 8080");

    var ffmpeg1 = spawn('ffmpeg', [
       '-y',
       //video
       '-i', 'rtsp://admin:password@192.168.1.54:554/axis-media/media.amp?videocodec=h264&resolution=1280x720',

       '-map', '0:0',
       '-pix_fmt', 'yuv420p',
       '-color_range', '2',
       '-c:v', 'libvpx-vp9',

       '-s', '1280x720',
       '-keyint_min', '25',
       '-g', '25',

       // //VP9_LIVE_PARAMS
       '-speed', '6',
       '-tile-columns', '4',
       '-frame-parallel', '1',
       '-threads', '8',
       '-static-thresh', '0',
       '-max-intra-rate', '300',
       '-deadline', 'realtime',
       '-lag-in-frames', '0',
       '-error-resilient', '1',

       '-f', 'webm_chunk',
       '-header', 'public/glass_360.hdr',
       '-chunk_start_index', '1',
       'public/glass_360_%d.chk',
    ]);


    setTimeout(()=> {
     var ffmpeg2 = spawn('ffmpeg', [
       '-y',
       '-f', 'webm_dash_manifest',
       '-live', '1',
       '-i', 'public/glass_360.hdr',
       '-c', 'copy',
       '-map', '0',
       '-r', '25',
       '-framerate', '25',

       '-f', 'webm_dash_manifest',
       '-live', '1',

       '-adaptation_sets', '"id=0,streams=0"',
       '-chunk_start_index', '1',
       '-chunk_duration_ms', '2000',
       '-time_shift_buffer_depth', '7200',
       '-minimum_update_period', '7200',

       'public/glass_live_manifest.mpd'
     ]);
     ffmpeg2.stdout.on('data',
         function (data) {
             console.log('ff2std: ' + data);
         }
     );

     ffmpeg2.stderr.on('data',
         function (data) {
             console.log('ff2err: ' + data);
         }
     );
    }, 5000);

    ffmpeg1.stdout.on('data',
       function (data) {
           console.log('ff1std: ' + data);
       }
    );

    ffmpeg1.stderr.on('data',
       function (data) {
           console.log('ff1err: ' + data);
       }
    );

    Here is all of my output :

    Running on Port 8080
    ff1err: ffmpeg version 3.2.4 Copyright (c) 2000-2017 the FFmpeg developers
     built with Apple LLVM version 6.0 (clang-600.0.57) (based on LLVM 3.5svn)
     configuration: --prefix=/usr/local/Cellar/ffmpeg/3.2.4 --enable-shared --enable-pthreads --enable-gpl --enable-version3 --enable-hardcoded-tables --enable-avresample --cc=clang --host-cflags= --host-ldflags= --enable-ffplay --enable-frei0r --enable-libass --enable-libfdk-aac --enable-libfreetype --enable-libmp3lame --enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopus --enable-librtmp --enable-libschroedinger --enable-libspeex --enable-libtheora --enable-libvorbis --enable-libvpx --enable-libx264 --enable-libxvid --enable-opencl --disable-lzma --enable-libopenjpeg --disable-decoder=jpeg2000 --extra-cflags=-I/usr/local/Cellar/openjpeg/2.1.2/include/openjpeg-2.1 --enable-nonfree --enable-vda

    ff1err:   libavutil      55. 34.101 / 55. 34.101
     libavcodec     57. 64.101 / 57. 64.101
     libavformat    57. 56.101 / 57. 56.101
     libavdevice    57.  1.100 / 57.  1.100
     libavfilter     6. 65.100 /  6. 65.100
     libavresample   3.  1.  0 /  3.  1.  0
     libswscale      4.  2.100 /  4.  2.100
     libswresample   2.  3.100 /  2.  3.100
     libpostproc    54.  1.100 / 54.  1.100

    ff1err: Input #0, rtsp, from 'rtsp://admin:password@192.168.1.54:554/axis-media/media.amp?videocodec=h264&resolution=1280x720':
     Metadata:
       title           : Session streamed with GStreamer
       comment         : rtsp-server
     Duration: N/A, start: 0.033344
    ff1err: , bitrate: N/A
       Stream #0:0: Video: h264 (Main), yuvj420p(pc, bt709, progressive), 1280x720 [SAR 1:1 DAR 16:9], 30 fps, 30 tbr, 90k tbn, 180k tbc

    ff1err: [swscaler @ 0x7f8df281bc00] deprecated pixel format used, make sure you did set range correctly

    ff1err: [libvpx-vp9 @ 0x7f8df2800600] v1.6.1

    ff1err: Output #0, webm_chunk, to 'public/glass_360_%d.chk':
     Metadata:
       title           : Session streamed with GStreamer
       comment         : rtsp-server
       encoder         : Lavf57.56.101

    ff1err:     Stream #0:0: Video: vp9 (libvpx-vp9), yuv420p(pc), 1280x720 [SAR 1:1 DAR 16:9], q=-1--1, 200 kb/s, 25 fps, 1k tbn, 25 tbc
       Metadata:
         encoder         : Lavc57.64.101 libvpx-vp9
       Side data:
         cpb: bitrate max/min/avg: 0/0/0 buffer size: 0 vbv_delay: -1
    Stream mapping:
     Stream #0:0 -> #0:0 (h264 (native) -> vp9 (libvpx-vp9))
    Press [q] to stop, [?] for help

    ff1err: frame=   10 fps=0.0 q=0.0 size=N/A time=00:00:00.36 bitrate=N/A speed=0.71x    
    ff1err: frame=   25 fps= 25 q=0.0 size=N/A time=00:00:00.96 bitrate=N/A speed=0.946x    
    ff1err: frame=   40 fps= 26 q=0.0 size=N/A time=00:00:01.56 bitrate=N/A speed=1.03x    
    ff1err: frame=   55 fps= 27 q=0.0 size=N/A time=00:00:02.16 bitrate=N/A speed=1.07x    
    ff1err: frame=   70 fps= 28 q=0.0 size=N/A time=00:00:02.76 bitrate=N/A speed=1.09x    
    ff1err: frame=   85 fps= 28 q=0.0 size=N/A time=00:00:03.36 bitrate=N/A speed=1.11x    
    ff2err: ffmpeg version 3.2.4 Copyright (c) 2000-2017 the FFmpeg developers
     built with Apple LLVM version 6.0 (clang-600.0.57) (based on LLVM 3.5svn)
     configuration: --prefix=/usr/local/Cellar/ffmpeg/3.2.4 --enable-shared --enable-pthreads --enable-gpl --enable-version3 --enable-hardcoded-tables --enable-avresample --cc=clang --host-cflags= --host-ldflags= --enable-ffplay --enable-frei0r --enable-libass --enable-libfdk-aac --enable-libfreetype --enable-libmp3lame --enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopus --enable-librtmp --enable-libschroedinger --enable-libspeex --enable-libtheora --enable-libvorbis --enable-libvpx --enable-libx264 --enable-libxvid --enable-opencl --disable-lzma --enable-libopenjpeg --disable-decoder=jpeg2000 --extra-cflags=-I/usr/local/Cellar/openjpeg/2.1.2/include/openjpeg-2.1 --enable-nonfree --enable-vda

    ff2err:   libavutil      55. 34.101 / 55. 34.101
     libavcodec     57. 64.101 / 57. 64.101
     libavformat    57. 56.101 / 57. 56.101
     libavdevice    57.  1.100 / 57.  1.100
     libavfilter     6. 65.100 /  6. 65.100
     libavresample   3.  1.  0 /  3.  1.  0
     libswscale      4.  2.100 /  4.  2.100
     libswresample   2.  3.100 /  2.  3.100
     libpostproc    54.  1.100 / 54.  1.100

    ff2err: [webm_dash_manifest @ 0x7fbc5b80b400] Could not find codec parameters for stream 0 (Video: vp9, none, 1280x720): unspecified pixel format
    Consider increasing the value for the 'analyzeduration' and 'probesize' options

    ff2err: Input #0, webm_dash_manifest, from 'public/glass_360.hdr':
     Metadata:
       title           : Session streamed with GStreamer
       encoder         : Lavf57.56.101
     Duration: N/A, bitrate: N/A
       Stream #0:0: Video: vp9, none, 1280x720
    ff2err: , SAR 1:1 DAR 16:9, 25 fps, 25 tbr, 1k tbn, 1k tbc (default)
       Metadata:
         webm_dash_manifest_file_name: glass_360.hdr
         webm_dash_manifest_track_number: 1

    ff2err: Could not write header for output file #0 (incorrect codec parameters ?): Operation not permittedStream mapping:
     Stream #0:0 -> #0:0 (copy)

    ff2err:     Last message repeated 1 times

    ff1err: frame=  101 fps= 29 q=0.0 size=N/A time=00:00:04.00 bitrate=N/A speed=1.13x    
    ff1err: frame=  116 fps= 29 q=0.0 size=N/A time=00:00:04.60 bitrate=N/A speed=1.14x    
    ff1err: frame=  131 fps= 29 q=0.0 size=N/A time=00:00:05.20 bitrate=N/A speed=1.15x    
    ff1err: frame=  146 fps= 29 q=0.0 size=N/A time=00:00:05.80 bitrate=N/A speed=1.15x    
    ff1err: frame=  161 fps= 29 q=0.0 size=N/A time=00:00:06.40 bitrate=N/A speed=1.15x    
    ff1err: frame=  177 fps= 29 q=0.0 size=N/A time=00:00:07.04 bitrate=N/A speed=1.16x    
    ff1err: frame=  192 fps= 29 q=0.0 size=N/A time=00:00:07.64 bitrate=N/A speed=1.16x    
    ff1err: frame=  207 fps= 29 q=0.0 size=N/A time=00:00:08.24 bitrate=N/A speed=1.16x    
    ff1err: frame=  222 fps= 29 q=0.0 size=N/A time=00:00:08.84 bitrate=N/A speed=1.17x    
    ff1err: frame=  237 fps= 29 q=0.0 size=N/A time=00:00:09.44 bitrate=N/A speed=1.17x    
    ff1err: frame=  252 fps= 29 q=0.0 size=N/A time=00:00:10.04 bitrate=N/A speed=1.17x  

    Why is ffmpeg creating an empty .mpd file ?

  • When generating a video from images and adding audio the video isn't playable

    1er août 2017, par randommman

    So I have a bunch of images in a folder, with the following structure :

    image-0.png
    image-1.png
    image-2.png

    Sometimes the folder can have just 1 image which will be saved as :

    image-0.png

    My code to generate the video and to add the audio

    // create the file
    using (WaveFileReader wf = new WaveFileReader(Audio))
    {
       // the files
       OldVideo = "old.avi";
       NewVideo = "video.avi";
       Audio = "sound.wav";

       // Get the time of the audio and divide by images
       time = wf.TotalTime.TotalSeconds;
       mimtime = time / imagescount;

       ffmpegPath = "ffmpeg.exe";
       ffmpegParams = " -r 1/" + mimtime + " -i " + ImagesFolder + "image-%d.png -t " + time + " -y -vf scale=1280:-2 " + OldVideo;
       ffmaudioParams = " -i " + OldVideo + " -i " + Audio + " -c copy -shortest " + NewVideo;
    }


    using (Process ffmpeg = new Process())
    {
       //Generate video
       ffmpegstartInfo = new ProcessStartInfo();
       ffmpegstartInfo.FileName = ffmpegPath;
       ffmpegstartInfo.Arguments = ffmpegParams;
       ffmpegstartInfo.RedirectStandardOutput = true;
       ffmpegstartInfo.RedirectStandardError = true;
       ffmpegstartInfo.UseShellExecute = false;
       ffmpegstartInfo.CreateNoWindow = true;

       ffmpeg.StartInfo.FileName = ffmpegPath;
       ffmpeg.StartInfo.Arguments = ffmpegParams;
       ffmpeg.StartInfo = ffmpegstartInfo;
       ffmpeg.Start();
       ffmpeg.WaitForExit(30000);


       /*ffmpeg.StartInfo.FileName = "cmd.exe";
       ffmpeg.StartInfo.Arguments = "/k " + ffmpegPath + " " + ffmpegParams;
       ffmpeg.Start();
       ffmpeg.WaitForExit(30000);*/
    }

    using (Process ffmaudio = new Process())
    {
       //Add audio to video
       ffmaudiostartInfo = new ProcessStartInfo();
       ffmaudiostartInfo.FileName = ffmpegPath;
       ffmaudiostartInfo.Arguments = ffmaudioParams;
       ffmaudiostartInfo.RedirectStandardOutput = true;
       ffmaudiostartInfo.RedirectStandardError = true;
       ffmaudiostartInfo.UseShellExecute = false;
       ffmaudiostartInfo.CreateNoWindow = true;

       ffmaudio.StartInfo.FileName = ffmpegPath;
       ffmaudio.StartInfo.Arguments = ffmaudioParams;
       ffmaudio.StartInfo = ffmaudiostartInfo;
       ffmaudio.Start();
       ffmaudio.WaitForExit(30000);

       /*ffmaudio.StartInfo.FileName = "cmd.exe";
       ffmaudio.StartInfo.Arguments = "/k " + ffmpegPath + " " + ffmaudioParams;
       ffmaudio.Start();
       ffmaudio.WaitForExit(30000);*/
    }

    So, what my code does, with the wavfilereader it gets the total length of my audio file, and then splits that time up by the number of images I have, so I can generate a video with the images split equally. Then I generate the video to a file called ’old.avi’, which is then used with my wav file to generate a new file called video.avi (which I am trying to have the generated video and audio together)

    My problem is when I generate the video it becomes uplayable. It claims to be (for example) 1 minute 50 seconds long, but once I click it, it won’t play. It just becomes unplayable, as you can see below

    http://i.imgur.com/tsck2NX.gifv

    I am using C# with process and Winforms, which opens the cmd line to run ffmpeg, with the 2 commands called above with the named variables below :

    ffmpegParams;
    ffmaudioParams;

    Which has all the commands I am using

    -r 1/ (audio length /divided by total images) so they're equal
    -t (to limit to the total time which is the audio's total seconds)
    ... I am using more as you can see in my code above

    How can I fix my problem, that I have described ?

    CMD line when generating the video :

    ffmpeg version N-86848-g03a9e6f Copyright (c) 2000-2017 the FFmpeg developers
     built with gcc 7.1.0 (GCC)
     configuration: --enable-gpl --enable-version3 --enable-cuda --enable-cuvid --enable-d3d11va --enable-dxva2 --enable-libmfx --enable-nvenc --enable-avisynth --enable-bzlib --enable-fontconfig --enable-frei0r --enable-gnutls --enable-iconv --enable-libass --enable-libbluray --enable-libbs2b --enable-libcaca --enable-libfreetype --enable-libgme --enable-libgsm --enable-libilbc --enable-libmodplug --enable-libmp3lame --enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopenh264 --enable-libopenjpeg --enable-libopus --enable-librtmp --enable-libsnappy --enable-libsoxr --enable-libspeex --enable-libtheora --enable-libtwolame --enable-libvidstab --enable-libvo-amrwbenc --enable-libvorbis --enable-libvpx --enable-libwavpack --enable-libwebp --enable-libx264 --enable-libx265 --enable-libxavs --enable-libxvid --enable-libzimg --enable-lzma --enable-zlib
     libavutil      55. 68.100 / 55. 68.100
     libavcodec     57.102.100 / 57.102.100
     libavformat    57. 76.100 / 57. 76.100
     libavdevice    57.  7.100 / 57.  7.100
     libavfilter     6. 95.100 /  6. 95.100
     libswscale      4.  7.101 /  4.  7.101
     libswresample   2.  8.100 /  2.  8.100
     libpostproc    54.  6.100 / 54.  6.100
    Input #0, image2, from 'C:\Users\Laptop\Documents\program/images/image-%d.png':
     Duration: 00:00:00.04, start: 0.000000, bitrate: N/A
       Stream #0:0: Video: png, rgba(pc), 800x800, 25 tbr, 25 tbn, 25 tbc
    Stream mapping:
     Stream #0:0 -> #0:0 (png (native) -> mpeg4 (native))
    Press [q] to stop, [?] for help
    [mpeg4 @ 00000000024f44a0] bitrate tolerance 4000000 too small for bitrate 200000, overriding
    Output #0, avi, to 'C:\Users\Laptop\Documents\program/old.avi':
     Metadata:
       ISFT            : Lavf57.76.100
       Stream #0:0: Video: mpeg4 (FMP4 / 0x34504D46), yuv420p, 1280x1280, q=2-31, 200 kb/s, 0.01 fps, 0.01 tbn, 0.01 tbc
       Metadata:
         encoder         : Lavc57.102.100 mpeg4
       Side data:
         cpb: bitrate max/min/avg: 0/0/200000 buffer size: 0 vbv_delay: -1
    frame=    1 fps=0.0 q=6.8 Lsize=      46kB time=00:01:37.57 bitrate=   3.9kbits/s speed=1.43e+003x
    video:41kB audio:0kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: 13.641467%

    When adding the audio to the video

    ffmpeg version N-86848-g03a9e6f Copyright (c) 2000-2017 the FFmpeg developers
     built with gcc 7.1.0 (GCC)
     configuration: --enable-gpl --enable-version3 --enable-cuda --enable-cuvid --enable-d3d11va --enable-dxva2 --enable-libmfx --enable-nvenc --enable-avisynth --enable-bzlib --enable-fontconfig --enable-frei0r --enable-gnutls --enable-iconv --enable-libass --enable-libbluray --enable-libbs2b --enable-libcaca --enable-libfreetype --enable-libgme --enable-libgsm --enable-libilbc --enable-libmodplug --enable-libmp3lame --enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopenh264 --enable-libopenjpeg --enable-libopus --enable-librtmp --enable-libsnappy --enable-libsoxr --enable-libspeex --enable-libtheora --enable-libtwolame --enable-libvidstab --enable-libvo-amrwbenc --enable-libvorbis --enable-libvpx --enable-libwavpack --enable-libwebp --enable-libx264 --enable-libx265 --enable-libxavs --enable-libxvid --enable-libzimg --enable-lzma --enable-zlib
     libavutil      55. 68.100 / 55. 68.100
     libavcodec     57.102.100 / 57.102.100
     libavformat    57. 76.100 / 57. 76.100
     libavdevice    57.  7.100 / 57.  7.100
     libavfilter     6. 95.100 /  6. 95.100
     libswscale      4.  7.101 /  4.  7.101
     libswresample   2.  8.100 /  2.  8.100
     libpostproc    54.  6.100 / 54.  6.100
    Input #0, avi, from 'C:\Users\Laptop\Documents\program/old.avi':
     Metadata:
       encoder         : Lavf57.76.100
     Duration: 00:01:37.58, start: 0.000000, bitrate: 3 kb/s
       Stream #0:0: Video: mpeg4 (Simple Profile) (FMP4 / 0x34504D46), yuv420p, 1280x1280 [SAR 1:1 DAR 1:1], 0.01 fps, 0.01 tbr, 0.01 tbn, 40 tbc
    Guessed Channel Layout for Input Stream #1.0 : mono
    Input #1, wav, from 'C:\Users\Laptop\Documents\program/audio.wav':
     Duration: 00:01:37.57, bitrate: 352 kb/s
       Stream #1:0: Audio: pcm_s16le ([1][0][0][0] / 0x0001), 22050 Hz, mono, s16, 352 kb/s
    Output #0, avi, to 'C:\Users\Laptop\Documents\program/video.avi':
     Metadata:
       ISFT            : Lavf57.76.100
       Stream #0:0: Video: mpeg4 (Simple Profile) (FMP4 / 0x34504D46), yuv420p, 1280x1280 [SAR 1:1 DAR 1:1], q=2-31, 0.01 fps, 0.01 tbr, 0.01 tbn, 0.01 tbc
       Stream #0:1: Audio: pcm_s16le ([1][0][0][0] / 0x0001), 22050 Hz, mono, s16, 352 kb/s
    Stream mapping:
     Stream #0:0 -> #0:0 (copy)
     Stream #1:0 -> #0:1 (copy)
    Press [q] to stop, [?] for help
    frame=    1 fps=0.0 q=-1.0 Lsize=      55kB time=00:01:37.57 bitrate=   4.6kbits/s speed=1.95e+005x
    video:41kB audio:4kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: 21.946869%