Recherche avancée

Médias (2)

Mot : - Tags -/documentation

Autres articles (74)

  • Des sites réalisés avec MediaSPIP

    2 mai 2011, par

    Cette page présente quelques-uns des sites fonctionnant sous MediaSPIP.
    Vous pouvez bien entendu ajouter le votre grâce au formulaire en bas de page.

  • Support audio et vidéo HTML5

    10 avril 2011

    MediaSPIP utilise les balises HTML5 video et audio pour la lecture de documents multimedia en profitant des dernières innovations du W3C supportées par les navigateurs modernes.
    Pour les navigateurs plus anciens, le lecteur flash Flowplayer est utilisé.
    Le lecteur HTML5 utilisé a été spécifiquement créé pour MediaSPIP : il est complètement modifiable graphiquement pour correspondre à un thème choisi.
    Ces technologies permettent de distribuer vidéo et son à la fois sur des ordinateurs conventionnels (...)

  • HTML5 audio and video support

    13 avril 2011, par

    MediaSPIP uses HTML5 video and audio tags to play multimedia files, taking advantage of the latest W3C innovations supported by modern browsers.
    The MediaSPIP player used has been created specifically for MediaSPIP and can be easily adapted to fit in with a specific theme.
    For older browsers the Flowplayer flash fallback is used.
    MediaSPIP allows for media playback on major mobile platforms with the above (...)

Sur d’autres sites (7458)

  • ffmpeg : match audio video duration post concatenation

    20 janvier 2020, par Massimo Vantaggio

    I wrote this bash to automate concatentation of videos for dash mpeg streaming infinite loop.
    Im unable to obtain same duration for audio and video, with the videos that i’m using for testing i get always 6 ms more for audio track.
    May ask help to debug it and understand how to get same duration for both audio and video track ?

    #!/bin/bash
    #CANCAT 0.3

    cd input
    fps=()
    # GET FPS OF EACH VIDEO INTO ARRAY
    for f in *.mp4; do
       _f=$(ffmpeg -i "$f" 2>&1 | sed -n "s/.*, \\(.*\\) fp.*/\\1/p")
       fps+=("$_f")
    done

    #GET NUMBER OF ELEMENTS IN FPS ARRAY  
    tLen=${#fps[@]}
    #CHECK FPS EQUALITY    
    for tLen in "${fps[@]:1}"; do
       if [[ $tLen != ${fps[0]} ]]; then
           printf "WARNING: VIDEO’S FRAME-RATE ARE NOT EQUALS, THE PROCESS CAN’T START."
           printf "%s\\0" "${fps[@]}" |
               sort -zu |
               xargs -0 printf " %s"
           printf "\\n"
          exit 1
       fi
    done

    #AUDIO ENCODING
    for f in *.mp4;
    do
    NAME=$(echo "$f" | cut -d'.' -f1)
    ffmpeg -y -i "$f" -c copy -video_track_timescale 90k -c:a aac -b:a 384k -ar 44100 -ac 2 -shortest -af aresample=async=1 ../buffer/${NAME}_buffer.mp4
    done
    #-af aresample=async=1000
    #-filter_complex " [1:0] apad "

    cd ..
    cd buffer
    times=()
    for f in *.mp4;
    do
    echo "file '$f'" >> list.txt;

    # GET DURATION OF EACH VIDEO
    _t=$(ffprobe -i "$f" -show_entries format=duration -v quiet -of csv="p=0")
    times+=("$_t")
    done

    #SUM ALL DURATIONS
    TOTALDURATION=$( echo "${times[@]}" | sed 's/ /+/g' | bc )
    DURROUND=$(echo "$TOTALDURATION" | cut -d'.' -f1)
    TOTDELTA="$((DURROUND%2))"
    TOTDUR="$(($DURROUND-$TOTDELTA))"

    #GET FPS
    FPS="$(ffmpeg -i ${f[0]} 2>&1 | sed -n "s/.*, \(.*\) fp.*/\1/p")"
    #ROUND FPS
    FPSC=$( echo "($FPS+0.5)/1" | bc )
    #GET GOP
    GOP="$((FPSC*2))"

    #ENCODING MASTER TRACK
    ffmpeg -f concat -safe 0 -y -i list.txt -loop 1 -i ../logo/logo.png -c:a copy -c:v libx264 -x264opts keyint=$GOP:min-keyint=$GOP:no-scenecut -bf 0 -r $FPSC -b:v 4800k -maxrate 9600k -bufsize 19200k -profile:v main -crf 22 -filter_complex "[0:v][1:v]overlay=main_w-overlay_w-10:10,scale=1920:1080,setsar=1" -t $TOTDUR 1080set.mp4

    #EXTRACTING AUDIO FROM MASTER VIDEO TRACK
    ffmpeg -y -i 1080set.mp4 -c copy -vn ../output/output_audio.mp4

    #REMOVE AUDIO FROM MASTER VIDEO TRACK
    ffmpeg -y -i 1080set.mp4 -c copy -an ../output/output_1080.mp4

    #CLEAN BUFFER
    rm *.mp4
    rm *.txt

    cd ..
    cd output

    #ENCODE 720p
    ffmpeg -y -i output_1080.mp4 -an -c:v libx264 -x264opts keyint=$GOP:min-keyint=$GOP:no-scenecut -bf 0 -s 1280x720 -r $FPSC -b:v 2400k -maxrate 4800k -bufsize 9600k -profile:v main -crf 22 output_720.mp4

    #ENCODE 360p
    ffmpeg -y -i output_1080.mp4 -an -c:v libx264 -x264opts keyint=$GOP:min-keyint=$GOP:no-scenecut -bf 0 -s 640x360 -r $FPSC -b:v 800k -maxrate 1600k -bufsize 3200k -profile:v main -crf 22 output_360.mp4

    #VALIDATOR
    echo 1080p ENCODING
    echo GOP:   $GOP
    echo VIDEO FORMAT CONTAINER DURATION:  
    ffprobe -v error -show_entries format=duration \
     -of default=noprint_wrappers=1:nokey=1 output_1080.mp4
    echo VIDEO STREAM DURATION:
    ffprobe -v error -select_streams v:0 -show_entries stream=duration \
     -of default=noprint_wrappers=1:nokey=1 output_1080.mp4
    echo AUDIO FORMAT CONTAINER DURATION:  
    ffprobe -v error -show_entries format=duration \
     -of default=noprint_wrappers=1:nokey=1 output_audio.mp4

    echo ______________________________________________________________

    echo 720p ENCODING
    echo GOP:   $GOP
    echo VIDEO FORMAT CONTAINER DURATION:  
    ffprobe -v error -show_entries format=duration \
     -of default=noprint_wrappers=1:nokey=1 output_720.mp4
    echo VIDEO STREAM DURATION:
    ffprobe -v error -select_streams v:0 -show_entries stream=duration \
     -of default=noprint_wrappers=1:nokey=1 output_720.mp4
    echo DONE
    exit 1

    Here the script with my videos for test and relative folders :
    https://gofile.io/?c=WPAC0Q

  • How can I convert a mp4 into an ogv with ffmpeg ? I am getting a "segmentation fault"

    8 mars 2020, par xrainbowuk

    I am trying to use ffmpeg to convert a mp4 video into an ogv and a webm. I am using a Mac and this is what I am putting into the terminal :

    ffmpeg -i "Hands-typing-(mobile-1080p-loop) copy.mp4" -codec:v libtheora -codec:a libvorbis -f ogv output.ogv

    This is what I am getting in response :

       ffmpeg version 4.2.2 Copyright (c) 2000-2019 the FFmpeg developers
     built with Apple clang version 11.0.0 (clang-1100.0.33.17)
     configuration: --prefix=/usr/local/Cellar/ffmpeg/4.2.2_2 --enable-shared --enable-pthreads --enable-version3 --enable-avresample --cc=clang --host-cflags= --host-ldflags= --enable-ffplay --enable-gnutls --enable-gpl --enable-libaom --enable-libbluray --enable-libmp3lame --enable-libopus --enable-librubberband --enable-libsnappy --enable-libtesseract --enable-libtheora --enable-libvidstab --enable-libvorbis --enable-libvpx --enable-libwebp --enable-libx264 --enable-libx265 --enable-libxvid --enable-lzma --enable-libfontconfig --enable-libfreetype --enable-frei0r --enable-libass --enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopenjpeg --enable-librtmp --enable-libspeex --enable-libsoxr --enable-videotoolbox --disable-libjack --disable-indev=jack
     libavutil      56. 31.100 / 56. 31.100
     libavcodec     58. 54.100 / 58. 54.100
     libavformat    58. 29.100 / 58. 29.100
     libavdevice    58.  8.100 / 58.  8.100
     libavfilter     7. 57.100 /  7. 57.100
     libavresample   4.  0.  0 /  4.  0.  0
     libswscale      5.  5.100 /  5.  5.100
     libswresample   3.  5.100 /  3.  5.100
     libpostproc    55.  5.100 / 55.  5.100
    Input #0, mov,mp4,m4a,3gp,3g2,mj2, from 'Hands-typing-(mobile-1080p-loop) copy.mp4':
     Metadata:
       major_brand     : mp42
       minor_version   : 0
       compatible_brands: mp42mp41
       creation_time   : 2020-03-04T20:14:21.000000Z
     Duration: 00:00:21.18, start: 0.000000, bitrate: 8201 kb/s
       Stream #0:0(eng): Video: h264 (Main) (avc1 / 0x31637661), yuv420p(tv, bt709), 1920x1080 [SAR 1:1 DAR 16:9], 8085 kb/s, 30 fps, 30 tbr, 30k tbn, 60k tbc (default)
       Metadata:
         creation_time   : 2020-03-04T20:14:21.000000Z
         handler_name    : ?Mainconcept Video Media Handler
         encoder         : AVC Coding
       Stream #0:1(eng): Audio: aac (LC) (mp4a / 0x6134706D), 48000 Hz, stereo, fltp, 125 kb/s (default)
       Metadata:
         creation_time   : 2020-03-04T20:14:21.000000Z
         handler_name    : #Mainconcept MP4 Sound Media Handler
    Stream mapping:
     Stream #0:0 -> #0:0 (h264 (native) -> theora (libtheora))
     Stream #0:1 -> #0:1 (aac (native) -> vorbis (libvorbis))
    Press [q] to stop, [?] for help
    zsh: segmentation fault  ffmpeg -i "Hands-typing-(mobile-1080p-loop) copy.mp4" -codec:v libtheora   -f

    I have also tried this, which doesn’t work :

    ffmpeg -i "Hands-typing-(mobile-1080p-loop) copy.mp4" -codec:v libtheora -qscale:v 3 -codec:a libvorbis \
     -qscale:a 3 -f ogv output.ogv

    I am struggling to find the answer to my question. Especially as there are so many posts about "segmentation faults".

    Any help is very much appreciated.

  • How to extract frames at 30 fps using FFMPEG APIs on Android ?

    11 mars 2020, par Amber Beriwal

    We are working on a project that consumes FFMPEG library for video frame extraction on Android platform.

    On Windows, we have observed :

    • Using CLI, ffmpeg is capable of extracting frames at 30 fps using command ffmpeg -i input.flv -vf fps=1 out%d.png.
    • Using Xuggler, we are able to extract frames at 30 fps.
    • Using FFMPEG APIs directly in code, we are getting frames at 30 fps.

    But when we use FFMPEG APIs directly on Android (See Hardware Details), we are getting following results :

    • 720p video (1280 x 720) - 16 fps (approx. 60 ms/frame)
    • 1080p video (1920 x 1080) - 7 fps (approx. 140 ms/frame)

    We haven’t tested Xuggler/CLI on Android yet.

    Ideally, we should be able to get the data in constant time (approx. 30 ms/frame).

    How can we get 30 fps on Android ?

    Code being used on Android :

    if (avformat_open_input(&pFormatCtx, pcVideoFile, NULL, NULL)) {
       iError = -1;  //Couldn't open file
    }

    if (!iError) {
       //Retrieve stream information
       if (avformat_find_stream_info(pFormatCtx, NULL) < 0)
           iError = -2; //Couldn't find stream information
    }

    //Find the first video stream
    if (!iError) {

       for (i = 0; i < pFormatCtx->nb_streams; i++) {
           if (AVMEDIA_TYPE_VIDEO
                   == pFormatCtx->streams[i]->codec->codec_type) {
               iFramesInVideo = pFormatCtx->streams[i]->nb_index_entries;
               duration = pFormatCtx->streams[i]->duration;
               begin = pFormatCtx->streams[i]->start_time;
               time_base = (pFormatCtx->streams[i]->time_base.num * 1.0f)
                       / pFormatCtx->streams[i]->time_base.den;

               pCodecCtx = avcodec_alloc_context3(NULL);
               if (!pCodecCtx) {
                   iError = -6;
                   break;
               }

               AVCodecParameters params = { 0 };
               iReturn = avcodec_parameters_from_context(&params,
                       pFormatCtx->streams[i]->codec);
               if (iReturn < 0) {
                   iError = -7;
                   break;
               }

               iReturn = avcodec_parameters_to_context(pCodecCtx, &params);
               if (iReturn < 0) {
                   iError = -7;
                   break;
               }

               //pCodecCtx = pFormatCtx->streams[i]->codec;

               iVideoStreamIndex = i;
               break;
           }
       }
    }

    if (!iError) {
       if (iVideoStreamIndex == -1) {
           iError = -3; // Didn't find a video stream
       }
    }

    if (!iError) {
       // Find the decoder for the video stream
       pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
       if (pCodec == NULL) {
           iError = -4;
       }
    }

    if (!iError) {
       // Open codec
       if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0)
           iError = -5;
    }

    if (!iError) {
       iNumBytes = av_image_get_buffer_size(AV_PIX_FMT_RGB24, pCodecCtx->width,
               pCodecCtx->height, 1);

       // initialize SWS context for software scaling
       sws_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
               pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height,
               AV_PIX_FMT_RGB24,
               SWS_BILINEAR,
               NULL,
               NULL,
               NULL);
       if (!sws_ctx) {
           iError = -7;
       }
    }
    clock_gettime(CLOCK_MONOTONIC_RAW, &end);
    delta_us = (end.tv_sec - start.tv_sec) * 1000000
           + (end.tv_nsec - start.tv_nsec) / 1000;
    start = end;
    //LOGI("Starting_Frame_Extraction: %lld", delta_us);
    if (!iError) {
       while (av_read_frame(pFormatCtx, &packet) == 0) {
           // Is this a packet from the video stream?
           if (packet.stream_index == iVideoStreamIndex) {
               pFrame = av_frame_alloc();
               if (NULL == pFrame) {
                   iError = -8;
                   break;
               }

               // Decode video frame
               avcodec_decode_video2(pCodecCtx, pFrame, &iFrameFinished,
                       &packet);
               if (iFrameFinished) {
                   //OUR CODE
               }
               av_frame_free(&pFrame);
               pFrame = NULL;
           }
           av_packet_unref(&packet);
       }
    }