Recherche avancée

Médias (0)

Mot : - Tags -/protocoles

Aucun média correspondant à vos critères n’est disponible sur le site.

Autres articles (32)

  • La sauvegarde automatique de canaux SPIP

    1er avril 2010, par

    Dans le cadre de la mise en place d’une plateforme ouverte, il est important pour les hébergeurs de pouvoir disposer de sauvegardes assez régulières pour parer à tout problème éventuel.
    Pour réaliser cette tâche on se base sur deux plugins SPIP : Saveauto qui permet une sauvegarde régulière de la base de donnée sous la forme d’un dump mysql (utilisable dans phpmyadmin) mes_fichiers_2 qui permet de réaliser une archive au format zip des données importantes du site (les documents, les éléments (...)

  • Script d’installation automatique de MediaSPIP

    25 avril 2011, par

    Afin de palier aux difficultés d’installation dues principalement aux dépendances logicielles coté serveur, un script d’installation "tout en un" en bash a été créé afin de faciliter cette étape sur un serveur doté d’une distribution Linux compatible.
    Vous devez bénéficier d’un accès SSH à votre serveur et d’un compte "root" afin de l’utiliser, ce qui permettra d’installer les dépendances. Contactez votre hébergeur si vous ne disposez pas de cela.
    La documentation de l’utilisation du script d’installation (...)

  • Automated installation script of MediaSPIP

    25 avril 2011, par

    To overcome the difficulties mainly due to the installation of server side software dependencies, an "all-in-one" installation script written in bash was created to facilitate this step on a server with a compatible Linux distribution.
    You must have access to your server via SSH and a root account to use it, which will install the dependencies. Contact your provider if you do not have that.
    The documentation of the use of this installation script is available here.
    The code of this (...)

Sur d’autres sites (5471)

  • WebM live streaming via DASH

    23 février 2017, par ewack

    I am following the instructions here to try to make WebM live streaming via DASH. My input is from an Axis camera and it is streaming as h264 encoding. I am using node to spin up the ffmpeg processes. I am able to create the .hdr file and the .chk files. The .mpd file is even created but it’s empty and I get an error saying :

    Could not write header for output file #0 (incorrect codec parameters ?): Operation not permittedStream mapping: Stream #0:0 -> #0:0 (copy)

    Here’s all of my code :

    var express = require('express');
    spawn = require('child_process').spawn;

    var app = express();

    app.use(express.static(__dirname + '/public'));

    app.listen(8080);
    console.log("Running on Port 8080");

    var ffmpeg1 = spawn('ffmpeg', [
       '-y',
       //video
       '-i', 'rtsp://admin:password@192.168.1.54:554/axis-media/media.amp?videocodec=h264&resolution=1280x720',

       '-map', '0:0',
       '-pix_fmt', 'yuv420p',
       '-color_range', '2',
       '-c:v', 'libvpx-vp9',

       '-s', '1280x720',
       '-keyint_min', '25',
       '-g', '25',

       // //VP9_LIVE_PARAMS
       '-speed', '6',
       '-tile-columns', '4',
       '-frame-parallel', '1',
       '-threads', '8',
       '-static-thresh', '0',
       '-max-intra-rate', '300',
       '-deadline', 'realtime',
       '-lag-in-frames', '0',
       '-error-resilient', '1',

       '-f', 'webm_chunk',
       '-header', 'public/glass_360.hdr',
       '-chunk_start_index', '1',
       'public/glass_360_%d.chk',
    ]);


    setTimeout(()=> {
     var ffmpeg2 = spawn('ffmpeg', [
       '-y',
       '-f', 'webm_dash_manifest',
       '-live', '1',
       '-i', 'public/glass_360.hdr',
       '-c', 'copy',
       '-map', '0',
       '-r', '25',
       '-framerate', '25',

       '-f', 'webm_dash_manifest',
       '-live', '1',

       '-adaptation_sets', '"id=0,streams=0"',
       '-chunk_start_index', '1',
       '-chunk_duration_ms', '2000',
       '-time_shift_buffer_depth', '7200',
       '-minimum_update_period', '7200',

       'public/glass_live_manifest.mpd'
     ]);
     ffmpeg2.stdout.on('data',
         function (data) {
             console.log('ff2std: ' + data);
         }
     );

     ffmpeg2.stderr.on('data',
         function (data) {
             console.log('ff2err: ' + data);
         }
     );
    }, 5000);

    ffmpeg1.stdout.on('data',
       function (data) {
           console.log('ff1std: ' + data);
       }
    );

    ffmpeg1.stderr.on('data',
       function (data) {
           console.log('ff1err: ' + data);
       }
    );

    Here is all of my output :

    Running on Port 8080
    ff1err: ffmpeg version 3.2.4 Copyright (c) 2000-2017 the FFmpeg developers
     built with Apple LLVM version 6.0 (clang-600.0.57) (based on LLVM 3.5svn)
     configuration: --prefix=/usr/local/Cellar/ffmpeg/3.2.4 --enable-shared --enable-pthreads --enable-gpl --enable-version3 --enable-hardcoded-tables --enable-avresample --cc=clang --host-cflags= --host-ldflags= --enable-ffplay --enable-frei0r --enable-libass --enable-libfdk-aac --enable-libfreetype --enable-libmp3lame --enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopus --enable-librtmp --enable-libschroedinger --enable-libspeex --enable-libtheora --enable-libvorbis --enable-libvpx --enable-libx264 --enable-libxvid --enable-opencl --disable-lzma --enable-libopenjpeg --disable-decoder=jpeg2000 --extra-cflags=-I/usr/local/Cellar/openjpeg/2.1.2/include/openjpeg-2.1 --enable-nonfree --enable-vda

    ff1err:   libavutil      55. 34.101 / 55. 34.101
     libavcodec     57. 64.101 / 57. 64.101
     libavformat    57. 56.101 / 57. 56.101
     libavdevice    57.  1.100 / 57.  1.100
     libavfilter     6. 65.100 /  6. 65.100
     libavresample   3.  1.  0 /  3.  1.  0
     libswscale      4.  2.100 /  4.  2.100
     libswresample   2.  3.100 /  2.  3.100
     libpostproc    54.  1.100 / 54.  1.100

    ff1err: Input #0, rtsp, from 'rtsp://admin:password@192.168.1.54:554/axis-media/media.amp?videocodec=h264&resolution=1280x720':
     Metadata:
       title           : Session streamed with GStreamer
       comment         : rtsp-server
     Duration: N/A, start: 0.033344
    ff1err: , bitrate: N/A
       Stream #0:0: Video: h264 (Main), yuvj420p(pc, bt709, progressive), 1280x720 [SAR 1:1 DAR 16:9], 30 fps, 30 tbr, 90k tbn, 180k tbc

    ff1err: [swscaler @ 0x7f8df281bc00] deprecated pixel format used, make sure you did set range correctly

    ff1err: [libvpx-vp9 @ 0x7f8df2800600] v1.6.1

    ff1err: Output #0, webm_chunk, to 'public/glass_360_%d.chk':
     Metadata:
       title           : Session streamed with GStreamer
       comment         : rtsp-server
       encoder         : Lavf57.56.101

    ff1err:     Stream #0:0: Video: vp9 (libvpx-vp9), yuv420p(pc), 1280x720 [SAR 1:1 DAR 16:9], q=-1--1, 200 kb/s, 25 fps, 1k tbn, 25 tbc
       Metadata:
         encoder         : Lavc57.64.101 libvpx-vp9
       Side data:
         cpb: bitrate max/min/avg: 0/0/0 buffer size: 0 vbv_delay: -1
    Stream mapping:
     Stream #0:0 -> #0:0 (h264 (native) -> vp9 (libvpx-vp9))
    Press [q] to stop, [?] for help

    ff1err: frame=   10 fps=0.0 q=0.0 size=N/A time=00:00:00.36 bitrate=N/A speed=0.71x    
    ff1err: frame=   25 fps= 25 q=0.0 size=N/A time=00:00:00.96 bitrate=N/A speed=0.946x    
    ff1err: frame=   40 fps= 26 q=0.0 size=N/A time=00:00:01.56 bitrate=N/A speed=1.03x    
    ff1err: frame=   55 fps= 27 q=0.0 size=N/A time=00:00:02.16 bitrate=N/A speed=1.07x    
    ff1err: frame=   70 fps= 28 q=0.0 size=N/A time=00:00:02.76 bitrate=N/A speed=1.09x    
    ff1err: frame=   85 fps= 28 q=0.0 size=N/A time=00:00:03.36 bitrate=N/A speed=1.11x    
    ff2err: ffmpeg version 3.2.4 Copyright (c) 2000-2017 the FFmpeg developers
     built with Apple LLVM version 6.0 (clang-600.0.57) (based on LLVM 3.5svn)
     configuration: --prefix=/usr/local/Cellar/ffmpeg/3.2.4 --enable-shared --enable-pthreads --enable-gpl --enable-version3 --enable-hardcoded-tables --enable-avresample --cc=clang --host-cflags= --host-ldflags= --enable-ffplay --enable-frei0r --enable-libass --enable-libfdk-aac --enable-libfreetype --enable-libmp3lame --enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopus --enable-librtmp --enable-libschroedinger --enable-libspeex --enable-libtheora --enable-libvorbis --enable-libvpx --enable-libx264 --enable-libxvid --enable-opencl --disable-lzma --enable-libopenjpeg --disable-decoder=jpeg2000 --extra-cflags=-I/usr/local/Cellar/openjpeg/2.1.2/include/openjpeg-2.1 --enable-nonfree --enable-vda

    ff2err:   libavutil      55. 34.101 / 55. 34.101
     libavcodec     57. 64.101 / 57. 64.101
     libavformat    57. 56.101 / 57. 56.101
     libavdevice    57.  1.100 / 57.  1.100
     libavfilter     6. 65.100 /  6. 65.100
     libavresample   3.  1.  0 /  3.  1.  0
     libswscale      4.  2.100 /  4.  2.100
     libswresample   2.  3.100 /  2.  3.100
     libpostproc    54.  1.100 / 54.  1.100

    ff2err: [webm_dash_manifest @ 0x7fbc5b80b400] Could not find codec parameters for stream 0 (Video: vp9, none, 1280x720): unspecified pixel format
    Consider increasing the value for the 'analyzeduration' and 'probesize' options

    ff2err: Input #0, webm_dash_manifest, from 'public/glass_360.hdr':
     Metadata:
       title           : Session streamed with GStreamer
       encoder         : Lavf57.56.101
     Duration: N/A, bitrate: N/A
       Stream #0:0: Video: vp9, none, 1280x720
    ff2err: , SAR 1:1 DAR 16:9, 25 fps, 25 tbr, 1k tbn, 1k tbc (default)
       Metadata:
         webm_dash_manifest_file_name: glass_360.hdr
         webm_dash_manifest_track_number: 1

    ff2err: Could not write header for output file #0 (incorrect codec parameters ?): Operation not permittedStream mapping:
     Stream #0:0 -> #0:0 (copy)

    ff2err:     Last message repeated 1 times

    ff1err: frame=  101 fps= 29 q=0.0 size=N/A time=00:00:04.00 bitrate=N/A speed=1.13x    
    ff1err: frame=  116 fps= 29 q=0.0 size=N/A time=00:00:04.60 bitrate=N/A speed=1.14x    
    ff1err: frame=  131 fps= 29 q=0.0 size=N/A time=00:00:05.20 bitrate=N/A speed=1.15x    
    ff1err: frame=  146 fps= 29 q=0.0 size=N/A time=00:00:05.80 bitrate=N/A speed=1.15x    
    ff1err: frame=  161 fps= 29 q=0.0 size=N/A time=00:00:06.40 bitrate=N/A speed=1.15x    
    ff1err: frame=  177 fps= 29 q=0.0 size=N/A time=00:00:07.04 bitrate=N/A speed=1.16x    
    ff1err: frame=  192 fps= 29 q=0.0 size=N/A time=00:00:07.64 bitrate=N/A speed=1.16x    
    ff1err: frame=  207 fps= 29 q=0.0 size=N/A time=00:00:08.24 bitrate=N/A speed=1.16x    
    ff1err: frame=  222 fps= 29 q=0.0 size=N/A time=00:00:08.84 bitrate=N/A speed=1.17x    
    ff1err: frame=  237 fps= 29 q=0.0 size=N/A time=00:00:09.44 bitrate=N/A speed=1.17x    
    ff1err: frame=  252 fps= 29 q=0.0 size=N/A time=00:00:10.04 bitrate=N/A speed=1.17x  

    Why is ffmpeg creating an empty .mpd file ?

  • RTP packets detected as UDP

    28 février 2017, par user3172852

    Here is what I am trying to do :

    WebRTC endpoint > RTP Endpoint > ffmpeg > RTMP server.

    This is what my SDP file looks like.

    var cm_offer = "v=0\n" +
                 "o=- 3641290734 3641290734 IN IP4 127.0.0.1\n" +
                 "s=nginx\n" +
                 "c=IN IP4 127.0.0.1\n" +
                 "t=0 0\n" +
                 "m=audio 60820 RTP/AVP 0\n" +
                 "a=rtpmap:0 PCMU/8000\n" +
                 "a=recvonly\n" +
                 "m=video 59618 RTP/AVP 101\n" +
                 "a=rtpmap:101 H264/90000\n" +
                 "a=recvonly\n";

    What’s happening is that wireshark can detect the incoming packets at port 59618, but not as RTP packets but UDP packets. I am trying to capture the packets using ffmpeg with the following command :

    ubuntu@ip-132-31-40-100:~$ ffmpeg -i udp://127.0.0.1:59618 -vcodec copy stream.mp4
    ffmpeg version git-2017-01-22-f1214ad Copyright (c) 2000-2017 the FFmpeg developers
     built with gcc 4.8 (Ubuntu 4.8.4-2ubuntu1~14.04.3)
     configuration: --extra-libs=-ldl --prefix=/opt/ffmpeg --mandir=/usr/share/man --enable-avresample --disable-debug --enable-nonfree --enable-gpl --enable-version3 --enable-libopencore-amrnb --enable-libopencore-amrwb --disable-decoder=amrnb --disable-decoder=amrwb --enable-libpulse --enable-libfreetype --enable-gnutls --enable-libx264 --enable-libx265 --enable-libfdk-aac --enable-libvorbis --enable-libmp3lame --enable-libopus --enable-libvpx --enable-libspeex --enable-libass --enable-avisynth --enable-libsoxr --enable-libxvid --enable-libvidstab --enable-libwavpack --enable-nvenc
     libavutil      55. 44.100 / 55. 44.100
     libavcodec     57. 75.100 / 57. 75.100
     libavformat    57. 63.100 / 57. 63.100
     libavdevice    57.  2.100 / 57.  2.100
     libavfilter     6. 69.100 /  6. 69.100
     libavresample   3.  2.  0 /  3.  2.  0
     libswscale      4.  3.101 /  4.  3.101
     libswresample   2.  4.100 /  2.  4.100
     libpostproc    54.  2.100 / 54.  2.100

    All I get is a blinking cursor and The stream.mp4 file is not written to disk after I exit (ctrl+c).

    So can you help me figure out :

    1. why wireshark cannot detect the packets as RTP (I suspect it has something to do with SDP)
    2. How to handle SDP answer when the RTP endpoint is pushing to ffmpeg which doesn’t send an answer back.

    Here is the entire code (hello world tutorial modified)

    /*
        * (C) Copyright 2014-2015 Kurento (http://kurento.org/)
        *
        * Licensed under the Apache License, Version 2.0 (the "License");
        * you may not use this file except in compliance with the License.
        * You may obtain a copy of the License at
        *
        *   http://www.apache.org/licenses/LICENSE-2.0
        *
        * Unless required by applicable law or agreed to in writing, software
        * distributed under the License is distributed on an "AS IS" BASIS,
        * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
        * See the License for the specific language governing permissions and
        * limitations under the License.
        */

       function getopts(args, opts)
       {
         var result = opts.default || {};
         args.replace(
             new RegExp("([^?=&]+)(=([^&]*))?", "g"),
             function($0, $1, $2, $3) { result[$1] = decodeURI($3); });

         return result;
       };

       var args = getopts(location.search,
       {
         default:
         {
           ws_uri: 'wss://' + location.hostname + ':8433/kurento',
           ice_servers: undefined
         }
       });

       function setIceCandidateCallbacks(webRtcPeer, webRtcEp, onerror)
       {
         webRtcPeer.on('icecandidate', function(candidate) {
           console.log("Local candidate:",candidate);

           candidate = kurentoClient.getComplexType('IceCandidate')(candidate);

           webRtcEp.addIceCandidate(candidate, onerror)
         });

         webRtcEp.on('OnIceCandidate', function(event) {
           var candidate = event.candidate;

           console.log("Remote candidate:",candidate);

           webRtcPeer.addIceCandidate(candidate, onerror);
         });
       }


       function setIceCandidateCallbacks2(webRtcPeer, rtpEp, onerror)
       {
         webRtcPeer.on('icecandidate', function(candidate) {
           console.log("Localr candidate:",candidate);

           candidate = kurentoClient.getComplexType('IceCandidate')(candidate);

           rtpEp.addIceCandidate(candidate, onerror)
         });
       }


       window.addEventListener('load', function()
       {
         console = new Console();

         var webRtcPeer;
         var pipeline;
         var webRtcEpt;

         var videoInput = document.getElementById('videoInput');
         var videoOutput = document.getElementById('videoOutput');

         var startButton = document.getElementById("start");
         var stopButton = document.getElementById("stop");

         startButton.addEventListener("click", function()
         {
           showSpinner(videoInput, videoOutput);

           var options = {
             localVideo: videoInput,
             remoteVideo: videoOutput
           };


           if (args.ice_servers) {
            console.log("Use ICE servers: " + args.ice_servers);
            options.configuration = {
              iceServers : JSON.parse(args.ice_servers)
            };
           } else {
            console.log("Use freeice")
           }

           webRtcPeer = kurentoUtils.WebRtcPeer.WebRtcPeerSendrecv(options, function(error)
           {
             if(error) return onError(error)

             this.generateOffer(onOffer)
           });

           function onOffer(error, sdpOffer)
           {
             if(error) return onError(error)

             kurentoClient(args.ws_uri, function(error, client)
             {
               if(error) return onError(error);

               client.create("MediaPipeline", function(error, _pipeline)
               {
                 if(error) return onError(error);

                 pipeline = _pipeline;

                 pipeline.create("WebRtcEndpoint", function(error, webRtc){
                   if(error) return onError(error);

                   webRtcEpt = webRtc;

                   setIceCandidateCallbacks(webRtcPeer, webRtc, onError)

                   webRtc.processOffer(sdpOffer, function(error, sdpAnswer){
                     if(error) return onError(error);

                     webRtcPeer.processAnswer(sdpAnswer, onError);
                   });
                   webRtc.gatherCandidates(onError);

                   webRtc.connect(webRtc, function(error){
                     if(error) return onError(error);

                     console.log("Loopback established");
                   });
                 });



               pipeline.create("RtpEndpoint", function(error, rtp){
                   if(error) return onError(error);

                   //setIceCandidateCallbacks2(webRtcPeer, rtp, onError)


                   var cm_offer = "v=0\n" +
                         "o=- 3641290734 3641290734 IN IP4 127.0.0.1\n" +
                         "s=nginx\n" +
                         "c=IN IP4 127.0.0.1\n" +
                         "t=0 0\n" +
                         "m=audio 60820 RTP/AVP 0\n" +
                         "a=rtpmap:0 PCMU/8000\n" +
                         "a=recvonly\n" +
                         "m=video 59618 RTP/AVP 101\n" +
                         "a=rtpmap:101 H264/90000\n" +
                         "a=recvonly\n";



                   rtp.processOffer(cm_offer, function(error, cm_sdpAnswer){
                     if(error) return onError(error);

                     //webRtcPeer.processAnswer(cm_sdpAnswer, onError);
                   });
                   //rtp.gatherCandidates(onError);

                   webRtcEpt.connect(rtp, function(error){
                     if(error) return onError(error);

                     console.log("RTP endpoint connected to webRTC");
                   });
                 });









               });
             });
           }
         });
         stopButton.addEventListener("click", stop);


         function stop() {
           if (webRtcPeer) {
             webRtcPeer.dispose();
             webRtcPeer = null;
           }

           if(pipeline){
             pipeline.release();
             pipeline = null;
           }

           hideSpinner(videoInput, videoOutput);
         }

         function onError(error) {
           if(error)
           {
             console.error(error);
             stop();
           }
         }
       })


       function showSpinner() {
         for (var i = 0; i < arguments.length; i++) {
           arguments[i].poster = 'img/transparent-1px.png';
           arguments[i].style.background = "center transparent url('img/spinner.gif') no-repeat";
         }
       }

       function hideSpinner() {
         for (var i = 0; i < arguments.length; i++) {
           arguments[i].src = '';
           arguments[i].poster = 'img/webrtc.png';
           arguments[i].style.background = '';
         }
       }

       /**
        * Lightbox utility (to display media pipeline image in a modal dialog)
        */
       $(document).delegate('*[data-toggle="lightbox"]', 'click', function(event) {
         event.preventDefault();
         $(this).ekkoLightbox();
       });
  • No output file when converting audio using FFmpeg in Android

    18 mars 2017, par Sha

    I’m trying to convert m4a audio file to wav using FFmpeg. The code executes fine and gives no error, but I don’t see any output file in my directory.

    This is what I am executing :

    String[] cmd = {"-y", "-i", "/storage/emulated/0/jd.m4a", "-f","wav" ,"/storage/emulated/0/DCIM/Camera/output.wav"};

    And this is what gets printed :

    03-17 15:40:51.539 10111-10111/io.whispero.soundmerger E/onProgress: ffmpeg version n3.0.1 Copyright (c) 2000-2016 the FFmpeg developers
    03-17 15:40:51.542 10111-10111/io.whispero.soundmerger E/onProgress:   built with gcc 4.8 (GCC)
    03-17 15:40:51.545 10111-10111/io.whispero.soundmerger E/onProgress:   configuration: --target-os=linux --cross-prefix=/home/vagrant/SourceCode/ffmpeg-android/toolchain-android/bin/arm-linux-androideabi- --arch=arm --cpu=cortex-a8 --enable-runtime-cpudetect --sysroot=/home/vagrant/SourceCode/ffmpeg-android/toolchain-android/sysroot --enable-pic --enable-libx264 --enable-libass --enable-libfreetype --enable-libfribidi --enable-libmp3lame --enable-fontconfig --enable-pthreads --disable-debug --disable-ffserver --enable-version3 --enable-hardcoded-tables --disable-ffplay --disable-ffprobe --enable-gpl --enable-yasm --disable-doc --disable-shared --enable-static --pkg-config=/home/vagrant/SourceCode/ffmpeg-android/ffmpeg-pkg-config --prefix=/home/vagrant/SourceCode/ffmpeg-android/build/armeabi-v7a --extra-cflags='-I/home/vagrant/SourceCode/ffmpeg-android/toolchain-android/include -U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=2 -fno-strict-overflow -fstack-protector-all' --extra-ldflags='-L/home/vagrant/SourceCode/ffmpeg-android/toolchain-android/lib -Wl,-z,relro -Wl,-z,now -pie' --extra-libs='-lpng -lexpat -lm' --extra-cxxflags=
    03-17 15:40:51.547 10111-10111/io.whispero.soundmerger E/onProgress:   libavutil      55. 17.103 / 55. 17.103
    03-17 15:40:51.552 10111-10111/io.whispero.soundmerger E/onProgress:   libavcodec     57. 24.102 / 57. 24.102
    03-17 15:40:51.554 10111-10111/io.whispero.soundmerger E/onProgress:   libavformat    57. 25.100 / 57. 25.100
    03-17 15:40:51.556 10111-10111/io.whispero.soundmerger E/onProgress:   libavdevice    57.  0.101 / 57.  0.101
    03-17 15:40:51.559 10111-10111/io.whispero.soundmerger E/onProgress:   libavfilter     6. 31.100 /  6. 31.100
    03-17 15:40:51.561 10111-10111/io.whispero.soundmerger E/onProgress:   libswscale      4.  0.100 /  4.  0.100
    03-17 15:40:51.562 10111-10111/io.whispero.soundmerger E/onProgress:   libswresample   2.  0.101 /  2.  0.101
    03-17 15:40:51.564 10111-10111/io.whispero.soundmerger E/onProgress:   libpostproc    54.  0.100 / 54.  0.100
    03-17 15:40:51.581 10111-10111/io.whispero.soundmerger E/onProgress: Input #0, mov,mp4,m4a,3gp,3g2,mj2, from '/storage/emulated/0/jd.m4a':
    03-17 15:40:51.585 10111-10111/io.whispero.soundmerger E/onProgress:   Metadata:
    03-17 15:40:51.587 10111-10111/io.whispero.soundmerger E/onProgress:     major_brand     : M4A
    03-17 15:40:51.589 10111-10111/io.whispero.soundmerger E/onProgress:     minor_version   : 0
    03-17 15:40:51.593 10111-10111/io.whispero.soundmerger E/onProgress:     compatible_brands: M4A mp42isom
    03-17 15:40:51.595 10111-10111/io.whispero.soundmerger E/onProgress:     creation_time   : 2017-02-16 10:36:39
    03-17 15:40:51.597 10111-10111/io.whispero.soundmerger E/onProgress:   Duration: 00:00:03.39, start: 0.000000, bitrate: 82 kb/s
    03-17 15:40:51.602 10111-10111/io.whispero.soundmerger E/onProgress:     Stream #0:0(eng): Audio: aac (LC) (mp4a / 0x6134706D), 16000 Hz, mono, fltp, 24 kb/s (default)
    03-17 15:40:51.608 10111-10111/io.whispero.soundmerger E/onProgress:     Metadata:
    03-17 15:40:51.612 10111-10111/io.whispero.soundmerger E/onProgress:       creation_time   : 2017-02-16 10:36:39
    03-17 15:40:51.614 10111-10111/io.whispero.soundmerger E/onProgress: Output #0, wav, to '/storage/emulated/0/DCIM/Camera/hyder.wav':
    03-17 15:40:51.617 10111-10111/io.whispero.soundmerger E/onProgress:   Metadata:
    03-17 15:40:51.619 10111-10111/io.whispero.soundmerger E/onProgress:     major_brand     : M4A
    03-17 15:40:51.621 10111-10111/io.whispero.soundmerger E/onProgress:     minor_version   : 0
    03-17 15:40:51.623 10111-10111/io.whispero.soundmerger E/onProgress:     compatible_brands: M4A mp42isom
    03-17 15:40:51.625 10111-10111/io.whispero.soundmerger E/onProgress:     ISFT            : Lavf57.25.100
    03-17 15:40:51.627 10111-10111/io.whispero.soundmerger E/onProgress:     Stream #0:0(eng): Audio: pcm_s16le ([1][0][0][0] / 0x0001), 16000 Hz, mono, s16, 256 kb/s (default)
    03-17 15:40:51.629 10111-10111/io.whispero.soundmerger E/onProgress:     Metadata:
    03-17 15:40:51.631 10111-10111/io.whispero.soundmerger E/onProgress:       creation_time   : 2017-02-16 10:36:39
    03-17 15:40:51.633 10111-10111/io.whispero.soundmerger E/onProgress:       encoder         : Lavc57.24.102 pcm_s16le
    03-17 15:40:51.636 10111-10111/io.whispero.soundmerger E/onProgress: Stream mapping:
    03-17 15:40:51.639 10111-10111/io.whispero.soundmerger E/onProgress:   Stream #0:0 -> #0:0 (aac (native) -> pcm_s16le (native))
    03-17 15:40:51.642 10111-10111/io.whispero.soundmerger E/onProgress: Press [q] to stop, [?] for help
    03-17 15:40:51.645 10111-10111/io.whispero.soundmerger E/onProgress: size=     106kB time=00:00:03.39 bitrate= 256.2kbits/s speed= 181x    
    03-17 15:40:51.647 10111-10111/io.whispero.soundmerger E/onProgress: video:0kB audio:106kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: 0.071860%
    03-17 15:40:51.650 10111-10111/io.whispero.soundmerger E/SUCCESS: ffmpeg version n3.0.1 Copyright (c) 2000-2016 the FFmpeg developers
                                                                       built with gcc 4.8 (GCC)
                                                                       configuration: --target-os=linux --cross-prefix=/home/vagrant/SourceCode/ffmpeg-android/toolchain-android/bin/arm-linux-androideabi- --arch=arm --cpu=cortex-a8 --enable-runtime-cpudetect --sysroot=/home/vagrant/SourceCode/ffmpeg-android/toolchain-android/sysroot --enable-pic --enable-libx264 --enable-libass --enable-libfreetype --enable-libfribidi --enable-libmp3lame --enable-fontconfig --enable-pthreads --disable-debug --disable-ffserver --enable-version3 --enable-hardcoded-tables --disable-ffplay --disable-ffprobe --enable-gpl --enable-yasm --disable-doc --disable-shared --enable-static --pkg-config=/home/vagrant/SourceCode/ffmpeg-android/ffmpeg-pkg-config --prefix=/home/vagrant/SourceCode/ffmpeg-android/build/armeabi-v7a --extra-cflags='-I/home/vagrant/SourceCode/ffmpeg-android/toolchain-android/include -U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=2 -fno-strict-overflow -fstack-protector-all' --extra-ldflags='-L/home/vagrant/SourceCode/ffmpeg-android/toolchain-android/lib -Wl,-z,relro -Wl,-z,now -pie' --extra-libs='-lpng -lexpat -lm' --extra-cxxflags=
                                                                       libavutil      55. 17.103 / 55. 17.103
                                                                       libavcodec     57. 24.102 / 57. 24.102
                                                                       libavformat    57. 25.100 / 57. 25.100
                                                                       libavdevice    57.  0.101 / 57.  0.101
                                                                       libavfilter     6. 31.100 /  6. 31.100
                                                                       libswscale      4.  0.100 /  4.  0.100
                                                                       libswresample   2.  0.101 /  2.  0.101
                                                                       libpostproc    54.  0.100 / 54.  0.100
                                                                     Input #0, mov,mp4,m4a,3gp,3g2,mj2, from '/storage/emulated/0/jd.m4a':
                                                                       Metadata:
                                                                         major_brand     : M4A
                                                                         minor_version   : 0
                                                                         compatible_brands: M4A mp42isom
                                                                         creation_time   : 2017-02-16 10:36:39
                                                                       Duration: 00:00:03.39, start: 0.000000, bitrate: 82 kb/s
                                                                         Stream #0:0(eng): Audio: aac (LC) (mp4a / 0x6134706D), 16000 Hz, mono, fltp, 24 kb/s (default)
                                                                         Metadata:
                                                                           creation_time   : 2017-02-16 10:36:39
                                                                     Output #0, wav, to '/storage/emulated/0/DCIM/Camera/hyder.wav':
                                                                       Metadata:
                                                                         major_brand     : M4A
                                                                         minor_version   : 0
                                                                         compatible_brands: M4A mp42isom
                                                                         ISFT            : Lavf57.25.100
                                                                         Stream #0:0(eng): Audio: pcm_s16le ([1][0][0][0] / 0x0001), 16000 Hz, mono, s16, 256 kb/s (default)
                                                                         Metadata:
                                                                           creation_time   : 2017-02-16 10:36:39
                                                                           encoder         : Lavc57.24.102 pcm_s16le
                                                                     Stream mapping:
                                                                       Stream #0:0 -> #0:0 (aac (native) -> pcm_s16le (native))
                                                                     Press [q] to stop, [?] for help
                                                                     size=     106kB time=00:00:03.39 bitrate= 256.2kbits/s speed= 181x    
                                                                     video:0kB audio:106kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: 0.071860%
    03-17 15:40:51.653 10111-10111/io.whispero.soundmerger E/onFinish: onFinish

    Please help why I am not seeing any output audio file :(

    Thanks.