Recherche avancée

Médias (91)

Autres articles (42)

  • Librairies et binaires spécifiques au traitement vidéo et sonore

    31 janvier 2010, par

    Les logiciels et librairies suivantes sont utilisées par SPIPmotion d’une manière ou d’une autre.
    Binaires obligatoires FFMpeg : encodeur principal, permet de transcoder presque tous les types de fichiers vidéo et sonores dans les formats lisibles sur Internet. CF ce tutoriel pour son installation ; Oggz-tools : outils d’inspection de fichiers ogg ; Mediainfo : récupération d’informations depuis la plupart des formats vidéos et sonores ;
    Binaires complémentaires et facultatifs flvtool2 : (...)

  • Support audio et vidéo HTML5

    10 avril 2011

    MediaSPIP utilise les balises HTML5 video et audio pour la lecture de documents multimedia en profitant des dernières innovations du W3C supportées par les navigateurs modernes.
    Pour les navigateurs plus anciens, le lecteur flash Flowplayer est utilisé.
    Le lecteur HTML5 utilisé a été spécifiquement créé pour MediaSPIP : il est complètement modifiable graphiquement pour correspondre à un thème choisi.
    Ces technologies permettent de distribuer vidéo et son à la fois sur des ordinateurs conventionnels (...)

  • De l’upload à la vidéo finale [version standalone]

    31 janvier 2010, par

    Le chemin d’un document audio ou vidéo dans SPIPMotion est divisé en trois étapes distinctes.
    Upload et récupération d’informations de la vidéo source
    Dans un premier temps, il est nécessaire de créer un article SPIP et de lui joindre le document vidéo "source".
    Au moment où ce document est joint à l’article, deux actions supplémentaires au comportement normal sont exécutées : La récupération des informations techniques des flux audio et video du fichier ; La génération d’une vignette : extraction d’une (...)

Sur d’autres sites (6351)

  • android ffmpeg bad video output

    20 août 2014, par Sujith Manjavana

    I’m following this tutorial to create my first ffmpeg app. I have successfully build the shared libs and compiled the project without any errors. But when i run the app on my nexus 5 the output is this this

    Here is the native code

    #include <libavcodec></libavcodec>avcodec.h>
    #include <libavformat></libavformat>avformat.h>
    #include <libswscale></libswscale>swscale.h>
    #include <libavutil></libavutil>pixfmt.h>

    #include
    #include

    #include
    #include <android></android>native_window.h>
    #include <android></android>native_window_jni.h>

    #define LOG_TAG "android-ffmpeg-tutorial02"
    #define LOGI(...) __android_log_print(4, LOG_TAG, __VA_ARGS__);
    #define LOGE(...) __android_log_print(6, LOG_TAG, __VA_ARGS__);

    ANativeWindow*      window;
    char                *videoFileName;
    AVFormatContext     *formatCtx = NULL;
    int                 videoStream;
    AVCodecContext      *codecCtx = NULL;
    AVFrame             *decodedFrame = NULL;
    AVFrame             *frameRGBA = NULL;
    jobject             bitmap;
    void*               buffer;
    struct SwsContext   *sws_ctx = NULL;
    int                 width;
    int                 height;
    int                 stop;

    jint naInit(JNIEnv *pEnv, jobject pObj, jstring pFileName) {
       AVCodec         *pCodec = NULL;
       int             i;
       AVDictionary    *optionsDict = NULL;

       videoFileName = (char *)(*pEnv)->GetStringUTFChars(pEnv, pFileName, NULL);
       LOGI("video file name is %s", videoFileName);
       // Register all formats and codecs
       av_register_all();
       // Open video file
       if(avformat_open_input(&amp;formatCtx, videoFileName, NULL, NULL)!=0)
           return -1; // Couldn't open file
       // Retrieve stream information
       if(avformat_find_stream_info(formatCtx, NULL)&lt;0)
           return -1; // Couldn't find stream information
       // Dump information about file onto standard error
       av_dump_format(formatCtx, 0, videoFileName, 0);
       // Find the first video stream
       videoStream=-1;
       for(i=0; inb_streams; i++) {
           if(formatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) {
               videoStream=i;
               break;
           }
       }
       if(videoStream==-1)
           return -1; // Didn't find a video stream
       // Get a pointer to the codec context for the video stream
       codecCtx=formatCtx->streams[videoStream]->codec;
       // Find the decoder for the video stream
       pCodec=avcodec_find_decoder(codecCtx->codec_id);
       if(pCodec==NULL) {
           fprintf(stderr, "Unsupported codec!\n");
           return -1; // Codec not found
       }
       // Open codec
       if(avcodec_open2(codecCtx, pCodec, &amp;optionsDict)&lt;0)
           return -1; // Could not open codec
       // Allocate video frame
       decodedFrame=avcodec_alloc_frame();
       // Allocate an AVFrame structure
       frameRGBA=avcodec_alloc_frame();
       if(frameRGBA==NULL)
           return -1;
       return 0;
    }

    jobject createBitmap(JNIEnv *pEnv, int pWidth, int pHeight) {
       int i;
       //get Bitmap class and createBitmap method ID
       jclass javaBitmapClass = (jclass)(*pEnv)->FindClass(pEnv, "android/graphics/Bitmap");
       jmethodID mid = (*pEnv)->GetStaticMethodID(pEnv, javaBitmapClass, "createBitmap", "(IILandroid/graphics/Bitmap$Config;)Landroid/graphics/Bitmap;");
       //create Bitmap.Config
       //reference: https://forums.oracle.com/thread/1548728
       const wchar_t* configName = L"ARGB_8888";
       int len = wcslen(configName);
       jstring jConfigName;
       if (sizeof(wchar_t) != sizeof(jchar)) {
           //wchar_t is defined as different length than jchar(2 bytes)
           jchar* str = (jchar*)malloc((len+1)*sizeof(jchar));
           for (i = 0; i &lt; len; ++i) {
               str[i] = (jchar)configName[i];
           }
           str[len] = 0;
           jConfigName = (*pEnv)->NewString(pEnv, (const jchar*)str, len);
       } else {
           //wchar_t is defined same length as jchar(2 bytes)
           jConfigName = (*pEnv)->NewString(pEnv, (const jchar*)configName, len);
       }
       jclass bitmapConfigClass = (*pEnv)->FindClass(pEnv, "android/graphics/Bitmap$Config");
       jobject javaBitmapConfig = (*pEnv)->CallStaticObjectMethod(pEnv, bitmapConfigClass,
               (*pEnv)->GetStaticMethodID(pEnv, bitmapConfigClass, "valueOf", "(Ljava/lang/String;)Landroid/graphics/Bitmap$Config;"), jConfigName);
       //create the bitmap
       return (*pEnv)->CallStaticObjectMethod(pEnv, javaBitmapClass, mid, pWidth, pHeight, javaBitmapConfig);
    }

    jintArray naGetVideoRes(JNIEnv *pEnv, jobject pObj) {
       jintArray lRes;
       if (NULL == codecCtx) {
           return NULL;
       }
       lRes = (*pEnv)->NewIntArray(pEnv, 2);
       if (lRes == NULL) {
           LOGI(1, "cannot allocate memory for video size");
           return NULL;
       }
       jint lVideoRes[2];
       lVideoRes[0] = codecCtx->width;
       lVideoRes[1] = codecCtx->height;
       (*pEnv)->SetIntArrayRegion(pEnv, lRes, 0, 2, lVideoRes);
       return lRes;
    }

    void naSetSurface(JNIEnv *pEnv, jobject pObj, jobject pSurface) {
       if (0 != pSurface) {
           // get the native window reference
           window = ANativeWindow_fromSurface(pEnv, pSurface);
           // set format and size of window buffer
           ANativeWindow_setBuffersGeometry(window, 0, 0, WINDOW_FORMAT_RGBA_8888);
       } else {
           // release the native window
           ANativeWindow_release(window);
       }
    }

    jint naSetup(JNIEnv *pEnv, jobject pObj, int pWidth, int pHeight) {
       width = pWidth;
       height = pHeight;
       //create a bitmap as the buffer for frameRGBA
       bitmap = createBitmap(pEnv, pWidth, pHeight);
       if (AndroidBitmap_lockPixels(pEnv, bitmap, &amp;buffer) &lt; 0)
           return -1;
       //get the scaling context
       sws_ctx = sws_getContext (
               codecCtx->width,
               codecCtx->height,
               codecCtx->pix_fmt,
               pWidth,
               pHeight,
               AV_PIX_FMT_RGBA,
               SWS_BILINEAR,
               NULL,
               NULL,
               NULL
       );
       // Assign appropriate parts of bitmap to image planes in pFrameRGBA
       // Note that pFrameRGBA is an AVFrame, but AVFrame is a superset
       // of AVPicture
       avpicture_fill((AVPicture *)frameRGBA, buffer, AV_PIX_FMT_RGBA,
               pWidth, pHeight);
       return 0;
    }

    void finish(JNIEnv *pEnv) {
       //unlock the bitmap
       AndroidBitmap_unlockPixels(pEnv, bitmap);
       av_free(buffer);
       // Free the RGB image
       av_free(frameRGBA);
       // Free the YUV frame
       av_free(decodedFrame);
       // Close the codec
       avcodec_close(codecCtx);
       // Close the video file
       avformat_close_input(&amp;formatCtx);
    }

    void decodeAndRender(JNIEnv *pEnv) {
       ANativeWindow_Buffer    windowBuffer;
       AVPacket                packet;
       int                     i=0;
       int                     frameFinished;
       int                     lineCnt;
       while(av_read_frame(formatCtx, &amp;packet)>=0 &amp;&amp; !stop) {
           // Is this a packet from the video stream?
           if(packet.stream_index==videoStream) {
               // Decode video frame
               avcodec_decode_video2(codecCtx, decodedFrame, &amp;frameFinished,
                  &amp;packet);
               // Did we get a video frame?
               if(frameFinished) {
                   // Convert the image from its native format to RGBA
                   sws_scale
                   (
                       sws_ctx,
                       (uint8_t const * const *)decodedFrame->data,
                       decodedFrame->linesize,
                       0,
                       codecCtx->height,
                       frameRGBA->data,
                       frameRGBA->linesize
                   );
                   // lock the window buffer
                   if (ANativeWindow_lock(window, &amp;windowBuffer, NULL) &lt; 0) {
                       LOGE("cannot lock window");
                   } else {
                       // draw the frame on buffer
                       LOGI("copy buffer %d:%d:%d", width, height, width*height*4);
                       LOGI("window buffer: %d:%d:%d", windowBuffer.width,
                               windowBuffer.height, windowBuffer.stride);
                       memcpy(windowBuffer.bits, buffer,  width * height * 4);
                       // unlock the window buffer and post it to display
                       ANativeWindow_unlockAndPost(window);
                       // count number of frames
                       ++i;
                   }
               }
           }
           // Free the packet that was allocated by av_read_frame
           av_free_packet(&amp;packet);
       }
       LOGI("total No. of frames decoded and rendered %d", i);
       finish(pEnv);
    }

    /**
    * start the video playback
    */
    void naPlay(JNIEnv *pEnv, jobject pObj) {
       //create a new thread for video decode and render
       pthread_t decodeThread;
       stop = 0;
       pthread_create(&amp;decodeThread, NULL, decodeAndRender, NULL);
    }

    /**
    * stop the video playback
    */
    void naStop(JNIEnv *pEnv, jobject pObj) {
       stop = 1;
    }

    jint JNI_OnLoad(JavaVM* pVm, void* reserved) {
       JNIEnv* env;
       if ((*pVm)->GetEnv(pVm, (void **)&amp;env, JNI_VERSION_1_6) != JNI_OK) {
            return -1;
       }
       JNINativeMethod nm[8];
       nm[0].name = "naInit";
       nm[0].signature = "(Ljava/lang/String;)I";
       nm[0].fnPtr = (void*)naInit;

       nm[1].name = "naSetSurface";
       nm[1].signature = "(Landroid/view/Surface;)V";
       nm[1].fnPtr = (void*)naSetSurface;

       nm[2].name = "naGetVideoRes";
       nm[2].signature = "()[I";
       nm[2].fnPtr = (void*)naGetVideoRes;

       nm[3].name = "naSetup";
       nm[3].signature = "(II)I";
       nm[3].fnPtr = (void*)naSetup;

       nm[4].name = "naPlay";
       nm[4].signature = "()V";
       nm[4].fnPtr = (void*)naPlay;

       nm[5].name = "naStop";
       nm[5].signature = "()V";
       nm[5].fnPtr = (void*)naStop;

       jclass cls = (*env)->FindClass(env, "roman10/tutorial/android_ffmpeg_tutorial02/MainActivity");
       //Register methods with env->RegisterNatives.
       (*env)->RegisterNatives(env, cls, nm, 6);
       return JNI_VERSION_1_6;
    }

    Here is the build.sh

    #!/bin/bash
    NDK=$HOME/Desktop/adt/android-ndk-r9
    SYSROOT=$NDK/platforms/android-9/arch-arm/
    TOOLCHAIN=$NDK/toolchains/arm-linux-androideabi-4.8/prebuilt/linux-x86_64
    function build_one
    {
    ./configure \
       --prefix=$PREFIX \
       --enable-shared \
       --disable-static \
       --disable-doc \
       --disable-ffmpeg \
       --disable-ffplay \
       --disable-ffprobe \
       --disable-ffserver \
       --disable-avdevice \
       --disable-doc \
       --disable-symver \
       --cross-prefix=$TOOLCHAIN/bin/arm-linux-androideabi- \
       --target-os=linux \
       --arch=arm \
       --enable-cross-compile \
       --sysroot=$SYSROOT \
       --extra-cflags="-Os -fpic $ADDI_CFLAGS" \
       --extra-ldflags="$ADDI_LDFLAGS" \
       $ADDITIONAL_CONFIGURE_FLAG
    make clean
    make
    make install
    }
    CPU=arm
    PREFIX=$(pwd)/android/$CPU
    ADDI_CFLAGS="-marm"
    build_one

    It works on the Galaxy tab2. what can i do to make it work on all devices ?? Please help me..

  • Android ExoPlayer stream mp3 over HTTP

    8 juillet 2016, par kevintcoughlin

    I’m trying to get a grasp on the new ExoPlayer library introduced this year at Google I/O 2014 so that I can incorporate it into my application.

    I’m attempting to stream an mp3 over HTTP, but so far have been unsuccessful. I’m not sure if it’s possible, but I’m trying to accomplish this without extending any of the base Source/Sample classes. My code is as follows :

    In my Activity

    SampleSource s = new FrameworkSampleSource(this, Uri.parse("http://traffic.libsyn.com/joeroganexp/p518.mp3"), null, 1);

    // Since I only have 1 audio renderer
    ExoPlayer player = ExoPlayer.Factory.newInstance(1);

    MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(s);
    player.prepare(audioRenderer);
    player.setPlayWhenReady(true);

    Logcat

    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ SniffFFMPEG
    07-06 15:52:34.080    3363-3376/com.kevintcoughlin.smodr I/FFmpegExtractor﹕ android-source:0xb7c53e00
    07-06 15:52:34.084    3363-3376/com.kevintcoughlin.smodr D/FFMPEG﹕ android source begin open
    07-06 15:52:34.084    3363-3376/com.kevintcoughlin.smodr D/FFMPEG﹕ android open, url: android-source:0xb7c53e00
    07-06 15:52:34.084    3363-3376/com.kevintcoughlin.smodr D/FFMPEG﹕ ffmpeg open android data source success, source ptr: 0xb7c53e00
    07-06 15:52:34.088    3363-3376/com.kevintcoughlin.smodr D/FFMPEG﹕ android source open success
    07-06 15:52:34.108    3363-3363/com.kevintcoughlin.smodr W/EGL_genymotion﹕ eglSurfaceAttrib not implemented
    07-06 15:52:34.116    3363-3363/com.kevintcoughlin.smodr E/OpenGLRenderer﹕ Getting MAX_TEXTURE_SIZE from GradienCache
    07-06 15:52:34.120    3363-3363/com.kevintcoughlin.smodr E/OpenGLRenderer﹕ MAX_TEXTURE_SIZE: 16384
    07-06 15:52:34.128    3363-3363/com.kevintcoughlin.smodr E/OpenGLRenderer﹕ Getting MAX_TEXTURE_SIZE from Caches::initConstraints()
    07-06 15:52:34.128    3363-3363/com.kevintcoughlin.smodr E/OpenGLRenderer﹕ MAX_TEXTURE_SIZE: 16384
    07-06 15:52:34.128    3363-3363/com.kevintcoughlin.smodr D/OpenGLRenderer﹕ Enabling debug mode 0
    07-06 15:52:34.640    3363-3376/com.kevintcoughlin.smodr I/FFMPEG﹕ [mp3 @ 0xb7c57040] Estimating duration from bitrate, this may be inaccurate
    07-06 15:52:34.640    3363-3376/com.kevintcoughlin.smodr I/FFMPEG﹕ Input #0, mp3, from 'android-source:0xb7c53e00':
    07-06 15:52:34.640    3363-3376/com.kevintcoughlin.smodr I/FFMPEG﹕ Metadata:
    07-06 15:52:34.640    3363-3376/com.kevintcoughlin.smodr I/FFMPEG﹕ TSS             : Logic Pro 9.1.8
    07-06 15:52:34.644    3363-3376/com.kevintcoughlin.smodr I/FFMPEG﹕ title           : #518 - Matt Fulchiron
    07-06 15:52:34.644    3363-3376/com.kevintcoughlin.smodr I/FFMPEG﹕ artist          : Joe Rogan Experience
    07-06 15:52:34.644    3363-3376/com.kevintcoughlin.smodr I/FFMPEG﹕ album_artist    : Joe Rogan Experience
    07-06 15:52:34.644    3363-3376/com.kevintcoughlin.smodr I/FFMPEG﹕ Duration: 02:57:10.21, start: 0.000000, bitrate: 127 kb/s
    07-06 15:52:34.644    3363-3376/com.kevintcoughlin.smodr I/FFMPEG﹕ Stream #0:0: Audio: mp3, 44100 Hz, stereo, s16p, 128 kb/s
    07-06 15:52:34.644    3363-3376/com.kevintcoughlin.smodr I/FFmpegExtractor﹕ FFmpegExtrator, url: android-source:0xb7c53e00, format_name: mp3, format_long_name: MP2/3 (MPEG audio layer 2/3)
    07-06 15:52:34.644    3363-3376/com.kevintcoughlin.smodr I/FFmpegExtractor﹕ list the formats suppoted by ffmpeg:
    07-06 15:52:34.644    3363-3376/com.kevintcoughlin.smodr I/FFmpegExtractor﹕ ========================================
    07-06 15:52:34.644    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[00]: mpeg
    07-06 15:52:34.644    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[01]: mpegts
    07-06 15:52:34.644    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[02]: mov,mp4,m4a,3gp,3g2,mj2
    07-06 15:52:34.644    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[03]: matroska,webm
    07-06 15:52:34.644    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[04]: asf
    07-06 15:52:34.644    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[05]: rm
    07-06 15:52:34.644    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[06]: flv
    07-06 15:52:34.644    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[07]: swf
    07-06 15:52:34.644    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[08]: avi
    07-06 15:52:34.644    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[09]: ape
    07-06 15:52:34.644    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[10]: dts
    07-06 15:52:34.644    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[11]: flac
    07-06 15:52:34.644    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[12]: ac3
    07-06 15:52:34.644    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[13]: wav
    07-06 15:52:34.644    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[14]: ogg
    07-06 15:52:34.644    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[15]: vc1
    07-06 15:52:34.644    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[16]: hevc
    07-06 15:52:34.644    3363-3376/com.kevintcoughlin.smodr I/FFmpegExtractor﹕ ========================================
    07-06 15:52:34.644    3363-3376/com.kevintcoughlin.smodr D/FFMPEG﹕ android source close
    07-06 15:52:34.644    3363-3376/com.kevintcoughlin.smodr W/FFmpegExtractor﹕ sniff through BetterSniffFFMPEG failed, try LegacySniffFFMPEG
    07-06 15:52:34.644    3363-3376/com.kevintcoughlin.smodr I/FFmpegExtractor﹕ source url:http://traffic.libsyn.com/joeroganexp/p518.mp3
    07-06 15:52:34.644    3363-3376/com.kevintcoughlin.smodr D/FFMPEG﹕ android source begin open
    07-06 15:52:34.648    3363-3376/com.kevintcoughlin.smodr D/FFMPEG﹕ android open, url: android-source:0xb7c53e00|file:http://traffic.libsyn.com/joeroganexp/p518.mp3
    07-06 15:52:34.648    3363-3376/com.kevintcoughlin.smodr D/FFMPEG﹕ ffmpeg open android data source success, source ptr: 0xb7c53e00
    07-06 15:52:34.648    3363-3376/com.kevintcoughlin.smodr D/FFMPEG﹕ android source open success
    07-06 15:52:34.708    3363-3394/com.kevintcoughlin.smodr D/dalvikvm﹕ GC_FOR_ALLOC freed 361K, 4% free 10852K/11288K, paused 75ms, total 75ms
    07-06 15:52:34.844    3363-3376/com.kevintcoughlin.smodr I/FFMPEG﹕ [mp3 @ 0xb7ca7700] Estimating duration from bitrate, this may be inaccurate
    07-06 15:52:34.844    3363-3376/com.kevintcoughlin.smodr I/FFMPEG﹕ Input #0, mp3, from 'android-source:0xb7c53e00|file:http://traffic.libsyn.com/joeroganexp/p518.mp3':
    07-06 15:52:34.844    3363-3376/com.kevintcoughlin.smodr I/FFMPEG﹕ Metadata:
    07-06 15:52:34.844    3363-3376/com.kevintcoughlin.smodr I/FFMPEG﹕ TSS             : Logic Pro 9.1.8
    07-06 15:52:34.848    3363-3376/com.kevintcoughlin.smodr I/FFMPEG﹕ title           : #518 - Matt Fulchiron
    07-06 15:52:34.848    3363-3376/com.kevintcoughlin.smodr I/FFMPEG﹕ artist          : Joe Rogan Experience
    07-06 15:52:34.848    3363-3376/com.kevintcoughlin.smodr I/FFMPEG﹕ album_artist    : Joe Rogan Experience
    07-06 15:52:34.848    3363-3376/com.kevintcoughlin.smodr I/FFMPEG﹕ Duration: 02:57:10.21, start: 0.000000, bitrate: 127 kb/s
    07-06 15:52:34.848    3363-3376/com.kevintcoughlin.smodr I/FFMPEG﹕ Stream #0:0: Audio: mp3, 44100 Hz, stereo, s16p, 128 kb/s
    07-06 15:52:34.848    3363-3376/com.kevintcoughlin.smodr I/FFmpegExtractor﹕ FFmpegExtrator, url: android-source:0xb7c53e00|file:http://traffic.libsyn.com/joeroganexp/p518.mp3, format_name: mp3, format_long_name: MP2/3 (MPEG audio layer 2/3)
    07-06 15:52:34.852    3363-3376/com.kevintcoughlin.smodr I/FFmpegExtractor﹕ list the formats suppoted by ffmpeg:
    07-06 15:52:34.852    3363-3376/com.kevintcoughlin.smodr I/FFmpegExtractor﹕ ========================================
    07-06 15:52:34.852    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[00]: mpeg
    07-06 15:52:34.852    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[01]: mpegts
    07-06 15:52:34.852    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[02]: mov,mp4,m4a,3gp,3g2,mj2
    07-06 15:52:34.852    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[03]: matroska,webm
    07-06 15:52:34.852    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[04]: asf
    07-06 15:52:34.852    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[05]: rm
    07-06 15:52:34.852    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[06]: flv
    07-06 15:52:34.852    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[07]: swf
    07-06 15:52:34.852    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[08]: avi
    07-06 15:52:34.852    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[09]: ape
    07-06 15:52:34.852    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[10]: dts
    07-06 15:52:34.856    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[11]: flac
    07-06 15:52:34.856    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[12]: ac3
    07-06 15:52:34.856    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[13]: wav
    07-06 15:52:34.856    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[14]: ogg
    07-06 15:52:34.856    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[15]: vc1
    07-06 15:52:34.856    3363-3376/com.kevintcoughlin.smodr V/FFmpegExtractor﹕ format_names[16]: hevc
    07-06 15:52:34.856    3363-3376/com.kevintcoughlin.smodr I/FFmpegExtractor﹕ ========================================
    07-06 15:52:34.856    3363-3376/com.kevintcoughlin.smodr D/FFMPEG﹕ android source close
    07-06 15:52:34.856    3363-3376/com.kevintcoughlin.smodr D/FFmpegExtractor﹕ SniffFFMPEG failed to sniff this source
    07-06 15:52:34.856    3363-3397/com.kevintcoughlin.smodr D/dalvikvm﹕ GC_FOR_ALLOC freed 120K, 3% free 12342K/12696K, paused 29ms, total 60ms
    07-06 15:52:34.860    3363-3397/com.kevintcoughlin.smodr I/dalvikvm-heap﹕ Grow heap (frag case) to 13.981MB for 1960012-byte allocation
    07-06 15:52:34.872    3363-3387/com.kevintcoughlin.smodr D/dalvikvm﹕ GC_FOR_ALLOC freed &lt;1K, 3% free 14256K/14612K, paused 10ms, total 10ms
    07-06 15:52:34.880    3363-3376/com.kevintcoughlin.smodr I/OMXClient﹕ Using client-side OMX mux.
    07-06 15:52:34.976    3363-3400/com.kevintcoughlin.smodr I/OMXClient﹕ Using client-side OMX mux.
    07-06 15:52:35.144    3363-3393/com.kevintcoughlin.smodr D/dalvikvm﹕ GC_FOR_ALLOC freed 1512K, 12% free 13705K/15532K, paused 12ms, total 36ms
    07-06 15:52:35.152    3363-3393/com.kevintcoughlin.smodr I/dalvikvm-heap﹕ Grow heap (frag case) to 15.315MB for 1962812-byte allocation
    07-06 15:52:35.200    3363-3388/com.kevintcoughlin.smodr D/dalvikvm﹕ GC_FOR_ALLOC freed &lt;1K, 11% free 15621K/17452K, paused 39ms, total 39ms
    07-06 15:52:35.356    3363-3400/com.kevintcoughlin.smodr E/ACodec﹕ OMX/mediaserver died, signalling error!
    07-06 15:52:35.356    3363-3400/com.kevintcoughlin.smodr E/MediaCodec﹕ Codec reported an error. (omx error 0x8000100d, internalError -32)
    07-06 15:52:35.376    3363-3400/com.kevintcoughlin.smodr A/ACodec﹕ frameworks/av/media/libstagefright/ACodec.cpp:499 CHECK(mem.get() != NULL) failed.
    07-06 15:52:35.376    3363-3400/com.kevintcoughlin.smodr A/libc﹕ Fatal signal 4 (SIGILL) at 0xb76d563d (code=2), thread 3400 (MediaCodec_loop)

    To my untrained eye it seems like I can get the TrackInfo correctly, but am having a problem reading the data over HTTP. It also seems that how the framework sniffs the mimetype is failing even though I do receive type ’audio/mpeg’.

    Again, I appreciate any direction I receive. I realize that these APIs are very new.

    Thanks !

  • Decode a video file from memory using libav

    21 octobre 2015, par Dídac Pérez

    Supose I have an entire video file in memory and I want to use libav to decode the whole frames. How should I do it ? The point is that I can do it reading directly from a file using avformat_open_input() function but I do need to do it from a file which is stored in memory.

    My AVIOContext implementation :

    class AVIOMemContext
    {

    public:

       AVIOMemContext(char* videoData, const int videoLen)
       {
           // Output buffer
           bufferSize = 32768;
           buffer = static_cast(av_malloc(bufferSize));

           // Internal buffer
           pos = 0;
           this->videoData = videoData;
           this->videoLen = videoLen;

           ctx_ = avio_alloc_context((unsigned char*) buffer, bufferSize, AVIO_FLAG_READ, this, &amp;AVIOMemContext::read, &amp;AVIOMemContext::write, &amp;AVIOMemContext::seek);
       }

       ~AVIOMemContext()
       {
           av_free(videoData);
       }

       static int read(void *opaque, unsigned char *buf, int buf_size)
       {
           AVIOMemContext* This = static_cast(opaque);

           // Read from pos to pos + buf_size
           if (This->pos + buf_size > This->videoLen)
           {
               int len = This->videoLen - This->pos;
               memcpy(buf, This->videoData + This->pos, len);
               return len;
           }
           else
           {
               memcpy(buf, This->videoData + This->pos, buf_size);
               return buf_size;
           }
       }

       static int write(void *opaque, unsigned char *buf, int buf_size)
       {
           /*
           AVIOMemContext* This = static_cast(opaque);
           return fwrite(buf, 1, buf_size, This->f_);
           */

           return 0;
       }

       static int64_t seek(void *opaque, int64_t offset, int whence)
       {
           AVIOMemContext* This = static_cast(opaque);

           if (offset + whence > This->videoLen)
           {
               This->pos = This->videoLen;

               return -1;
           }
           else
           {
               This->pos = offset + whence;

               return 0;
           }
       }

       AVIOContext *get_avio()
       {
           return ctx_;
       }

    private:

       // Output buffer
       int bufferSize;
       char* buffer;

       // Internal buffer
       int pos;
       char* videoData;
       int videoLen;

       AVIOContext* ctx_;

    };

    My current code :

    [...]

    av_register_all();
    avcodec_register_all();

    AVFormatContext* context;
    AVCodec* pCodec;
    AVPacket packet;
    AVCodecContext* pCodecCtx;
    int video_stream_index;
    int res;
    int got_picture;

    // Init variables and objects
    context = avformat_alloc_context();

    AVIOMemContext priv_ctx(videoData, videoLen);
    context->pb = priv_ctx.get_avio();

    res = avformat_find_stream_info(context, NULL);

    if (res &lt; 0)
    {
       // Error
       avformat_free_context(context);

       return 0;
    }

    // Obtain the video stream of the total set of streams
    for (unsigned int k = 0; k &lt; context->nb_streams; ++k)
    {
       if (context->streams[k]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
           video_stream_index = k;
       context->streams[k]->codec->time_base.den = 90000;
    }

    pCodecCtx = context->streams[video_stream_index]->codec;
    pCodec = avcodec_find_decoder(pCodecCtx->codec_id);

    avcodec_open(pCodecCtx, pCodec);

    //allocate video frame
    AVFrame *pFrame = avcodec_alloc_frame();

    unsigned int nFrame = 0;

    while (av_read_frame(context, &amp;packet) >= 0)

    [...]

    Thanks in advance,

    Dídac Pérez