Recherche avancée

Médias (0)

Mot : - Tags -/utilisateurs

Aucun média correspondant à vos critères n’est disponible sur le site.

Autres articles (85)

  • MediaSPIP v0.2

    21 juin 2013, par

    MediaSPIP 0.2 est la première version de MediaSPIP stable.
    Sa date de sortie officielle est le 21 juin 2013 et est annoncée ici.
    Le fichier zip ici présent contient uniquement les sources de MediaSPIP en version standalone.
    Comme pour la version précédente, il est nécessaire d’installer manuellement l’ensemble des dépendances logicielles sur le serveur.
    Si vous souhaitez utiliser cette archive pour une installation en mode ferme, il vous faudra également procéder à d’autres modifications (...)

  • Use, discuss, criticize

    13 avril 2011, par

    Talk to people directly involved in MediaSPIP’s development, or to people around you who could use MediaSPIP to share, enhance or develop their creative projects.
    The bigger the community, the more MediaSPIP’s potential will be explored and the faster the software will evolve.
    A discussion list is available for all exchanges between users.

  • MediaSPIP version 0.1 Beta

    16 avril 2011, par

    MediaSPIP 0.1 beta est la première version de MediaSPIP décrétée comme "utilisable".
    Le fichier zip ici présent contient uniquement les sources de MediaSPIP en version standalone.
    Pour avoir une installation fonctionnelle, il est nécessaire d’installer manuellement l’ensemble des dépendances logicielles sur le serveur.
    Si vous souhaitez utiliser cette archive pour une installation en mode ferme, il vous faudra également procéder à d’autres modifications (...)

Sur d’autres sites (15689)

  • android ffmpeg bad video output

    20 août 2014, par Sujith Manjavana

    I’m following this tutorial to create my first ffmpeg app. I have successfully build the shared libs and compiled the project without any errors. But when i run the app on my nexus 5 the output is this this

    Here is the native code

    #include <libavcodec></libavcodec>avcodec.h>
    #include <libavformat></libavformat>avformat.h>
    #include <libswscale></libswscale>swscale.h>
    #include <libavutil></libavutil>pixfmt.h>

    #include
    #include

    #include
    #include <android></android>native_window.h>
    #include <android></android>native_window_jni.h>

    #define LOG_TAG "android-ffmpeg-tutorial02"
    #define LOGI(...) __android_log_print(4, LOG_TAG, __VA_ARGS__);
    #define LOGE(...) __android_log_print(6, LOG_TAG, __VA_ARGS__);

    ANativeWindow*      window;
    char                *videoFileName;
    AVFormatContext     *formatCtx = NULL;
    int                 videoStream;
    AVCodecContext      *codecCtx = NULL;
    AVFrame             *decodedFrame = NULL;
    AVFrame             *frameRGBA = NULL;
    jobject             bitmap;
    void*               buffer;
    struct SwsContext   *sws_ctx = NULL;
    int                 width;
    int                 height;
    int                 stop;

    jint naInit(JNIEnv *pEnv, jobject pObj, jstring pFileName) {
       AVCodec         *pCodec = NULL;
       int             i;
       AVDictionary    *optionsDict = NULL;

       videoFileName = (char *)(*pEnv)->GetStringUTFChars(pEnv, pFileName, NULL);
       LOGI("video file name is %s", videoFileName);
       // Register all formats and codecs
       av_register_all();
       // Open video file
       if(avformat_open_input(&amp;formatCtx, videoFileName, NULL, NULL)!=0)
           return -1; // Couldn't open file
       // Retrieve stream information
       if(avformat_find_stream_info(formatCtx, NULL)&lt;0)
           return -1; // Couldn't find stream information
       // Dump information about file onto standard error
       av_dump_format(formatCtx, 0, videoFileName, 0);
       // Find the first video stream
       videoStream=-1;
       for(i=0; inb_streams; i++) {
           if(formatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) {
               videoStream=i;
               break;
           }
       }
       if(videoStream==-1)
           return -1; // Didn't find a video stream
       // Get a pointer to the codec context for the video stream
       codecCtx=formatCtx->streams[videoStream]->codec;
       // Find the decoder for the video stream
       pCodec=avcodec_find_decoder(codecCtx->codec_id);
       if(pCodec==NULL) {
           fprintf(stderr, "Unsupported codec!\n");
           return -1; // Codec not found
       }
       // Open codec
       if(avcodec_open2(codecCtx, pCodec, &amp;optionsDict)&lt;0)
           return -1; // Could not open codec
       // Allocate video frame
       decodedFrame=avcodec_alloc_frame();
       // Allocate an AVFrame structure
       frameRGBA=avcodec_alloc_frame();
       if(frameRGBA==NULL)
           return -1;
       return 0;
    }

    jobject createBitmap(JNIEnv *pEnv, int pWidth, int pHeight) {
       int i;
       //get Bitmap class and createBitmap method ID
       jclass javaBitmapClass = (jclass)(*pEnv)->FindClass(pEnv, "android/graphics/Bitmap");
       jmethodID mid = (*pEnv)->GetStaticMethodID(pEnv, javaBitmapClass, "createBitmap", "(IILandroid/graphics/Bitmap$Config;)Landroid/graphics/Bitmap;");
       //create Bitmap.Config
       //reference: https://forums.oracle.com/thread/1548728
       const wchar_t* configName = L"ARGB_8888";
       int len = wcslen(configName);
       jstring jConfigName;
       if (sizeof(wchar_t) != sizeof(jchar)) {
           //wchar_t is defined as different length than jchar(2 bytes)
           jchar* str = (jchar*)malloc((len+1)*sizeof(jchar));
           for (i = 0; i &lt; len; ++i) {
               str[i] = (jchar)configName[i];
           }
           str[len] = 0;
           jConfigName = (*pEnv)->NewString(pEnv, (const jchar*)str, len);
       } else {
           //wchar_t is defined same length as jchar(2 bytes)
           jConfigName = (*pEnv)->NewString(pEnv, (const jchar*)configName, len);
       }
       jclass bitmapConfigClass = (*pEnv)->FindClass(pEnv, "android/graphics/Bitmap$Config");
       jobject javaBitmapConfig = (*pEnv)->CallStaticObjectMethod(pEnv, bitmapConfigClass,
               (*pEnv)->GetStaticMethodID(pEnv, bitmapConfigClass, "valueOf", "(Ljava/lang/String;)Landroid/graphics/Bitmap$Config;"), jConfigName);
       //create the bitmap
       return (*pEnv)->CallStaticObjectMethod(pEnv, javaBitmapClass, mid, pWidth, pHeight, javaBitmapConfig);
    }

    jintArray naGetVideoRes(JNIEnv *pEnv, jobject pObj) {
       jintArray lRes;
       if (NULL == codecCtx) {
           return NULL;
       }
       lRes = (*pEnv)->NewIntArray(pEnv, 2);
       if (lRes == NULL) {
           LOGI(1, "cannot allocate memory for video size");
           return NULL;
       }
       jint lVideoRes[2];
       lVideoRes[0] = codecCtx->width;
       lVideoRes[1] = codecCtx->height;
       (*pEnv)->SetIntArrayRegion(pEnv, lRes, 0, 2, lVideoRes);
       return lRes;
    }

    void naSetSurface(JNIEnv *pEnv, jobject pObj, jobject pSurface) {
       if (0 != pSurface) {
           // get the native window reference
           window = ANativeWindow_fromSurface(pEnv, pSurface);
           // set format and size of window buffer
           ANativeWindow_setBuffersGeometry(window, 0, 0, WINDOW_FORMAT_RGBA_8888);
       } else {
           // release the native window
           ANativeWindow_release(window);
       }
    }

    jint naSetup(JNIEnv *pEnv, jobject pObj, int pWidth, int pHeight) {
       width = pWidth;
       height = pHeight;
       //create a bitmap as the buffer for frameRGBA
       bitmap = createBitmap(pEnv, pWidth, pHeight);
       if (AndroidBitmap_lockPixels(pEnv, bitmap, &amp;buffer) &lt; 0)
           return -1;
       //get the scaling context
       sws_ctx = sws_getContext (
               codecCtx->width,
               codecCtx->height,
               codecCtx->pix_fmt,
               pWidth,
               pHeight,
               AV_PIX_FMT_RGBA,
               SWS_BILINEAR,
               NULL,
               NULL,
               NULL
       );
       // Assign appropriate parts of bitmap to image planes in pFrameRGBA
       // Note that pFrameRGBA is an AVFrame, but AVFrame is a superset
       // of AVPicture
       avpicture_fill((AVPicture *)frameRGBA, buffer, AV_PIX_FMT_RGBA,
               pWidth, pHeight);
       return 0;
    }

    void finish(JNIEnv *pEnv) {
       //unlock the bitmap
       AndroidBitmap_unlockPixels(pEnv, bitmap);
       av_free(buffer);
       // Free the RGB image
       av_free(frameRGBA);
       // Free the YUV frame
       av_free(decodedFrame);
       // Close the codec
       avcodec_close(codecCtx);
       // Close the video file
       avformat_close_input(&amp;formatCtx);
    }

    void decodeAndRender(JNIEnv *pEnv) {
       ANativeWindow_Buffer    windowBuffer;
       AVPacket                packet;
       int                     i=0;
       int                     frameFinished;
       int                     lineCnt;
       while(av_read_frame(formatCtx, &amp;packet)>=0 &amp;&amp; !stop) {
           // Is this a packet from the video stream?
           if(packet.stream_index==videoStream) {
               // Decode video frame
               avcodec_decode_video2(codecCtx, decodedFrame, &amp;frameFinished,
                  &amp;packet);
               // Did we get a video frame?
               if(frameFinished) {
                   // Convert the image from its native format to RGBA
                   sws_scale
                   (
                       sws_ctx,
                       (uint8_t const * const *)decodedFrame->data,
                       decodedFrame->linesize,
                       0,
                       codecCtx->height,
                       frameRGBA->data,
                       frameRGBA->linesize
                   );
                   // lock the window buffer
                   if (ANativeWindow_lock(window, &amp;windowBuffer, NULL) &lt; 0) {
                       LOGE("cannot lock window");
                   } else {
                       // draw the frame on buffer
                       LOGI("copy buffer %d:%d:%d", width, height, width*height*4);
                       LOGI("window buffer: %d:%d:%d", windowBuffer.width,
                               windowBuffer.height, windowBuffer.stride);
                       memcpy(windowBuffer.bits, buffer,  width * height * 4);
                       // unlock the window buffer and post it to display
                       ANativeWindow_unlockAndPost(window);
                       // count number of frames
                       ++i;
                   }
               }
           }
           // Free the packet that was allocated by av_read_frame
           av_free_packet(&amp;packet);
       }
       LOGI("total No. of frames decoded and rendered %d", i);
       finish(pEnv);
    }

    /**
    * start the video playback
    */
    void naPlay(JNIEnv *pEnv, jobject pObj) {
       //create a new thread for video decode and render
       pthread_t decodeThread;
       stop = 0;
       pthread_create(&amp;decodeThread, NULL, decodeAndRender, NULL);
    }

    /**
    * stop the video playback
    */
    void naStop(JNIEnv *pEnv, jobject pObj) {
       stop = 1;
    }

    jint JNI_OnLoad(JavaVM* pVm, void* reserved) {
       JNIEnv* env;
       if ((*pVm)->GetEnv(pVm, (void **)&amp;env, JNI_VERSION_1_6) != JNI_OK) {
            return -1;
       }
       JNINativeMethod nm[8];
       nm[0].name = "naInit";
       nm[0].signature = "(Ljava/lang/String;)I";
       nm[0].fnPtr = (void*)naInit;

       nm[1].name = "naSetSurface";
       nm[1].signature = "(Landroid/view/Surface;)V";
       nm[1].fnPtr = (void*)naSetSurface;

       nm[2].name = "naGetVideoRes";
       nm[2].signature = "()[I";
       nm[2].fnPtr = (void*)naGetVideoRes;

       nm[3].name = "naSetup";
       nm[3].signature = "(II)I";
       nm[3].fnPtr = (void*)naSetup;

       nm[4].name = "naPlay";
       nm[4].signature = "()V";
       nm[4].fnPtr = (void*)naPlay;

       nm[5].name = "naStop";
       nm[5].signature = "()V";
       nm[5].fnPtr = (void*)naStop;

       jclass cls = (*env)->FindClass(env, "roman10/tutorial/android_ffmpeg_tutorial02/MainActivity");
       //Register methods with env->RegisterNatives.
       (*env)->RegisterNatives(env, cls, nm, 6);
       return JNI_VERSION_1_6;
    }

    Here is the build.sh

    #!/bin/bash
    NDK=$HOME/Desktop/adt/android-ndk-r9
    SYSROOT=$NDK/platforms/android-9/arch-arm/
    TOOLCHAIN=$NDK/toolchains/arm-linux-androideabi-4.8/prebuilt/linux-x86_64
    function build_one
    {
    ./configure \
       --prefix=$PREFIX \
       --enable-shared \
       --disable-static \
       --disable-doc \
       --disable-ffmpeg \
       --disable-ffplay \
       --disable-ffprobe \
       --disable-ffserver \
       --disable-avdevice \
       --disable-doc \
       --disable-symver \
       --cross-prefix=$TOOLCHAIN/bin/arm-linux-androideabi- \
       --target-os=linux \
       --arch=arm \
       --enable-cross-compile \
       --sysroot=$SYSROOT \
       --extra-cflags="-Os -fpic $ADDI_CFLAGS" \
       --extra-ldflags="$ADDI_LDFLAGS" \
       $ADDITIONAL_CONFIGURE_FLAG
    make clean
    make
    make install
    }
    CPU=arm
    PREFIX=$(pwd)/android/$CPU
    ADDI_CFLAGS="-marm"
    build_one

    It works on the Galaxy tab2. what can i do to make it work on all devices ?? Please help me..

  • native memory and two thread

    26 février 2013, par user1978722

    I have two еркуфвы in each of which I cause native function of memory allocation
    then function working with it and then function memory releasing this
    Java code

    thread1 :

     @Override
       protected Void doInBackground(Void...params) {
       width_=FFMpegWrapper.getWidth(src);
       height_=FFMpegWrapper.getHeight(src);
       handle = FFMpegWrapper.openFile(src);


       for (int i=f1+1;i15*ost)-1000000);
       ByteBuffer my_buffer2 = FFMpegWrapper.allocNative2(bufferSize);
       FFMpegWrapper.getFrame2(handle, kadr, width_, height_, my_buffer2);
       Bitmap dest = Bitmap.createBitmap(width_, height_, Bitmap.Config.ARGB_8888);
       dest.copyPixelsFromBuffer(my_buffer2);
       OutputStream outStream = null;
       File file = new File(extStorageDirectory, "file"+toString().valueOf(i)+".png");
       Log.v("ttag",extStorageDirectory+"/file"+toString().valueOf(i)+".png");
       try {
       outStream = new FileOutputStream(file);
       dest.compress(Bitmap.CompressFormat.PNG, 100, outStream);
       outStream.flush();
       outStream.close();
       }
       catch(Exception e)
       {}


       FFMpegWrapper.freeNative2();
       mProgressStatus =i;  
       allProgress=allProgress+1;
       this.publishProgress(mProgressStatus);
       }
     // TODO Auto-generated method stub

           return null;

           }

    thread2 :

    @Override
       protected Void doInBackground(Void...params) {
           width_=FFMpegWrapper.getWidth(src);
           height_=FFMpegWrapper.getHeight(src);
           handle = FFMpegWrapper.openFile(src);

           for (int i=0;i15*ost)-1000000);
                ByteBuffer my_buffer = FFMpegWrapper.allocNative(bufferSize);
                   FFMpegWrapper.getFrame(handle, kadr, width_, height_, my_buffer);
                   Log.v("ttag",toString().valueOf(kadr));
                   Bitmap dest = Bitmap.createBitmap(width_, height_, Bitmap.Config.ARGB_8888);
                   dest.copyPixelsFromBuffer(my_buffer);

                   OutputStream outStream = null;
                   File file = new File(extStorageDirectory, "file"+toString().valueOf(i)+".png");
                   Log.v("ttag",extStorageDirectory+"/file"+toString().valueOf(i)+".png");
                   try {
                    outStream = new FileOutputStream(file);
                    dest.compress(Bitmap.CompressFormat.PNG, 100, outStream);
                    outStream.flush();
                    outStream.close();
                    //dest.
                   }
                   catch(Exception e)
                   {}


               FFMpegWrapper.freeNative();
               mProgressStatus =i;  
               allProgress=allProgress+1;
            this.publishProgress(mProgressStatus);
       }

       // TODO Auto-generated method stub

       return null;

       }

    and functions on jni side :

    jint Java_artemxxl_projects_livewallpapercreator_FFMpegWrapper_getFrame(JNIEnv *env, jobject thiz, thandle_file handleFile, jlong timeUS, jint width, jint height, jobject buffer) {

    //  LOGI("file= %d",handleFile);
       AVFormatContext* ctx = ((struct thandle*)handleFile)->ctx;
       AVCodecContext* codecCtx = ((struct thandle*)handleFile)->codecCtx;
       AVPacket* packet =  ((struct thandle*)handleFile)->packet;
       int videoStream = ((struct thandle*)handleFile)->videoStream;
    jshort* buff = (jshort*) (*env)->GetDirectBufferAddress(env, buffer);

    AVFrame* frame = avcodec_alloc_frame(); //YUV frame
    avcodec_get_frame_defaults(frame);

    int frameNumber = timeUS;
    //LOGI("avtb= %d",AV_TIME_BASE);
    int64_t pos = frameNumber * AV_TIME_BASE / 1000000;
    int64_t seek_target= av_rescale_q(pos, AV_TIME_BASE_Q, ctx->streams[videoStream]->time_base);

    int res = avformat_seek_file(ctx
    , videoStream
    , INT64_MIN
    , seek_target//* AV_TIME_BASE
    , INT64_MAX
    , 0);
    //LOGI("seek: %d f=%ld pos=%lld st=%lld", res, frameNumber, (int64_t)pos, seek_target);
    if (res >= 0) {
    avcodec_flush_buffers(codecCtx);
    // LOGI("flushed");
    }
    av_init_packet(packet);

    AVFrame* frameRGB = avcodec_alloc_frame();
    avcodec_get_frame_defaults(frameRGB);

    enum PixelFormat pixel_format = PIX_FMT_RGBA;
    avpicture_fill((AVPicture*) frameRGB
    , (uint8_t*)buff
    , pixel_format
    , codecCtx->width
    , codecCtx->height
    );

    while (av_read_frame(ctx, packet) == 0) {
    LOGI("pts1=%lld st1=%lld", packet->pts, seek_target);
    if (packet->stream_index == videoStream) {
     int gotPicture = 0;
     int bytesDecompressed = avcodec_decode_video2(codecCtx, frame, &amp;gotPicture, packet);
       if (gotPicture &amp;&amp; packet->pts >= seek_target) {
     //    LOGI("opana");
       // конвертируем данные РёР· формата YUV РІ RGB24
       struct SwsContext* scaleCtx = sws_getContext(frame->width,
         frame->height,
         (enum PixelFormat)frame->format
         , width
         , height
         , pixel_format
         , SWS_BICUBIC
         , 0, 0, 0);

       int height = sws_scale(scaleCtx
         , frame->data
         , frame->linesize
         , 0
         , frame->height
         , frameRGB->data
         , frameRGB->linesize);
      break;
      }
     av_free_packet(packet);
     }
    }
    //LOGI("ended");
    av_free(frameRGB);
    av_free(frame);
    return 0;
    }


    static jobject globalRef;

    jobject Java_artemxxl_projects_livewallpapercreator_FFMpegWrapper_allocNative(JNIEnv* env, jobject thiz, jlong size)
    {
       void* buffer = malloc(size);
           jobject directBuffer = (*env)->NewDirectByteBuffer(env,buffer, size);
           globalRef = (*env)->NewGlobalRef(env,directBuffer);

           return globalRef;

    }
    void Java_artemxxl_projects_livewallpapercreator_FFMpegWrapper_freeNative(JNIEnv* env, jobject thiz)
    {
        void *buffer = (*env)->GetDirectBufferAddress(env,globalRef);

           (*env)->DeleteGlobalRef(env,globalRef);
           free(buffer);
           LOGI("free1");
    }
    ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////

    jint Java_artemxxl_projects_livewallpapercreator_FFMpegWrapper_getFrame2(JNIEnv *env, jobject thiz, thandle_file handleFile, jlong timeUS, jint width, jint height, jobject buffer) {

       //LOGI("file= %d",handleFile);
       AVFormatContext* ctx = ((struct thandle*)handleFile)->ctx;
       AVCodecContext* codecCtx = ((struct thandle*)handleFile)->codecCtx;
       AVPacket* packet =  ((struct thandle*)handleFile)->packet;
       int videoStream = ((struct thandle*)handleFile)->videoStream;
    jshort* buff = (jshort*) (*env)->GetDirectBufferAddress(env, buffer);

    AVFrame* frame = avcodec_alloc_frame(); //YUV frame
    avcodec_get_frame_defaults(frame);

    int frameNumber = timeUS;
    //LOGI("avtb= %d",AV_TIME_BASE);
    int64_t pos = frameNumber * AV_TIME_BASE / 1000000;
    int64_t seek_target= av_rescale_q(pos, AV_TIME_BASE_Q, ctx->streams[videoStream]->time_base);

    int res = avformat_seek_file(ctx
    , videoStream
    , INT64_MIN
    , seek_target//* AV_TIME_BASE
    , INT64_MAX
    , 0);
    //LOGI("seek: %d f=%ld pos=%lld st=%lld", res, frameNumber, (int64_t)pos, seek_target);
    if (res >= 0) {
    avcodec_flush_buffers(codecCtx);
    //LOGI("flushed");
    }
    av_init_packet(packet);

    AVFrame* frameRGB = avcodec_alloc_frame();
    avcodec_get_frame_defaults(frameRGB);

    enum PixelFormat pixel_format = PIX_FMT_RGBA;
    avpicture_fill((AVPicture*) frameRGB
    , (uint8_t*)buff
    , pixel_format
    , codecCtx->width
    , codecCtx->height
    );

    while (av_read_frame(ctx, packet) == 0) {

    LOGI("pts2=%lld st2=%lld", packet->pts, seek_target);
    if (packet->stream_index == videoStream) {
     int gotPicture = 0;
     int bytesDecompressed = avcodec_decode_video2(codecCtx, frame, &amp;gotPicture, packet);
     //LOGI("pred_opana");
       if (gotPicture &amp;&amp; packet->pts >= seek_target) {
           //LOGI("opana");
       // конвертируем данные РёР· формата YUV РІ RGB24
       struct SwsContext* scaleCtx = sws_getContext(frame->width,
         frame->height,
         (enum PixelFormat)frame->format
         , width
         , height
         , pixel_format
         , SWS_BICUBIC
         , 0, 0, 0);

       int height = sws_scale(scaleCtx
         , frame->data
         , frame->linesize
         , 0
         , frame->height
         , frameRGB->data
         , frameRGB->linesize);
      break;
      }
     av_free_packet(packet);
     }

    }
    //LOGI("ended");
    av_free(frameRGB);
    av_free(frame);
    return 0;
    }






    static jobject globalRef2;

    jobject Java_artemxxl_projects_livewallpapercreator_FFMpegWrapper_allocNative2(JNIEnv* env, jobject thiz, jlong size)
    {
       void* buffer = malloc(size);
           jobject directBuffer = (*env)->NewDirectByteBuffer(env,buffer, size);
           globalRef2 = (*env)->NewGlobalRef(env,directBuffer);

           return globalRef2;

    }
    void Java_artemxxl_projects_livewallpapercreator_FFMpegWrapper_freeNative2(JNIEnv* env, jobject thiz)
    {
        void *buffer = (*env)->GetDirectBufferAddress(env,globalRef2);

           (*env)->DeleteGlobalRef(env,globalRef2);
           free(buffer);
           LOGI("free2");

    }

    when I release memory in one of threads
    that I receive an error from the second

    here logs :

    02-26 17:44:21.680: I/com.domain.tag(855): initialize_passed
    02-26 17:44:21.810: D/dalvikvm(855): GC_CONCURRENT freed 208K, 6% free 13183K/14023K, paused 14ms+8ms, total 37ms
    02-26 17:44:21.810: D/AbsListView(855): [unregisterDoubleTapMotionListener]
    02-26 17:44:21.810: I/MotionRecognitionManager(855):   .unregisterListener : / listener count = 0->0, listener=android.widget.AbsListView$4@42cdcda0
    02-26 17:44:21.810: W/CursorWrapperInner(855): Cursor finalized without prior close()
    02-26 17:44:21.930: I/com.domain.tag(855): pts2=234133 st2=235160
    02-26 17:44:21.945: V/ost(855): 0
    02-26 17:44:21.945: V/sec(855): 1
    02-26 17:44:21.945: V/start(855): 234627000
    02-26 17:44:21.945: V/start(855): 234627
    02-26 17:44:21.945: I/com.domain.tag(855): pts1=232098 st1=234627
    02-26 17:44:21.960: I/com.domain.tag(855): pts2=233344 st2=235160
    02-26 17:44:21.960: I/com.domain.tag(855): pts2=234166 st2=235160
    02-26 17:44:21.975: I/com.domain.tag(855): pts1=231125 st1=234627
    02-26 17:44:21.975: I/com.domain.tag(855): pts1=232131 st1=234627
    02-26 17:44:21.985: I/com.domain.tag(855): pts2=234200 st2=235160
    02-26 17:44:21.995: I/com.domain.tag(855): pts1=232164 st1=234627
    02-26 17:44:22.010: I/com.domain.tag(855): pts2=234233 st2=235160
    02-26 17:44:22.015: I/com.domain.tag(855): pts1=232198 st1=234627
    02-26 17:44:22.030: I/com.domain.tag(855): pts2=234266 st2=235160
    02-26 17:44:22.040: I/com.domain.tag(855): pts1=232231 st1=234627
    02-26 17:44:22.055: I/com.domain.tag(855): pts2=234300 st2=235160
    02-26 17:44:22.065: I/com.domain.tag(855): pts1=232264 st1=234627
    02-26 17:44:22.075: I/com.domain.tag(855): pts2=234333 st2=235160
    02-26 17:44:22.085: I/com.domain.tag(855): pts1=232298 st1=234627
    02-26 17:44:22.100: I/com.domain.tag(855): pts2=234366 st2=235160
    02-26 17:44:22.105: I/com.domain.tag(855): pts1=232331 st1=234627
    02-26 17:44:22.120: I/com.domain.tag(855): pts2=234400 st2=235160
    02-26 17:44:22.130: I/com.domain.tag(855): pts1=232365 st1=234627
    02-26 17:44:22.145: I/com.domain.tag(855): pts2=233685 st2=235160
    02-26 17:44:22.145: I/com.domain.tag(855): pts2=234433 st2=235160
    02-26 17:44:22.150: I/com.domain.tag(855): pts1=232398 st1=234627
    02-26 17:44:22.165: I/com.domain.tag(855): pts2=234467 st2=235160
    02-26 17:44:22.175: I/com.domain.tag(855): pts1=231509 st1=234627
    02-26 17:44:22.175: I/com.domain.tag(855): pts1=232431 st1=234627
    02-26 17:44:22.185: I/com.domain.tag(855): pts2=234500 st2=235160
    02-26 17:44:22.195: I/com.domain.tag(855): pts1=232465 st1=234627
    02-26 17:44:22.210: I/com.domain.tag(855): pts2=234533 st2=235160
    02-26 17:44:22.220: I/com.domain.tag(855): pts1=232498 st1=234627
    02-26 17:44:22.230: I/com.domain.tag(855): pts2=234567 st2=235160
    02-26 17:44:22.245: I/com.domain.tag(855): pts1=232531 st1=234627
    02-26 17:44:22.250: I/com.domain.tag(855): pts2=234600 st2=235160
    02-26 17:44:22.265: I/com.domain.tag(855): pts1=232565 st1=234627
    02-26 17:44:22.275: I/com.domain.tag(855): pts2=234633 st2=235160
    02-26 17:44:22.290: I/com.domain.tag(855): pts1=232598 st1=234627
    02-26 17:44:22.295: I/com.domain.tag(855): pts2=234667 st2=235160
    02-26 17:44:22.310: I/com.domain.tag(855): pts1=232631 st1=234627
    02-26 17:44:22.325: I/com.domain.tag(855): pts2=234700 st2=235160
    02-26 17:44:22.335: I/com.domain.tag(855): pts1=232665 st1=234627
    02-26 17:44:22.345: I/com.domain.tag(855): pts2=234734 st2=235160
    02-26 17:44:22.355: I/com.domain.tag(855): pts1=232698 st1=234627
    02-26 17:44:22.365: I/com.domain.tag(855): pts2=234767 st2=235160
    02-26 17:44:22.380: I/com.domain.tag(855): pts1=232732 st1=234627
    02-26 17:44:22.390: I/com.domain.tag(855): pts2=234800 st2=235160
    02-26 17:44:22.400: I/com.domain.tag(855): pts1=232765 st1=234627
    02-26 17:44:22.415: I/com.domain.tag(855): pts2=234834 st2=235160
    02-26 17:44:22.425: I/com.domain.tag(855): pts1=232798 st1=234627
    02-26 17:44:22.440: I/com.domain.tag(855): pts2=234069 st2=235160
    02-26 17:44:22.440: I/com.domain.tag(855): pts2=234867 st2=235160
    02-26 17:44:22.450: I/com.domain.tag(855): pts1=231893 st1=234627
    02-26 17:44:22.450: I/com.domain.tag(855): pts1=232832 st1=234627
    02-26 17:44:22.460: I/com.domain.tag(855): pts2=234900 st2=235160
    02-26 17:44:22.475: I/com.domain.tag(855): pts1=232865 st1=234627
    02-26 17:44:22.485: I/com.domain.tag(855): pts2=234934 st2=235160
    02-26 17:44:22.500: I/com.domain.tag(855): pts1=232898 st1=234627
    02-26 17:44:22.510: I/com.domain.tag(855): pts2=234967 st2=235160
    02-26 17:44:22.525: I/com.domain.tag(855): pts1=232932 st1=234627
    02-26 17:44:22.530: I/com.domain.tag(855): pts2=235000 st2=235160
    02-26 17:44:22.555: I/com.domain.tag(855): pts1=232965 st1=234627
    02-26 17:44:22.555: I/com.domain.tag(855): pts2=235034 st2=235160
    02-26 17:44:22.580: I/com.domain.tag(855): pts2=235067 st2=235160
    02-26 17:44:22.580: I/com.domain.tag(855): pts1=232998 st1=234627
    02-26 17:44:22.605: I/com.domain.tag(855): pts2=235101 st2=235160
    02-26 17:44:22.610: I/com.domain.tag(855): pts1=233032 st1=234627
    02-26 17:44:22.630: I/com.domain.tag(855): pts2=235134 st2=235160
    02-26 17:44:22.635: I/com.domain.tag(855): pts1=233065 st1=234627
    02-26 17:44:22.655: I/com.domain.tag(855): pts2=235167 st2=235160
    02-26 17:44:22.660: I/com.domain.tag(855): pts1=233099 st1=234627
    02-26 17:44:22.690: I/com.domain.tag(855): pts1=233132 st1=234627
    02-26 17:44:22.705: D/dalvikvm(855): GC_FOR_ALLOC freed 434K, 10% free 12753K/14023K, paused 17ms, total 17ms
    02-26 17:44:22.710: I/dalvikvm-heap(855): Grow heap (frag case) to 13.759MB for 786448-byte allocation
    02-26 17:44:22.715: I/com.domain.tag(855): pts1=232234 st1=234627
    02-26 17:44:22.715: I/com.domain.tag(855): pts1=233165 st1=234627
    02-26 17:44:22.735: D/dalvikvm(855): GC_FOR_ALLOC freed 0K, 9% free 13521K/14855K, paused 25ms, total 25ms
    02-26 17:44:22.735: I/com.domain.tag(855): pts1=233199 st1=234627
    02-26 17:44:22.745: V/ttag(855): /storage/sdcard0/temp1/file8.png
    02-26 17:44:22.755: D/dalvikvm(855): GC_CONCURRENT freed 2K, 9% free 13527K/14855K, paused 2ms+2ms, total 20ms
    02-26 17:44:22.760: I/com.domain.tag(855): pts1=233232 st1=234627
    02-26 17:44:22.785: I/com.domain.tag(855): pts1=233265 st1=234627
    02-26 17:44:22.810: I/com.domain.tag(855): pts1=233299 st1=234627
    02-26 17:44:22.835: I/com.domain.tag(855): pts1=233332 st1=234627
    02-26 17:44:22.855: I/com.domain.tag(855): pts1=233366 st1=234627
    02-26 17:44:22.880: I/com.domain.tag(855): pts1=233399 st1=234627
    02-26 17:44:22.905: I/com.domain.tag(855): pts1=233432 st1=234627
    02-26 17:44:22.955: I/com.domain.tag(855): pts1=233466 st1=234627
    02-26 17:44:23.000: I/com.domain.tag(855): pts1=233499 st1=234627
    02-26 17:44:23.030: I/com.domain.tag(855): pts1=232637 st1=234627
    02-26 17:44:23.030: I/com.domain.tag(855): pts1=233532 st1=234627
    02-26 17:44:23.060: I/com.domain.tag(855): pts1=233566 st1=234627
    02-26 17:44:23.080: I/com.domain.tag(855): pts1=233599 st1=234627
    02-26 17:44:23.105: I/com.domain.tag(855): free2
    02-26 17:44:23.105: I/com.domain.tag(855): pts2=234133 st2=235227
    02-26 17:44:23.115: I/com.domain.tag(855): pts1=233344 st1=234627
    02-26 17:44:23.115: I/com.domain.tag(855): pts1=234166 st1=234627
    02-26 17:44:23.140: I/com.domain.tag(855): pts2=234200 st2=235227
    02-26 17:44:23.140: I/com.domain.tag(855): pts1=234233 st1=234627
    02-26 17:44:24.035: I/com.domain.tag(855): pts1=234266 st1=234627
    02-26 17:44:24.035: I/com.domain.tag(855): pts1=234300 st1=234627
    02-26 17:44:24.035: I/com.domain.tag(855): pts1=234333 st1=234627
    02-26 17:44:24.035: I/com.domain.tag(855): pts1=234366 st1=234627
    02-26 17:44:24.035: I/com.domain.tag(855): pts1=234400 st1=234627
    02-26 17:44:24.035: I/com.domain.tag(855): pts1=233685 st1=234627
    02-26 17:44:24.035: I/com.domain.tag(855): pts1=234433 st1=234627
    02-26 17:44:24.035: I/com.domain.tag(855): pts1=234467 st1=234627
    02-26 17:44:24.040: I/com.domain.tag(855): pts1=234500 st1=234627
    02-26 17:44:24.040: I/com.domain.tag(855): pts1=234533 st1=234627
    02-26 17:44:24.040: I/com.domain.tag(855): pts1=234567 st1=234627
    02-26 17:44:24.040: I/com.domain.tag(855): pts1=234600 st1=234627
    02-26 17:44:24.040: I/com.domain.tag(855): pts1=234633 st1=234627
    02-26 17:44:24.050: I/com.domain.tag(855): pts2=234667 st2=235227
    02-26 17:44:24.055: V/ttag(855): 234627000
    02-26 17:44:24.075: D/dalvikvm(855): GC_FOR_ALLOC freed 776K, 15% free 12755K/14855K, paused 21ms, total 21ms
    02-26 17:44:24.075: I/com.domain.tag(855): pts2=234700 st2=235227
    02-26 17:44:24.080: I/dalvikvm-heap(855): Grow heap (frag case) to 13.761MB for 786448-byte allocation
    02-26 17:44:24.100: I/com.domain.tag(855): pts2=234734 st2=235227
    02-26 17:44:24.110: D/dalvikvm(855): GC_CONCURRENT freed &lt;1K, 9% free 13523K/14855K, paused 13ms+2ms, total 30ms
    02-26 17:44:24.110: D/dalvikvm(855): WAIT_FOR_CONCURRENT_GC blocked 17ms
    02-26 17:44:24.110: D/dalvikvm(855): WAIT_FOR_CONCURRENT_GC blocked 18ms
    02-26 17:44:24.110: V/ttag(855): /storage/sdcard0/temp1/file0.png
    02-26 17:44:24.135: I/com.domain.tag(855): pts2=234767 st2=235227
    02-26 17:44:24.170: I/com.domain.tag(855): pts2=234800 st2=235227
    02-26 17:44:24.195: I/com.domain.tag(855): pts2=234834 st2=235227
    02-26 17:44:24.220: I/com.domain.tag(855): pts2=234069 st2=235227
    02-26 17:44:24.220: I/com.domain.tag(855): pts2=234867 st2=235227
    02-26 17:44:24.245: I/com.domain.tag(855): pts2=234900 st2=235227
    02-26 17:44:24.275: I/com.domain.tag(855): pts2=234934 st2=235227
    02-26 17:44:24.300: I/com.domain.tag(855): pts2=234967 st2=235227
    02-26 17:44:24.325: I/com.domain.tag(855): pts2=235000 st2=235227
    02-26 17:44:24.350: I/com.domain.tag(855): pts2=235034 st2=235227
    02-26 17:44:24.380: I/com.domain.tag(855): pts2=235067 st2=235227
    02-26 17:44:24.405: I/com.domain.tag(855): pts2=235101 st2=235227
    02-26 17:44:24.430: I/com.domain.tag(855): pts2=235134 st2=235227
    02-26 17:44:24.445: I/com.domain.tag(855): free1
    02-26 17:44:24.445: V/ost(855): 1
    02-26 17:44:24.445: V/sec(855): 1
    02-26 17:44:24.445: V/start(855): 234627000
    02-26 17:44:24.445: V/start(855): 234627
    02-26 17:44:24.445: I/com.domain.tag(855): pts1=232098 st1=234694
    02-26 17:44:24.445: I/com.domain.tag(855): pts1=231125 st1=234694
    02-26 17:44:24.445: I/com.domain.tag(855): pts1=232131 st1=234694
    02-26 17:44:24.455: I/com.domain.tag(855): pts1=232164 st1=234694
    02-26 17:44:24.485: I/com.domain.tag(855): pts2=232198 st2=235227
    02-26 17:44:24.715: I/com.domain.tag(855): pts1=232231 st1=234694
    02-26 17:44:24.725: I/com.domain.tag(855): pts1=232264 st1=234694
    02-26 17:44:24.725: I/com.domain.tag(855): pts1=232298 st1=234694
    02-26 17:44:24.725: I/com.domain.tag(855): pts1=232331 st1=234694
    02-26 17:44:24.725: I/com.domain.tag(855): pts1=232365 st1=234694
    02-26 17:44:24.725: I/com.domain.tag(855): pts1=232398 st1=234694
    02-26 17:44:24.730: I/com.domain.tag(855): pts1=231509 st1=234694
    02-26 17:44:24.730: I/com.domain.tag(855): pts1=232431 st1=234694
    02-26 17:44:24.730: I/com.domain.tag(855): pts1=232465 st1=234694
    02-26 17:44:24.730: I/com.domain.tag(855): pts1=232498 st1=234694
    02-26 17:44:24.750: I/com.domain.tag(855): pts2=232531 st2=235227
    02-26 17:44:24.960: I/com.domain.tag(855): pts1=232565 st1=234694
    02-26 17:44:24.960: I/com.domain.tag(855): pts1=232598 st1=234694
    02-26 17:44:24.960: I/com.domain.tag(855): pts1=232631 st1=234694
    02-26 17:44:24.960: I/com.domain.tag(855): pts1=232665 st1=234694
    02-26 17:44:24.965: I/com.domain.tag(855): pts1=232698 st1=234694
    02-26 17:44:24.965: I/com.domain.tag(855): pts1=232732 st1=234694
    02-26 17:44:24.965: I/com.domain.tag(855): pts1=232765 st1=234694
    02-26 17:44:24.965: I/com.domain.tag(855): pts1=232798 st1=234694
    02-26 17:44:24.980: I/com.domain.tag(855): pts2=231893 st2=235227
    02-26 17:44:24.980: I/com.domain.tag(855): pts2=232832 st2=235227
    02-26 17:44:25.210: I/com.domain.tag(855): pts1=232865 st1=234694
    02-26 17:44:25.210: I/com.domain.tag(855): pts1=232898 st1=234694
    02-26 17:44:25.210: I/com.domain.tag(855): pts1=232932 st1=234694
    02-26 17:44:25.210: I/com.domain.tag(855): pts1=232965 st1=234694
    02-26 17:44:25.210: I/com.domain.tag(855): pts1=232998 st1=234694
    02-26 17:44:25.230: I/com.domain.tag(855): pts2=233032 st2=235227
    02-26 17:44:25.245: I/com.domain.tag(855): pts1=233065 st1=234694
    02-26 17:44:25.260: I/com.domain.tag(855): pts2=233099 st2=235227
    02-26 17:44:25.715: I/com.domain.tag(855): pts1=233132 st1=234694
    02-26 17:44:25.725: I/com.domain.tag(855): pts1=232234 st1=234694
    02-26 17:44:25.725: I/com.domain.tag(855): pts1=233165 st1=234694
    02-26 17:44:25.730: I/com.domain.tag(855): pts1=233199 st1=234694
    02-26 17:44:25.735: I/com.domain.tag(855): pts1=233232 st1=234694
    02-26 17:44:25.740: I/com.domain.tag(855): pts1=233265 st1=234694
    02-26 17:44:25.740: I/com.domain.tag(855): pts1=233299 st1=234694
    02-26 17:44:25.745: I/com.domain.tag(855): pts1=233332 st1=234694
    02-26 17:44:25.755: I/com.domain.tag(855): pts1=233366 st1=234694
    02-26 17:44:25.755: I/com.domain.tag(855): pts1=233399 st1=234694
    02-26 17:44:25.755: I/com.domain.tag(855): pts1=233432 st1=234694
    02-26 17:44:25.755: I/com.domain.tag(855): pts1=233466 st1=234694
    02-26 17:44:25.755: I/com.domain.tag(855): pts1=233499 st1=234694
    02-26 17:44:25.755: I/com.domain.tag(855): pts1=232637 st1=234694
    02-26 17:44:25.755: I/com.domain.tag(855): pts1=233532 st1=234694
    02-26 17:44:25.755: I/com.domain.tag(855): pts1=233566 st1=234694
    02-26 17:44:25.755: I/com.domain.tag(855): pts1=233599 st1=234694
    02-26 17:44:25.755: I/com.domain.tag(855): pts1=233632 st1=234694
    02-26 17:44:25.755: I/com.domain.tag(855): pts1=233666 st1=234694
    02-26 17:44:25.755: I/com.domain.tag(855): pts1=233699 st1=234694
    02-26 17:44:25.755: I/com.domain.tag(855): pts1=233733 st1=234694
    02-26 17:44:25.755: I/com.domain.tag(855): pts1=233766 st1=234694
    02-26 17:44:25.755: I/com.domain.tag(855): pts1=233799 st1=234694
    02-26 17:44:25.755: I/com.domain.tag(855): pts1=232970 st1=234694
    02-26 17:44:25.755: I/com.domain.tag(855): pts1=233833 st1=234694
    02-26 17:44:25.755: I/com.domain.tag(855): pts1=233866 st1=234694
    02-26 17:44:25.755: I/com.domain.tag(855): pts1=233899 st1=234694
    02-26 17:44:25.760: I/com.domain.tag(855): pts1=233933 st1=234694
    02-26 17:44:25.760: I/com.domain.tag(855): pts1=233966 st1=234694
    02-26 17:44:25.760: I/com.domain.tag(855): pts1=233999 st1=234694
    02-26 17:44:25.760: I/com.domain.tag(855): pts1=234033 st1=234694
    02-26 17:44:25.760: I/com.domain.tag(855): pts1=234066 st1=234694
    02-26 17:44:25.760: I/com.domain.tag(855): pts1=234100 st1=234694
    02-26 17:44:25.760: I/com.domain.tag(855): pts1=234133 st1=234694
    02-26 17:44:25.760: I/com.domain.tag(855): pts1=233344 st1=234694
    02-26 17:44:25.760: I/com.domain.tag(855): pts1=234166 st1=234694
    02-26 17:44:25.760: I/com.domain.tag(855): pts1=234200 st1=234694
    02-26 17:44:25.760: I/com.domain.tag(855): pts1=234233 st1=234694
    02-26 17:44:25.760: I/com.domain.tag(855): pts1=234266 st1=234694
    02-26 17:44:25.760: I/com.domain.tag(855): pts1=234300 st1=234694
    02-26 17:44:25.775: I/com.domain.tag(855): pts2=234333 st2=235227
    02-26 17:44:26.115: I/com.domain.tag(855): pts1=234366 st1=234694
    02-26 17:44:26.120: I/com.domain.tag(855): pts1=234400 st1=234694
    02-26 17:44:26.120: I/com.domain.tag(855): pts1=233685 st1=234694
    02-26 17:44:26.120: I/com.domain.tag(855): pts1=234433 st1=234694
    02-26 17:44:26.120: I/com.domain.tag(855): pts1=234467 st1=234694
    02-26 17:44:26.125: I/com.domain.tag(855): pts1=234500 st1=234694
    02-26 17:44:26.125: I/com.domain.tag(855): pts1=234533 st1=234694
    02-26 17:44:26.125: I/com.domain.tag(855): pts1=234567 st1=234694
    02-26 17:44:26.125: I/com.domain.tag(855): pts1=234600 st1=234694
    02-26 17:44:26.130: I/com.domain.tag(855): pts1=234633 st1=234694
    02-26 17:44:26.130: I/com.domain.tag(855): pts1=234667 st1=234694
    02-26 17:44:26.130: I/com.domain.tag(855): pts1=234700 st1=234694
    02-26 17:44:26.135: I/com.domain.tag(855): pts2=234734 st2=235227
    02-26 17:44:26.135: V/ttag(855): 234693666
    02-26 17:44:26.160: D/dalvikvm(855): GC_FOR_ALLOC freed 781K, 15% free 12756K/14855K, paused 21ms, total 21ms
    02-26 17:44:26.160: I/dalvikvm-heap(855): Grow heap (frag case) to 13.761MB for 786448-byte allocation
    02-26 17:44:26.165: I/com.domain.tag(855): pts2=234767 st2=235227
    02-26 17:44:26.175: D/dalvikvm(855): GC_CONCURRENT freed &lt;1K, 9% free 13523K/14855K, paused 2ms+2ms, total 16ms
    02-26 17:44:26.175: D/dalvikvm(855): WAIT_FOR_CONCURRENT_GC blocked 14ms
    02-26 17:44:26.175: D/dalvikvm(855): WAIT_FOR_CONCURRENT_GC blocked 14ms
    02-26 17:44:26.180: V/ttag(855): /storage/sdcard0/temp1/file1.png
    02-26 17:44:26.185: A/libc(855): @@@ ABORTING: HEAP MEMORY CORRUPTION IN tmalloc_large addr=0x0008fffb
    02-26 17:44:26.185: A/libc(855): Fatal signal 11 (SIGSEGV) at 0xdeadbaad (code=1), thread 2047 (AsyncTask #1)

    in what my error ?

  • ffmpeg frame to android bitmap

    19 février 2015, par grunk

    I’am using ffmpeg (as a library) to decode a h264 packet.
    The decoding part seems ok (at least i think) but i don’t understand how to convert the decoded frame to a format that android can understand and display in an imageView

    jbyteArray Java_com_exemple_ffmpeg_decodeImage(JNIEnv* env,jobject obj, jbyteArray h264Datas)
    {
       jbyte* bufferPtr = (*env)->GetByteArrayElements(env, h264Datas, NULL);
       jsize lengthOfDatas = (*env)->GetArrayLength(env, h264Datas);
       (*env)->ReleaseByteArrayElements(env, h264Datas, bufferPtr, JNI_ABORT);

       AVPacket packet = {0};
       packet.data = bufferPtr;
       packet.size  = lengthOfDatas;

       int frameFinished = 0;

       if(packet.stream_index == 0)
       {
           int res = avcodec_decode_video2(codecCtx, srcFrame, &amp;frameFinished, &amp;packet);
           if(res &lt; 0)
           {
               LOGI("Failed to decode frame\n");
           }
       }

       if (!isOutputInit)
       {
           if(codecCtx->width > 0 &amp;&amp; codecCtx->height > 0)
           {
               outputBufLen = avpicture_get_size(PIX_FMT_RGBA, codecCtx->width, codecCtx->height);
               if(outputBuf != NULL) {
                   av_free(outputBuf);
               }
               outputBuf = av_malloc(outputBufLen);
               avpicture_fill((AVPicture*)dstFrame, outputBuf, PIX_FMT_RGBA, codecCtx->width, codecCtx->height);
               convertCtx = sws_getContext(codecCtx->width, codecCtx->height, codecCtx->pix_fmt,  codecCtx->width,
                                           codecCtx->height, PIX_FMT_RGBA, SWS_FAST_BILINEAR, NULL, NULL, NULL);

               isOutputInit = 1;

           }

       }

       if(frameFinished)
           isFrameReady = 1;

       //Creating data for android


    }

    What i want from there is to get a jbyteArray that i can feed to BitmapFactory , or even better an Android Bitmap if that is possible.

    How can i convert the decoded frame to something usable by Android ?