Recherche avancée

Médias (91)

Autres articles (104)

  • MediaSPIP version 0.1 Beta

    16 avril 2011, par

    MediaSPIP 0.1 beta est la première version de MediaSPIP décrétée comme "utilisable".
    Le fichier zip ici présent contient uniquement les sources de MediaSPIP en version standalone.
    Pour avoir une installation fonctionnelle, il est nécessaire d’installer manuellement l’ensemble des dépendances logicielles sur le serveur.
    Si vous souhaitez utiliser cette archive pour une installation en mode ferme, il vous faudra également procéder à d’autres modifications (...)

  • Le plugin : Podcasts.

    14 juillet 2010, par

    Le problème du podcasting est à nouveau un problème révélateur de la normalisation des transports de données sur Internet.
    Deux formats intéressants existent : Celui développé par Apple, très axé sur l’utilisation d’iTunes dont la SPEC est ici ; Le format "Media RSS Module" qui est plus "libre" notamment soutenu par Yahoo et le logiciel Miro ;
    Types de fichiers supportés dans les flux
    Le format d’Apple n’autorise que les formats suivants dans ses flux : .mp3 audio/mpeg .m4a audio/x-m4a .mp4 (...)

  • Encoding and processing into web-friendly formats

    13 avril 2011, par

    MediaSPIP automatically converts uploaded files to internet-compatible formats.
    Video files are encoded in MP4, Ogv and WebM (supported by HTML5) and MP4 (supported by Flash).
    Audio files are encoded in MP3 and Ogg (supported by HTML5) and MP3 (supported by Flash).
    Where possible, text is analyzed in order to retrieve the data needed for search engine detection, and then exported as a series of image files.
    All uploaded files are stored online in their original format, so you can (...)

Sur d’autres sites (35317)

  • Multithreading decoding A/V streams via FFmpeg + JNI + Android NDK causes artifacts

    26 avril 2024, par tretdm

    It would seem that if I mute the callback for reading the audio buffer, then they disappear.

    


    FFmpeg decoding issues in picture

    


    I'll attach the code below to make it easier for you to understand what the problem is :

    


    ffwrap.cpp

    


    static void *audioDecoderThread(void *arg) {
    AudioDecoder *audioDec = (AudioDecoder*) arg;
    audioDec->prepare();
    audioDec->start();
}

static void *videoDecoderThread(void *arg) {
    VideoDecoder *videoDec = (VideoDecoder*) arg;
    videoDec->prepare();
    videoDec->start();
}

// ...

void FFmpegWrapper::startDecoding(int pStreamIndex) {
    if(pStreamIndex == gAudioStreamIndex) {
        AudioDecoder *audioDec = new AudioDecoder(
            gFormatCtx,
            gAudioCodecCtx,
            getStream(gAudioStreamIndex),
            gAudioStreamIndex,
            gInterface
        );

        pthread_t audioDecThread;
        pthread_create(&audioDecThread, NULL, &audioDecoderThread, (void*)audioDec);
    } else if(pStreamIndex == gVideoStreamIndex) {
        VideoDecoder *videoDec = new VideoDecoder(
            gFormatCtx,
            gVideoCodecCtx,
            getStream(gVideoStreamIndex),
            gVideoStreamIndex,
            gInterface
        );

        pthread_t videoDecThread;
        pthread_create(&videoDecThread, NULL, &videoDecoderThread, (void*)videoDec);
    }
}


    


    audiodec.cpp

    


    AudioDecoder::AudioDecoder(AVFormatContext *pFormatCtx,
                           AVCodecContext *pCodecCtx,
                           AVStream* pStream,
                           int pStreamIndex,
                           IFFmpegWrapper *pInterface) {
    gFormatCtx = pFormatCtx;
    gCodecCtx = pCodecCtx;
    gStream = pStream;
    gStreamIndex = pStreamIndex;
    gInterface = pInterface;
}

bool AudioDecoder::prepare() {
    gBufferSize = AV_MAX_AUDIO_FRAME_SIZE; 
    // ^ this constant is equal to 192000 and replaces the deprecated 
    //   AVCODEC_MAX_AUDIO_FRAME_SIZE

    gBuffer = (short*) av_malloc(gBufferSize);
    gSwrCtx = swr_alloc();
    gSwrCtx = swr_alloc_set_opts(
        gSwrCtx, (int64_t) gCodecCtx->channel_layout, AV_SAMPLE_FMT_S16,
        gCodecCtx->sample_rate, gCodecCtx->channel_layout,
        gCodecCtx->sample_fmt, gCodecCtx->sample_rate, 0, NULL
    );
    swr_init(gSwrCtx);
    return gBuffer != NULL;
}

void *AudioDecoder::decodeInThread() {
    int         status, dataSize, len;
    AVPacket    avPkt;
    AVFrame     *pFrame     = av_frame_alloc();

    while(av_read_frame(gFormatCtx, &avPkt)>=0) {
        // It is from the audio stream?
        if(avPkt.stream_index == gStreamIndex) {
            len = avcodec_decode_audio4(gStream->codec, pFrame, &status, &avPkt);
            if(len < 0) {
                break;
            }
            if (status) {
                dataSize = av_samples_get_buffer_size(
                    NULL, gCodecCtx->channels, pFrame->nb_samples,
                    gCodecCtx->sample_fmt, 1
                );
                uint8_t* buffer = (uint8_t*)av_malloc(sizeof(uint8_t) * dataSize);
                swr_convert(
                    gSwrCtx, &buffer, dataSize,
                    (const uint8_t **) pFrame->data,
                    pFrame->nb_samples
                );
                memcpy(gBuffer, buffer, dataSize);
                av_free(buffer);

                // Calling callback for JNI bridging
                gInterface->onStreamDecoding((uint8_t*)gBuffer, dataSize, gStreamIndex);
            }
        }

        // Free the packet that was allocated by av_read_frame
        av_free_packet(&avPkt);
        av_packet_unref(&avPkt);
    }
    av_free(pFrame);
    stop();
}

bool AudioDecoder::start() {
    //  ^ Problematic function, using pthread the problem remains 
    //    except for commenting out this line
    decodeInThread();
    return true;
}

bool AudioDecoder::stop() {
    free(gBuffer);
    swr_free(&gSwrCtx);
    avcodec_close(gCodecCtx);
    return true;
}


    


    videodec.cpp

    


    VideoDecoder::VideoDecoder(AVFormatContext *pFormatCtx,
                           AVCodecContext *pCodecCtx,
                           AVStream* pStream,
                           int pStreamIndex,
                           IFFmpegWrapper *pInterface) {
    gFormatCtx = pFormatCtx;
    gCodecCtx = pCodecCtx;
    gStream = pStream;
    gStreamIndex = pStreamIndex;
    gInterface = pInterface;
}

bool VideoDecoder::prepare() {
    return true; // dummy function yet
}

void *VideoDecoder::decodeInThread() {
    AVPacket            avPkt;
    int                 vWidth = gCodecCtx->width,
                        vHeight = gCodecCtx->height,
                        status, len,
                        dataSize = avpicture_get_size(AV_PIX_FMT_RGB32, vWidth, vHeight),
                        packetSize, tVideoFrames;
    struct SwsContext   *img_convert_ctx = NULL;

    gBuffer = (short*) av_mallocz((size_t)dataSize);

    while(av_read_frame(gFormatCtx, &avPkt)>=0) {
        gFrame = avcodec_alloc_frame();
        // It is from the video stream?
        if(avPkt.stream_index == gStreamIndex) {
            packetSize = avPkt.size;
            struct SwsContext *img_convert_ctx = NULL;
            avpicture_fill((AVPicture*) gFrame,
                (const uint8_t*) gBuffer,
                gCodecCtx->pix_fmt,
                gCodecCtx->width,
                gCodecCtx->height
            );

            avcodec_decode_video2(gCodecCtx, gFrame, &status, &avPkt);
            if(!status || gFrame == NULL || packetSize == 0) {
                tVideoFrames++;
                continue;
            }
            AVPixelFormat pxf;

            pxf = AV_PIX_FMT_BGR32;

            convertYuv2Rgb(pxf, gFrame, dataSize);
            tVideoFrames++;
            gInterface->onStreamDecoding((uint8_t*)gBuffer, dataSize, gStreamIndex);
        }
        av_free(gFrame);

        // Free the packet that was allocated by av_read_frame
        av_free_packet(&avPkt);
        av_packet_unref(&avPkt);
    }

    stop();
}

short* VideoDecoder::convertYuv2Rgb(AVPixelFormat pxf, AVFrame* frame, int length) {
    // Convert video frame from YUV to RGB24
    AVFrame         *frameRGB   = av_frame_alloc();
    AVPixelFormat   output_pxf  = pxf;

    avpicture_fill((AVPicture *)frameRGB, (uint8_t*)gBuffer, output_pxf,
                   gCodecCtx->width, gCodecCtx->height);
    const int width = gCodecCtx->width, height = gCodecCtx->height;
    SwsContext* img_convert_ctx = sws_getContext(width, height,
                                     gCodecCtx->pix_fmt,
                                     width, height, output_pxf, SWS_BICUBIC,
                                     NULL, NULL, NULL);


    if(img_convert_ctx == NULL) {
        LOGE(10, "[ERROR] Cannot initialize the conversion context!");
        sws_freeContext(img_convert_ctx);
        return NULL;
    }

    int ret = sws_scale(img_convert_ctx, (const uint8_t* const*)frame->data, frame->linesize, 0,
                        gCodecCtx->height, frameRGB->data, frameRGB->linesize);
    if(frameRGB->data[0] == NULL) {
        LOGE(10, "[ERROR] SWS_Scale failed");
    }
    av_free(frameRGB);
    av_frame_unref(frameRGB);
    sws_freeContext(img_convert_ctx);
    return gBuffer;
}

bool VideoDecoder::start() {
    decodeInThread(); 
    //  ^ Problematic function, using pthread the problem remains 
    //    except for commenting out this line
    return true;
}

bool VideoDecoder::stop() {
    av_free(gFrame);
    avcodec_close(gCodecCtx);
    av_free(gBuffer);
    return true;
}


    


    mplayer.cpp

    


    void IPlayerWrapper::onResult(int cmdId, int resultCode) {
    JNIEnv* env;
    int attachResult = attachEnv(&env);
    if(attachResult < 2) {
        jclass jmPlay = env->GetObjectClass(instance);
        if(cmdId == FFMPEG_COMMAND_FIND_STREAMS) {
            gWrapper->openCodecs();
        } else if(cmdId == FFMPEG_COMMAND_OPEN_CODECS) {
            jmethodID onResultMid = env->GetMethodID(jmPlay, "onResult", "(II)V");
            env->CallVoidMethod(instance, onResultMid, (jint)cmdId, (jint)resultCode);
        }
        if(attachResult == 1) {
            gVM->DetachCurrentThread();
        }
    }
}

void IPlayerWrapper::onStreamDecoding(uint8_t* buffer, int bufferLen, int streamIndex) {
    JNIEnv* env;
    int attachResult = attachEnv(&env);
    if(attachResult < 2) {
        jclass jmPlay = env->GetObjectClass(instance);
        jBuffer = env->NewByteArray((jsize) bufferLen);
        env->SetByteArrayRegion(jBuffer, 0, (jsize) bufferLen, (jbyte *) buffer);
        if(streamIndex == gWrapper->gAudioStreamIndex) {
            jmethodID renderAudioMid = env->GetMethodID(jmPlay, "renderAudio", "([BI)V");
            env->CallVoidMethod(instance, renderAudioMid, jBuffer, bufferLen);
        } else if(streamIndex == gWrapper->gVideoStreamIndex) {
            jmethodID renderVideoMid = env->GetMethodID(jmPlay, "renderVideo", "([BI)V");
            env->CallVoidMethod(instance, renderVideoMid, jBuffer, bufferLen);
        }
        env->ReleaseByteArrayElements(jBuffer, (jbyte *)env->GetByteArrayElements(jBuffer, NULL), JNI_ABORT);
        env->DeleteLocalRef(jBuffer);
        env->DeleteLocalRef(jmPlay);
        if(attachResult == 1) {
            gVM->DetachCurrentThread();
        }
    }
}


JNIEXPORT void JNICALL naInit(JNIEnv *env, jobject instance) {
    gInterface = new IPlayerWrapper();
    gInterface->instance = env->NewGlobalRef(instance);
    gWrapper = new FFmpegWrapper(gDebugMode, gInterface);
}

JNIEXPORT void JNICALL naPlay(JNIEnv *env, jobject instance, int streamType) {
    gVMArgs.version = JNI_VERSION_1_6;
    gVMArgs.name = NULL;
    gVMArgs.group = NULL;
    gWrapper->setPlaybackState(FFMPEG_PLAYBACK_PLAYING);
    //gWrapper->startDecoding();
}

JNIEXPORT void JNICALL naStartAudioDecoding(JNIEnv *env, jobject instance) {
    gWrapper->startDecoding(gWrapper->gAudioStreamIndex);
}

JNIEXPORT void JNICALL naStartVideoDecoding(JNIEnv *env, jobject instance) {
    gWrapper->startDecoding(gWrapper->gVideoStreamIndex);
}


    


    FFmpegPlayer.java

    


        @Override
    public void start() throws IllegalStateException {
        if(tracks != null) {
            naPlay();
            Log.d(MPLAY_TAG, "Playing...");
            FFAudioTrack audio_track = null;
            FFVideoTrack video_track = null;
            for(int tracks_index = 0; tracks_index < tracks.size(); tracks_index++) {
                if(tracks.get(tracks_index) instanceof FFAudioTrack) {
                    audio_track = (FFAudioTrack) tracks.get(tracks_index);
                } else if(tracks.get(tracks_index) instanceof FFVideoTrack) {
                    video_track = (FFVideoTrack) tracks.get(tracks_index);
                }
            }

            naStartAudioDecoding();
            naStartVideoDecoding();
        }
    }

    private void renderAudio(final byte[] buffer, final int length) {
        AudioTrack track = null;
        if (buffer == null) {
            Log.e(MPLAY_TAG, "Audio buffer is empty");
            return;
        }

        if (!prepared_audio_buffer) {
            for (int tracks_index = 0; tracks_index < tracks.size(); tracks_index++) {
                if (tracks.get(tracks_index) instanceof FFAudioTrack) {
                    track = (FFAudioTrack) tracks.get(tracks_index);
                }
            }
            if (track == null) {
                Log.e(MPLAY_TAG, "Audio track not found");
                return;
            }
            int ch_config = track.channels == 2 ?
                    AudioFormat.CHANNEL_CONFIGURATION_STEREO :
                    AudioFormat.CHANNEL_CONFIGURATION_MONO;

            audio_track = new AudioTrack(AudioManager.STREAM_MUSIC, (int) track.sample_rate,
                    ch_config,
                    AudioFormat.ENCODING_PCM_16BIT, length, AudioTrack.MODE_STREAM);

            minAudioBufferSize = AudioRecord.getMinBufferSize(
                    (int) (track.sample_rate),
                    ch_config,
                    AudioFormat.ENCODING_PCM_16BIT);

            audio_track.play();
            prepared_audio_buffer = true;
        }

        try {
            audio_track.write(buffer, 0, length);
        } catch (Exception ignored) {
        }
    }

    private void renderVideo(final byte[] buffer, final int length) {
        Canvas c;
        videoBuffer = buffer;
        OvkVideoTrack track = null;
        for (int tracks_index = 0; tracks_index < tracks.size(); tracks_index++) {
            if (tracks.get(tracks_index) instanceof FFVideoTrack) {
                track = (FFVideoTrack) tracks.get(tracks_index);
            }
        }
        if (track != null) {
            int frame_width = track.frame_size[0];
            int frame_height = track.frame_size[1];
            if (frame_width > 0 && frame_height > 0) {
                minVideoBufferSize = frame_width * frame_height * 4;
                try {
                    // RGB_565  == 65K colours (16 bit)
                    // RGB_8888 == 16.7M colours (24 bit w/ alpha ch.)
                    int bpp = Build.VERSION.SDK_INT > 9 ? 16 : 24;
                    Bitmap.Config bmp_config =
                            bpp == 24 ? Bitmap.Config.RGB_565 : Bitmap.Config.ARGB_8888;
                    if(videoBuffer != null && holder != null) {
                        holder.setType(SurfaceHolder.SURFACE_TYPE_NORMAL);
                        if((c = holder.lockCanvas()) == null) {
                            Log.d(MPLAY_TAG, "Lock canvas failed");
                            return;
                        }
                        ByteBuffer bbuf =
                                ByteBuffer.allocateDirect(minVideoBufferSize);
                        bbuf.rewind();
                        for(int i = 0; i < videoBuffer.length; i++) {
                            bbuf.put(i, videoBuffer[i]);
                        }
                        bbuf.rewind();
                        Bitmap bmp = Bitmap.createBitmap(frame_width, frame_height, bmp_config);
                        bmp.copyPixelsFromBuffer(bbuf);
                        float aspect_ratio = (float) frame_width / (float) frame_height;
                        int scaled_width = (int)(aspect_ratio * (c.getHeight()));
                        videoBuffer = null;
                        c.drawBitmap(bmp,
                                null,
                                new RectF(
                                        ((c.getWidth() - scaled_width) / 2), 0,
                                        ((c.getWidth() - scaled_width) / 2) + scaled_width,
                                        c.getHeight()),
                                null);
                        holder.unlockCanvasAndPost(c);
                        bmp.recycle();
                        bbuf.clear();
                    } else {
                        Log.d(MPLAY_TAG, "Video frame buffer is null");
                    }
                } catch (Exception ex) {
                    ex.printStackTrace();
                } catch (OutOfMemoryError oom) {
                    oom.printStackTrace();
                    stop();
                }
            }
        }
    }



    


    I used FFmpeg version 2.8.11 and wrote a wrapper specifically for an application that supports Android 2.1 and higher. After switching to OOP and multithreading, I noticed such decoder artifacts, and the buffer often times out, as indicated by the Android AudioFlinger in the logs. It doesn’t matter whether it’s an AVD or a real device.

    


    04-26 20:56:51.215 1333-1372/? I/AudioFlinger: BUFFER TIMEOUT: remove(4097) from active list on thread 0xaf183540


    


    Replaying with other coding schemes does not solve this problem, since the problem lies in the implementation of the wrapper itself, which I cannot understand.

    


  • Tap to record like in vine using javacv

    8 décembre 2015, par human123

    I am trying to implement a tap to record feature like in vine. A sample for handling recording (not touch to record) provided in javacv is https://github.com/bytedeco/javacv/blob/master/samples/RecordActivity.java. I am trying to modify it so that in onPreviewFrame method frames are added to buffer only when user has his finger placed on screen. These frames are then tried to be combined into final video in stopRecording method.

    The issue is that if I set the timestamp as given in below code snippet (in stopRecording method)

    if (t > recorder.getTimestamp())
    {
       recorder.setTimestamp(t);
    }

    the behavior is as below

    Case 1

    If I tap on screen to record for 2 seconds and take the finger away from screen for 3 seconds and then again place finger back on screen to record for another 4 seconds the resulting video is like,

    For 1st 2 seconds video has recorded content. For next 3 seconds (time when finger is put away from screen). video just shows the last frame recorded when finger was placed on screen last. Then the video has recorded video content for next 4 seconds. So there seems to be an issue in handling video recording when finger is removed from screen.

    Case 2

    Next I removed the code setting time stamp to recorder(the code snippet given above) in stopRecording method.

    Now the resulting video (for the same steps tried in case 1) does not contain the middle 3 seconds(which is what is required) when finger was taken away from screen. But video is playing at a faster rate. So it seems that we need to set time stamp so that video plays at normal rate.

    Full code of my activity is given below. (Please note that video recording is mainly handled from onPreviewFrame and stopRecording methods)

    public class TouchToRecordActivity extends Activity implements OnClickListener, View.OnTouchListener {

    private final static String CLASS_LABEL = "TouchToRecordActivity";
    private final static String LOG_TAG = CLASS_LABEL;

    private String ffmpeg_link = "/mnt/sdcard/stream.mp4";

    long startTime = 0;
    boolean recording = false;
    boolean rec = false;

    private FFmpegFrameRecorder recorder;

    private boolean isPreviewOn = false;

    private int sampleAudioRateInHz = 44100;
    private int imageWidth = 640;
    private int imageHeight = 480;
    private int destWidth = 480;
    private int frameRate = 30;

    /* audio data getting thread */
    private AudioRecord audioRecord;
    private AudioRecordRunnable audioRecordRunnable;
    private Thread audioThread;
    volatile boolean runAudioThread = true;

    /* video data getting thread */
    private Camera cameraDevice;
    private CameraView cameraView;

    private Frame yuvImage = null;

    /* layout setting */
    private final int bg_screen_bx = 232;
    private final int bg_screen_by = 128;
    private final int bg_screen_width = 700;
    private final int bg_screen_height = 500;
    private final int bg_width = 1123;
    private final int bg_height = 715;
    private final int live_width = 640;
    private final int live_height = 480;
    private int screenWidth, screenHeight;
    private Button btnRecorderControl;

    /* The number of seconds in the continuous record loop (or 0 to disable loop). */
    final int RECORD_LENGTH = 20;
    Frame[] images;
    long[] timestamps;
    ShortBuffer[] samples;
    int imagesIndex, samplesIndex;

    long firstTime = 0;
    long startPauseTime = 0;
    long totalPauseTime = 0;
    long pausedTime = 0;
    long stopPauseTime = 0;
    long totalTime = 0;

    long totalRecordedTS = 0;

    private TextView txtTimer;
    private Handler mHandler = new Handler();

    @Override
    public void onCreate(Bundle savedInstanceState) {
       super.onCreate(savedInstanceState);
       setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);

       setContentView(R.layout.touch_main);

       initLayout();
    }

    @Override
    protected void onDestroy() {
       super.onDestroy();

       recording = false;

       if (cameraView != null) {
           cameraView.stopPreview();
       }

       if (cameraDevice != null) {
           cameraDevice.stopPreview();
           cameraDevice.release();
           cameraDevice = null;
       }
    }


    private void initLayout() {

       /* get size of screen */
       Display display = ((WindowManager) getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
       screenWidth = display.getWidth();
       screenHeight = display.getHeight();
       RelativeLayout.LayoutParams layoutParam = null;
       LayoutInflater myInflate = null;
       myInflate = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
       RelativeLayout topLayout = new RelativeLayout(this);
       setContentView(topLayout);
       LinearLayout preViewLayout = (LinearLayout) myInflate.inflate(R.layout.touch_main, null);
       layoutParam = new RelativeLayout.LayoutParams(screenWidth, screenHeight);
       topLayout.addView(preViewLayout, layoutParam);

       txtTimer = (TextView) preViewLayout.findViewById(R.id.txtTimer);

       /* add control button: start and stop */
       btnRecorderControl = (Button) findViewById(R.id.recorder_control);
       btnRecorderControl.setText("Start");
       btnRecorderControl.setOnClickListener(this);

       /* add camera view */
       int display_width_d = (int) (1.0 * bg_screen_width * screenWidth / bg_width);
       int display_height_d = (int) (1.0 * bg_screen_height * screenHeight / bg_height);
       int prev_rw, prev_rh;
       if (1.0 * display_width_d / display_height_d > 1.0 * live_width / live_height) {
           prev_rh = display_height_d;
           prev_rw = (int) (1.0 * display_height_d * live_width / live_height);
       } else {
           prev_rw = display_width_d;
           prev_rh = (int) (1.0 * display_width_d * live_height / live_width);
       }
       layoutParam = new RelativeLayout.LayoutParams(prev_rw, prev_rh);
       layoutParam.topMargin = (int) (1.0 * bg_screen_by * screenHeight / bg_height);
       layoutParam.leftMargin = (int) (1.0 * bg_screen_bx * screenWidth / bg_width);

       cameraDevice = Camera.open();
       Log.i(LOG_TAG, "cameara open");
       cameraView = new CameraView(this, cameraDevice);
       topLayout.addView(cameraView, layoutParam);
       topLayout.setOnTouchListener(this);
       Log.i(LOG_TAG, "cameara preview start: OK");
    }

    //---------------------------------------
    // initialize ffmpeg_recorder
    //---------------------------------------
    private void initRecorder() {

       Log.w(LOG_TAG, "init recorder");

       if (RECORD_LENGTH > 0) {
           imagesIndex = 0;
           images = new Frame[RECORD_LENGTH * frameRate];
           timestamps = new long[images.length];
           for (int i = 0; i < images.length; i++) {
               images[i] = new Frame(destWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
               timestamps[i] = -1;
           }
       } else if (yuvImage == null) {
           yuvImage = new Frame(destWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
           Log.i(LOG_TAG, "create yuvImage");
       }
       Log.i(LOG_TAG, "ffmpeg_url: " + ffmpeg_link);
       recorder = new FFmpegFrameRecorder(ffmpeg_link, destWidth, imageHeight, 1);
       recorder.setFormat("mp4");
       recorder.setVideoCodecName("libx264");
       recorder.setSampleRate(sampleAudioRateInHz);
       // Set in the surface changed method
       recorder.setFrameRate(frameRate);

       Log.i(LOG_TAG, "recorder initialize success");

       audioRecordRunnable = new AudioRecordRunnable();
       audioThread = new Thread(audioRecordRunnable);
       runAudioThread = true;
    }

    public void startRecording() {

       initRecorder();

       mHandler.removeCallbacks(mUpdateTimeTask);
       mHandler.postDelayed(mUpdateTimeTask, 100);

       try {
           recorder.start();
           startTime = System.currentTimeMillis();
           recording = true;
           audioThread.start();

       } catch (FFmpegFrameRecorder.Exception e) {
           e.printStackTrace();
       }
    }

    public void stopRecording() {

       runAudioThread = false;
       try {
           audioThread.join();
       } catch (InterruptedException e) {
           e.printStackTrace();
       }
       audioRecordRunnable = null;
       audioThread = null;

       if (recorder != null && recording) {
           if (RECORD_LENGTH > 0) {
               Log.v(LOG_TAG, "Writing frames");
               try {
                   int firstIndex = imagesIndex % samples.length;
                   int lastIndex = (imagesIndex - 1) % images.length;
                   if (imagesIndex <= images.length) {
                       firstIndex = 0;
                       lastIndex = imagesIndex - 1;
                   }
                   if ((startTime = timestamps[lastIndex] - RECORD_LENGTH * 1000000L) < 0) {
                       startTime = 0;
                   }
                   if (lastIndex < firstIndex) {
                       lastIndex += images.length;
                   }
                   int videoCounter = 0;
                   for (int i = firstIndex; i <= lastIndex; i++) {
                       if (timestamps[i] == -1) {
                           Log.v(LOG_TAG, "frame not recorded");
                       }
                       if (timestamps[i] != -1) {
                           long t = timestamps[i % timestamps.length] - startTime;
                           if (t >= 0) {

                               videoCounter++;

                               /*if (((i % images.length) != 0) && images[i % images.length] != images[(i % images.length) - 1]) {
                                   if (t > recorder.getTimestamp()) {
                                       recorder.setTimestamp(t);
                                   }*/
                                   Log.v(LOG_TAG, "imageIndex=" + (i % images.length));
                                   recorder.record(images[i % images.length]);
                           /*    }*/
                               Log.v(LOG_TAG, "videoCounter=" + videoCounter);
                           }
                       }
                   }

                   firstIndex = samplesIndex % samples.length;
                   lastIndex = (samplesIndex - 1) % samples.length;
                   if (samplesIndex <= samples.length) {
                       firstIndex = 0;
                       lastIndex = samplesIndex - 1;
                   }
                   if (lastIndex < firstIndex) {
                       lastIndex += samples.length;
                   }
                   for (int i = firstIndex; i <= lastIndex; i++) {
                       if (timestamps[i] != -1) {
                           recorder.recordSamples(samples[i % samples.length]);
                       }
                   }
               } catch (FFmpegFrameRecorder.Exception e) {
                   Log.v(LOG_TAG, e.getMessage());
                   e.printStackTrace();
               }
           }

           recording = false;
           Log.v(LOG_TAG, "Finishing recording, calling stop and release on recorder");
           try {
               recorder.stop();
               recorder.release();
           } catch (FFmpegFrameRecorder.Exception e) {
               e.printStackTrace();
           }
           recorder = null;

       }
    }

    @Override
    public boolean onKeyDown(int keyCode, KeyEvent event) {

       if (keyCode == KeyEvent.KEYCODE_BACK) {
           if (recording) {
               stopRecording();
           }

           finish();

           return true;
       }

       return super.onKeyDown(keyCode, event);
    }

    @Override
    public boolean onTouch(View view, MotionEvent motionEvent) {
       switch (motionEvent.getAction()) {
           case MotionEvent.ACTION_DOWN:
               Log.v(LOG_TAG, "ACTION_DOWN" + recording);

               if (!recording) {
                   startRecording();
               } else {
                   stopPauseTime = System.currentTimeMillis();
                   totalPauseTime = stopPauseTime - startPauseTime - ((long) (1.0 / (double) frameRate) * 1000);
                   pausedTime += totalPauseTime;
               }
               rec = true;
               setTotalVideoTime();
               btnRecorderControl.setText(getResources().getString(R.string.stop));
               break;
           case MotionEvent.ACTION_MOVE:
               rec = true;
               setTotalVideoTime();
               break;
           case MotionEvent.ACTION_UP:
               Log.v(LOG_TAG, "ACTION_UP");
               rec = false;
               startPauseTime = System.currentTimeMillis();
               break;
       }
       return true;
    }

    private Runnable mUpdateTimeTask = new Runnable() {
       public void run() {
           if (recording) {
               setTotalVideoTime();
           }
           mHandler.postDelayed(this, 500);
       }
    };

    private synchronized void setTotalVideoTime() {
       totalTime = System.currentTimeMillis() - firstTime - pausedTime - ((long) (1.0 / (double) frameRate) * 1000);
       if (totalTime > 0)
           txtTimer.setText(getRecordingTimeFromMillis(totalTime));
    }

    private String getRecordingTimeFromMillis(long millis) {
       String strRecordingTime = null;
       int seconds = (int) (millis / 1000);
       int minutes = seconds / 60;
       int hours = minutes / 60;

       if (hours >= 0 && hours < 10)
           strRecordingTime = "0" + hours + ":";
       else
           strRecordingTime = hours + ":";

       if (hours > 0)
           minutes = minutes % 60;

       if (minutes >= 0 && minutes < 10)
           strRecordingTime += "0" + minutes + ":";
       else
           strRecordingTime += minutes + ":";

       seconds = seconds % 60;

       if (seconds >= 0 && seconds < 10)
           strRecordingTime += "0" + seconds;
       else
           strRecordingTime += seconds;

       return strRecordingTime;

    }


    //---------------------------------------------
    // audio thread, gets and encodes audio data
    //---------------------------------------------
    class AudioRecordRunnable implements Runnable {

       @Override
       public void run() {
           android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);

           // Audio
           int bufferSize;
           ShortBuffer audioData;
           int bufferReadResult;

           bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
                   AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
           audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
                   AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);

           if (RECORD_LENGTH > 0) {
               samplesIndex = 0;
               samples = new ShortBuffer[RECORD_LENGTH * sampleAudioRateInHz * 2 / bufferSize + 1];
               for (int i = 0; i < samples.length; i++) {
                   samples[i] = ShortBuffer.allocate(bufferSize);
               }
           } else {
               audioData = ShortBuffer.allocate(bufferSize);
           }

           Log.d(LOG_TAG, "audioRecord.startRecording()");
           audioRecord.startRecording();

           /* ffmpeg_audio encoding loop */
           while (runAudioThread) {
               if (RECORD_LENGTH > 0) {
                   audioData = samples[samplesIndex++ % samples.length];
                   audioData.position(0).limit(0);
               }
               //Log.v(LOG_TAG,"recording? " + recording);
               bufferReadResult = audioRecord.read(audioData.array(), 0, audioData.capacity());
               audioData.limit(bufferReadResult);
               if (bufferReadResult > 0) {
                   Log.v(LOG_TAG, "bufferReadResult: " + bufferReadResult);
                   // If "recording" isn't true when start this thread, it never get's set according to this if statement...!!!
                   // Why?  Good question...
                   if (recording && rec) {
                       Log.v(LOG_TAG, "Recording audio");
                       if (RECORD_LENGTH <= 0) try {
                           recorder.recordSamples(audioData);
                           //Log.v(LOG_TAG,"recording " + 1024*i + " to " + 1024*i+1024);
                       } catch (FFmpegFrameRecorder.Exception e) {
                           Log.v(LOG_TAG, e.getMessage());
                           e.printStackTrace();
                       }
                   }
               }
           }
           Log.v(LOG_TAG, "AudioThread Finished, release audioRecord");

           /* encoding finish, release recorder */
           if (audioRecord != null) {
               audioRecord.stop();
               audioRecord.release();
               audioRecord = null;
               Log.v(LOG_TAG, "audioRecord released");
           }
       }
    }

    //---------------------------------------------
    // camera thread, gets and encodes video data
    //---------------------------------------------
    class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {

       private SurfaceHolder mHolder;
       private Camera mCamera;

       public CameraView(Context context, Camera camera) {
           super(context);
           Log.w("camera", "camera view");
           mCamera = camera;
           mHolder = getHolder();
           mHolder.addCallback(CameraView.this);
           mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
           mCamera.setPreviewCallback(CameraView.this);
       }

       @Override
       public void surfaceCreated(SurfaceHolder holder) {
           try {
               stopPreview();
               mCamera.setPreviewDisplay(holder);
           } catch (IOException exception) {
               mCamera.release();
               mCamera = null;
           }
       }

       public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
           stopPreview();

           Camera.Parameters camParams = mCamera.getParameters();
           List sizes = camParams.getSupportedPreviewSizes();
           // Sort the list in ascending order
           Collections.sort(sizes, new Comparator() {

               public int compare(final Camera.Size a, final Camera.Size b) {
                   return a.width * a.height - b.width * b.height;
               }
           });

           camParams.setPreviewSize(imageWidth, imageHeight);

           Log.v(LOG_TAG, "Setting imageWidth: " + imageWidth + " imageHeight: " + imageHeight + " frameRate: " + frameRate);

           camParams.setPreviewFrameRate(frameRate);
           Log.v(LOG_TAG, "Preview Framerate: " + camParams.getPreviewFrameRate());

           mCamera.setParameters(camParams);

           List videoSizes = mCamera.getParameters().getSupportedVideoSizes();

           // Set the holder (which might have changed) again
           try {
               mCamera.setPreviewDisplay(holder);
               mCamera.setPreviewCallback(CameraView.this);
               startPreview();
           } catch (Exception e) {
               Log.e(LOG_TAG, "Could not set preview display in surfaceChanged");
           }
       }

       @Override
       public void surfaceDestroyed(SurfaceHolder holder) {
           try {
               mHolder.addCallback(null);
               mCamera.setPreviewCallback(null);
           } catch (RuntimeException e) {
               // The camera has probably just been released, ignore.
           }
       }

       public void startPreview() {
           if (!isPreviewOn && mCamera != null) {
               isPreviewOn = true;
               mCamera.startPreview();
           }
       }

       public void stopPreview() {
           if (isPreviewOn && mCamera != null) {
               isPreviewOn = false;
               mCamera.stopPreview();
           }
       }

       @Override
       public void onPreviewFrame(byte[] data, Camera camera) {
           if (audioRecord == null || audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
               startTime = System.currentTimeMillis();
               return;
           }
           if (RECORD_LENGTH > 0) {
               int i = imagesIndex++ % images.length;
               Log.v(LOG_TAG, "recording:" + recording + "rec:" + rec);
               if (recording && rec) {
                   yuvImage = images[i];
                   timestamps[i] = 1000 * (System.currentTimeMillis() - startTime);
                   totalRecordedTS++;
               } else {
                   Log.v(LOG_TAG, "recording is paused");
                   yuvImage = null;
                   timestamps[i] = -1;
               }
           }

           /* get video data */
           if (yuvImage != null && recording && rec) {
               if (data.length != imageWidth * imageHeight) {
                   Camera.Size sz = camera.getParameters().getPreviewSize();
                   imageWidth = sz.width;
                   imageHeight = sz.height;
                   destWidth = imageHeight;
                   Log.v(LOG_TAG, "data length:" + data.length);
               }

               ByteBuffer bb = (ByteBuffer) yuvImage.image[0].position(0); // resets the buffer
               int start = 2 * ((imageWidth - destWidth) / 4); // this must be even
               for (int row = 0; row < imageHeight * 3 / 2; row++) {
                   bb.put(data, start, destWidth);
                   start += imageWidth;
               }

           }
       }
    }

    @Override
    public void onClick(View v) {
       if (!recording) {
           startRecording();
           Log.w(LOG_TAG, "Start Button Pushed");
           btnRecorderControl.setText("Stop");
       } else {
           // This will trigger the audio recording loop to stop and then set isRecorderStart = false;
           stopRecording();
           Log.w(LOG_TAG, "Stop Button Pushed");
           btnRecorderControl.setText("Start");
       }
    }}

    Changes made as per Alex Cohn’s suggestions

    Suggestion 1 - Estimate average frame rate

       public void stopRecording() {

      ..............................

                               if (((i % images.length) != 0) && images[i % images.length] != images[(i % images.length) - 1]) {
                                   if (t > recorder.getTimestamp()) {
                                       t += 1000000 / frameRate;
                                       recorder.setTimestamp(t);
                                   }

                                   recorder.record(images[i % images.length]);
                               }
                ..........................................


    }

    Change made was adding t += 1000000 / frameRate ; But this caused the video to freeze (as in case 1 described above) in portions when finger was placed away from screen.

    Suggestion 2 - Modification in onPreviewFrame()

    long[] timestampsForRecorder;
    private void initRecorder() {

       Log.w(LOG_TAG, "init recorder");

       if (RECORD_LENGTH > 0) {
          .......................................................
           timestampsForRecorder = new long[images.length];
           for (int i = 0; i < images.length; i++) {
               images[i] = new Frame(destWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
               timestamps[i] = -1;
               timestampsForRecorder[i] = -1;
           }
       } else if (yuvImage == null) {
           yuvImage = new Frame(destWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
           Log.i(LOG_TAG, "create yuvImage");
       }
       ...................................................
    }

    @Override
    public void onPreviewFrame(byte[] data, Camera camera) {
           if (audioRecord == null || audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
               startTime = SystemClock.elapsedRealtime();
               return;
           }
           if (RECORD_LENGTH > 0) {
               int i = imagesIndex++ % images.length;
               Log.v(LOG_TAG, "recording:" + recording + "rec:" + rec);
               if (recording && rec) {
                   yuvImage = images[i];
                   long thisFrameTime = SystemClock.elapsedRealtime();
                   timestamps[i] = thisFrameTime;
                   long lastFrameTime = timestamps[(int) (imagesIndex == 0 ? startTime : ((imagesIndex-1) % images.length))];
                   Log.v(LOG_TAG, "lastFrameTime:" + lastFrameTime+",stopPauseTime:" + stopPauseTime);
                   if (lastFrameTime > stopPauseTime) {
                       timestampsForRecorder[i] = 1000 * (thisFrameTime - Math.max(stopPauseTime, lastFrameTime));
                   }
               }
           }

          .....................................................
       }

    public void stopRecording() {

       .......................................................

       if (recorder != null && recording) {
           if (RECORD_LENGTH > 0) {
               Log.v(LOG_TAG, "Writing frames");
               try {
                   int firstIndex = imagesIndex % samples.length;
                   int lastIndex = (imagesIndex - 1) % images.length;
                   if (imagesIndex <= images.length) {
                       firstIndex = 0;
                       lastIndex = imagesIndex - 1;
                   }
                   if ((startTime = timestampsForRecorder[lastIndex] - RECORD_LENGTH * 1000000L) < 0) {
                       startTime = 0;
                   }
                   if (lastIndex < firstIndex) {
                       lastIndex += images.length;
                   }
                   for (int i = firstIndex; i <= lastIndex; i++) {

                       if (timestampsForRecorder[i] != -1) {
                           long t = timestampsForRecorder[i % timestampsForRecorder.length] - startTime;
                           if (t >= 0) {

                               if (((i % images.length) != 0) && images[i % images.length] != images[(i % images.length) - 1]) {
                                   if (t > recorder.getTimestamp()) {
                                       recorder.setTimestamp(t);
                                   }
                                   Log.v(LOG_TAG, "imageIndex=" + (i % images.length));
                                   recorder.record(images[i % images.length]);
                               }
                           }
                       }
                   }
                   .............................................
               } catch (FFmpegFrameRecorder.Exception e) {
                  .................................
               }
           }

           ...........................................

       }
    }

    The video recorded using this was having the issue in case 2 mentioned above. ie,It was playing at a faster rate

  • Decoding a h.264 stream with MediaCodec, dequeueOutputBuffer always return -1

    20 septembre 2016, par bitto bitta

    I am trying to use the MediaCodec API for decoding live-stream screen capture from PC by ffmpeg.

    For Sender (PC ffmpeg)

    i use this command

    ffmpeg -re -f gdigrab -s 1920x1080 -threads 4 -i desktop -vcodec libx264 -pix_fmt yuv420p -tune zerolatency -profile:v baseline -flags global_header  -s 1280x720  -an -f rtp rtp://192.168.1.6:1234

    and output looks like this

    Output #0, rtp, to 'rtp://192.168.1.6:1234':
     Metadata:
       encoder         : Lavf56.15.104
       Stream #0:0: Video: h264 (libx264), yuv420p, 1280x720, q=-1--1, 29.97 fps, 90k tbn, 29.97 tbc
    Metadata:
     encoder         : Lavc56.14.100 libx264
    Stream mapping:
     Stream #0:0 -> #0:0 (bmp (native) -> h264 (libx264))
    SDP:
    v=0
    o=- 0 0 IN IP4 127.0.0.1
    s=No Name
    c=IN IP4 192.168.1.6
    t=0 0
    a=tool:libavformat 56.15.104
    m=video 1234 RTP/AVP 96
    a=rtpmap:96 H264/90000
    a=fmtp:96 packetization-mode=1; sprop-parameter-sets=Z0LAH9kAUAW6EAAAPpAADqYI8YMkgA==,aMuDyyA=; profile-level-id=42C01F

    Press [q] to stop, [?] for help
    frame=   19 fps=0.0 q=17.0 size=     141kB time=00:00:00.63 bitrate=1826.0kbits/
    frame=   34 fps= 32 q=17.0 size=     164kB time=00:00:01.13 bitrate=1181.5kbits/
    frame=   50 fps= 32 q=18.0 size=     173kB time=00:00:01.66 bitrate= 850.9kbits/

    For Receiver (Android MediaCodec)

    I created activity with surface and implements SurfaceHolder.Callback

    In surfaceChanged

    @Override
    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
       Log.i("sss", "surfaceChanged");
       if( playerThread == null ) {
           playerThread = new PlayerThread(holder.getSurface());
           playerThread.start();
       }

    }

    For PlayerThread

    class PlayerThread extends Thread {

       MediaCodec decoder;
       Surface surface;

       public PlayerThread(Surface surface) {
           this.surface = surface;
       }

       @Override
       public void run() {
           running = true;
           try {
               MediaFormat format = MediaFormat.createVideoFormat("video/avc", 1280, 720);
               byte[] header = new byte[] {0,0,0,1};
               byte[] sps = Base64.decode("Z0LAH9kAUAW6EAAAPpAADqYI8YMkgA==", Base64.DEFAULT);
               byte[] pps = Base64.decode("aMuDyyA=", Base64.DEFAULT);

               byte[] header_sps = new byte[sps.length + header.length];
               System.arraycopy(header,0,header_sps,0,header.length);
               System.arraycopy(sps,0,header_sps,header.length, sps.length);

               byte[] header_pps = new byte[pps.length + header.length];
               System.arraycopy(header,0, header_pps, 0, header.length);
               System.arraycopy(pps, 0, header_pps, header.length, pps.length);

               format.setByteBuffer("csd-0", ByteBuffer.wrap(header_sps));
               format.setByteBuffer("csd-1", ByteBuffer.wrap(header_pps));
               format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 1280 * 720);
    //          format.setInteger("durationUs", 63446722);
    //          format.setByteBuffer("csd-2", ByteBuffer.wrap((hexStringToByteArray("42C01E"))));                      
    //          format.setInteger(MediaFormat.KEY_COLOR_FORMAT ,MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar);
               Log.i("sss", "Format = " + format);

               try {
                   decoder = MediaCodec.createDecoderByType("video/avc");
                   decoder.configure(format, surface, null, 0);
                   decoder.start();

               } catch (IOException ioEx) {
                   ioEx.printStackTrace();
               }

               DatagramSocket socket = new DatagramSocket(1234);
               byte[] bytes = new byte[4096];
               DatagramPacket packet = new DatagramPacket(bytes, bytes.length);

               byte[] data;

               ByteBuffer[] inputBuffers;
               ByteBuffer[] outputBuffers;

               ByteBuffer inputBuffer;
               ByteBuffer outputBuffer;

               MediaCodec.BufferInfo bufferInfo;

               bufferInfo = new MediaCodec.BufferInfo();
               int inputBufferIndex;
               int outputBufferIndex;
               byte[] outData;

               inputBuffers = decoder.getInputBuffers();
               outputBuffers = decoder.getOutputBuffers();

               int minusCount = 0;
               byte[] prevData = new byte[65535];
               List playLoads = new ArrayList<>();
               int playloadSize = 0;
               while (true) {
                   try {
                       socket.receive(packet);
                       data = new byte[packet.getLength()];
                       System.arraycopy(packet.getData(), packet.getOffset(), data, 0, packet.getLength());

                   inputBufferIndex = decoder.dequeueInputBuffer(-1);
                       Log.i("sss", "inputBufferIndex = " + inputBufferIndex);
                   if (inputBufferIndex >= 0)
                   {
                       inputBuffer = inputBuffers[inputBufferIndex];
                       inputBuffer.clear();

                       inputBuffer.put(data);


                       decoder.queueInputBuffer(inputBufferIndex, 0, data.length, 0, 0);
    //                  decoder.flush();
                   }

                   outputBufferIndex = decoder.dequeueOutputBuffer(bufferInfo, 10000);
                   Log.i("sss", "outputBufferIndex = " + outputBufferIndex);

                   while (outputBufferIndex >= 0)
                   {
                       outputBuffer = outputBuffers[outputBufferIndex];

                       outputBuffer.position(bufferInfo.offset);
                       outputBuffer.limit(bufferInfo.offset + bufferInfo.size);

                       outData = new byte[bufferInfo.size];
                       outputBuffer.get(outData);


                       decoder.releaseOutputBuffer(outputBufferIndex, false);
                       outputBufferIndex = decoder.dequeueOutputBuffer(bufferInfo, 0);

                   }

                   } catch (SocketTimeoutException e) {
                       Log.d("thread", "timeout");
                   }
               }
           } catch (Exception e) {
               e.printStackTrace();
           }
       }
    }

    I think stream from ffmpeg is not a problem because i can open it from mxPlayer via sdp file.
    And if I pass this stream to local RTSP server (by VLC) then I use MediaPlayer to get RTSP stream, it works but quite slow.

    After I looked into the packet I realize that

    • first four bytes is header and sequence number
    • next four bytes is TimeStamp
    • next four bytes is source identifier

    So, I cut first 12 bytes out and combine packets with same TimeStamp. Then put it in buffer like this

    In while(true) after received packet

                   Log.i("sss", "Received = " + data.length + " bytes");
                   Log.i("sss","prev " + prevData.length + " bytes = " + getBytesStr(prevData));
                   Log.i("sss","data " + data.length + " bytes = " + getBytesStr(data));

                           if(data[4] == prevData[4] && data[5] == prevData[5] && data[6] == prevData[6] && data[7] == prevData[7]){
                               byte[] playload = new byte[prevData.length -12];
                               System.arraycopy(prevData,12,playload, 0, prevData.length-12);
                               playLoads.add(playload);
                               playloadSize += playload.length;
                               Log.i("sss", "Same timeStamp playload " + playload.length + " bytes = " + getBytesStr(playload));
                           } else {
                               if(playLoads.size() > 0){
                                   byte[] playload = new byte[prevData.length -12];
                                   System.arraycopy(prevData,12,playload, 0, prevData.length-12);
                                   playLoads.add(playload);
                                   playloadSize += playload.length;
                                   Log.i("sss", "last playload " + playload.length + " bytes = " + getBytesStr(playload));

                                   inputBufferIndex = decoder.dequeueInputBuffer(-1);
                                   if (inputBufferIndex >= 0){
                                       inputBuffer = inputBuffers[inputBufferIndex];
                                       inputBuffer.clear();
                                       byte[] allPlayload = new byte[playloadSize];
                                       int curLength = 0;
                                       for(byte[] playLoad:playLoads){
                                           System.arraycopy(playLoad,0,allPlayload, curLength, playLoad.length);
                                           curLength += playLoad.length;
                                       }
                                       Log.i("sss", "diff timeStamp AlllayLoad " + allPlayload.length + "bytes = " + getBytesStr(allPlayload));
                                       inputBuffer.put(allPlayload);

                                       decoder.queueInputBuffer(inputBufferIndex, 0, data.length, 0, 0);
                                       decoder.flush();
                                   }

                                   bufferInfo = new MediaCodec.BufferInfo();
                                   outputBufferIndex = decoder.dequeueOutputBuffer(bufferInfo, 10000);
                                   if(outputBufferIndex!= -1)
                                       Log.i("sss", "outputBufferIndex = " + outputBufferIndex);

                                   playLoads = new ArrayList<>();
                                   prevData = new byte[65535];
                                   playloadSize = 0;
                               }

                           }

                       prevData = data.clone();

    The outputBufferIndex still return -1

    If I change timeoutUS from 10000 to -1, it never go to next line

    I’ve searched for a week but still no luck T_T

    Why dequeueOutputBuffer always return -1 ?

    What is the problem of my code ?

    Could you properly optimize my code to work correctly ?

    Thanks for your help.

    Edit#1

    Thanks @mstorsjo guide me to Packetization and i found useful infomation

    How to process raw UDP packets so that they can be decoded by a decoder filter in a directshow source filter

    Then i edited my code below

    if((data[12] & 0x1f) == 28){
      if((data[13] & 0x80) == 0x80){ //found start bit
         inputBufferIndex = decoder.dequeueInputBuffer(-1);
         if (inputBufferIndex >= 0){
            inputBuffer = inputBuffers[inputBufferIndex];
            inputBuffer.clear();
            byte result = (byte)((bytes[12] & 0xe0) + (bytes[13] & 0x1f));
            inputBuffer.put(new byte[] {0,0,1});
            inputBuffer.put(result);
            inputBuffer.put(data,14, data.length-14);
         }

      } else if((data[13] &0x40) == 0x40){ //found stop bit
         inputBuffer.put(data, 14, data.length -14);
         decoder.queueInputBuffer(inputBufferIndex, 0, data.length, 0, 0);
         bufferInfo = new MediaCodec.BufferInfo();
         outputBufferIndex = decoder.dequeueOutputBuffer(bufferInfo, 10000);

         switch(outputBufferIndex)
         {
            case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
               outputBuffers = decoder.getOutputBuffers();
               Log.w("sss", "Output Buffers Changed");
               break;
            case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
               Log.w("sss", "Output Format Changed");
               MediaFormat newFormat = decoder.getOutputFormat();
               Log.i("sss","New format : " + newFormat);

               break;
            case MediaCodec.INFO_TRY_AGAIN_LATER:
               Log.w("sss", "Try Again Later");
               break;
            default:
               outputBuffer = outputBuffers[outputBufferIndex];
               outputBuffer.position(bufferInfo.offset);
               outputBuffer.limit(bufferInfo.offset + bufferInfo.size);
               decoder.releaseOutputBuffer(outputBufferIndex, true);

         }
      } else {
         inputBuffer.put(data, 14, data.length -14);
      }
    }

    Now i can see some picture but most of screen is gray

    What should i do next ??

    Thank you.