
Recherche avancée
Autres articles (103)
-
MediaSPIP 0.1 Beta version
25 avril 2011, parMediaSPIP 0.1 beta is the first version of MediaSPIP proclaimed as "usable".
The zip file provided here only contains the sources of MediaSPIP in its standalone version.
To get a working installation, you must manually install all-software dependencies on the server.
If you want to use this archive for an installation in "farm mode", you will also need to proceed to other manual (...) -
Supporting all media types
13 avril 2011, parUnlike most software and media-sharing platforms, MediaSPIP aims to manage as many different media types as possible. The following are just a few examples from an ever-expanding list of supported formats : images : png, gif, jpg, bmp and more audio : MP3, Ogg, Wav and more video : AVI, MP4, OGV, mpg, mov, wmv and more text, code and other data : OpenOffice, Microsoft Office (Word, PowerPoint, Excel), web (html, CSS), LaTeX, Google Earth and (...)
-
HTML5 audio and video support
13 avril 2011, parMediaSPIP uses HTML5 video and audio tags to play multimedia files, taking advantage of the latest W3C innovations supported by modern browsers.
The MediaSPIP player used has been created specifically for MediaSPIP and can be easily adapted to fit in with a specific theme.
For older browsers the Flowplayer flash fallback is used.
MediaSPIP allows for media playback on major mobile platforms with the above (...)
Sur d’autres sites (10015)
-
FFmpeg player backporting to Android 2.1 - one more problem
22 avril 2024, par tretdmI looked for a lot of information about how to build and use FFmpeg in early versions of Android, looked at the source codes of players from 2011-2014 and was able to easily build FFmpeg 4.0.4 and 3.1.4 on the NDKv5 platform. I have highlighted the main things for this purpose :


- 

<android></android>bitmap.h>
and<android></android>native_window.h>
before Android 2.2 (API Level 8) such a thing did not exist- this requires some effort to implement buffer management for A/V streams, since in practice, when playing video, the application silently crashed after a few seconds due to overflow (below code example in C++ and Java)
- FFmpeg - imho, the only way to support a sufficient number of codecs that are not officially included in Android 2.1 and above








void decodeVideoFromPacket(JNIEnv *env, jobject instance,
 jclass mplayer_class, AVPacket avpkt, 
 int total_frames, int length) {
 AVFrame *pFrame = NULL
 AVFrame *pFrameRGB = NULL;
 pFrame = avcodec_alloc_frame();
 pFrameRGB = avcodec_alloc_frame();
 int frame_size = avpicture_get_size(PIX_FMT_RGB32, gVideoCodecCtx->width, gVideoCodecCtx->height);
 unsigned char* buffer = (unsigned char*)av_malloc((size_t)frame_size * 3);
 if (!buffer) {
 av_free(pFrame);
 av_free(pFrameRGB);
 return;
 }
 jbyteArray buffer2;
 jmethodID renderVideoFrames = env->GetMethodID(mplayer_class, "renderVideoFrames", "([BI)V");
 int frameDecoded;
 avpicture_fill((AVPicture*) pFrame,
 buffer,
 gVideoCodecCtx->pix_fmt,
 gVideoCodecCtx->width,
 gVideoCodecCtx->height
 );

 if (avpkt.stream_index == gVideoStreamIndex) { // If video stream found
 int size = avpkt.size;
 total_frames++;
 struct SwsContext *img_convert_ctx = NULL;
 avcodec_decode_video2(gVideoCodecCtx, pFrame, &frameDecoded, &avpkt);
 if (!frameDecoded || pFrame == NULL) {
 return;
 }

 try {
 PixelFormat pxf;
 // RGB565 by default for Android Canvas in pre-Gingerbread devices.
 if(android::get_android_api_version(env) >= ANDROID_API_CODENAME_GINGERBREAD) {
 pxf = PIX_FMT_BGR32;
 } else {
 pxf = PIX_FMT_RGB565;
 }

 int rgbBytes = avpicture_get_size(pxf, gVideoCodecCtx->width,
 gVideoCodecCtx->height);

 // Converting YUV to RGB frame & RGB frame to char* buffer 
 
 buffer = convertYuv2Rgb(pxf, pFrame, rgbBytes); // result of av_image_copy_to_buffer()

 if(buffer == NULL) {
 return;
 }

 buffer2 = env->NewByteArray((jsize) rgbBytes);
 env->SetByteArrayRegion(buffer2, 0, (jsize) rgbBytes,
 (jbyte *) buffer);
 env->CallVoidMethod(instance, renderVideoFrames, buffer2, rgbBytes);
 env->DeleteLocalRef(buffer2);
 free(buffer);
 } catch (...) {
 if (debug_mode) {
 LOGE(10, "[ERROR] Render video frames failed");
 return;
 }
 }
 }
}



private void renderVideoFrames(final byte[] buffer, final int length) {
 new Thread(new Runnable() {
 @Override
 public void run() {
 Canvas c;
 VideoTrack track = null;
 for (int tracks_index = 0; tracks_index < tracks.size(); tracks_index++) {
 if (tracks.get(tracks_index) instanceof VideoTrack) {
 track = (VideoTrack) tracks.get(tracks_index);
 }
 }
 if (track != null) {
 int frame_width = track.frame_size[0];
 int frame_height = track.frame_size[1];
 if (frame_width > 0 && frame_height > 0) {
 try {
 // RGB_565 == 65K colours (16 bit)
 // RGB_8888 == 16.7M colours (24 bit w/ alpha ch.)
 int bpp = Build.VERSION.SDK_INT > 9 ? 16 : 24;
 Bitmap.Config bmp_config =
 bpp == 24 ? Bitmap.Config.RGB_565 : Bitmap.Config.ARGB_8888;
 Paint paint = new Paint();
 if(buffer != null && holder != null) {
 holder.setType(SurfaceHolder.SURFACE_TYPE_NORMAL);
 if((c = holder.lockCanvas()) == null) {
 Log.d(MPLAY_TAG, "Lock canvas failed");
 return;
 }
 ByteBuffer bbuf =
 ByteBuffer.allocateDirect(minVideoBufferSize);
 bbuf.rewind();
 for(int i = 0; i < buffer.length; i++) {
 bbuf.put(i, buffer[i]);
 }
 bbuf.rewind();

 // The approximate location where the application crashed.
 Bitmap bmp = Bitmap.createBitmap(frame_width, frame_height, bmp_config);
 bmp.copyPixelsFromBuffer(bbuf);
 
 float aspect_ratio = (float) frame_width / (float) frame_height;
 int scaled_width = (int)(aspect_ratio * (c.getHeight()));
 c.drawBitmap(bmp,
 null,
 new RectF(
 ((c.getWidth() - scaled_width) / 2), 0,
 ((c.getWidth() - scaled_width) / 2) + scaled_width,
 c.getHeight()),
 null);
 holder.unlockCanvasAndPost(c);
 bmp.recycle();
 bbuf.clear();
 } else {
 Log.d(MPLAY_TAG, "Video frame buffer is null");
 }
 } catch (Exception ex) {
 ex.printStackTrace();
 } catch (OutOfMemoryError oom) {
 oom.printStackTrace();
 stop();
 }
 }
 }
 }
 }).start();
 }



Exception (tested in Android 4.1.2 emulator) :


E/dalvikvm-heap: Out of memory on a 1228812-byte allocation
I/dalvikvm: "Thread-495" prio=5 tid=21 RUNNABLE
 ................................................
 at android.graphics.Bitmap.nativeCreate(Native Method)
 at android.graphics.Bitmap.createBitmap(Bitmap.java:640)
 at android.graphics.Bitmap.createBitmap(Bitmap.java:620)
 at [app_package_name].MediaPlayer$5.run(MediaPlayer.java:406)
 at java.lang.Thread.run(Thread.java:856)



For clarification : I first compiled FFmpeg 0.11.x on a virtual machine with Ubuntu 12.04 LTS from my written build script, looked for player examples suitable for Android below 2.2 (there is little information about them, unfortunately) and opened the file on the player and after showing the first frames it crashed into a stack or buffer overflow, on I put off developing the player for some time.


Is there anything ready-made that, as a rule, fits into one C++ file and takes into account all the nuances of backporting ? Thanks in advance.


-
Multithreading decoding A/V streams via FFmpeg + JNI + Android NDK causes artifacts
26 avril 2024, par tretdmIt would seem that if I mute the callback for reading the audio buffer, then they disappear.


FFmpeg decoding issues in picture


I'll attach the code below to make it easier for you to understand what the problem is :


ffwrap.cpp


static void *audioDecoderThread(void *arg) {
 AudioDecoder *audioDec = (AudioDecoder*) arg;
 audioDec->prepare();
 audioDec->start();
}

static void *videoDecoderThread(void *arg) {
 VideoDecoder *videoDec = (VideoDecoder*) arg;
 videoDec->prepare();
 videoDec->start();
}

// ...

void FFmpegWrapper::startDecoding(int pStreamIndex) {
 if(pStreamIndex == gAudioStreamIndex) {
 AudioDecoder *audioDec = new AudioDecoder(
 gFormatCtx,
 gAudioCodecCtx,
 getStream(gAudioStreamIndex),
 gAudioStreamIndex,
 gInterface
 );

 pthread_t audioDecThread;
 pthread_create(&audioDecThread, NULL, &audioDecoderThread, (void*)audioDec);
 } else if(pStreamIndex == gVideoStreamIndex) {
 VideoDecoder *videoDec = new VideoDecoder(
 gFormatCtx,
 gVideoCodecCtx,
 getStream(gVideoStreamIndex),
 gVideoStreamIndex,
 gInterface
 );

 pthread_t videoDecThread;
 pthread_create(&videoDecThread, NULL, &videoDecoderThread, (void*)videoDec);
 }
}



audiodec.cpp


AudioDecoder::AudioDecoder(AVFormatContext *pFormatCtx,
 AVCodecContext *pCodecCtx,
 AVStream* pStream,
 int pStreamIndex,
 IFFmpegWrapper *pInterface) {
 gFormatCtx = pFormatCtx;
 gCodecCtx = pCodecCtx;
 gStream = pStream;
 gStreamIndex = pStreamIndex;
 gInterface = pInterface;
}

bool AudioDecoder::prepare() {
 gBufferSize = AV_MAX_AUDIO_FRAME_SIZE; 
 // ^ this constant is equal to 192000 and replaces the deprecated 
 // AVCODEC_MAX_AUDIO_FRAME_SIZE

 gBuffer = (short*) av_malloc(gBufferSize);
 gSwrCtx = swr_alloc();
 gSwrCtx = swr_alloc_set_opts(
 gSwrCtx, (int64_t) gCodecCtx->channel_layout, AV_SAMPLE_FMT_S16,
 gCodecCtx->sample_rate, gCodecCtx->channel_layout,
 gCodecCtx->sample_fmt, gCodecCtx->sample_rate, 0, NULL
 );
 swr_init(gSwrCtx);
 return gBuffer != NULL;
}

void *AudioDecoder::decodeInThread() {
 int status, dataSize, len;
 AVPacket avPkt;
 AVFrame *pFrame = av_frame_alloc();

 while(av_read_frame(gFormatCtx, &avPkt)>=0) {
 // It is from the audio stream?
 if(avPkt.stream_index == gStreamIndex) {
 len = avcodec_decode_audio4(gStream->codec, pFrame, &status, &avPkt);
 if(len < 0) {
 break;
 }
 if (status) {
 dataSize = av_samples_get_buffer_size(
 NULL, gCodecCtx->channels, pFrame->nb_samples,
 gCodecCtx->sample_fmt, 1
 );
 uint8_t* buffer = (uint8_t*)av_malloc(sizeof(uint8_t) * dataSize);
 swr_convert(
 gSwrCtx, &buffer, dataSize,
 (const uint8_t **) pFrame->data,
 pFrame->nb_samples
 );
 memcpy(gBuffer, buffer, dataSize);
 av_free(buffer);

 // Calling callback for JNI bridging
 gInterface->onStreamDecoding((uint8_t*)gBuffer, dataSize, gStreamIndex);
 }
 }

 // Free the packet that was allocated by av_read_frame
 av_free_packet(&avPkt);
 av_packet_unref(&avPkt);
 }
 av_free(pFrame);
 stop();
}

bool AudioDecoder::start() {
 // ^ Problematic function, using pthread the problem remains 
 // except for commenting out this line
 decodeInThread();
 return true;
}

bool AudioDecoder::stop() {
 free(gBuffer);
 swr_free(&gSwrCtx);
 avcodec_close(gCodecCtx);
 return true;
}



videodec.cpp


VideoDecoder::VideoDecoder(AVFormatContext *pFormatCtx,
 AVCodecContext *pCodecCtx,
 AVStream* pStream,
 int pStreamIndex,
 IFFmpegWrapper *pInterface) {
 gFormatCtx = pFormatCtx;
 gCodecCtx = pCodecCtx;
 gStream = pStream;
 gStreamIndex = pStreamIndex;
 gInterface = pInterface;
}

bool VideoDecoder::prepare() {
 return true; // dummy function yet
}

void *VideoDecoder::decodeInThread() {
 AVPacket avPkt;
 int vWidth = gCodecCtx->width,
 vHeight = gCodecCtx->height,
 status, len,
 dataSize = avpicture_get_size(AV_PIX_FMT_RGB32, vWidth, vHeight),
 packetSize, tVideoFrames;
 struct SwsContext *img_convert_ctx = NULL;

 gBuffer = (short*) av_mallocz((size_t)dataSize);

 while(av_read_frame(gFormatCtx, &avPkt)>=0) {
 gFrame = avcodec_alloc_frame();
 // It is from the video stream?
 if(avPkt.stream_index == gStreamIndex) {
 packetSize = avPkt.size;
 struct SwsContext *img_convert_ctx = NULL;
 avpicture_fill((AVPicture*) gFrame,
 (const uint8_t*) gBuffer,
 gCodecCtx->pix_fmt,
 gCodecCtx->width,
 gCodecCtx->height
 );

 avcodec_decode_video2(gCodecCtx, gFrame, &status, &avPkt);
 if(!status || gFrame == NULL || packetSize == 0) {
 tVideoFrames++;
 continue;
 }
 AVPixelFormat pxf;

 pxf = AV_PIX_FMT_BGR32;

 convertYuv2Rgb(pxf, gFrame, dataSize);
 tVideoFrames++;
 gInterface->onStreamDecoding((uint8_t*)gBuffer, dataSize, gStreamIndex);
 }
 av_free(gFrame);

 // Free the packet that was allocated by av_read_frame
 av_free_packet(&avPkt);
 av_packet_unref(&avPkt);
 }

 stop();
}

short* VideoDecoder::convertYuv2Rgb(AVPixelFormat pxf, AVFrame* frame, int length) {
 // Convert video frame from YUV to RGB24
 AVFrame *frameRGB = av_frame_alloc();
 AVPixelFormat output_pxf = pxf;

 avpicture_fill((AVPicture *)frameRGB, (uint8_t*)gBuffer, output_pxf,
 gCodecCtx->width, gCodecCtx->height);
 const int width = gCodecCtx->width, height = gCodecCtx->height;
 SwsContext* img_convert_ctx = sws_getContext(width, height,
 gCodecCtx->pix_fmt,
 width, height, output_pxf, SWS_BICUBIC,
 NULL, NULL, NULL);


 if(img_convert_ctx == NULL) {
 LOGE(10, "[ERROR] Cannot initialize the conversion context!");
 sws_freeContext(img_convert_ctx);
 return NULL;
 }

 int ret = sws_scale(img_convert_ctx, (const uint8_t* const*)frame->data, frame->linesize, 0,
 gCodecCtx->height, frameRGB->data, frameRGB->linesize);
 if(frameRGB->data[0] == NULL) {
 LOGE(10, "[ERROR] SWS_Scale failed");
 }
 av_free(frameRGB);
 av_frame_unref(frameRGB);
 sws_freeContext(img_convert_ctx);
 return gBuffer;
}

bool VideoDecoder::start() {
 decodeInThread(); 
 // ^ Problematic function, using pthread the problem remains 
 // except for commenting out this line
 return true;
}

bool VideoDecoder::stop() {
 av_free(gFrame);
 avcodec_close(gCodecCtx);
 av_free(gBuffer);
 return true;
}



mplayer.cpp


void IPlayerWrapper::onResult(int cmdId, int resultCode) {
 JNIEnv* env;
 int attachResult = attachEnv(&env);
 if(attachResult < 2) {
 jclass jmPlay = env->GetObjectClass(instance);
 if(cmdId == FFMPEG_COMMAND_FIND_STREAMS) {
 gWrapper->openCodecs();
 } else if(cmdId == FFMPEG_COMMAND_OPEN_CODECS) {
 jmethodID onResultMid = env->GetMethodID(jmPlay, "onResult", "(II)V");
 env->CallVoidMethod(instance, onResultMid, (jint)cmdId, (jint)resultCode);
 }
 if(attachResult == 1) {
 gVM->DetachCurrentThread();
 }
 }
}

void IPlayerWrapper::onStreamDecoding(uint8_t* buffer, int bufferLen, int streamIndex) {
 JNIEnv* env;
 int attachResult = attachEnv(&env);
 if(attachResult < 2) {
 jclass jmPlay = env->GetObjectClass(instance);
 jBuffer = env->NewByteArray((jsize) bufferLen);
 env->SetByteArrayRegion(jBuffer, 0, (jsize) bufferLen, (jbyte *) buffer);
 if(streamIndex == gWrapper->gAudioStreamIndex) {
 jmethodID renderAudioMid = env->GetMethodID(jmPlay, "renderAudio", "([BI)V");
 env->CallVoidMethod(instance, renderAudioMid, jBuffer, bufferLen);
 } else if(streamIndex == gWrapper->gVideoStreamIndex) {
 jmethodID renderVideoMid = env->GetMethodID(jmPlay, "renderVideo", "([BI)V");
 env->CallVoidMethod(instance, renderVideoMid, jBuffer, bufferLen);
 }
 env->ReleaseByteArrayElements(jBuffer, (jbyte *)env->GetByteArrayElements(jBuffer, NULL), JNI_ABORT);
 env->DeleteLocalRef(jBuffer);
 env->DeleteLocalRef(jmPlay);
 if(attachResult == 1) {
 gVM->DetachCurrentThread();
 }
 }
}


JNIEXPORT void JNICALL naInit(JNIEnv *env, jobject instance) {
 gInterface = new IPlayerWrapper();
 gInterface->instance = env->NewGlobalRef(instance);
 gWrapper = new FFmpegWrapper(gDebugMode, gInterface);
}

JNIEXPORT void JNICALL naPlay(JNIEnv *env, jobject instance, int streamType) {
 gVMArgs.version = JNI_VERSION_1_6;
 gVMArgs.name = NULL;
 gVMArgs.group = NULL;
 gWrapper->setPlaybackState(FFMPEG_PLAYBACK_PLAYING);
 //gWrapper->startDecoding();
}

JNIEXPORT void JNICALL naStartAudioDecoding(JNIEnv *env, jobject instance) {
 gWrapper->startDecoding(gWrapper->gAudioStreamIndex);
}

JNIEXPORT void JNICALL naStartVideoDecoding(JNIEnv *env, jobject instance) {
 gWrapper->startDecoding(gWrapper->gVideoStreamIndex);
}



FFmpegPlayer.java


@Override
 public void start() throws IllegalStateException {
 if(tracks != null) {
 naPlay();
 Log.d(MPLAY_TAG, "Playing...");
 FFAudioTrack audio_track = null;
 FFVideoTrack video_track = null;
 for(int tracks_index = 0; tracks_index < tracks.size(); tracks_index++) {
 if(tracks.get(tracks_index) instanceof FFAudioTrack) {
 audio_track = (FFAudioTrack) tracks.get(tracks_index);
 } else if(tracks.get(tracks_index) instanceof FFVideoTrack) {
 video_track = (FFVideoTrack) tracks.get(tracks_index);
 }
 }

 naStartAudioDecoding();
 naStartVideoDecoding();
 }
 }

 private void renderAudio(final byte[] buffer, final int length) {
 AudioTrack track = null;
 if (buffer == null) {
 Log.e(MPLAY_TAG, "Audio buffer is empty");
 return;
 }

 if (!prepared_audio_buffer) {
 for (int tracks_index = 0; tracks_index < tracks.size(); tracks_index++) {
 if (tracks.get(tracks_index) instanceof FFAudioTrack) {
 track = (FFAudioTrack) tracks.get(tracks_index);
 }
 }
 if (track == null) {
 Log.e(MPLAY_TAG, "Audio track not found");
 return;
 }
 int ch_config = track.channels == 2 ?
 AudioFormat.CHANNEL_CONFIGURATION_STEREO :
 AudioFormat.CHANNEL_CONFIGURATION_MONO;

 audio_track = new AudioTrack(AudioManager.STREAM_MUSIC, (int) track.sample_rate,
 ch_config,
 AudioFormat.ENCODING_PCM_16BIT, length, AudioTrack.MODE_STREAM);

 minAudioBufferSize = AudioRecord.getMinBufferSize(
 (int) (track.sample_rate),
 ch_config,
 AudioFormat.ENCODING_PCM_16BIT);

 audio_track.play();
 prepared_audio_buffer = true;
 }

 try {
 audio_track.write(buffer, 0, length);
 } catch (Exception ignored) {
 }
 }

 private void renderVideo(final byte[] buffer, final int length) {
 Canvas c;
 videoBuffer = buffer;
 OvkVideoTrack track = null;
 for (int tracks_index = 0; tracks_index < tracks.size(); tracks_index++) {
 if (tracks.get(tracks_index) instanceof FFVideoTrack) {
 track = (FFVideoTrack) tracks.get(tracks_index);
 }
 }
 if (track != null) {
 int frame_width = track.frame_size[0];
 int frame_height = track.frame_size[1];
 if (frame_width > 0 && frame_height > 0) {
 minVideoBufferSize = frame_width * frame_height * 4;
 try {
 // RGB_565 == 65K colours (16 bit)
 // RGB_8888 == 16.7M colours (24 bit w/ alpha ch.)
 int bpp = Build.VERSION.SDK_INT > 9 ? 16 : 24;
 Bitmap.Config bmp_config =
 bpp == 24 ? Bitmap.Config.RGB_565 : Bitmap.Config.ARGB_8888;
 if(videoBuffer != null && holder != null) {
 holder.setType(SurfaceHolder.SURFACE_TYPE_NORMAL);
 if((c = holder.lockCanvas()) == null) {
 Log.d(MPLAY_TAG, "Lock canvas failed");
 return;
 }
 ByteBuffer bbuf =
 ByteBuffer.allocateDirect(minVideoBufferSize);
 bbuf.rewind();
 for(int i = 0; i < videoBuffer.length; i++) {
 bbuf.put(i, videoBuffer[i]);
 }
 bbuf.rewind();
 Bitmap bmp = Bitmap.createBitmap(frame_width, frame_height, bmp_config);
 bmp.copyPixelsFromBuffer(bbuf);
 float aspect_ratio = (float) frame_width / (float) frame_height;
 int scaled_width = (int)(aspect_ratio * (c.getHeight()));
 videoBuffer = null;
 c.drawBitmap(bmp,
 null,
 new RectF(
 ((c.getWidth() - scaled_width) / 2), 0,
 ((c.getWidth() - scaled_width) / 2) + scaled_width,
 c.getHeight()),
 null);
 holder.unlockCanvasAndPost(c);
 bmp.recycle();
 bbuf.clear();
 } else {
 Log.d(MPLAY_TAG, "Video frame buffer is null");
 }
 } catch (Exception ex) {
 ex.printStackTrace();
 } catch (OutOfMemoryError oom) {
 oom.printStackTrace();
 stop();
 }
 }
 }
 }




I used FFmpeg version 2.8.11 and wrote a wrapper specifically for an application that supports Android 2.1 and higher. After switching to OOP and multithreading, I noticed such decoder artifacts, and the buffer often times out, as indicated by the Android AudioFlinger in the logs. It doesn’t matter whether it’s an AVD or a real device.


04-26 20:56:51.215 1333-1372/? I/AudioFlinger: BUFFER TIMEOUT: remove(4097) from active list on thread 0xaf183540



Replaying with other coding schemes does not solve this problem, since the problem lies in the implementation of the wrapper itself, which I cannot understand.


-
H264 streamed video stutter and freeze with MediaCodec, Android 4.1.2
5 mars 2015, par WajihI have been trying my heart out to remove the stutter from an android RTSP client.
Here is my setup- FFMPEG server streams a live video on Win7. The video is 1200x900 in size. The video streamed is in H264 format.
- I receive the video packets on android (4.1.2) clinet under JNI which pushes the packet to java - Device is a Samsung Tab4
- Packets are decoded using MediaCodec. Once call from JNI to push the packets into MediaCodec, another thread in Java tries to de-queue the data and display them to a SurfaceView (its a GLSurfaceView)
Despite my efforts of using queue to buffer the packets, changing wait times to 0,-1, 1000000, i am unable to get a clean streamed video. I understand that there is some packet loss (1% to 10%), but I am getting a broken video, with stutter (some call it even jitter). Green patches, pink screens, gray slices. You name it, it is there, the problem seems to be exaggerated when there is a fast movement in the video.
At the moment I am not sure where the problem lies, I tried a windows version of the client (with ffmpeg decoding) and it works smoothly despite the packet loss.What am I doing wrong ? Any guidance is appreciated.
Below is the client end code for Android and the server end FFMPEG settings I read from a config file.// Function called from JNI
public int decodeVideo(byte[] data, int size, long presentationTimeUs, boolean rtpMarker, int flag)
{
if(vdecoder == null)
return -1;
if(currVInbufIdx == -1) {
vdecoderInbufIdx = vdecoder.dequeueInputBuffer(1000000); //1000000/*1s*/
if(vdecoderInbufIdx < 0) {
Log.d("log","decodeVideo@1: frame dropped");
vdecoderRet = -1;
return vdecoderRet;
}
currVInbufIdx = vdecoderInbufIdx;
currVPts = presentationTimeUs;
currVFlag = flag;
inputVBuffers[currVInbufIdx].clear();
}
vdecoderPos = inputVBuffers[currVInbufIdx].position();
vdecoderRemaining = inputVBuffers[currVInbufIdx].remaining();
if(flag==currVFlag && vdecoderRemaining >= size && currVPts == presentationTimeUs
&& rtpMarker == false
/*&&(pos < vbufferLevel || vbufferLevel<=0)*/)
{
/* Queue without decoding */
inputVBuffers[currVInbufIdx].put(data, 0,size);
}
else
{
if(flag==currVFlag && vdecoderRemaining >= size && currVPts == presentationTimeUs
&& rtpMarker)
{
inputVBuffers[currVInbufIdx].put(data, 0, size);
queued = true;
}
Log.d("log", "decodeVideo: submit,"
+ " pts=" + Long.toString(currVPts)
+ " position="+inputVBuffers[currVInbufIdx].position()
+ " capacity="+inputVBuffers[currVInbufIdx].capacity()
+ " VBIndex="+currVInbufIdx
);
vdecoder.queueInputBuffer(currVInbufIdx, 0, inputVBuffers[currVInbufIdx].position(), currVPts, currVFlag);
//
vdecoderInbufIdx = vdecoder.dequeueInputBuffer(1000000);//1000000/*1s*/
if(vdecoderInbufIdx >= 0)
{
currVInbufIdx = vdecoderInbufIdx;
currVPts = presentationTimeUs;
currVFlag = flag;
inputVBuffers[currVInbufIdx].clear();
//if(queued == false)
{
inputVBuffers[vdecoderInbufIdx].put(data, 0, size);
}
}
else
{
currVInbufIdx = -1;
currVPts = -1;
vdecoderRet = -1;
Log.d("log","decodeVideo@2: frame dropped");
}
}
return vdecoderRet;
}And here we have the thread that calls for a render
// Function at android. Called by a separate thread.
private void videoRendererThreadProc() {
if(bufinfo == null)
bufinfo = new MediaCodec.BufferInfo();
videoRendered = false;
Log.d("log", "videoRenderer started.");
while(!Thread.interrupted() && !quitVideoRenderer)
{
Log.d("log", "videoRendererThreadProc");
outbufIdx = vdecoder.dequeueOutputBuffer(bufinfo,1000000);//500000
switch (outbufIdx)
{
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
Log.d("log", "decodeVideo: output buffers changed.");
// outputBuffers = vdecoder.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
Log.d("log", "decodeVideo: format changed - " + vdecoder.getOutputFormat());
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
// Log.d("log", "decodeVideo: try again later.");
break;
default:
// decoded or rendered
videoRendered = true;
vdecoder.releaseOutputBuffer(outbufIdx, true);
//Log.d("log", "decodeVideo: Rendering...!!!.");
}
}
// flush decoder
//vdecoder.queueInputBuffer(0, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
outbufIdx = vdecoder.dequeueOutputBuffer(bufinfo, 1000000);//10000
if(outbufIdx >= 0)
{
vdecoder.releaseOutputBuffer(outbufIdx, true);
}
bufinfo = null;
videoRendered = false;
//
Log.d("log", "videoRenderer terminated.");
}And the ffmpeg setting at server at as follows.
[slices] = 4 # --slices
[threads] = 4 # --threads
[profile] = high # --profile main|baseline
[preset] = faster # --preset faster|ultrafast
[tune] = zerolatency # --tune