
Recherche avancée
Autres articles (69)
-
Mise à jour de la version 0.1 vers 0.2
24 juin 2013, parExplications des différents changements notables lors du passage de la version 0.1 de MediaSPIP à la version 0.3. Quelles sont les nouveautés
Au niveau des dépendances logicielles Utilisation des dernières versions de FFMpeg (>= v1.2.1) ; Installation des dépendances pour Smush ; Installation de MediaInfo et FFprobe pour la récupération des métadonnées ; On n’utilise plus ffmpeg2theora ; On n’installe plus flvtool2 au profit de flvtool++ ; On n’installe plus ffmpeg-php qui n’est plus maintenu au (...) -
Personnaliser en ajoutant son logo, sa bannière ou son image de fond
5 septembre 2013, parCertains thèmes prennent en compte trois éléments de personnalisation : l’ajout d’un logo ; l’ajout d’une bannière l’ajout d’une image de fond ;
-
MediaSPIP Player : problèmes potentiels
22 février 2011, parLe lecteur ne fonctionne pas sur Internet Explorer
Sur Internet Explorer (8 et 7 au moins), le plugin utilise le lecteur Flash flowplayer pour lire vidéos et son. Si le lecteur ne semble pas fonctionner, cela peut venir de la configuration du mod_deflate d’Apache.
Si dans la configuration de ce module Apache vous avez une ligne qui ressemble à la suivante, essayez de la supprimer ou de la commenter pour voir si le lecteur fonctionne correctement : /** * GeSHi (C) 2004 - 2007 Nigel McNie, (...)
Sur d’autres sites (11225)
-
Trying to sync audio/visual using FFMpeg and openAL
22 août 2013, par user1379811hI have been studying dranger ffmpeg tutorial which explains how to sync audio and visual once you have the frames displayed and audio playing which is where im at.
Unfortunately, the tutorial is out of date (Stephen Dranger explaained that himself to me) and also uses sdl which im not doing - this is for Blackberry 10 application.
I just cannot make the video frames display at the correct speed (they are just playing very fast) and I have been trying for over a week now - seriously !
I have 3 threads happening - one to read from stream into audio and video queues and then 2 threads for audio and video.
If somebody could explain whats happening after scanning my relevent code you would be a lifesaver.
The delay (what I pass to usleep(testDelay) seems to be going up (incrementing) which doesn't seem right to me.
count = 1;
MyApp* inst = worker->app;//(VideoUploadFacebook*)arg;
qDebug() << "\n start loadstream";
w = new QWaitCondition();
w2 = new QWaitCondition();
context = avformat_alloc_context();
inst->threadStarted = true;
cout << "start of decoding thread";
cout.flush();
av_register_all();
avcodec_register_all();
avformat_network_init();
av_log_set_callback(&log_callback);
AVInputFormat *pFormat;
//const char device[] = "/dev/video0";
const char formatName[] = "mp4";
cout << "2start of decoding thread";
cout.flush();
if (!(pFormat = av_find_input_format(formatName))) {
printf("can't find input format %s\n", formatName);
//return void*;
}
//open rtsp
if(avformat_open_input(&context, inst->capturedUrl.data(), pFormat,NULL) != 0){
// return ;
cout << "error opening of decoding thread: " << inst->capturedUrl.data();
cout.flush();
}
cout << "3start of decoding thread";
cout.flush();
// av_dump_format(context, 0, inst->capturedUrl.data(), 0);
/* if(avformat_find_stream_info(context,NULL) < 0){
return EXIT_FAILURE;
}
*/
//search video stream
for(int i =0;inb_streams;i++){
if(context->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
inst->video_stream_index = i;
}
cout << "3z start of decoding thread";
cout.flush();
AVFormatContext* oc = avformat_alloc_context();
av_read_play(context);//play RTSP
AVDictionary *optionsDict = NULL;
ccontext = context->streams[inst->video_stream_index]->codec;
inst->audioc = context->streams[1]->codec;
cout << "4start of decoding thread";
cout.flush();
codec = avcodec_find_decoder(ccontext->codec_id);
ccontext->pix_fmt = PIX_FMT_YUV420P;
AVCodec* audio_codec = avcodec_find_decoder(inst->audioc->codec_id);
inst->packet = new AVPacket();
if (!audio_codec) {
cout << "audio codec not found\n"; //fflush( stdout );
exit(1);
}
if (avcodec_open2(inst->audioc, audio_codec, NULL) < 0) {
cout << "could not open codec\n"; //fflush( stdout );
exit(1);
}
if (avcodec_open2(ccontext, codec, &optionsDict) < 0) exit(1);
cout << "5start of decoding thread";
cout.flush();
inst->pic = avcodec_alloc_frame();
av_init_packet(inst->packet);
while(av_read_frame(context,inst->packet) >= 0 && &inst->keepGoing)
{
if(inst->packet->stream_index == 0){//packet is video
int check = 0;
// av_init_packet(inst->packet);
int result = avcodec_decode_video2(ccontext, inst->pic, &check, inst->packet);
if(check)
break;
}
}
inst->originalVideoWidth = inst->pic->width;
inst->originalVideoHeight = inst->pic->height;
float aspect = (float)inst->originalVideoHeight / (float)inst->originalVideoWidth;
inst->newVideoWidth = inst->originalVideoWidth;
int newHeight = (int)(inst->newVideoWidth * aspect);
inst->newVideoHeight = newHeight;//(int)inst->originalVideoHeight / inst->originalVideoWidth * inst->newVideoWidth;// = new height
int size = avpicture_get_size(PIX_FMT_YUV420P, inst->originalVideoWidth, inst->originalVideoHeight);
uint8_t* picture_buf = (uint8_t*)(av_malloc(size));
avpicture_fill((AVPicture *) inst->pic, picture_buf, PIX_FMT_YUV420P, inst->originalVideoWidth, inst->originalVideoHeight);
picrgb = avcodec_alloc_frame();
int size2 = avpicture_get_size(PIX_FMT_YUV420P, inst->newVideoWidth, inst->newVideoHeight);
uint8_t* picture_buf2 = (uint8_t*)(av_malloc(size2));
avpicture_fill((AVPicture *) picrgb, picture_buf2, PIX_FMT_YUV420P, inst->newVideoWidth, inst->newVideoHeight);
if(ccontext->pix_fmt != PIX_FMT_YUV420P)
{
std::cout << "fmt != 420!!!: " << ccontext->pix_fmt << std::endl;//
// return (EXIT_SUCCESS);//-1;
}
if (inst->createForeignWindow(inst->myForeignWindow->windowGroup(),
"HelloForeignWindowAppIDqq", 0,
0, inst->newVideoWidth,
inst->newVideoHeight)) {
} else {
qDebug() << "The ForeginWindow was not properly initialized";
}
inst->keepGoing = true;
inst->img_convert_ctx = sws_getContext(inst->originalVideoWidth, inst->originalVideoHeight, PIX_FMT_YUV420P, inst->newVideoWidth, inst->newVideoHeight,
PIX_FMT_YUV420P, SWS_BILINEAR, NULL, NULL, NULL);
is = (VideoState*)av_mallocz(sizeof(VideoState));
if (!is)
return NULL;
is->audioStream = 1;
is->audio_st = context->streams[1];
is->audio_buf_size = 0;
is->audio_buf_index = 0;
is->videoStream = 0;
is->video_st = context->streams[0];
is->frame_timer = (double)av_gettime() / 1000000.0;
is->frame_last_delay = 40e-3;
is->av_sync_type = DEFAULT_AV_SYNC_TYPE;
//av_strlcpy(is->filename, filename, sizeof(is->filename));
is->iformat = pFormat;
is->ytop = 0;
is->xleft = 0;
/* start video display */
is->pictq_mutex = new QMutex();
is->pictq_cond = new QWaitCondition();
is->subpq_mutex = new QMutex();
is->subpq_cond = new QWaitCondition();
is->video_current_pts_time = av_gettime();
packet_queue_init(&audioq);
packet_queue_init(&videoq);
is->audioq = audioq;
is->videoq = videoq;
AVPacket* packet2 = new AVPacket();
ccontext->get_buffer = our_get_buffer;
ccontext->release_buffer = our_release_buffer;
av_init_packet(packet2);
while(inst->keepGoing)
{
if(av_read_frame(context,packet2) < 0 && keepGoing)
{
printf("bufferframe Could not read a frame from stream.\n");
fflush( stdout );
}else {
if(packet2->stream_index == 0) {
packet_queue_put(&videoq, packet2);
} else if(packet2->stream_index == 1) {
packet_queue_put(&audioq, packet2);
} else {
av_free_packet(packet2);
}
if(!videoThreadStarted)
{
videoThreadStarted = true;
QThread* thread = new QThread;
videoThread = new VideoStreamWorker(this);
// Give QThread ownership of Worker Object
videoThread->moveToThread(thread);
connect(videoThread, SIGNAL(error(QString)), this, SLOT(errorHandler(QString)));
QObject::connect(videoThread, SIGNAL(refreshNeeded()), this, SLOT(refreshNeededSlot()));
connect(thread, SIGNAL(started()), videoThread, SLOT(doWork()));
connect(videoThread, SIGNAL(finished()), thread, SLOT(quit()));
connect(videoThread, SIGNAL(finished()), videoThread, SLOT(deleteLater()));
connect(thread, SIGNAL(finished()), thread, SLOT(deleteLater()));
thread->start();
}
if(!audioThreadStarted)
{
audioThreadStarted = true;
QThread* thread = new QThread;
AudioStreamWorker* videoThread = new AudioStreamWorker(this);
// Give QThread ownership of Worker Object
videoThread->moveToThread(thread);
// Connect videoThread error signal to this errorHandler SLOT.
connect(videoThread, SIGNAL(error(QString)), this, SLOT(errorHandler(QString)));
// Connects the thread’s started() signal to the process() slot in the videoThread, causing it to start.
connect(thread, SIGNAL(started()), videoThread, SLOT(doWork()));
connect(videoThread, SIGNAL(finished()), thread, SLOT(quit()));
connect(videoThread, SIGNAL(finished()), videoThread, SLOT(deleteLater()));
// Make sure the thread object is deleted after execution has finished.
connect(thread, SIGNAL(finished()), thread, SLOT(deleteLater()));
thread->start();
}
}
} //finished main loop
int MyApp::video_thread() {
//VideoState *is = (VideoState *)arg;
AVPacket pkt1, *packet = &pkt1;
int len1, frameFinished;
double pts;
pic = avcodec_alloc_frame();
for(;;) {
if(packet_queue_get(&videoq, packet, 1) < 0) {
// means we quit getting packets
break;
}
pts = 0;
global_video_pkt_pts2 = packet->pts;
// Decode video frame
len1 = avcodec_decode_video2(ccontext, pic, &frameFinished, packet);
if(packet->dts == AV_NOPTS_VALUE
&& pic->opaque && *(uint64_t*)pic->opaque != AV_NOPTS_VALUE) {
pts = *(uint64_t *)pic->opaque;
} else if(packet->dts != AV_NOPTS_VALUE) {
pts = packet->dts;
} else {
pts = 0;
}
pts *= av_q2d(is->video_st->time_base);
// Did we get a video frame?
if(frameFinished) {
pts = synchronize_video(is, pic, pts);
actualPts = pts;
refreshSlot();
}
av_free_packet(packet);
}
av_free(pic);
return 0;
}
int MyApp::audio_thread() {
//VideoState *is = (VideoState *)arg;
AVPacket pkt1, *packet = &pkt1;
int len1, frameFinished;
ALuint source;
ALenum format = 0;
// ALuint frequency;
ALenum alError;
ALint val2;
ALuint buffers[NUM_BUFFERS];
int dataSize;
ALCcontext *aContext;
ALCdevice *device;
if (!alutInit(NULL, NULL)) {
// printf(stderr, "init alut error\n");
}
device = alcOpenDevice(NULL);
if (device == NULL) {
// printf(stderr, "device error\n");
}
//Create a context
aContext = alcCreateContext(device, NULL);
alcMakeContextCurrent(aContext);
if(!(aContext)) {
printf("Could not create the OpenAL context!\n");
return 0;
}
alListener3f(AL_POSITION, 0.0f, 0.0f, 0.0f);
//ALenum alError;
if(alGetError() != AL_NO_ERROR) {
cout << "could not create buffers";
cout.flush();
fflush( stdout );
return 0;
}
alGenBuffers(NUM_BUFFERS, buffers);
alGenSources(1, &source);
if(alGetError() != AL_NO_ERROR) {
cout << "after Could not create buffers or the source.\n";
cout.flush( );
return 0;
}
int i;
int indexOfPacket;
double pts;
//double pts;
int n;
for(i = 0; i < NUM_BUFFERS; i++)
{
if(packet_queue_get(&audioq, packet, 1) < 0) {
// means we quit getting packets
break;
}
cout << "streamindex=audio \n";
cout.flush( );
//printf("before decode audio\n");
//fflush( stdout );
// AVPacket *packet = new AVPacket();//malloc(sizeof(AVPacket*));
AVFrame *decodedFrame = NULL;
int gotFrame = 0;
// AVFrame* decodedFrame;
if(!decodedFrame) {
if(!(decodedFrame = avcodec_alloc_frame())) {
cout << "Run out of memory, stop the streaming...\n";
fflush( stdout );
cout.flush();
return -2;
}
} else {
avcodec_get_frame_defaults(decodedFrame);
}
int len = avcodec_decode_audio4(audioc, decodedFrame, &gotFrame, packet);
if(len < 0) {
cout << "Error while decoding.\n";
cout.flush( );
return -3;
}
if(len < 0) {
/* if error, skip frame */
is->audio_pkt_size = 0;
//break;
}
is->audio_pkt_data += len;
is->audio_pkt_size -= len;
pts = is->audio_clock;
// *pts_ptr = pts;
n = 2 * is->audio_st->codec->channels;
is->audio_clock += (double)packet->size/
(double)(n * is->audio_st->codec->sample_rate);
if(gotFrame) {
cout << "got audio frame.\n";
cout.flush( );
// We have a buffer ready, send it
dataSize = av_samples_get_buffer_size(NULL, audioc->channels,
decodedFrame->nb_samples, audioc->sample_fmt, 1);
if(!format) {
if(audioc->sample_fmt == AV_SAMPLE_FMT_U8 ||
audioc->sample_fmt == AV_SAMPLE_FMT_U8P) {
if(audioc->channels == 1) {
format = AL_FORMAT_MONO8;
} else if(audioc->channels == 2) {
format = AL_FORMAT_STEREO8;
}
} else if(audioc->sample_fmt == AV_SAMPLE_FMT_S16 ||
audioc->sample_fmt == AV_SAMPLE_FMT_S16P) {
if(audioc->channels == 1) {
format = AL_FORMAT_MONO16;
} else if(audioc->channels == 2) {
format = AL_FORMAT_STEREO16;
}
}
if(!format) {
cout << "OpenAL can't open this format of sound.\n";
cout.flush( );
return -4;
}
}
printf("albufferdata audio b4.\n");
fflush( stdout );
alBufferData(buffers[i], format, *decodedFrame->data, dataSize, decodedFrame->sample_rate);
cout << "after albufferdata all buffers \n";
cout.flush( );
av_free_packet(packet);
//=av_free(packet);
av_free(decodedFrame);
if((alError = alGetError()) != AL_NO_ERROR) {
printf("Error while buffering.\n");
printAlError(alError);
return -6;
}
}
}
cout << "before quoe buffers \n";
cout.flush();
alSourceQueueBuffers(source, NUM_BUFFERS, buffers);
cout << "before play.\n";
cout.flush();
alSourcePlay(source);
cout << "after play.\n";
cout.flush();
if((alError = alGetError()) != AL_NO_ERROR) {
cout << "error strating stream.\n";
cout.flush();
printAlError(alError);
return 0;
}
// AVPacket *pkt = &is->audio_pkt;
while(keepGoing)
{
while(packet_queue_get(&audioq, packet, 1) >= 0) {
// means we quit getting packets
do {
alGetSourcei(source, AL_BUFFERS_PROCESSED, &val2);
usleep(SLEEP_BUFFERING);
} while(val2 <= 0);
if(alGetError() != AL_NO_ERROR)
{
fprintf(stderr, "Error gettingsource :(\n");
return 1;
}
while(val2--)
{
ALuint buffer;
alSourceUnqueueBuffers(source, 1, &buffer);
if(alGetError() != AL_NO_ERROR)
{
fprintf(stderr, "Error unqueue buffers :(\n");
// return 1;
}
AVFrame *decodedFrame = NULL;
int gotFrame = 0;
// AVFrame* decodedFrame;
if(!decodedFrame) {
if(!(decodedFrame = avcodec_alloc_frame())) {
cout << "Run out of memory, stop the streaming...\n";
//fflush( stdout );
cout.flush();
return -2;
}
} else {
avcodec_get_frame_defaults(decodedFrame);
}
int len = avcodec_decode_audio4(audioc, decodedFrame, &gotFrame, packet);
if(len < 0) {
cout << "Error while decoding.\n";
cout.flush( );
is->audio_pkt_size = 0;
return -3;
}
is->audio_pkt_data += len;
is->audio_pkt_size -= len;
if(packet->size <= 0) {
/* No data yet, get more frames */
//continue;
}
if(gotFrame) {
pts = is->audio_clock;
len = synchronize_audio(is, (int16_t *)is->audio_buf,
packet->size, pts);
is->audio_buf_size = packet->size;
pts = is->audio_clock;
// *pts_ptr = pts;
n = 2 * is->audio_st->codec->channels;
is->audio_clock += (double)packet->size /
(double)(n * is->audio_st->codec->sample_rate);
if(packet->pts != AV_NOPTS_VALUE) {
is->audio_clock = av_q2d(is->audio_st->time_base)*packet->pts;
}
len = av_samples_get_buffer_size(NULL, audioc->channels,
decodedFrame->nb_samples, audioc->sample_fmt, 1);
alBufferData(buffer, format, *decodedFrame->data, len, decodedFrame->sample_rate);
if(alGetError() != AL_NO_ERROR)
{
fprintf(stderr, "Error buffering :(\n");
return 1;
}
alSourceQueueBuffers(source, 1, &buffer);
if(alGetError() != AL_NO_ERROR)
{
fprintf(stderr, "Error queueing buffers :(\n");
return 1;
}
}
}
alGetSourcei(source, AL_SOURCE_STATE, &val2);
if(val2 != AL_PLAYING)
alSourcePlay(source);
}
//pic = avcodec_alloc_frame();
}
qDebug() << "end audiothread";
return 1;
}
void MyApp::refreshSlot()
{
if(true)
{
printf("got frame %d, %d\n", pic->width, ccontext->width);
fflush( stdout );
sws_scale(img_convert_ctx, (const uint8_t **)pic->data, pic->linesize,
0, originalVideoHeight, &picrgb->data[0], &picrgb->linesize[0]);
printf("rescaled frame %d, %d\n", newVideoWidth, newVideoHeight);
fflush( stdout );
//av_free_packet(packet);
//av_init_packet(packet);
qDebug() << "waking audio as video finished";
////mutex.unlock();
//mutex2.lock();
doingVideoFrame = false;
//doingAudioFrame = false;
////mutex2.unlock();
//mutex2.unlock();
//w2->wakeAll();
//w->wakeAll();
qDebug() << "now woke audio";
//pic = picrgb;
uint8_t *srcy = picrgb->data[0];
uint8_t *srcu = picrgb->data[1];
uint8_t *srcv = picrgb->data[2];
printf("got src yuv frame %d\n", &srcy);
fflush( stdout );
unsigned char *ptr = NULL;
screen_get_buffer_property_pv(mScreenPixelBuffer, SCREEN_PROPERTY_POINTER, (void**) &ptr);
unsigned char *y = ptr;
unsigned char *u = y + (newVideoHeight * mStride) ;
unsigned char *v = u + (newVideoHeight * mStride) / 4;
int i = 0;
printf("got buffer picrgbwidth= %d \n", newVideoWidth);
fflush( stdout );
for ( i = 0; i < newVideoHeight; i++)
{
int doff = i * mStride;
int soff = i * picrgb->linesize[0];
memcpy(&y[doff], &srcy[soff], newVideoWidth);
}
for ( i = 0; i < newVideoHeight / 2; i++)
{
int doff = i * mStride / 2;
int soff = i * picrgb->linesize[1];
memcpy(&u[doff], &srcu[soff], newVideoWidth / 2);
}
for ( i = 0; i < newVideoHeight / 2; i++)
{
int doff = i * mStride / 2;
int soff = i * picrgb->linesize[2];
memcpy(&v[doff], &srcv[soff], newVideoWidth / 2);
}
printf("before posttoscreen \n");
fflush( stdout );
video_refresh_timer();
qDebug() << "end refreshslot";
}
else
{
}
}
void MyApp::refreshNeededSlot2()
{
printf("blitting to buffer");
fflush(stdout);
screen_buffer_t screen_buffer;
screen_get_window_property_pv(mScreenWindow, SCREEN_PROPERTY_RENDER_BUFFERS, (void**) &screen_buffer);
int attribs[] = { SCREEN_BLIT_SOURCE_WIDTH, newVideoWidth, SCREEN_BLIT_SOURCE_HEIGHT, newVideoHeight, SCREEN_BLIT_END };
int res2 = screen_blit(mScreenCtx, screen_buffer, mScreenPixelBuffer, attribs);
printf("dirty rectangles");
fflush(stdout);
int dirty_rects[] = { 0, 0, newVideoWidth, newVideoHeight };
screen_post_window(mScreenWindow, screen_buffer, 1, dirty_rects, 0);
printf("done screneposdtwindow");
fflush(stdout);
}
void MyApp::video_refresh_timer() {
testDelay = 0;
// VideoState *is = ( VideoState* )userdata;
VideoPicture *vp;
//double pts = 0 ;
double actual_delay, delay, sync_threshold, ref_clock, diff;
if(is->video_st) {
if(false)////is->pictq_size == 0)
{
testDelay = 1;
schedule_refresh(is, 1);
} else {
// vp = &is->pictq[is->pictq_rindex];
delay = actualPts - is->frame_last_pts; /* the pts from last time */
if(delay <= 0 || delay >= 1.0) {
/* if incorrect delay, use previous one */
delay = is->frame_last_delay;
}
/* save for next time */
is->frame_last_delay = delay;
is->frame_last_pts = actualPts;
is->video_current_pts = actualPts;
is->video_current_pts_time = av_gettime();
/* update delay to sync to audio */
ref_clock = get_audio_clock(is);
diff = actualPts - ref_clock;
/* Skip or repeat the frame. Take delay into account
FFPlay still doesn't "know if this is the best guess." */
sync_threshold = (delay > AV_SYNC_THRESHOLD) ? delay : AV_SYNC_THRESHOLD;
if(fabs(diff) < AV_NOSYNC_THRESHOLD) {
if(diff <= -sync_threshold) {
delay = 0;
} else if(diff >= sync_threshold) {
delay = 2 * delay;
}
}
is->frame_timer += delay;
/* computer the REAL delay */
actual_delay = is->frame_timer - (av_gettime() / 1000000.0);
if(actual_delay < 0.010) {
/* Really it should skip the picture instead */
actual_delay = 0.010;
}
testDelay = (int)(actual_delay * 1000 + 0.5);
schedule_refresh(is, (int)(actual_delay * 1000 + 0.5));
/* show the picture! */
//video_display(is);
// SDL_CondSignal(is->pictq_cond);
// SDL_UnlockMutex(is->pictq_mutex);
}
} else {
testDelay = 100;
schedule_refresh(is, 100);
}
}
void MyApp::schedule_refresh(VideoState *is, int delay) {
qDebug() << "start schedule refresh timer" << delay;
typeOfEvent = FF_REFRESH_EVENT2;
w->wakeAll();
// SDL_AddTimer(delay,
}I am currently waiting on data in a loop in the following way
QMutex mutex;
mutex.lock();
while(keepGoing)
{
qDebug() << "MAINTHREAD" << testDelay;
w->wait(&mutex);
mutex.unlock();
qDebug() << "MAINTHREAD past wait";
if(!keepGoing)
{
break;
}
if(testDelay > 0 && typeOfEvent == FF_REFRESH_EVENT2)
{
usleep(testDelay);
refreshNeededSlot2();
}
else if(testDelay > 0 && typeOfEvent == FF_QUIT_EVENT2)
{
keepGoing = false;
exit(0);
break;
// usleep(testDelay);
// refreshNeededSlot2();
}
qDebug() << "MAINTHREADend";
mutex.lock();
}
mutex.unlock();Please let me know if I need to provide any more relevent code. I'm sorry my code is untidy - I still learning c++ and have been modifying this code for over a week now as previously mentioned.
Just added a sample of output I'm seeing from print outs I do to console - I can't get my head around it (it's almost too complicated for my level of expertise) but when you see the frames being played and audio playing it's very difficult to give up especially when it took me a couple of weeks to get to this stage.
Please someone give me a hand if they spot the problem.
MAINTHREAD past wait
pts after syncvideo= 1073394046
got frame 640, 640
start video_refresh_timer
actualpts = 1.66833
frame lastpts = 1.63497
start schedule refresh timer need to delay for 123pts after syncvideo= 1073429033
got frame 640, 640
MAINTHREAD loop delay before refresh = 123
start video_refresh_timer
actualpts = 1.7017
frame lastpts = 1.66833
start schedule refresh timer need to delay for 115MAINTHREAD past wait
pts after syncvideo= 1073464021
got frame 640, 640
start video_refresh_timer
actualpts = 1.73507
frame lastpts = 1.7017
start schedule refresh timer need to delay for 140MAINTHREAD loop delay before refresh = 140
pts after syncvideo= 1073499008
got frame 640, 640
start video_refresh_timer
actualpts = 1.76843
frame lastpts = 1.73507
start schedule refresh timer need to delay for 163MAINTHREAD past wait
pts after syncvideo= 1073533996
got frame 640, 640
start video_refresh_timer
actualpts = 1.8018
frame lastpts = 1.76843
start schedule refresh timer need to delay for 188MAINTHREAD loop delay before refresh = 188
pts after syncvideo= 1073568983
got frame 640, 640
start video_refresh_timer
actualpts = 1.83517
frame lastpts = 1.8018
start schedule refresh timer need to delay for 246MAINTHREAD past wait
pts after syncvideo= 1073603971
got frame 640, 640
start video_refresh_timer
actualpts = 1.86853
frame lastpts = 1.83517
start schedule refresh timer need to delay for 299MAINTHREAD loop delay before refresh = 299
pts after syncvideo= 1073638958
got frame 640, 640
start video_refresh_timer
actualpts = 1.9019
frame lastpts = 1.86853
start schedule refresh timer need to delay for 358MAINTHREAD past wait
pts after syncvideo= 1073673946
got frame 640, 640
start video_refresh_timer
actualpts = 1.93527
frame lastpts = 1.9019
start schedule refresh timer need to delay for 416MAINTHREAD loop delay before refresh = 416
pts after syncvideo= 1073708933
got frame 640, 640
start video_refresh_timer
actualpts = 1.96863
frame lastpts = 1.93527
start schedule refresh timer need to delay for 474MAINTHREAD past wait
pts after syncvideo= 1073742872
got frame 640, 640
MAINTHREAD loop delay before refresh = 474
start video_refresh_timer
actualpts = 2.002
frame lastpts = 1.96863
start schedule refresh timer need to delay for 518MAINTHREAD past wait
pts after syncvideo= 1073760366
got frame 640, 640
start video_refresh_timer
actualpts = 2.03537
frame lastpts = 2.002
start schedule refresh timer need to delay for 575 -
Video created using H263 codec and ffmpeg does not play on android device [closed]
21 mars 2013, par susheel tickooI have created a video using FFmpeg and H263 codec. But when I play the video on an Android device the player is unable to play it. I have used both the extensions .mp4 and .3gp.
void generate(JNIEnv *pEnv, jobject pObj,jobjectArray stringArray,int famerate,int width,int height,jstring videoFilename)
{
AVCodec *codec;
AVCodecContext *c= NULL;
//int framesnum=5;
int i,looper, out_size, size, x, y,encodecbuffsize,j;
__android_log_write(ANDROID_LOG_INFO, "record","************into generate************");
int imagecount= (*pEnv)->GetArrayLength(pEnv, stringArray);
__android_log_write(ANDROID_LOG_INFO, "record","************got magecount************");
int retval=-10;
FILE *f;
AVFrame *picture,*encoded_avframe;
uint8_t *encodedbuffer;
jbyte *raw_record;
char logdatadata[100];
int returnvalue = -1,numBytes =-1;
const char *gVideoFileName = (char *)(*pEnv)->GetStringUTFChars(pEnv, videoFilename, NULL);
__android_log_write(ANDROID_LOG_INFO, "record","************got video file name************");
/* find the mpeg1 video encoder */
codec = avcodec_find_encoder(CODEC_ID_H264);
if (!codec) {
__android_log_write(ANDROID_LOG_INFO, "record","codec not found");
exit(1);
}
c= avcodec_alloc_context();
/*c->bit_rate = 400000;
c->width = width;
c->height = height;
c->time_base= (AVRational){1,famerate};
c->gop_size = 12; // emit one intra frame every ten frames
c->max_b_frames=0;
c->pix_fmt = PIX_FMT_YUV420P;
c->codec_type = AVMEDIA_TYPE_VIDEO;
c->codec_id = CODEC_ID_H263;*/
c->bit_rate = 400000;
// resolution must be a multiple of two
c->width = 176;
c->height = 144;
c->pix_fmt = PIX_FMT_YUV420P;
c->qcompress = 0.0;
c->qblur = 0.0;
c->gop_size = 20; //or 1
c->sub_id = 1;
c->workaround_bugs = FF_BUG_AUTODETECT;
//pFFmpeg->c->time_base = (AVRational){1,25};
c->time_base.num = 1;
c->time_base.den = famerate;
c->max_b_frames = 0; //pas de B frame en H263
// c->opaque = opaque;
c->dct_algo = FF_DCT_AUTO;
c->idct_algo = FF_IDCT_AUTO;
//lc->rtp_mode = 0;
c->rtp_payload_size = 1000;
c->rtp_callback = 0; // ffmpeg_rtp_callback;
c->flags |= CODEC_FLAG_QSCALE;
c->mb_decision = FF_MB_DECISION_RD;
c->thread_count = 1;
#define DEFAULT_RATE (16 * 8 * 1024)
c->rc_min_rate = DEFAULT_RATE;
c->rc_max_rate = DEFAULT_RATE;
c->rc_buffer_size = DEFAULT_RATE * 64;
c->bit_rate = DEFAULT_RATE;
sprintf(logdatadata, "------width from c ---- = %d",width);
__android_log_write(ANDROID_LOG_INFO, "record",logdatadata);
sprintf(logdatadata, "------height from c ---- = %d",height);
__android_log_write(ANDROID_LOG_INFO, "record",logdatadata);
__android_log_write(ANDROID_LOG_INFO, "record","************Found codec and now opening it************");
/* open it */
retval = avcodec_open(c, codec);
if ( retval < 0)
{
sprintf(logdatadata, "------avcodec_open ---- retval = %d",retval);
__android_log_write(ANDROID_LOG_INFO, "record",logdatadata);
__android_log_write(ANDROID_LOG_INFO, "record","could not open codec");
exit(1);
}
__android_log_write(ANDROID_LOG_INFO, "record","statement 5");
f = fopen(gVideoFileName, "ab");
if (!f) {
__android_log_write(ANDROID_LOG_INFO, "record","could not open video file");
exit(1);
}
__android_log_write(ANDROID_LOG_INFO, "record", "***************Allocating encodedbuffer*********\n");
encodecbuffsize = avpicture_get_size(PIX_FMT_RGB24, c->width, c->height);
sprintf(logdatadata, "encodecbuffsize = %d",encodecbuffsize);
__android_log_write(ANDROID_LOG_INFO, "record",logdatadata);
encodedbuffer = malloc(encodecbuffsize);
jclass cls = (*pEnv)->FindClass(pEnv, "com/canvasm/mediclinic/VideoGenerator");
jmethodID mid = (*pEnv)->GetMethodID(pEnv, cls, "videoProgress", "(Ljava/lang/String;)Ljava/lang/String;");
jmethodID mid_delete = (*pEnv)->GetMethodID(pEnv, cls, "deleteTempFile", "(Ljava/lang/String;)Ljava/lang/String;");
if (mid == 0)
return;
__android_log_write(ANDROID_LOG_INFO, "native","got method id");
for(i=0;i<=imagecount;i++) {
jboolean isCp;
int progress = 0;
float temp;
jstring string;
if(i==imagecount)
string = (jstring) (*pEnv)->GetObjectArrayElement(pEnv, stringArray, imagecount-1);
else
string = (jstring) (*pEnv)->GetObjectArrayElement(pEnv, stringArray, i);
const char *rawString = (*pEnv)->GetStringUTFChars(pEnv, string, &isCp);
__android_log_write(ANDROID_LOG_INFO, "record",rawString);
picture = OpenImage(rawString,width,height);
//WriteJPEG(c,picture,i);
// encode video
memset(encodedbuffer,0,encodecbuffsize);
//do{
for(looper=0;looper<5;looper++)
{
memset(encodedbuffer,0,encodecbuffsize);
out_size = avcodec_encode_video(c, encodedbuffer, encodecbuffsize, picture);
sprintf(logdatadata, "avcodec_encode_video ----- out_size = %d \n",out_size );
__android_log_write(ANDROID_LOG_INFO, "record",logdatadata);
if(out_size>0)
break;
}
__android_log_write(ANDROID_LOG_INFO, "record","*************Start looping for same image*******");
returnvalue = fwrite(encodedbuffer, 1, out_size, f);
sprintf(logdatadata, "fwrite ----- returnvalue = %d \n",returnvalue );
__android_log_write(ANDROID_LOG_INFO, "record",logdatadata);
__android_log_write(ANDROID_LOG_INFO, "record","*************End looping for same image*******");
// publishing progress
progress = ((i*100)/(imagecount+1))+15;//+1 is for last frame duplicated entry
if(progress<20 )
progress =20;
if(progress>=95 )
progress =95;
sprintf(logdatadata, "%d",progress );
jstring jstrBuf = (*pEnv)->NewStringUTF(pEnv, logdatadata);
(*pEnv)->CallObjectMethod(pEnv, pObj, mid,jstrBuf);
if(i>0)
(*pEnv)->CallObjectMethod(pEnv, pObj, mid_delete,string);
}
/* get the delayed frames */
for(; out_size; i++) {
fflush(stdout);
out_size = avcodec_encode_video(c, encodedbuffer, encodecbuffsize, NULL);
fwrite(encodedbuffer, 20, out_size, f);
}
/* add sequence end code to have a real mpeg file */
encodedbuffer[0] = 0x00;
encodedbuffer[1] = 0x00;
encodedbuffer[2] = 0x01;
encodedbuffer[3] = 0xb7;
fwrite(encodedbuffer, 1, 4, f);
fclose(f);
free(encodedbuffer);
avcodec_close(c);
av_free(c);
__android_log_write(ANDROID_LOG_INFO, "record","Video created ");
// last updation of 100%
sprintf(logdatadata, "%d",100 );
jstring jstrBuf = (*pEnv)->NewStringUTF(pEnv, logdatadata);
(*pEnv)->CallObjectMethod(pEnv, pObj, mid,jstrBuf);
}
AVFrame* OpenImage(const char* imageFileName,int w,int h)
{
AVFrame *pFrame;
AVCodec *pCodec ;
AVFormatContext *pFormatCtx;
AVCodecContext *pCodecCtx;
uint8_t *buffer;
int frameFinished,framesNumber = 0,retval = -1,numBytes=0;
AVPacket packet;
char logdatadata[100];
//__android_log_write(ANDROID_LOG_INFO, "OpenImage",imageFileName);
if(av_open_input_file(&pFormatCtx, imageFileName, NULL, 0, NULL)!=0)
//if(avformat_open_input(&pFormatCtx,imageFileName,NULL,NULL)!=0)
{
__android_log_write(ANDROID_LOG_INFO, "record",
"Can't open image file ");
return NULL;
}
pCodecCtx = pFormatCtx->streams[0]->codec;
pCodecCtx->width = w;
pCodecCtx->height = h;
pCodecCtx->pix_fmt = PIX_FMT_YUV420P;
// Find the decoder for the video stream
pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if (!pCodec)
{
__android_log_write(ANDROID_LOG_INFO, "record",
"Can't open image file ");
return NULL;
}
pFrame = avcodec_alloc_frame();
numBytes = avpicture_get_size(PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));
sprintf(logdatadata, "numBytes = %d",numBytes);
__android_log_write(ANDROID_LOG_INFO, "record",logdatadata);
retval = avpicture_fill((AVPicture *) pFrame, buffer, PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
// Open codec
if(avcodec_open(pCodecCtx, pCodec)<0)
{
__android_log_write(ANDROID_LOG_INFO, "record","Could not open codec");
return NULL;
}
if (!pFrame)
{
__android_log_write(ANDROID_LOG_INFO, "record","Can't allocate memory for AVFrame\n");
return NULL;
}
int readval = -5;
while (readval = av_read_frame(pFormatCtx, &packet) >= 0)
{
if(packet.stream_index != 0)
continue;
int ret = avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
sprintf(logdatadata, "avcodec_decode_video2 ret = %d",ret);
__android_log_write(ANDROID_LOG_INFO, "record",logdatadata);
if (ret > 0)
{
__android_log_write(ANDROID_LOG_INFO, "record","Frame is decoded\n");
pFrame->quality = 4;
av_free_packet(&packet);
av_close_input_file(pFormatCtx);
return pFrame;
}
else
{
__android_log_write(ANDROID_LOG_INFO, "record","error while decoding frame \n");
}
}
sprintf(logdatadata, "readval = %d",readval);
__android_log_write(ANDROID_LOG_INFO, "record",logdatadata);
}The
generate
method takes a list of strings (path to images) and converts them to video and theOpenImage
method is responsible for convertign a single image toAVFrame
. -
Android FFmpeg Video Player
11 mars 2013, par DilipI want to play video using
FFmpeg
for this have used some code,But it open file but not drawing frames thowing Unhandled page fault exception.Java Code :
public class MainActivity extends Activity {
private static native void openFile();
private static native void drawFrame(Bitmap bitmap);
private static native void drawFrameAt(Bitmap bitmap, int secs);
private Bitmap mBitmap;
private int mSecs = 0;
static {
System.loadLibrary("ffmpegutils");
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// setContentView(new VideoView(this));
setContentView(R.layout.main);
mBitmap = Bitmap.createBitmap(320, 240, Bitmap.Config.ARGB_8888);
openFile();
Button btn = (Button) findViewById(R.id.frame_adv);
btn.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
try {
drawFrame(mBitmap);
ImageView i = (ImageView) findViewById(R.id.frame);
i.setImageBitmap(mBitmap);
} catch (Exception e) {
e.printStackTrace();
}
}
});
}}Jni code :
#include
#include
#include
#include <android></android>log.h>
#include <android></android>bitmap.h>
#include <libavcodec></libavcodec>avcodec.h>
#include <libavformat></libavformat>avformat.h>
#include <libswscale></libswscale>swscale.h>
#define LOG_TAG "FFMPEGSample"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
/* Cheat to keep things simple and just use some globals. */
AVFormatContext *pFormatCtx;
AVCodecContext *pCodecCtx;
AVFrame *pFrame;
AVFrame *pFrameRGB;
int videoStream;
/*
* Write a frame worth of video (in pFrame) into the Android bitmap
* described by info using the raw pixel buffer. It's a very inefficient
* draw routine, but it's easy to read. Relies on the format of the
* bitmap being 8bits per color component plus an 8bit alpha channel.
*/
static void fill_bitmap(AndroidBitmapInfo* info, void *pixels, AVFrame *pFrame) {
uint8_t *frameLine;
int yy;
for (yy = 0; yy < info->height; yy++) {
uint8_t* line = (uint8_t*) pixels;
frameLine = (uint8_t *) pFrame->data[0] + (yy * pFrame->linesize[0]);
int xx;
for (xx = 0; xx < info->width; xx++) {
int out_offset = xx * 4;
int in_offset = xx * 3;
line[out_offset] = frameLine[in_offset];
line[out_offset + 1] = frameLine[in_offset + 1];
line[out_offset + 2] = frameLine[in_offset + 2];
line[out_offset + 3] = 0;
}
pixels = (char*) pixels + info->stride;
}
}
void Java_com_churnlabs_ffmpegsample_MainActivity_openFile(JNIEnv * env,
jobject this) {
int ret;
int err;
int i;
AVCodec *pCodec;
uint8_t *buffer;
int numBytes;
av_register_all();
LOGE("Registered formats***********************************");
err = av_open_input_file(&pFormatCtx, "file:///mnt/sdcard/android.3gp",
NULL, 0, NULL);
LOGE("Called open file***************************************************");
if (err != 0) {
LOGE(
"Couldn't open file****************************************************");
return;
}
LOGE(
"Opened file***********************************************************");
if (av_find_stream_info(pFormatCtx) < 0) {
LOGE(
"Unable to get stream info*****************************************");
return;
}
videoStream = -1;
for (i = 0; i < pFormatCtx->nb_streams; i++) {
if (pFormatCtx->streams[i]->codec->codec_type == CODEC_TYPE_VIDEO) {
videoStream = i;
break;
}
}
if (videoStream == -1) {
LOGE("Unable to find video stream");
return;
}
LOGI("Video stream is [%d]", videoStream);
pCodecCtx = pFormatCtx->streams[videoStream]->codec;
pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if (pCodec == NULL) {
LOGE("Unsupported codec**********************************************");
return;
}
if (avcodec_open(pCodecCtx, pCodec) < 0) {
LOGE("Unable to open codec***************************************");
return;
}
pFrame = avcodec_alloc_frame();
pFrameRGB = avcodec_alloc_frame();
LOGI("Video size is [%d x %d]", pCodecCtx->width, pCodecCtx->height);
numBytes = avpicture_get_size(PIX_FMT_RGB24, pCodecCtx->width,
pCodecCtx->height);
buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));
avpicture_fill((AVPicture *) pFrameRGB, buffer, PIX_FMT_RGB24,
pCodecCtx->width, pCodecCtx->height);
}
void Java_com_churnlabs_ffmpegsample_MainActivity_drawFrame(JNIEnv * env,
jobject this, jstring bitmap) {
AndroidBitmapInfo info;
void* pixels;
int ret;
int err;
int i;
int frameFinished = 0;
AVPacket packet;
static struct SwsContext *img_convert_ctx;
int64_t seek_target;
if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) {
LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret);
return;
}
LOGE(
"Checked on the bitmap*************************************************");
if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) {
LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret);
}
LOGE(
"Grabbed the pixels*******************************************************");
i = 0;
while ((i == 0) && (av_read_frame(pFormatCtx, &packet) >= 0)) {
if (packet.stream_index == videoStream) {
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
if (frameFinished) {
LOGE("packet pts %llu", packet.pts);
// This is much different than the tutorial, sws_scale
// replaces img_convert, but it's not a complete drop in.
// This version keeps the image the same size but swaps to
// RGB24 format, which works perfect for PPM output.
int target_width = 320;
int target_height = 240;
img_convert_ctx = sws_getContext(pCodecCtx->width,
pCodecCtx->height, pCodecCtx->pix_fmt, target_width,
target_height, PIX_FMT_RGB24, SWS_BICUBIC, NULL, NULL,
NULL);
if (img_convert_ctx == NULL) {
LOGE("could not initialize conversion context\n");
return;
}
sws_scale(img_convert_ctx,
(const uint8_t* const *) pFrame->data, pFrame->linesize,
0, pCodecCtx->height, pFrameRGB->data,
pFrameRGB->linesize);
// save_frame(pFrameRGB, target_width, target_height, i);
fill_bitmap(&info, pixels, pFrameRGB);
i = 1;
}
}
av_free_packet(&packet);
}
AndroidBitmap_unlockPixels(env, bitmap);
}
int seek_frame(int tsms) {
int64_t frame;
frame = av_rescale(tsms, pFormatCtx->streams[videoStream]->time_base.den,
pFormatCtx->streams[videoStream]->time_base.num);
frame /= 1000;
if (avformat_seek_file(pFormatCtx, videoStream, 0, frame, frame,
AVSEEK_FLAG_FRAME) < 0) {
return 0;
}
avcodec_flush_buffers(pCodecCtx);
return 1;
}Log Trace
0): <6>AUO_TOUCH: ts_irqHandler: before disable_irq()
D/PrintK ( 57): <6>AUO_TOUCH: ts_irqWorkHandler: P1(313,750),P2(0,0)
D/PrintK ( 0): <6>AUO_TOUCH: ts_irqHandler: before disable_irq()
D/PrintK ( 57): <6>AUO_TOUCH: ts_irqWorkHandler: P1(0,0),P2(0,0)
E/FFMPEGSample( 2882): Checked on the bitmap*************************************************
E/FFMPEGSample( 2882): Grabbed the pixels*******************************************************
E/FFMPEGSample( 2882): packet pts 0
F/PrintK ( 2882): <2>Exception!!! bs.ffmpegsample: unhandled page fault (11) at 0x0000000c, code 0x017
F/PrintK ( 2882): <2>Exception!!! bs.ffmpegsample: unhandled page fault (11) at 0x0000000c, code 0x017
I/DEBUG ( 86): *** *** *** *** *** *** *** *** *** *** *** *** *** *** *** ***
F/DEBUG ( 86): *** *** *** *** *** *** *** *** *** *** *** *** *** *** *** ***
I/DEBUG ( 86): Build fingerprint: 'dell/streak/streak/8x50:2.2.2/FRG83G/eng.cmbuild.20110317.163900:user/release-keys'
I/DEBUG ( 86): Exception!!! pid: 2882, tid: 2882 >>> com.churnlabs.ffmpegsample <<<
F/DEBUG ( 86): Exception!!! pid: 2882, tid: 2882 >>> com.churnlabs.ffmpegsample <<<
I/DEBUG ( 86): signal 11 (SIGSEGV), fault addr 0000000c
F/DEBUG ( 86): signal 11 (SIGSEGV), fault addr 0000000c
I/DEBUG ( 86): r0 00000070 r1 00000000 r2 0024fca8 r3 afd42328
F/DEBUG ( 86): r0 00000070 r1 00000000 r2 0024fca8 r3 afd42328
I/DEBUG ( 86): r4 00000000 r5 00000000 r6 0000062c r7 0000a000
F/DEBUG ( 86): r4 00000000 r5 00000000 r6 0000062c r7 0000a000
I/DEBUG ( 86): r8 be9794f0 r9 428ab9d8 10 00000003 fp be979830
F/DEBUG ( 86): r8 be9794f0 r9 428ab9d8 10 00000003 fp be979830
I/DEBUG ( 86): ip ffffff90 sp be979448 lr afd0c633 pc afd0c320 cpsr 80000030
F/DEBUG ( 86): ip ffffff90 sp be979448 lr afd0c633 pc afd0c320 cpsr 80000030
I/DEBUG ( 86): d0 6472656767756265 d1 0000000000000000
I/DEBUG ( 86): d2 0000000000000000 d3 0000000044480000
I/DEBUG ( 86): d4 8000000000000000 d5 000000003f800000
I/DEBUG ( 86): d6 0000000000000000 d7 4448000043f00000
I/DEBUG ( 86): d8 0000000000000000 d9 0000000000000000
I/DEBUG ( 86): d10 0000000000000000 d11 0000000000000000
I/DEBUG ( 86): d12 0000000000000000 d13 0000000000000000
I/DEBUG ( 86): d14 0000000000000000 d15 0000000000000000
I/DEBUG ( 86): d16 0000000000000000 d17 0000000000000000
I/DEBUG ( 86): d18 0000000000000000 d19 0000000000000000
I/DEBUG ( 86): d20 3ff0000000000000 d21 8000000000000000
I/DEBUG ( 86): d22 0000000000000000 d23 0000000500010004
I/DEBUG ( 86): d24 0101010101010101 d25 0000000000000000
I/DEBUG ( 86): d26 0000000000000000 d27 0000000000000000
I/DEBUG ( 86): d28 0000000000000000 d29 3ff0000000000000
I/DEBUG ( 86): d30 0000000000000000 d31 3ff0000000000000
I/DEBUG ( 86): scr 80000012
I/DEBUG ( 86):
I/DEBUG ( 86): #00 pc 0000c320 /system/lib/libc.so
F/DEBUG ( 86): #00 pc 0000c320 /system/lib/libc.so
I/DEBUG ( 86): #01 pc 0000c62e /system/lib/libc.so
F/DEBUG ( 86): #01 pc 0000c62e /system/lib/libc.so
I/DEBUG ( 86): #02 pc 0000cd3e /system/lib/libc.so
F/DEBUG ( 86): #02 pc 0000cd3e /system/lib/libc.so
I/DEBUG ( 86): #03 pc 0002d2c4 /system/lib/libskia.so
F/DEBUG ( 86): #03 pc 0002d2c4 /system/lib/libskia.so
I/DEBUG ( 86): #04 pc 000693ec /system/lib/libskia.so
F/DEBUG ( 86): #04 pc 000693ec /system/lib/libskia.so
I/DEBUG ( 86): #05 pc 00064d70 /system/lib/libskia.so
F/DEBUG ( 86): #05 pc 00064d70 /system/lib/libskia.so
I/DEBUG ( 86): #06 pc 0004dea8 /system/lib/libandroid_runtime.so
F/DEBUG ( 86): #06 pc 0004dea8 /system/lib/libandroid_runtime.so
I/DEBUG ( 86): #07 pc 00016df4 /system/lib/libdvm.so
F/DEBUG ( 86): #07 pc 00016df4 /system/lib/libdvm.so
I/DEBUG ( 86): #08 pc 00042904 /system/lib/libdvm.so
F/DEBUG ( 86): #08 pc 00042904 /system/lib/libdvm.so
I/DEBUG ( 86): #09 pc 0001bd58 /system/lib/libdvm.so
F/DEBUG ( 86): #09 pc 0001bd58 /system/lib/libdvm.so
I/DEBUG ( 86): #10 pc 00022550 /system/lib/libdvm.so
F/DEBUG ( 86): #10 pc 00022550 /system/lib/libdvm.so
I/DEBUG ( 86): #11 pc 000213f0 /system/lib/libdvm.so
F/DEBUG ( 86): #11 pc 000213f0 /system/lib/libdvm.so
I/DEBUG ( 86): #12 pc 00058c4a /system/lib/libdvm.so
F/DEBUG ( 86): #12 pc 00058c4a /system/lib/libdvm.so
I/DEBUG ( 86): #13 pc 00060e72 /system/lib/libdvm.so
F/DEBUG ( 86): #13 pc 00060e72 /system/lib/libdvm.so
I/DEBUG ( 86): #14 pc 0001bd58 /system/lib/libdvm.so
F/DEBUG ( 86): #14 pc 0001bd58 /system/lib/libdvm.so
I/DEBUG ( 86): #15 pc 00022550 /system/lib/libdvm.so
F/DEBUG ( 86): #15 pc 00022550 /system/lib/libdvm.so
I/DEBUG ( 86): #16 pc 000213f0 /system/lib/libdvm.so
F/DEBUG ( 86): #16 pc 000213f0 /system/lib/libdvm.so
I/DEBUG ( 86): #17 pc 00058a90 /system/lib/libdvm.so
F/DEBUG ( 86): #17 pc 00058a90 /system/lib/libdvm.so
I/DEBUG ( 86): #18 pc 0004525e /system/lib/libdvm.so
F/DEBUG ( 86): #18 pc 0004525e /system/lib/libdvm.so
I/DEBUG ( 86): #19 pc 0002e574 /system/lib/libandroid_runtime.so
F/DEBUG ( 86): #19 pc 0002e574 /system/lib/libandroid_runtime.so
I/DEBUG ( 86): #20 pc 0002f5f6 /system/lib/libandroid_runtime.so
F/DEBUG ( 86): #20 pc 0002f5f6 /system/lib/libandroid_runtime.so
I/DEBUG ( 86): #21 pc 00008ca8 /system/bin/app_process
F/DEBUG ( 86): #21 pc 00008ca8 /system/bin/app_process
I/DEBUG ( 86): #22 pc 0000d3d0 /system/lib/libc.so
F/DEBUG ( 86): #22 pc 0000d3d0 /system/lib/libc.so
I/DEBUG ( 86):
I/DEBUG ( 86): code around pc:
I/DEBUG ( 86): afd0c300 19d94f56 42ba690f 80a4f0c0 94001814
I/DEBUG ( 86): afd0c310 f08042a2 68d1809f 42916994 6895d00e
I/DEBUG ( 86): afd0c320 429668ee 8096f040 4296688e 8092f040
I/DEBUG ( 86): afd0c330 bf2442bd 608d60e9 e08bd21b b1116951
I/DEBUG ( 86): afd0c340 0514f102 6911e007 f102b191 e0020510
I/DEBUG ( 86):
I/DEBUG ( 86): code around lr:
I/DEBUG ( 86): afd0c610 60f11008 f8c1608e 4e31c00c f10319a1
I/DEBUG ( 86): afd0c620 608a0608 e04b614d b1b2684a f7ff4628
I/DEBUG ( 86): afd0c630 e00ffe23 0f41f115 f04fbf88 d80c35ff
I/DEBUG ( 86): afd0c640 350b4927 0507f025 68431860 4628b12b
I/DEBUG ( 86): afd0c650 fc1cf7ff 28004606 4e21d132 689119a2
I/DEBUG ( 86):
I/DEBUG ( 86): stack:
I/DEBUG ( 86): be979408 000001e0
I/DEBUG ( 86): be97940c be979494 [stack]
I/DEBUG ( 86): be979410 be979438 [stack]
I/DEBUG ( 86): be979414 be979478 [stack]
I/DEBUG ( 86): be979418 0012f484 [heap]
I/DEBUG ( 86): be97941c be979428 [stack]
I/DEBUG ( 86): be979420 00000000
I/DEBUG ( 86): be979424 ab163cec /system/lib/libskia.so
I/DEBUG ( 86): be979428 3f800000
I/DEBUG ( 86): be97942c 80000000 /system/lib/libicudata.so
I/DEBUG ( 86): be979430 00000000
I/DEBUG ( 86): be979434 80000000 /system/lib/libicudata.so
I/DEBUG ( 86): be979438 3f800000
I/DEBUG ( 86): be97943c 00000000
I/DEBUG ( 86): be979440 df002777
I/DEBUG ( 86): be979444 e3a070ad
I/DEBUG ( 86): #00 be979448 0024fd18 [heap]
I/DEBUG ( 86): be97944c afd4372c /system/lib/libc.so
I/DEBUG ( 86): be979450 000000c5
I/DEBUG ( 86): be979454 afd42328 /system/lib/libc.so
I/DEBUG ( 86): be979458 00000070
I/DEBUG ( 86): be97945c 0000062c
I/DEBUG ( 86): be979460 00000003
I/DEBUG ( 86): be979464 afd0c633 /system/lib/libc.so
I/DEBUG ( 86): #01 be979468 be9794c8 [stack]
I/DEBUG ( 86): be97946c 00000000
I/DEBUG ( 86): be979470 002576bc [heap]
I/DEBUG ( 86): be979474 ab163d2c /system/lib/libskia.so
I/DEBUG ( 86): be979478 00000000
I/DEBUG ( 86): be97947c 00000000
I/DEBUG ( 86): be979480 44480000 /system/framework/framework-res.apk
I/DEBUG ( 86): be979484 00000068
I/DEBUG ( 86): be979488 00000002
I/DEBUG ( 86): be97948c 00000068
I/DEBUG ( 86): be979490 00000003
I/DEBUG ( 86): be979494 afd0cd41 /system/lib/libc.so
E/Parcel ( 841): Reading a NULL string not supported here.Can any plz suggest me where I'm doing wrong.