Recherche avancée

Médias (1)

Mot : - Tags -/copyleft

Autres articles (69)

  • Personnaliser en ajoutant son logo, sa bannière ou son image de fond

    5 septembre 2013, par

    Certains thèmes prennent en compte trois éléments de personnalisation : l’ajout d’un logo ; l’ajout d’une bannière l’ajout d’une image de fond ;

  • Submit bugs and patches

    13 avril 2011

    Unfortunately a software is never perfect.
    If you think you have found a bug, report it using our ticket system. Please to help us to fix it by providing the following information : the browser you are using, including the exact version as precise an explanation as possible of the problem if possible, the steps taken resulting in the problem a link to the site / page in question
    If you think you have solved the bug, fill in a ticket and attach to it a corrective patch.
    You may also (...)

  • Publier sur MédiaSpip

    13 juin 2013

    Puis-je poster des contenus à partir d’une tablette Ipad ?
    Oui, si votre Médiaspip installé est à la version 0.2 ou supérieure. Contacter au besoin l’administrateur de votre MédiaSpip pour le savoir

Sur d’autres sites (9239)

  • Make Qt Player codec independent

    16 mars 2016, par Tejas Virpariya

    I develop Qt application which can play more then one video file using bellow code.

    QMediaPlayer *player;
    QString fileName = "C:/username/test.h264";
    player->setmedia(QUrl::fromLocalFile(fileName));

    In starting I cannot play all types of video file, so I install codec on my system, now when my player start codec decoder start, and my CPU usage reach at high.(Show the bellow Image)

    enter image description here

    You can see in above image right side bottom corner LAW(Red label) which saw external decoder started.

    Now, I want to make my Qt Player codec independent, means I know my player have to play only .h264 file, so I will use only h264 decoder and no need of audio so I will not use audio decoder.

    As per my knowledge, QMediaPlayer start decoder when it come in picture, correct me if i am wrong. So What can I do to stop external decoder and decode frame internally and play successfully ?

    EDIT : code for audio decode using FFmpeg

    FFmpegAudio.pro

    TARGET = fooAudioFFMPEG
    QT       += core gui qml quick widgets
    TEMPLATE = app
    SOURCES += main.cpp \
       mainwindow.cpp
    HEADERS += mainwindow.h \
       wrapper.h
    FORMS += mainwindow.ui
    QMAKE_CXXFLAGS += -D__STDC_CONSTANT_MACROS

    LIBS += -pthread
    LIBS += -L/usr/local/lib
    LIBS += -lavdevice
    LIBS += -lavfilter
    LIBS += -lpostproc
    LIBS += -lavformat
    LIBS += -lavcodec
    LIBS += -ldl
    LIBS += -lXfixes
    LIBS += -lXext
    LIBS += -lX11
    LIBS += -lasound
    LIBS += -lSDL
    LIBS += -lx264
    LIBS += -lvpx
    LIBS += -lvorbisenc
    LIBS += -lvorbis
    LIBS += -logg
    LIBS += -lopencore-amrwb
    LIBS += -lopencore-amrnb
    LIBS += -lmp3lame
    LIBS += -lfaac
    LIBS += -lz
    LIBS += -lrt
    LIBS += -lswscale
    LIBS += -lavutil
    LIBS += -lm

    mainwindow.h

    #ifndef MAINWINDOW_H
    #define MAINWINDOW_H

    #include <qmainwindow>

    namespace Ui {
       class MainWindow;
    }

    class MainWindow : public QMainWindow {
       Q_OBJECT
    public:
       MainWindow(QWidget *parent = 0);
       ~MainWindow();

    protected:
       void changeEvent(QEvent *e);

    private:
       Ui::MainWindow *ui;

    private slots:
       void on_pushButton_clicked();
    };

    #endif // MAINWINDOW_H
    </qmainwindow>

    wrapper.h

    #ifndef WRAPPER_H_
    #define WRAPPER_H_

    #include

    #include <libavutil></libavutil>opt.h>
    #include <libavcodec></libavcodec>avcodec.h>
    #include <libavutil></libavutil>channel_layout.h>
    #include <libavutil></libavutil>common.h>
    #include <libavutil></libavutil>imgutils.h>
    #include <libavutil></libavutil>mathematics.h>
    #include <libavutil></libavutil>samplefmt.h>

    #define INBUF_SIZE 4096
    #define AUDIO_INBUF_SIZE 20480
    #define AUDIO_REFILL_THRESH 4096



    /* check that a given sample format is supported by the encoder */
    static int check_sample_fmt(AVCodec *codec, enum AVSampleFormat sample_fmt)
    {
       const enum AVSampleFormat *p = codec->sample_fmts;

       while (*p != AV_SAMPLE_FMT_NONE) {
           if (*p == sample_fmt)
               return 1;
           p++;
       }
       return 0;
    }

    /* just pick the highest supported samplerate */
    static int select_sample_rate(AVCodec *codec)
    {
       const int *p;
       int best_samplerate = 0;

       if (!codec->supported_samplerates)
           return 44100;

       p = codec->supported_samplerates;
       while (*p) {
           best_samplerate = FFMAX(*p, best_samplerate);
           p++;
       }
       return best_samplerate;
    }

    /* select layout with the highest channel count */
    static int select_channel_layout(AVCodec *codec)
    {
       const uint64_t *p;
       uint64_t best_ch_layout = 0;
       int best_nb_channells   = 0;

       if (!codec->channel_layouts)
           return AV_CH_LAYOUT_STEREO;

       p = codec->channel_layouts;
       while (*p) {
           int nb_channels = av_get_channel_layout_nb_channels(*p);

           if (nb_channels > best_nb_channells) {
               best_ch_layout    = *p;
               best_nb_channells = nb_channels;
           }
           p++;
       }
       return best_ch_layout;
    }

    /*
    * Audio encoding example
    */
    static void audio_encode_example(const char *filename)
    {
       AVCodec *codec;
       AVCodecContext *c= NULL;
       AVFrame *frame;
       AVPacket pkt;
       int i, j, k, ret, got_output;
       int buffer_size;
       FILE *f;
       uint16_t *samples;
       float t, tincr;

       printf("Encode audio file %s\n", filename);

       /* find the MP2 encoder */
       codec = avcodec_find_encoder(AV_CODEC_ID_MP2);
       if (!codec) {
           fprintf(stderr, "Codec not found\n");
           exit(1);
       }

       c = avcodec_alloc_context3(codec);
       if (!c) {
           fprintf(stderr, "Could not allocate audio codec context\n");
           exit(1);
       }

       /* put sample parameters */
       c->bit_rate = 64000;

       /* check that the encoder supports s16 pcm input */
       c->sample_fmt = AV_SAMPLE_FMT_S16;
       if (!check_sample_fmt(codec, c->sample_fmt)) {
           fprintf(stderr, "Encoder does not support sample format %s",
                   av_get_sample_fmt_name(c->sample_fmt));
           exit(1);
       }

       /* select other audio parameters supported by the encoder */
       c->sample_rate    = select_sample_rate(codec);
       c->channel_layout = select_channel_layout(codec);
       c->channels       = av_get_channel_layout_nb_channels(c->channel_layout);

       /* open it */
       if (avcodec_open2(c, codec, NULL) &lt; 0) {
           fprintf(stderr, "Could not open codec\n");
           exit(1);
       }

       f = fopen(filename, "wb");
       if (!f) {
           fprintf(stderr, "Could not open %s\n", filename);
           exit(1);
       }

       /* frame containing input raw audio */
       frame = avcodec_alloc_frame();
       if (!frame) {
           fprintf(stderr, "Could not allocate audio frame\n");
           exit(1);
       }

       frame->nb_samples     = c->frame_size;
       frame->format         = c->sample_fmt;
       frame->channel_layout = c->channel_layout;

       /* the codec gives us the frame size, in samples,
        * we calculate the size of the samples buffer in bytes */
       buffer_size = av_samples_get_buffer_size(NULL, c->channels, c->frame_size,
                                                c->sample_fmt, 0);
       samples = (uint16_t *)av_malloc(buffer_size);
       if (!samples) {
           fprintf(stderr, "Could not allocate %d bytes for samples buffer\n",
                   buffer_size);
           exit(1);
       }
       /* setup the data pointers in the AVFrame */
       ret = avcodec_fill_audio_frame(frame, c->channels, c->sample_fmt,
                                      (const uint8_t*)samples, buffer_size, 0);
       if (ret &lt; 0) {
           fprintf(stderr, "Could not setup audio frame\n");
           exit(1);
       }

       /* encode a single tone sound */
       t = 0;
       tincr = 2 * M_PI * 440.0 / c->sample_rate;
       for(i=0;i&lt;200;i++) {
           av_init_packet(&amp;pkt);
           pkt.data = NULL; // packet data will be allocated by the encoder
           pkt.size = 0;

           for (j = 0; j &lt; c->frame_size; j++) {
               samples[2*j] = (int)(sin(t) * 10000);

               for (k = 1; k &lt; c->channels; k++)
                   samples[2*j + k] = samples[2*j];
               t += tincr;
           }
           /* encode the samples */
           ret = avcodec_encode_audio2(c, &amp;pkt, frame, &amp;got_output);
           if (ret &lt; 0) {
               fprintf(stderr, "Error encoding audio frame\n");
               exit(1);
           }
           if (got_output) {
               fwrite(pkt.data, 1, pkt.size, f);
               av_free_packet(&amp;pkt);
           }
       }

       /* get the delayed frames */
       for (got_output = 1; got_output; i++) {
           ret = avcodec_encode_audio2(c, &amp;pkt, NULL, &amp;got_output);
           if (ret &lt; 0) {
               fprintf(stderr, "Error encoding frame\n");
               exit(1);
           }

           if (got_output) {
               fwrite(pkt.data, 1, pkt.size, f);
               av_free_packet(&amp;pkt);
           }
       }
       fclose(f);

       av_freep(&amp;samples);
       avcodec_free_frame(&amp;frame);
       avcodec_close(c);
       av_free(c);
    }

    /*
    * Audio decoding.
    */
    static void audio_decode_example(const char *outfilename, const char *filename)
    {
       AVCodec *codec;
       AVCodecContext *c= NULL;
       int len;
       FILE *f, *outfile;
       uint8_t inbuf[AUDIO_INBUF_SIZE + FF_INPUT_BUFFER_PADDING_SIZE];
       AVPacket avpkt;
       AVFrame *decoded_frame = NULL;

       av_init_packet(&amp;avpkt);

       printf("Decode audio file %s to %s\n", filename, outfilename);

       /* find the mpeg audio decoder */
       codec = avcodec_find_decoder(AV_CODEC_ID_MP2);
       if (!codec) {
           fprintf(stderr, "Codec not found\n");
           exit(1);
       }

       c = avcodec_alloc_context3(codec);
       if (!c) {
           fprintf(stderr, "Could not allocate audio codec context\n");
           exit(1);
       }

       /* open it */
       if (avcodec_open2(c, codec, NULL) &lt; 0) {
           fprintf(stderr, "Could not open codec\n");
           exit(1);
       }

       f = fopen(filename, "rb");
       if (!f) {
           fprintf(stderr, "Could not open %s\n", filename);
           exit(1);
       }
       outfile = fopen(outfilename, "wb");
       if (!outfile) {
           av_free(c);
           exit(1);
       }

       /* decode until eof */
       avpkt.data = inbuf;
       avpkt.size = fread(inbuf, 1, AUDIO_INBUF_SIZE, f);

       while (avpkt.size > 0) {
           int got_frame = 0;

           if (!decoded_frame) {
               if (!(decoded_frame = avcodec_alloc_frame())) {
                   fprintf(stderr, "Could not allocate audio frame\n");
                   exit(1);
               }
           } else
               avcodec_get_frame_defaults(decoded_frame);

           len = avcodec_decode_audio4(c, decoded_frame, &amp;got_frame, &amp;avpkt);
           if (len &lt; 0) {
               fprintf(stderr, "Error while decoding\n");
               exit(1);
           }
           if (got_frame) {
               /* if a frame has been decoded, output it */
               int data_size = av_samples_get_buffer_size(NULL, c->channels,
                                                          decoded_frame->nb_samples,
                                                          c->sample_fmt, 1);
               fwrite(decoded_frame->data[0], 1, data_size, outfile);
           }
           avpkt.size -= len;
           avpkt.data += len;
           avpkt.dts =
           avpkt.pts = AV_NOPTS_VALUE;
           if (avpkt.size &lt; AUDIO_REFILL_THRESH) {
               /* Refill the input buffer, to avoid trying to decode
                * incomplete frames. Instead of this, one could also use
                * a parser, or use a proper container format through
                * libavformat. */
               memmove(inbuf, avpkt.data, avpkt.size);
               avpkt.data = inbuf;
               len = fread(avpkt.data + avpkt.size, 1,
                           AUDIO_INBUF_SIZE - avpkt.size, f);
               if (len > 0)
                   avpkt.size += len;
           }
       }

       fclose(outfile);
       fclose(f);

       avcodec_close(c);
       av_free(c);
       avcodec_free_frame(&amp;decoded_frame);
    }

    /*
    * Main WRAPPER function
    */
    void service(){


       /* register all the codecs */
       avcodec_register_all();


       audio_encode_example("test.mp2");
       audio_decode_example("test.sw", "test.mp2");

    }

    #endif

    main.cpp

    #include <qapplication>
    #include "mainwindow.h"

    extern "C"{
       #include "wrapper.h"
    }

    int main(int argc, char *argv[])
    {
       service(); //calling the function service inside the wrapper

       QApplication a(argc, argv);
       MainWindow w;
       w.show();
       return a.exec();
    }
    </qapplication>

    mainwindow.cpp

    #include "mainwindow.h"
    #include "ui_mainwindow.h"

    MainWindow::MainWindow(QWidget *parent) :
       QMainWindow(parent),
       ui(new Ui::MainWindow)
    {
       ui->setupUi(this);
    }

    MainWindow::~MainWindow()
    {
       delete ui;
    }

    void MainWindow::changeEvent(QEvent *e)
    {
       QMainWindow::changeEvent(e);
       switch (e->type()) {
       case QEvent::LanguageChange:
           ui->retranslateUi(this);
           break;
       default:
           break;
       }
    }

    void MainWindow::on_pushButton_clicked()
    {
           this->close();
    }

    mainwindow.ui
    //Nothing important

    Thanks.

  • Use FFMPEG mux flv and send rtmp on IOS

    3 janvier 2017, par downloss

    I would like to use iphone camera & microphone to capture information pushed out through FFMPEG RTMP Streaming

    The following Function capture information on IOS

    - (void)captureOutput:(AVCaptureOutput *)captureOutput  didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
    {    
       if (connection == videoCaptureConnection)
       {
           [manager264 encoderToH264:sampleBuffer];
       }
       else if (connection == audioCaptureConnection)
       {
           [manager264 encoderToMP3:sampleBuffer];
       }
    }

    Initialization FFMPEG

    - (int)setX264Resource
    {
       Global_Variables_VVV = (AppDelegate *)[[UIApplication sharedApplication] delegate];
       avformat_network_init();
       av_register_all();

       pFormatCtx = avformat_alloc_context();
       avformat_alloc_output_context2(&amp;pFormatCtx, NULL, "flv", out_file);
       fmt = pFormatCtx->oformat;

       //Open output URL
       if (avio_open(&amp;pFormatCtx->pb, out_file, AVIO_FLAG_READ_WRITE) &lt; 0)
       {
           printf("Failed to open output file! \n");
           return -1;
       }

       /* Add the audio and video streams using the default format codecs
        * and initialize the codecs. */
       video_st = NULL;
       audio_st = NULL;
      if (fmt->video_codec != AV_CODEC_ID_NONE) {
           video_st = add_stream(pFormatCtx, &amp;pCodec, AV_CODEC_ID_H264);
       }
      if (fmt->audio_codec != AV_CODEC_ID_NONE) {
           audio_st = add_stream(pFormatCtx, &amp;aCodec, AV_CODEC_ID_MP3);
       }

       /* Now that all the parameters are set, we can open the audio and
        * video codecs and allocate the necessary encode buffers. */
       if (video_st)
           [self open_video:pFormatCtx avcodec:pCodec avstream:video_st];

       if (audio_st)
           [self open_audio:pFormatCtx avcodec:aCodec avstream:audio_st];

       // Show some Information
       av_dump_format(pFormatCtx, 0, out_file, 1);

       //Write File Header
       avformat_write_header(pFormatCtx, NULL);

       av_new_packet(&amp;pkt, picture_size);
       av_new_packet(&amp;pkt2, picture_size);

       AVCodecContext *c = video_st->codec;

       y_size = c->width * c->height;

       if (pFrame)
           pFrame->pts = 0;

       if(aFrame)
       {
           aFrame->pts = 0;
       }

       return 0;
    }

    static AVStream *add_stream(AVFormatContext *oc, AVCodec **codec, enum AVCodecID codec_id)
    {
       AVCodecContext *c;
       AVStream *st;
       /* find the encoder */
       *codec = avcodec_find_encoder(codec_id);
       if (!(*codec))
       {
           NSLog(@"Could not find encoder for '%s'\n",
             avcodec_get_name(codec_id));
       }
       st = avformat_new_stream(oc, *codec);
       if (!st)
       {
           NSLog(@"Could not allocate stream\n");
       }
       st->id = oc->nb_streams-1;
       c = st->codec;
       switch ((*codec)->type)
       {
           case AVMEDIA_TYPE_AUDIO:
               c->codec_id = AV_CODEC_ID_MP3;
               c->codec_type = AVMEDIA_TYPE_AUDIO;
               c->channels = 1;

               c->sample_fmt = AV_SAMPLE_FMT_S16P;
               c->bit_rate = 128000;
               c->sample_rate = 44100;
               c->channel_layout = AV_CH_LAYOUT_MONO;
               break;
           case AVMEDIA_TYPE_VIDEO:
               c->codec_id = AV_CODEC_ID_H264;
               c->codec_type=AVMEDIA_TYPE_VIDEO;
               /* Resolution must be a multiple of two. */
               c->width    = 720;
               c->height   = 1280;
               /* timebase: This is the fundamental unit of time (in seconds) in terms
                * of which frame timestamps are represented. For fixed-fps content,
                * timebase should be 1/framerate and timestamp increments should be
                * identical to 1. */
               c->time_base.den = 30;
               c->time_base.num = 1;
               c->gop_size      = 15; /* emit one intra frame every twelve frames at most */
               c->pix_fmt       = PIX_FMT_YUV420P;
               c->max_b_frames = 0;
               c->bit_rate = 3000000;
               c->qmin = 10;
               c->qmax = 51;

               break;
           default:
               break;
       }
       /* Some formats want stream headers to be separate. */
       if (oc->oformat->flags &amp; AVFMT_GLOBALHEADER)
           c->flags |= CODEC_FLAG_GLOBAL_HEADER;
       return st;
    }

    SampleBuffer will turn into H264 and pushed out RTMP Streaming

    - (void)encoderToH264:(CMSampleBufferRef)sampleBuffer
    {
       CVPixelBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
       if (CVPixelBufferLockBaseAddress(imageBuffer, 0) == kCVReturnSuccess)
       {
           UInt8 *bufferbasePtr = (UInt8 *)CVPixelBufferGetBaseAddress(imageBuffer);
           UInt8 *bufferPtr = (UInt8 *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer,0);
           UInt8 *bufferPtr1 = (UInt8 *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer,1);
           size_t buffeSize = CVPixelBufferGetDataSize(imageBuffer);
           size_t width = CVPixelBufferGetWidth(imageBuffer);
           size_t height = CVPixelBufferGetHeight(imageBuffer);
           size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
           size_t bytesrow0 = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer,0);
           size_t bytesrow1  = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer,1);
           size_t bytesrow2 = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer,2);
           UInt8 *yuv420_data = (UInt8 *)malloc(width * height *3/ 2); // buffer to store YUV with layout YYYYYYYYUUVV


           /* convert NV12 data to YUV420*/
           UInt8 *pY = bufferPtr ;
           UInt8 *pUV = bufferPtr1;
           UInt8 *pU = yuv420_data + width*height;
           UInt8 *pV = pU + width*height/4;
           for(int i =0;i/Read raw YUV data
           picture_buf = yuv420_data;
           pFrame->data[0] = picture_buf;              // Y
           pFrame->data[1] = picture_buf+ y_size;      // U
           pFrame->data[2] = picture_buf+ y_size*5/4;  // V

           int got_picture = 0;

           // Encode
           pFrame->width = 720;
           pFrame->height = 1280;
           pFrame->format = PIX_FMT_YUV420P;

           AVCodecContext *c = video_st->codec;
           int ret = avcodec_encode_video2(c, &amp;pkt, pFrame, &amp;got_picture);
           if(ret &lt; 0)
           {
               printf("Failed to encode! \n");
           }

           if (got_picture==1)
           {
               /* Compute current audio and video time. */
               video_time = video_st ? video_st->pts.val * av_q2d(video_st->time_base) : 0.0;
               pFrame->pts += av_rescale_q(1, video_st->codec->time_base, video_st->time_base);

               if(pkt.size != 0)
               {
                   printf("Succeed to encode frame: %5lld\tsize:%5d\n", pFrame->pts, pkt.size);
                   pkt.stream_index = video_st->index;
                   ret = av_write_frame(pFormatCtx, &amp;pkt);
                   av_free_packet(&amp;pkt);
               }
           }
           free(yuv420_data);
       }
       CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
    }

    SampleBuffer will turn into MP3 and pushed out RTMP Streaming

    -(void)encoderToMP3:(CMSampleBufferRef)sampleBuffer
    {
       CMSampleTimingInfo timing_info;
       CMSampleBufferGetSampleTimingInfo(sampleBuffer, 0, &amp;timing_info);
       double  pts=0;
       double  dts=0;
       AVCodecContext *c;
       int got_packet, ret;
       c = audio_st->codec;
       CMItemCount numSamples = CMSampleBufferGetNumSamples(sampleBuffer);

       NSUInteger channelIndex = 0;

       CMBlockBufferRef audioBlockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);

       size_t audioBlockBufferOffset = (channelIndex * numSamples * sizeof(SInt16));
       size_t lengthAtOffset = 0;
       size_t totalLength = 0;
       SInt16 *samples = NULL;
       CMBlockBufferGetDataPointer(audioBlockBuffer, audioBlockBufferOffset, &amp;lengthAtOffset, &amp;totalLength, (char **)(&amp;samples));

       const AudioStreamBasicDescription *audioDescription = CMAudioFormatDescriptionGetStreamBasicDescription(CMSampleBufferGetFormatDescription(sampleBuffer));

       SwrContext *swr = swr_alloc();

       int in_smprt = (int)audioDescription->mSampleRate;
       av_opt_set_int(swr, "in_channel_layout",  AV_CH_LAYOUT_MONO, 0);
       av_opt_set_int(swr, "out_channel_layout", audio_st->codec->channel_layout,  0);

       av_opt_set_int(swr, "in_channel_count", audioDescription->mChannelsPerFrame,  0);
       av_opt_set_int(swr, "out_channel_count", 1,  0);

       av_opt_set_int(swr, "out_channel_layout", audio_st->codec->channel_layout,  0);
       av_opt_set_int(swr, "in_sample_rate",     audioDescription->mSampleRate,0);

       av_opt_set_int(swr, "out_sample_rate",    audio_st->codec->sample_rate,0);

       av_opt_set_sample_fmt(swr, "in_sample_fmt",  AV_SAMPLE_FMT_S16, 0);

       av_opt_set_sample_fmt(swr, "out_sample_fmt", audio_st->codec->sample_fmt,  0);

       swr_init(swr);
       uint8_t **input = NULL;
       int src_linesize;
       int in_samples = (int)numSamples;
       ret = av_samples_alloc_array_and_samples(&amp;input, &amp;src_linesize, audioDescription->mChannelsPerFrame, in_samples, AV_SAMPLE_FMT_S16P, 0);

       *input=(uint8_t*)samples;
       uint8_t *output=NULL;

       int out_samples = av_rescale_rnd(swr_get_delay(swr, in_smprt) +in_samples, (int)audio_st->codec->sample_rate, in_smprt, AV_ROUND_UP);

       av_samples_alloc(&amp;output, NULL, audio_st->codec->channels, out_samples, audio_st->codec->sample_fmt, 0);
       in_samples = (int)numSamples;
       out_samples = swr_convert(swr, &amp;output, out_samples, (const uint8_t **)input, in_samples);

       aFrame->nb_samples =(int) out_samples;

       ret = avcodec_fill_audio_frame(aFrame, audio_st->codec->channels, audio_st->codec->sample_fmt,
                                      (uint8_t *)output,
                                      (int) out_samples *
                                      av_get_bytes_per_sample(audio_st->codec->sample_fmt) *
                                      audio_st->codec->channels, 1);
       if (ret &lt; 0)
       {
           fprintf(stderr, "Error fill audio frame: %s\n", av_err2str(ret));
       }
       aFrame->channel_layout = audio_st->codec->channel_layout;
       aFrame->channels=audio_st->codec->channels;
       aFrame->sample_rate= audio_st->codec->sample_rate;

       if (timing_info.presentationTimeStamp.timescale!=0)
           pts=(double) timing_info.presentationTimeStamp.value/timing_info.presentationTimeStamp.timescale;


       aFrame->pts = pts*audio_st->time_base.den;
       aFrame->pts = av_rescale_q(aFrame->pts, audio_st->time_base, audio_st->codec->time_base);

       ret = avcodec_encode_audio2(c, &amp;pkt2, aFrame, &amp;got_packet);

       if (ret &lt; 0)
       {
           fprintf(stderr, "Error encoding audio frame: %s\n", av_err2str(ret));
       }
       swr_free(&amp;swr);

       if (got_packet)
       {
           pkt2.stream_index = audio_st->index;        

           // Write the compressed frame to the media file.

           ret = av_interleaved_write_frame(pFormatCtx, &amp;pkt2);
           if (ret != 0)
           {
               fprintf(stderr, "Error while writing audio frame: %s\n", av_err2str(ret));
               av_free_packet(&amp;pkt2);
           }
       }
    }

    Soon "Broken pipe" problem occurs after execution.
    PTS is currently feeling is not adjusted, but do not know how to adjust the PTS.

    2016-03-09 16:57:41.058 PoliceCamPlayer[1004:193465] recordVideo....
    [libx264 @ 0x12f8b6e00] using cpu capabilities: ARMv8 NEON
    [libx264 @ 0x12f8b6e00] profile Constrained Baseline, level 3.1
    [libx264 @ 0x12f8b6e00] 264 - core 148 - H.264/MPEG-4 AVC codec - Copyleft 2003-2016 - http://www.videolan.org/x264.html - options: cabac=0 ref=1 deblock=1:0:0 analyse=0x1:0x111 me=hex subme=2 psy=1 psy_rd=1.00:0.00 mixed_ref=0 me_range=16 chroma_me=1 trellis=0 8x8dct=0 cqm=0 deadzone=21,11 fast_pskip=1 chroma_qp_offset=0 threads=2 lookahead_threads=2 sliced_threads=1 slices=2 nr=0 decimate=1 interlaced=0 bluray_compat=0 constrained_intra=0 bframes=0 weightp=0 keyint=15 keyint_min=1 scenecut=40 intra_refresh=0 rc=abr mbtree=0 bitrate=3000 ratetol=1.0 qcomp=0.60 qpmin=25 qpmax=51 qpstep=4 ip_ratio=1.40 aq=1:1.00
    Output #0, flv, to 'rtmp://XXX.XX.XXX.XX/myapp/jackal':
       Stream #0:0: Video: h264 (libx264), yuv420p, 720x1280, q=25-51, 3000 kb/s, 23 tbc
       Stream #0:1: Audio: mp3 (libmp3lame), 44100 Hz, mono, s16p, 64 kb/s
    [flv @ 0x12f8b5400] Using AVStream.codec.time_base as a timebase hint to the muxer is deprecated. Set AVStream.time_base instead.
    [flv @ 0x12f8b5400] Using AVStream.codec.time_base as a timebase hint to the muxer is deprecated. Set AVStream.time_base instead.
    [libx264 @ 0x12f8b6e00] Provided packet is too small, needs to be 33468
    Failed to encode!
    Audio_pts:4154432515 pts_time:4.15443e+06 dts:4154432515 dts_time:4.15443e+06 duration:1152 duration_time:1.152 stream_index:1
    Video_pts:43 pts_time:0.043 dts:43 dts_time:0.043 duration:0 duration_time:0 stream_index:0
    Audio_pts:4154433667 pts_time:4.15443e+06 dts:4154433667 dts_time:4.15443e+06 duration:1152 duration_time:1.152 stream_index:1
    Audio_pts:4154434854 pts_time:4.15443e+06 dts:4154434854 dts_time:4.15443e+06 duration:1152 duration_time:1.152 stream_index:1
    Video_pts:86 pts_time:0.086 dts:86 dts_time:0.086 duration:0 duration_time:0 stream_index:0
    Audio_pts:4154435996 pts_time:4.15444e+06 dts:4154435996 dts_time:4.15444e+06 duration:1152 duration_time:1.152 stream_index:1
    Audio_pts:4154437138 pts_time:4.15444e+06 dts:4154437138 dts_time:4.15444e+06 duration:1152 duration_time:1.152 stream_index:1
    Video_pts:129 pts_time:0.129 dts:129 dts_time:0.129 duration:0 duration_time:0 stream_index:0
    Audio_pts:4154438281 pts_time:4.15444e+06 dts:4154438281 dts_time:4.15444e+06 duration:1152 duration_time:1.152 stream_index:1
    Video_pts:172 pts_time:0.172 dts:172 dts_time:0.172 duration:0 duration_time:0 stream_index:0
    Audio_pts:4154439467 pts_time:4.15444e+06 dts:4154439467 dts_time:4.15444e+06 duration:1152 duration_time:1.152 stream_index:1
    Video_pts:215 pts_time:0.215 dts:215 dts_time:0.215 duration:0 duration_time:0 stream_index:0
    Audio_pts:4154440609 pts_time:4.15444e+06 dts:4154440609 dts_time:4.15444e+06 duration:1152 duration_time:1.152 stream_index:1
    Audio_pts:4154441752 pts_time:4.15444e+06 dts:4154441752 dts_time:4.15444e+06 duration:1152 duration_time:1.152 stream_index:1
    Video_pts:258 pts_time:0.258 dts:258 dts_time:0.258 duration:0 duration_time:0 stream_index:0
    Audio_pts:4154442884 pts_time:4.15444e+06 dts:4154442884 dts_time:4.15444e+06 duration:1152 duration_time:1.152 stream_index:1
    Audio_pts:4154444071 pts_time:4.15444e+06 dts:4154444071 dts_time:4.15444e+06 duration:1152 duration_time:1.152 stream_index:1
    Video_pts:301 pts_time:0.301 dts:301 dts_time:0.301 duration:0 duration_time:0 stream_index:0
    Audio_pts:4154445213 pts_time:4.15445e+06 dts:4154445213 dts_time:4.15445e+06 duration:1152 duration_time:1.152 stream_index:1
    Audio_pts:4154446355 pts_time:4.15445e+06 dts:4154446355 dts_time:4.15445e+06 duration:1152 duration_time:1.152 stream_index:1
    Video_pts:344 pts_time:0.344 dts:344 dts_time:0.344 duration:0 duration_time:0 stream_index:0
    Audio_pts:4154447498 pts_time:4.15445e+06 dts:4154447498 dts_time:4.15445e+06 duration:1152 duration_time:1.152 stream_index:1
    Video_pts:387 pts_time:0.387 dts:387 dts_time:0.387 duration:0 duration_time:0 stream_index:0
    Audio_pts:4154448640 pts_time:4.15445e+06 dts:4154448640 dts_time:4.15445e+06 duration:1152 duration_time:1.152 stream_index:1
    Audio_pts:4154449826 pts_time:4.15445e+06 dts:4154449826 dts_time:4.15445e+06 duration:1152 duration_time:1.152 stream_index:1
    Video_pts:430 pts_time:0.43 dts:430 dts_time:0.43 duration:0 duration_time:0 stream_index:0
    Audio_pts:4154450969 pts_time:4.15445e+06 dts:4154450969 dts_time:4.15445e+06 duration:1152 duration_time:1.152 stream_index:1
    Audio_pts:4154452101 pts_time:4.15445e+06 dts:4154452101 dts_time:4.15445e+06 duration:1152 duration_time:1.152 stream_index:1
    ...................
    ...................
    ...................
    Video_pts:4343 pts_time:4.343 dts:4343 dts_time:4.343 duration:0 duration_time:0 stream_index:0
    Audio_pts:4154622619 pts_time:4.15462e+06 dts:4154622619 dts_time:4.15462e+06 duration:1152 duration_time:1.152 stream_index:1
    Video_pts:4386 pts_time:4.386 dts:4386 dts_time:4.386 duration:0 duration_time:0 stream_index:0
    Audio_pts:4154623761 pts_time:4.15462e+06 dts:4154623761 dts_time:4.15462e+06 duration:1152 duration_time:1.152 stream_index:1
    Audio_pts:4154624903 pts_time:4.15462e+06 dts:4154624903 dts_time:4.15462e+06 duration:1152 duration_time:1.152 stream_index:1
    Audio_pts:4154626090 pts_time:4.15463e+06 dts:4154626090 dts_time:4.15463e+06 duration:1152 duration_time:1.152 stream_index:1
    Video_pts:4429 pts_time:4.429 dts:4429 dts_time:4.429 duration:0 duration_time:0 stream_index:0
    Audio_pts:4154627222 pts_time:4.15463e+06 dts:4154627222 dts_time:4.15463e+06 duration:1152 duration_time:1.152 stream_index:1
    Video_pts:4472 pts_time:4.472 dts:4472 dts_time:4.472 duration:0 duration_time:0 stream_index:0
    Error while writing audio frame: Broken pipe
    Audio_pts:4154628365 pts_time:4.15463e+06 dts:4154628365 dts_time:4.15463e+06 duration:1152 duration_time:1.152 stream_index:1
    Error while writing audio frame: Broken pipe
    Audio_pts:4154629507 pts_time:4.15463e+06 dts:4154629507 dts_time:4.15463e+06 duration:1152 duration_time:1.152 stream_index:1
    Error while writing audio frame: Broken pipe
    Audio_pts:4154630693 pts_time:4.15463e+06 dts:4154630693 dts_time:4.15463e+06 duration:1152 duration_time:1.152 stream_index:1
    Error while writing audio frame: Broken pipe
    Audio_pts:4154631836 pts_time:4.15463e+06 dts:4154631836 dts_time:4.15463e+06 duration:1152 duration_time:1.152 stream_index:1
    Error while writing audio frame: Broken pipe
    Audio_pts:4154632978 pts_time:4.15463e+06 dts:4154632978 dts_time:4.15463e+06 duration:1152 duration_time:1.152 stream_index:1
    .......................
    .......................
    .......................
    2016-03-09 16:57:49.345 PoliceCamPlayer[1004:193465] stopRecord!!!
    Video_pts:7783 pts_time:7.783 dts:7783 dts_time:7.783 duration:0 duration_time:0 stream_index:0
    [flv @ 0x12f8b5400] Failed to update header with correct duration.
    [flv @ 0x12f8b5400] Failed to update header with correct filesize.
    [libx264 @ 0x12f8b6e00] frame I:28    Avg QP:25.36  size: 24181
    [libx264 @ 0x12f8b6e00] frame P:154   Avg QP:25.34  size:  6603
    [libx264 @ 0x12f8b6e00] mb I  I16..4: 80.9%  0.0% 19.1%
    [libx264 @ 0x12f8b6e00] mb P  I16..4:  5.9%  0.0%  0.2%  P16..4: 28.2%  4.4%  1.0%  0.0%  0.0%    skip:60.2%
    [libx264 @ 0x12f8b6e00] final ratefactor: 16.70
    [libx264 @ 0x12f8b6e00] coded y,uvDC,uvAC intra: 35.8% 9.3% 0.4% inter: 8.8% 1.6% 0.0%
    [libx264 @ 0x12f8b6e00] i16 v,h,dc,p: 28% 26% 26% 21%
    [libx264 @ 0x12f8b6e00] i4 v,h,dc,ddl,ddr,vr,hd,vl,hu: 13% 26% 25%  3%  7%  4%  5%  3% 13%
    [libx264 @ 0x12f8b6e00] i8c dc,h,v,p: 85%  9%  5%  0%
    [libx264 @ 0x12f8b6e00] kb/s:1712.63
  • Android recording video with overlay view [way 2]

    2 mars 2016, par t0m

    I am trying app in android which have functionality to capture video with overlay views. I tried two ways (1. and 2.).
    1. Via SurfaceView and JavaCV with FFmpeg.
    2. Via OpenCV and JavaCV with FFmpeg.
    3. For API21+ maybe with MediaProjection.

    (Question is divided to two questions, due to stackoverflow length limit.)

    ad 1. Via SurfaceView and JavaCV with FFmpeg :

    Here

    ad 2. Via OpenCV and JavaCV with FFmpeg :

    OpenCVCameraActivity.java :

    import android.app.Activity;
    import android.hardware.Camera;
    import android.media.AudioFormat;
    import android.media.AudioRecord;
    import android.media.MediaRecorder;
    import android.os.Bundle;
    import android.os.Environment;
    import android.util.Log;
    import android.view.Menu;
    import android.view.MenuItem;
    import android.view.MotionEvent;
    import android.view.SubMenu;
    import android.view.SurfaceView;
    import android.view.View;
    import android.view.WindowManager;
    import android.widget.Toast;

    import org.bytedeco.javacv.FFmpegFrameRecorder;
    import org.bytedeco.javacv.Frame;
    import org.opencv.android.BaseLoaderCallback;
    import org.opencv.android.CameraBridgeViewBase;
    import org.opencv.android.LoaderCallbackInterface;
    import org.opencv.android.OpenCVLoader;
    import org.opencv.core.Mat;

    import java.io.File;
    import java.nio.ByteBuffer;
    import java.nio.ShortBuffer;
    import java.text.SimpleDateFormat;
    import java.util.Date;
    import java.util.List;
    import java.util.ListIterator;

    @SuppressWarnings("ALL")
    public class OpenCVCameraActivity extends Activity implements
           CameraBridgeViewBase.CvCameraViewListener2,
           View.OnTouchListener {

       //name of activity, for DEBUGGING
       private static final String TAG = OpenCVCameraActivity.class.getSimpleName();

       private OpenCVCameraPreview mOpenCvCameraView;
       private List mResolutionList;
       private MenuItem[] mEffectMenuItems;
       private SubMenu mColorEffectsMenu;
       private MenuItem[] mResolutionMenuItems;
       private SubMenu mResolutionMenu;

       private static long frameCounter = 0;

       long startTime = 0;
       private Mat edgesMat;
       boolean recording = false;
       private int sampleAudioRateInHz = 44100;
       private int imageWidth = 1280;
       private int imageHeight = 720;
       private int frameRate = 30;
       private Frame yuvImage = null;
       private File ffmpeg_link;
       private FFmpegFrameRecorder recorder;

       /*audio data getting thread */
       private AudioRecord audioRecord;
       private AudioRecordRunnable audioRecordRunnable;
       private Thread audioThread;
       volatile boolean runAudioThread = true;
       ShortBuffer[] samples;


       private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
           @Override
           public void onManagerConnected(int status) {
               switch (status) {
                   case LoaderCallbackInterface.SUCCESS:
                       Log.i(TAG, "OpenCV loaded successfully");
                       mOpenCvCameraView.enableView();
                       mOpenCvCameraView.setOnTouchListener(OpenCVCameraActivity.this);
                   break;
                   default:
                       super.onManagerConnected(status);
                   break;
               }
           }
       };

       @Override
       protected void onCreate(Bundle savedInstanceState) {
           super.onCreate(savedInstanceState);
           if(Static.DEBUG) Log.i(TAG, "onCreate()");

           getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

           Thread.setDefaultUncaughtExceptionHandler(uncaughtExceptionHandler);

           try {
               setContentView(R.layout.activity_opencv);

               mOpenCvCameraView = (OpenCVCameraPreview) findViewById(R.id.openCVCameraPreview);
               mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
               mOpenCvCameraView.setCvCameraViewListener(this);

               mOpenCvCameraView.enableFpsMeter();

               ffmpeg_link = new File(Environment.getExternalStorageDirectory(), "stream.mp4");
           } catch (Exception e){
               e.printStackTrace();
           }
       }

       private Thread.UncaughtExceptionHandler uncaughtExceptionHandler =
               new Thread.UncaughtExceptionHandler() {
                   public void uncaughtException(Thread thread, Throwable ex) {
                       if(Static.DEBUG) Log.e(TAG, "Uncaught exception", ex);
                   }
               };

       @Override
       protected void onRestart() {
           if (Static.DEBUG) Log.i(TAG, "onRestart()");
           super.onRestart();
       }

       @Override
       protected void onStart() {
           if (Static.DEBUG) Log.i(TAG, "onStart()");
           super.onStart();
       }

       @Override
       protected void onResume() {
           if (Static.DEBUG) Log.i(TAG, "onResume()");
           super.onResume();

           if (!OpenCVLoader.initDebug()) {
               Log.i(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
               OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_1_0, this, mLoaderCallback);
           } else {
               Log.i(TAG, "OpenCV library found inside package. Using it!");
               mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
           }

       }

       @Override
       public boolean onCreateOptionsMenu(Menu menu) {
           if (Static.DEBUG) Log.i(TAG, "onCreateOptionsMenu()");
           super.onCreateOptionsMenu(menu);

           List<string> effects = mOpenCvCameraView.getEffectList();

           if (effects == null) {
               Log.e(TAG, "Color effects are not supported by device!");
               return true;
           }

           mColorEffectsMenu = menu.addSubMenu("Color Effect");
           mEffectMenuItems = new MenuItem[effects.size()];

           int idx = 0;
           ListIterator<string> effectItr = effects.listIterator();
           while(effectItr.hasNext()) {
               String element = effectItr.next();
               mEffectMenuItems[idx] = mColorEffectsMenu.add(1, idx, Menu.NONE, element);
               idx++;
           }

           mResolutionMenu = menu.addSubMenu("Resolution");
           mResolutionList = mOpenCvCameraView.getResolutionList();
           mResolutionMenuItems = new MenuItem[mResolutionList.size()];

           ListIterator resolutionItr = mResolutionList.listIterator();
           idx = 0;
           while(resolutionItr.hasNext()) {
               Camera.Size element = resolutionItr.next();
               mResolutionMenuItems[idx] = mResolutionMenu.add(2, idx, Menu.NONE,
                       Integer.valueOf(element.width).toString() + "x" + Integer.valueOf(element.height).toString());
               idx++;
           }

           return true;
       }

       @Override
       protected void onPause() {
           if (Static.DEBUG) Log.i(TAG, "onPause()");
           super.onPause();

           if (mOpenCvCameraView != null)
               mOpenCvCameraView.disableView();

       }

       @Override
       protected void onStop() {
           if (Static.DEBUG) Log.i(TAG, "onStop()");
           super.onStop();
       }

       @Override
       protected void onDestroy() {
           if (Static.DEBUG) Log.i(TAG, "onDestroy()");
           super.onDestroy();

           if (mOpenCvCameraView != null)
               mOpenCvCameraView.disableView();
       }

       public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {

           ++frameCounter;
           //Log.i(TAG, "Frame number: "+frameCounter);

           final Mat rgba = inputFrame.rgba();
           //Core.flip(rgba, rgba, 1);

           /*if(Static.DEBUG) Log.i(TAG,"rgba.total(): "+rgba.total());
           if(Static.DEBUG) Log.i(TAG,"rgba.channels(): " +rgba.channels());*/
           byte[] data = new byte[(int) (rgba.total() * rgba.channels())];
           rgba.get(0, 0, data);
           //if(Static.DEBUG) Log.i(TAG,"return_buff: "+return_buff.length);

           if (audioRecord == null || audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
               startTime = System.currentTimeMillis();
               return rgba;
           }

           // get video data
           if (yuvImage != null &amp;&amp; recording) {
               ByteBuffer b = (ByteBuffer)yuvImage.image[0].position(0);
               b.put(data);

               try {
                   long t = 1000 * (System.currentTimeMillis() - startTime);
                   if(Static.DEBUG) Log.i(TAG,"Writing Frame on timestamp: "+t);
                   if (t > recorder.getTimestamp()) {
                       recorder.setTimestamp(t);
                   }
                   recorder.record(yuvImage);
               } catch (FFmpegFrameRecorder.Exception e) {
                   if(Static.DEBUG) Log.i(TAG,e.getMessage());
                   e.printStackTrace();
               }
           }

           return rgba;
       }

       @Override
       public void onCameraViewStarted(int width, int height) {
           edgesMat = new Mat();
       }

       @Override
       public void onCameraViewStopped() {
           if (edgesMat != null)
               edgesMat.release();

           edgesMat = null;
       }

       public boolean onOptionsItemSelected(MenuItem item) {
           Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
           if (item.getGroupId() == 1)
           {
               mOpenCvCameraView.setEffect((String) item.getTitle());
               Toast.makeText(this, mOpenCvCameraView.getEffect(), Toast.LENGTH_SHORT).show();
           } else if (item.getGroupId() == 2) {
               int id = item.getItemId();
               Camera.Size resolution = mResolutionList.get(id);
               mOpenCvCameraView.setResolution(resolution);
               resolution = mOpenCvCameraView.getResolution();
               String caption = Integer.valueOf(resolution.width).toString() + "x" + Integer.valueOf(resolution.height).toString();
               Toast.makeText(this, caption, Toast.LENGTH_SHORT).show();
           }

           return true;
       }

       @Override
       public boolean onTouch(View v, MotionEvent event) {
           Log.i(TAG,"onTouch event");
           SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss");
           String currentDateandTime = sdf.format(new Date());
           String fileName = Environment.getExternalStorageDirectory().getPath() +
                   "/sample_picture_" + currentDateandTime + ".jpg";
           mOpenCvCameraView.takePicture(fileName);
           Toast.makeText(this, fileName + " saved", Toast.LENGTH_SHORT).show();
           return false;
       }

       /**
        * Click to ImageButton to start recording.
        */
       public void onClickBtnStartRecord2(View v) {
           if (Static.DEBUG) Log.i(TAG, "onClickBtnStartRecord()");

           if(!recording)
               startRecording();
           else
               stopRecording();
       }

       private void startRecording() {
           if (Static.DEBUG) Log.i(TAG, "startRecording()");
           initRecorder();

           try {
               recorder.start();
               startTime = System.currentTimeMillis();
               recording = true;
               audioThread.start();
               if (Static.DEBUG) Log.i(TAG, "startRecording() success");
           } catch(FFmpegFrameRecorder.Exception e) {
               e.printStackTrace();
           }
       }

       private void stopRecording() {
           if (Static.DEBUG) Log.i(TAG, "stopRecording()");

           runAudioThread = false;
           try {
               audioThread.join();
           } catch(InterruptedException e) {
               e.printStackTrace();
           }
           audioRecordRunnable = null;
           audioThread = null;
           if (Static.DEBUG) Log.i(TAG, "stopRecording() 2");
           if(recorder != null &amp;&amp; recording) {

               recording = false;
               try {
                   recorder.stop();
                   recorder.release();
                   Log.i(TAG, "Finishing recording, calling stop and release on recorder");
               } catch(FFmpegFrameRecorder.Exception e) {
                   e.printStackTrace();
               }
               recorder = null;
           }
       }


       //---------------------------------------
       // initialize ffmpeg_recorder
       //---------------------------------------
       private void initRecorder() {

           Log.i(TAG, "init recorder");
           try {

               if (yuvImage == null) {
                   yuvImage = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 4);
                   Log.i(TAG, "create yuvImage");
               }

               Log.i(TAG, "ffmpeg_url: " + ffmpeg_link.getAbsolutePath());
               //Log.i(TAG, "ffmpeg_url: " + ffmpeg_link.exists());
               recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
               recorder.setFormat("mp4");
               recorder.setSampleRate(sampleAudioRateInHz);
               // Set in the surface changed method
               recorder.setFrameRate(frameRate);

               audioRecordRunnable = new AudioRecordRunnable();
               audioThread = new Thread(audioRecordRunnable);
               runAudioThread = true;
               Log.i(TAG, "recorder initialize success");
           } catch (Exception e){
               e.printStackTrace();
           }
       }

       //---------------------------------------------
       // audio thread, gets and encodes audio data
       //---------------------------------------------
       class AudioRecordRunnable implements Runnable {

           @Override
           public void run() {
               android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);

               // Audio
               int bufferSize;
               ShortBuffer audioData;
               int bufferReadResult;

               bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
                       AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
               audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
                       AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);

               audioData = ShortBuffer.allocate(bufferSize);

               Log.d(TAG, "audioRecord.startRecording()");
               audioRecord.startRecording();

               // ffmpeg_audio encoding loop
               while(runAudioThread) {
                   //Log.v(TAG,"recording? " + recording);
                   bufferReadResult = audioRecord.read(audioData.array(), 0, audioData.capacity());
                   audioData.limit(bufferReadResult);
                   if(bufferReadResult > 0) {
                       Log.v(TAG, "bufferReadResult: " + bufferReadResult);
                       // If "recording" isn't true when start this thread, it never get's set according to this if statement...!!!
                       // Why?  Good question...
                       if(recording) {
                               try {
                                   recorder.recordSamples(audioData);
                                   //Log.v(TAG,"recording " + 1024*i + " to " + 1024*i+1024);
                               } catch(FFmpegFrameRecorder.Exception e) {
                                   Log.v(TAG, e.getMessage());
                                   e.printStackTrace();
                               }
                       }
                   }
               }
               Log.v(TAG, "AudioThread Finished, release audioRecord");

               // encoding finish, release recorder
               if(audioRecord != null) {
                   audioRecord.stop();
                   audioRecord.release();
                   audioRecord = null;
                   Log.v(TAG, "audioRecord released");
               }
           }
       }
    }
    </string></string>

    OpenCVCameraPreview.java :

    import android.content.Context;
    import android.hardware.Camera;
    import android.util.AttributeSet;
    import android.util.Log;

    import org.opencv.android.JavaCameraView;

    import java.io.FileOutputStream;
    import java.util.List;

    public class OpenCVCameraPreview extends JavaCameraView implements Camera.PictureCallback {

       private static final String TAG =  OpenCVCameraPreview.class.getSimpleName();
       private String mPictureFileName;

       public OpenCVCameraPreview(Context context, AttributeSet attrs) {
           super(context, attrs);
       }

       public List<string> getEffectList() {
           return mCamera.getParameters().getSupportedColorEffects();
       }

       public boolean isEffectSupported() {
           return (mCamera.getParameters().getColorEffect() != null);
       }

       public String getEffect() {
           return mCamera.getParameters().getColorEffect();
       }

       public void setEffect(String effect) {
           Camera.Parameters params = mCamera.getParameters();
           params.setColorEffect(effect);
           mCamera.setParameters(params);
       }

       public List getResolutionList() {
           return mCamera.getParameters().getSupportedPreviewSizes();
       }

       public void setResolution(Camera.Size resolution) {
           disconnectCamera();
           mMaxHeight = resolution.height;
           mMaxWidth = resolution.width;
           connectCamera(getWidth(), getHeight());
       }

       public Camera.Size getResolution() {
           return mCamera.getParameters().getPreviewSize();
       }

       public void takePicture(final String fileName) {
           Log.i(TAG, "Taking picture");
           this.mPictureFileName = fileName;
           // Postview and jpeg are sent in the same buffers if the queue is not empty when performing a capture.
           // Clear up buffers to avoid mCamera.takePicture to be stuck because of a memory issue
           mCamera.setPreviewCallback(null);

           // PictureCallback is implemented by the current class
           mCamera.takePicture(null, null, this);
       }

       @Override
       public void onPictureTaken(byte[] data, Camera camera) {
           Log.i(TAG, "Saving a bitmap to file");
           // The camera preview was automatically stopped. Start it again.
           mCamera.startPreview();
           mCamera.setPreviewCallback(this);

           // Write the image in a file (in jpeg format)
           try {
               FileOutputStream fos = new FileOutputStream(mPictureFileName);

               fos.write(data);
               fos.close();

           } catch (java.io.IOException e) {
               Log.e("PictureDemo", "Exception in photoCallback", e);
           }

       }
    }
    </string>

    activity_opencv.xml :

    &lt;?xml version="1.0" encoding="utf-8"?>
    <relativelayout>

       

       &lt;ImageButton<br />
           android:id=&quot;@+id/btnStartRecord2&quot;<br />
           android:layout_width=&quot;70dp&quot;<br />
           android:layout_height=&quot;70dp&quot;<br />
           android:scaleType=&quot;fitXY&quot;<br />
           android:src=&quot;@drawable/record_icon&quot;<br />
           android:background=&quot;@null&quot;<br />
           android:text=&quot;@string/btnStartRecord&quot;<br />
           android:onClick=&quot;onClickBtnStartRecord2&quot;<br />
           android:layout_centerVertical=&quot;true&quot;<br />
           android:layout_alignParentRight=&quot;true&quot;<br />
           android:layout_alignParentEnd=&quot;true&quot;/&gt;


    </relativelayout>

    Overlay views working, but recorded video is without overlay views, and recording with onCameraFrame method is very slow.