Recherche avancée

Médias (91)

Autres articles (85)

  • Personnaliser en ajoutant son logo, sa bannière ou son image de fond

    5 septembre 2013, par

    Certains thèmes prennent en compte trois éléments de personnalisation : l’ajout d’un logo ; l’ajout d’une bannière l’ajout d’une image de fond ;

  • Publier sur MédiaSpip

    13 juin 2013

    Puis-je poster des contenus à partir d’une tablette Ipad ?
    Oui, si votre Médiaspip installé est à la version 0.2 ou supérieure. Contacter au besoin l’administrateur de votre MédiaSpip pour le savoir

  • Support de tous types de médias

    10 avril 2011

    Contrairement à beaucoup de logiciels et autres plate-formes modernes de partage de documents, MediaSPIP a l’ambition de gérer un maximum de formats de documents différents qu’ils soient de type : images (png, gif, jpg, bmp et autres...) ; audio (MP3, Ogg, Wav et autres...) ; vidéo (Avi, MP4, Ogv, mpg, mov, wmv et autres...) ; contenu textuel, code ou autres (open office, microsoft office (tableur, présentation), web (html, css), LaTeX, Google Earth) (...)

Sur d’autres sites (6950)

  • squeeze image while capturing video with FFmpegFrameRecorder

    12 avril 2016, par Saty

    I am trying to stream video with FFmpegFrameRecorder using javacv. All are working great except I find videos which are actually images are bit squeeze from the height.

    I am using the below code which half of the internet community is using to live stream

    public class MainActivity extends Activity implements OnClickListener {

    private final static String LOG_TAG = "MainActivity";

    private PowerManager.WakeLock mWakeLock;

    private String ffmpeg_link = "rtmp://username:password@xxx.xxx.xxx.xxx:1935/live/test.flv";
    //private String ffmpeg_link = "/mnt/sdcard/new_stream.flv";

    private volatile FFmpegFrameRecorder recorder;
    boolean recording = false;
    long startTime = 0;

    private int sampleAudioRateInHz = 44100;
    private int imageWidth = 320;
    private int imageHeight = 240;
    private int frameRate = 30;

    private Thread audioThread;
    volatile boolean runAudioThread = true;
    private AudioRecord audioRecord;
    private AudioRecordRunnable audioRecordRunnable;

    private CameraView cameraView;
    private IplImage yuvIplimage = null;

    private Button recordButton;
    private LinearLayout mainLayout;

    @Override
    public void onCreate(Bundle savedInstanceState) {
       super.onCreate(savedInstanceState);

       setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
       setContentView(R.layout.activity_main);

       initLayout();
       initRecorder();
    }

    @Override
    protected void onResume() {
       super.onResume();

       if (mWakeLock == null) {
           PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
           mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, LOG_TAG);
           mWakeLock.acquire();
       }
    }

    @Override
    protected void onPause() {
       super.onPause();

       if (mWakeLock != null) {
           mWakeLock.release();
           mWakeLock = null;
       }
    }

    @Override
    protected void onDestroy() {
       super.onDestroy();

       recording = false;
    }


    private void initLayout() {

       mainLayout = (LinearLayout) this.findViewById(R.id.record_layout);

       recordButton = (Button) findViewById(R.id.recorder_control);
       recordButton.setText("Start");
       recordButton.setOnClickListener(this);

       cameraView = new CameraView(this);

       LinearLayout.LayoutParams layoutParam = new LinearLayout.LayoutParams(imageWidth, imageHeight);        
       mainLayout.addView(cameraView, layoutParam);
       Log.v(LOG_TAG, "added cameraView to mainLayout");
    }

    private void initRecorder() {
       Log.w(LOG_TAG,"initRecorder");

       if (yuvIplimage == null) {
           // Recreated after frame size is set in surface change method
           yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_8U, 2);
           //yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_32S, 2);

           Log.v(LOG_TAG, "IplImage.create");
       }

       recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
       Log.v(LOG_TAG, "FFmpegFrameRecorder: " + ffmpeg_link + " imageWidth: " + imageWidth + " imageHeight " + imageHeight);

       recorder.setFormat("flv");
       Log.v(LOG_TAG, "recorder.setFormat(\"flv\")");

       recorder.setSampleRate(sampleAudioRateInHz);
       Log.v(LOG_TAG, "recorder.setSampleRate(sampleAudioRateInHz)");

       // re-set in the surface changed method as well
       recorder.setFrameRate(frameRate);
       Log.v(LOG_TAG, "recorder.setFrameRate(frameRate)");

       // Create audio recording thread
       audioRecordRunnable = new AudioRecordRunnable();
       audioThread = new Thread(audioRecordRunnable);
    }

    // Start the capture
    public void startRecording() {
       try {
           recorder.start();
           startTime = System.currentTimeMillis();
           recording = true;
           audioThread.start();
       } catch (FFmpegFrameRecorder.Exception e) {
           e.printStackTrace();
       }
    }

    public void stopRecording() {
       // This should stop the audio thread from running
       runAudioThread = false;

       if (recorder != null && recording) {
           recording = false;
           Log.v(LOG_TAG,"Finishing recording, calling stop and release on recorder");
           try {
               recorder.stop();
               recorder.release();
           } catch (FFmpegFrameRecorder.Exception e) {
               e.printStackTrace();
           }
           recorder = null;
       }
    }

    @Override
    public boolean onKeyDown(int keyCode, KeyEvent event) {
       // Quit when back button is pushed
       if (keyCode == KeyEvent.KEYCODE_BACK) {
           if (recording) {
               stopRecording();
           }
           finish();
           return true;
       }
       return super.onKeyDown(keyCode, event);
    }

    @Override
    public void onClick(View v) {
       if (!recording) {
           startRecording();
           Log.w(LOG_TAG, "Start Button Pushed");
           recordButton.setText("Stop");
       } else {
           stopRecording();
           Log.w(LOG_TAG, "Stop Button Pushed");
           recordButton.setText("Start");
       }
    }

    //---------------------------------------------
    // audio thread, gets and encodes audio data
    //---------------------------------------------
    class AudioRecordRunnable implements Runnable {

       @Override
       public void run() {
           // Set the thread priority
           android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);

           // Audio
           int bufferSize;
           short[] audioData;
           int bufferReadResult;

           bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
                   AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT);
           audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
                   AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);

           audioData = new short[bufferSize];

           Log.d(LOG_TAG, "audioRecord.startRecording()");
           audioRecord.startRecording();

           // Audio Capture/Encoding Loop
           while (runAudioThread) {
               // Read from audioRecord
               bufferReadResult = audioRecord.read(audioData, 0, audioData.length);
               if (bufferReadResult > 0) {
                   //Log.v(LOG_TAG,"audioRecord bufferReadResult: " + bufferReadResult);

                   // Changes in this variable may not be picked up despite it being "volatile"
                   if (recording) {
                       try {
                           // Write to FFmpegFrameRecorder
                           Buffer[] buffer = {ShortBuffer.wrap(audioData, 0, bufferReadResult)};                        
                           recorder.record(buffer);
                       } catch (FFmpegFrameRecorder.Exception e) {
                           Log.v(LOG_TAG,e.getMessage());
                           e.printStackTrace();
                       }
                   }
               }
           }
           Log.v(LOG_TAG,"AudioThread Finished");

           /* Capture/Encoding finished, release recorder */
           if (audioRecord != null) {
               audioRecord.stop();
               audioRecord.release();
               audioRecord = null;
               Log.v(LOG_TAG,"audioRecord released");
           }
       }
    }

    class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {

       private boolean previewRunning = false;

       private SurfaceHolder holder;
       private Camera camera;

       private byte[] previewBuffer;

       long videoTimestamp = 0;

       Bitmap bitmap;
       Canvas canvas;

       public CameraView(Context _context) {
           super(_context);

           holder = this.getHolder();
           holder.addCallback(this);
           holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
       }

       @Override
       public void surfaceCreated(SurfaceHolder holder) {
           camera = Camera.open();

           try {
               camera.setPreviewDisplay(holder);
               camera.setPreviewCallback(this);

               Camera.Parameters currentParams = camera.getParameters();
               Log.v(LOG_TAG,"Preview Framerate: " + currentParams.getPreviewFrameRate());
               Log.v(LOG_TAG,"Preview imageWidth: " + currentParams.getPreviewSize().width + " imageHeight: " + currentParams.getPreviewSize().height);

               // Use these values
               imageWidth = currentParams.getPreviewSize().width;
               imageHeight = currentParams.getPreviewSize().height;
               frameRate = currentParams.getPreviewFrameRate();                

               bitmap = Bitmap.createBitmap(imageWidth, imageHeight, Bitmap.Config.ALPHA_8);


               /*
               Log.v(LOG_TAG,"Creating previewBuffer size: " + imageWidth * imageHeight * ImageFormat.getBitsPerPixel(currentParams.getPreviewFormat())/8);
               previewBuffer = new byte[imageWidth * imageHeight * ImageFormat.getBitsPerPixel(currentParams.getPreviewFormat())/8];
               camera.addCallbackBuffer(previewBuffer);
               camera.setPreviewCallbackWithBuffer(this);
               */              

               camera.startPreview();
               previewRunning = true;
           }
           catch (IOException e) {
               Log.v(LOG_TAG,e.getMessage());
               e.printStackTrace();
           }  
       }

       public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
           Log.v(LOG_TAG,"Surface Changed: width " + width + " height: " + height);

           // We would do this if we want to reset the camera parameters
           /*
           if (!recording) {
               if (previewRunning){
                   camera.stopPreview();
               }
               try {
                   //Camera.Parameters cameraParameters = camera.getParameters();
                   //p.setPreviewSize(imageWidth, imageHeight);
                   //p.setPreviewFrameRate(frameRate);
                   //camera.setParameters(cameraParameters);

                   camera.setPreviewDisplay(holder);
                   camera.startPreview();
                   previewRunning = true;
               }
               catch (IOException e) {
                   Log.e(LOG_TAG,e.getMessage());
                   e.printStackTrace();
               }  
           }            
           */

           // Get the current parameters
           Camera.Parameters currentParams = camera.getParameters();
           Log.v(LOG_TAG,"Preview Framerate: " + currentParams.getPreviewFrameRate());
           Log.v(LOG_TAG,"Preview imageWidth: " + currentParams.getPreviewSize().width + " imageHeight: " + currentParams.getPreviewSize().height);

           // Use these values
           imageWidth = currentParams.getPreviewSize().width;
           imageHeight = currentParams.getPreviewSize().height;
           frameRate = currentParams.getPreviewFrameRate();

           // Create the yuvIplimage if needed
           yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_8U, 2);
           //yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_32S, 2);
       }

       @Override
       public void surfaceDestroyed(SurfaceHolder holder) {
           try {
               camera.setPreviewCallback(null);

               previewRunning = false;
               camera.release();

           } catch (RuntimeException e) {
               Log.v(LOG_TAG,e.getMessage());
               e.printStackTrace();
           }
       }

       @Override
       public void onPreviewFrame(byte[] data, Camera camera) {

           if (yuvIplimage != null && recording) {
               videoTimestamp = 1000 * (System.currentTimeMillis() - startTime);

               // Put the camera preview frame right into the yuvIplimage object
               yuvIplimage.getByteBuffer().put(data);

               // FAQ about IplImage:
               // - For custom raw processing of data, getByteBuffer() returns an NIO direct
               //   buffer wrapped around the memory pointed by imageData, and under Android we can
               //   also use that Buffer with Bitmap.copyPixelsFromBuffer() and copyPixelsToBuffer().
               // - To get a BufferedImage from an IplImage, we may call getBufferedImage().
               // - The createFrom() factory method can construct an IplImage from a BufferedImage.
               // - There are also a few copy*() methods for BufferedImage<->IplImage data transfers.

               // Let's try it..
               // This works but only on transparency
               // Need to find the right Bitmap and IplImage matching types

               /*
               bitmap.copyPixelsFromBuffer(yuvIplimage.getByteBuffer());
               //bitmap.setPixel(10,10,Color.MAGENTA);

               canvas = new Canvas(bitmap);
               Paint paint = new Paint();
               paint.setColor(Color.GREEN);
               float leftx = 20;
               float topy = 20;
               float rightx = 50;
               float bottomy = 100;
               RectF rectangle = new RectF(leftx,topy,rightx,bottomy);
               canvas.drawRect(rectangle, paint);

               bitmap.copyPixelsToBuffer(yuvIplimage.getByteBuffer());
               */
               //Log.v(LOG_TAG,"Writing Frame");

               try {

                   // Get the correct time
                   recorder.setTimestamp(videoTimestamp);

                   // Record the image into FFmpegFrameRecorder
                   recorder.record(yuvIplimage);

               } catch (FFmpegFrameRecorder.Exception e) {
                   Log.v(LOG_TAG,e.getMessage());
                   e.printStackTrace();
               }
           }
       }
    }

    }

  • H264 Encoding - Could not play video using VLC Player

    31 mars 2016, par bot1131357

    I am have trouble encoding an H264 video correctly using FFmpeg libav. I could not play the encoded video in VLC media player, and although I could play the video on MPC-HC the time shows 00:00/00:00. Clearly I’m missing something.

    The Media info from MPC-HC shows this :

    General
    Format : AVC
    Format/Info : Advanced Video Codec
    File size : 110 KiB
    Duration : 2s 400ms
    Overall bit rate : 375 Kbps
    Writing library : x264 core 148 r2665 a01e339
    Encoding settings : cabac=0 / ref=3 / deblock=1:0:0 / analyse=0x1:0x111 / me=hex / subme=7 / psy=1 / psy_rd=1.00:0.00 / mixed_ref=1 / me_range=16 / chroma_me=1 / trellis=1 / 8x8dct=0 / cqm=0 / deadzone=21,11 / fast_pskip=1 / chroma_qp_offset=-2 / threads=7 / lookahead_threads=1 / sliced_threads=0 / nr=0 / decimate=1 / interlaced=0 / bluray_compat=0 / constrained_intra=0 / bframes=0 / weightp=0 / keyint=12 / keyint_min=1 / scenecut=40 / intra_refresh=0 / rc_lookahead=12 / rc=abr / mbtree=1 / bitrate=2000 / ratetol=1.0 / qcomp=0.60 / qpmin=0 / qpmax=69 / qpstep=4 / ip_ratio=1.40 / aq=1:1.00

    Video
    Format : AVC
    Format/Info : Advanced Video Codec
    Format profile : Baseline@L2.1
    Format settings, CABAC : No
    Format settings, ReFrames : 3 frames
    Format settings, GOP : M=1, N=12
    Duration : 2s 400ms
    Bit rate : 2 000 Kbps
    Width : 320 pixels
    Height : 240 pixels
    Display aspect ratio : 4:3
    Frame rate mode : Variable
    Frame rate : 20.833 fps
    Color space : YUV
    Chroma subsampling : 4:2:0
    Bit depth : 8 bits
    Scan type : Progressive
    Bits/(Pixel*Frame) : 1.250
    Stream size : 586 KiB
    Writing library : x264 core 148 r2665 a01e339
    Encoding settings : cabac=0 / ref=3 / deblock=1:0:0 / analyse=0x1:0x111 / me=hex / subme=7 / psy=1 / psy_rd=1.00:0.00 / mixed_ref=1 / me_range=16 / chroma_me=1 / trellis=1 / 8x8dct=0 / cqm=0 / deadzone=21,11 / fast_pskip=1 / chroma_qp_offset=-2 / threads=7 / lookahead_threads=1 / sliced_threads=0 / nr=0 / decimate=1 / interlaced=0 / bluray_compat=0 / constrained_intra=0 / bframes=0 / weightp=0 / keyint=12 / keyint_min=1 / scenecut=40 / intra_refresh=0 / rc_lookahead=12 / rc=abr / mbtree=1 / bitrate=2000 / ratetol=1.0 / qcomp=0.60 / qpmin=0 / qpmax=69 / qpstep=4 / ip_ratio=1.40 / aq=1:1.00

    I noticed something odd in the above info :
    - The frame rate is 20.833 fps, instead of the specified 10 fps.
    - Duration of 2s 400ms did not seem right either, since the video played for more than 4s.

    Also, (AVFrame* picture)->pict_type is always set to AV_PICTURE_TYPE_NONE. I don’t think this is normal.

    The library that I’m using is ffmpeg-20160219-git-98a0053-win32-dev. I would really really appreciate if you could help me out of this confusion.

    /*
    * Video encoding example
    */
    char filename[] = "test.mp4";
    int main(int argc, char** argv)
    {
       AVCodec *codec = NULL;
       AVCodecContext *codecCtx= NULL;
       AVFormatContext *pFormatCtx = NULL;
       AVStream * pVideoStream = NULL;
       AVFrame *picture = NULL;

       int i, x, y,            //
           ret,                // Return value
           got_packet_ptr;     // Data encoded into packet

       printf("Video encoding\n");

       // Register all formats and codecs
       av_register_all();

       // allocate context
       pFormatCtx = avformat_alloc_context();
       memcpy(pFormatCtx->filename,filename,
           min(strlen(filename), sizeof(pFormatCtx->filename)));

       // guess format
       pFormatCtx->oformat = av_guess_format("h264", NULL, NULL);
       if (NULL==pFormatCtx->oformat)
       {
           cerr << "Could not guess output format" << endl;
           return -1;
       }  

       // Find the codec.
       codec = avcodec_find_encoder(pFormatCtx->oformat->video_codec);
       if (codec == NULL) {
           fprintf(stderr, "Codec not found\n");
           return -1;
       }

       // Set context
       int framerate = 10;
       codecCtx = avcodec_alloc_context3(codec);
       avcodec_get_context_defaults3(codecCtx, codec);
       codecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
       codecCtx->profile = FF_PROFILE_H264_BASELINE;
       // Resolution must be a multiple of two.
       codecCtx->width  = 320;
       codecCtx->height = 240;

       codecCtx->bit_rate = 2000000;
       codecCtx->time_base.den = framerate;
       codecCtx->time_base.num = 1;
       codecCtx->gop_size = 12; // emit one intra frame every twelve frames at most

       // Open the codec.  
       if (avcodec_open2(codecCtx, codec, NULL) < 0)
       {
           printf("Cannot open video codec\n");
           return -1;
       }

       // Add stream to pFormatCtx
       pVideoStream = avformat_new_stream(pFormatCtx, codec);
       if (!pVideoStream)
       {
           printf("Cannot add new video stream\n");
           return -1;
       }
       pVideoStream->codec = codecCtx;
       pVideoStream->time_base.den = framerate;
       pVideoStream->time_base.num = 1;

       if (avio_open2(&pFormatCtx->pb, filename, AVIO_FLAG_WRITE, NULL, NULL) < 0)
       {
           printf("Cannot open file\n");
           return -1;
       }

       // Write file header.
       avformat_write_header(pFormatCtx, NULL);

       // Create frame
       picture= av_frame_alloc();
       picture->format = codecCtx->pix_fmt;
       picture->width  = codecCtx->width;
       picture->height = codecCtx->height;

       int bufferImgSize = av_image_get_buffer_size(codecCtx->pix_fmt, codecCtx->width,
                       codecCtx->height,1);    
       av_image_alloc(picture->data, picture->linesize, codecCtx->width, codecCtx->height,                 codecCtx->pix_fmt, 32);

       AVPacket avpkt;

       /* encode 1 second of video */
       for(i=0;i<50;i++)
       {
           /* prepare a dummy image */
           /* Y */
           for(y=0;yheight;y++)
           {
               for(x=0;xwidth;x++)
               {
                   picture->data[0][y * picture->linesize[0] + x] = x + y + i * 3;
               }
           }
           /* Cb and Cr */
           for(y=0;yheight/2;y++)
           {
               for(x=0;xwidth/2;x++)
               {
                   picture->data[1][y * picture->linesize[1] + x] = 128 + y + i * 2;
                   picture->data[2][y * picture->linesize[2] + x] = 64 + x + i * 5;
               }
           }

           // Get timestamp
           picture->pts = (float) i * (1000.0/(float)(codecCtx->time_base.den)) * 90;

           // Encode frame to packet
           av_init_packet(&avpkt);
           got_packet_ptr = 0;
           int error = avcodec_encode_video2(codecCtx, &avpkt, picture, &got_packet_ptr);
           if (!error && got_packet_ptr > 0)
           {
               // Write packet with frame.
               ret = (av_interleaved_write_frame(pFormatCtx, &avpkt) == 0);        
           }  
           av_packet_unref(&avpkt);
       }

       // Flush remaining encoded data
       while(1)
       {
           av_init_packet(&avpkt);
           got_packet_ptr = 0;
           // Encode frame to packet.
           int error = avcodec_encode_video2(codecCtx, &avpkt, NULL, &got_packet_ptr);
           if (!error && got_packet_ptr > 0)
           {
               // Write packet with frame.
               ret = (av_interleaved_write_frame(pFormatCtx, &avpkt) == 0);        
           }
           else
           {
               break;
           }
           av_packet_unref(&avpkt);
       }
       av_write_trailer(pFormatCtx);

       av_packet_unref(&avpkt);
       av_frame_free(&picture);

       avcodec_close(codecCtx);
       av_free(codecCtx);

       cin.get();
    }
  • Android recording video with overlay view

    6 mars 2016, par t0m

    I am trying app in android which have functionality to capture video with overlay views. (Also i need onPreviewFrame method.)
    Via SurfaceView and JavaCV with FFmpeg :


    OpenCVCameraActivity.java :

    import android.app.Activity;
    import android.content.Context;
    import android.hardware.Camera;
    import android.hardware.Camera.PreviewCallback;
    import android.media.AudioFormat;
    import android.media.AudioRecord;
    import android.media.MediaRecorder;
    import android.os.Bundle;
    import android.util.Log;
    import android.view.KeyEvent;
    import android.view.Menu;
    import android.view.SurfaceHolder;
    import android.view.SurfaceView;
    import android.view.View;
    import android.widget.FrameLayout;
    import android.widget.Toast;

    import org.bytedeco.javacv.FFmpegFrameRecorder;
    import org.bytedeco.javacv.Frame;

    import java.io.File;
    import java.io.IOException;
    import java.nio.ByteBuffer;
    import java.nio.ShortBuffer;
    import java.util.List;


    @SuppressWarnings("ALL")
    public class OpenCVCameraActivity extends Activity {

       private static final String TAG = OpenCVCameraActivity.class.getSimpleName();

       private long startTime = 0;
       private boolean isPreviewOn = false;

       private int sampleAudioRateInHz = 44100;
       private Camera.Size previewSize;        //preview and Camera and Recorder width and height
       private int recorderFrameRate = 25;

       // audio data getting thread
       private AudioRecord audioRecord;
       private AudioRecordRunnable audioRecordRunnable;
       private Thread audioThread;
       private volatile boolean runAudioThread = true;

       // video data getting thread
       private Camera mCamera;
       private CameraView mPreview;
       private FFmpegFrameRecorder recorder;
       private boolean recording = false;
       private Frame yuvImage = null;

       //storage
       private Storage storage;

       @Override
       public void onCreate(Bundle savedInstanceState) {
           super.onCreate(savedInstanceState);
           if(Static.DEBUG) Log.i(TAG, "onCreate()");

           Thread.setDefaultUncaughtExceptionHandler(uncaughtExceptionHandler);

           setContentView(R.layout.activity_opencv);

           prepareCamera();
       }

       private Thread.UncaughtExceptionHandler uncaughtExceptionHandler =
               new Thread.UncaughtExceptionHandler() {
                   public void uncaughtException(Thread thread, Throwable ex) {
                       if(Static.DEBUG) Log.e(TAG, "Uncaught exception", ex);
                   }
               };

       @Override
       protected void onRestart() {
           super.onRestart();
           if (Static.DEBUG) Log.i(TAG, "onRestart()");
       }

       @Override
       protected void onStart() {
           super.onStart();
           if (Static.DEBUG) Log.i(TAG, "onStart()");
       }

       @Override
       protected void onResume() {
           super.onResume();
           if (Static.DEBUG) Log.i(TAG, "onResume()");

           storage = new Storage(this);
           if(storage.mExternalStorageAvailable == true && storage.mExternalStorageWriteable == false)
               Static.showToast(this, getString(R.string.errExternalStorageReadOnly), Toast.LENGTH_LONG);
           else if (storage.mExternalStorageAvailable == false && storage.mExternalStorageWriteable == false)
               Static.showToast(this, getString(R.string.errExternalStorage), Toast.LENGTH_LONG);
       }

       @Override
       public boolean onCreateOptionsMenu(Menu menu) {
           if (Static.DEBUG) Log.i(TAG, "onCreateOptionsMenu()");
           return super.onCreateOptionsMenu(menu);
       }

       @Override
       protected void onPause() {
           super.onPause();
           if (Static.DEBUG) Log.i(TAG, "onPause()");
       }

       @Override
       protected void onStop() {
           super.onStop();
           if (Static.DEBUG) Log.i(TAG, "onStop()");
       }

       @Override
       protected void onDestroy() {
           super.onDestroy();
           if (Static.DEBUG) Log.i(TAG, "onDestroy()");

           recording = false;

           if (mPreview != null) {
               mPreview.stopPreview();
           }

           if (mCamera != null) {
               mCamera.stopPreview();
               mCamera.release();
               mCamera = null;
           }
       }

       /** Prepare camera object.
        * */
       private void prepareCamera() {
           //1. Open camera object
           try {
               mCamera = getCameraInstance(this);
           } catch (Exception e) {
               e.printStackTrace();
               Static.showToast(this, e.getMessage(), Toast.LENGTH_LONG);
               finish();
               return;
           }

           setOptimalCameraParams();

           //2. Connect Preview + 3. Start Preview + 8e Stop preview v metode surfaceChanged
           mPreview = new CameraView(this, mCamera);
           FrameLayout preview = (FrameLayout) findViewById(R.id.cameraPreview);
           preview.addView(mPreview); //surfaceView to FrameLayout
           if(Static.DEBUG) Log.i(TAG, "camera preview start: OK");
       }

       /**
        * A safe way to get an instance of the Camera object.
        */
       @SuppressWarnings("deprecation")
       public static Camera getCameraInstance(Context ctx) throws Exception {
           Camera c = Camera.open();
           if (c == null)
               throw new Exception(ctx.getString(R.string.errCameraNotAvailable));

           if(Static.DEBUG) Log.i(TAG, "camera open");
           return c; // attempt to get a Camera instance, otherwise null
       }

       /**Method set optmial parameters. Depend on HW possibilities.*/
       @SuppressWarnings("deprecation")
       private void setOptimalCameraParams(){

           // Camera parameters
           Camera.Parameters params = mCamera.getParameters();
           List<string> focusModes = params.getSupportedFocusModes();
           if (Static.DEBUG) Log.i(TAG, "focusModes():" + focusModes.toString());
           if (Static.DEBUG) Log.i(TAG, "Camera parameters:\n" + params.flatten());

           params.setRecordingHint(true); //MediaRecorder.start() to start faster

           //Automatically autofocus if it's possible
           if (params.getSupportedFocusModes().contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
               params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
           } else if (focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) { //at least focus auto
               params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
           }

           // set Camera parameters
           mCamera.setParameters(params);
       }

       //---------------------------------------
       // initialize ffmpeg_recorder
       //---------------------------------------
       private void initRecorder() throws Exception {
           if(Static.DEBUG) Log.i(TAG,"init recorder");

           File output = null;

           try {
               output = storage.getOutputMediaFile(storage.MEDIA_TYPE_VIDEO);
               if(output == null)
                   throw new Exception();
           } catch (Exception e) {
               e.printStackTrace();
               throw new Exception(getString(R.string.errSetOutputFile));
           }

           if (yuvImage == null) {
               yuvImage = new Frame(previewSize.width, previewSize.height, Frame.DEPTH_UBYTE, 2);
               if(Static.DEBUG) Log.i(TAG, "create yuvImage");
           }

           if(Static.DEBUG) Log.i(TAG, "ffmpeg_url: " + output.getPath());
           recorder = new FFmpegFrameRecorder(output.getPath(), previewSize.width, previewSize.height, 1);
           //recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);
           //recorder.setVideoOption("preset", "veryfast"); // or ultrafast or fast, etc.
           //recorder.setVideoQuality(0); // maximum quality, replace recorder.setVideoBitrate(16384);
           //recorder.setPixelFormat(avutil.AV_PIX_FMT_YUV420P);
           recorder.setFormat("mp4");
           recorder.setSampleRate(sampleAudioRateInHz);
           recorder.setFrameRate(recorderFrameRate);

           if(Static.DEBUG) Log.i(TAG, "recorder initialize success");

           audioRecordRunnable = new AudioRecordRunnable();
           audioThread = new Thread(audioRecordRunnable);
           runAudioThread = true;
       }

       public void startRecording() {

           try {
               initRecorder();
           } catch (Exception e){
               e.printStackTrace();
               Static.showToast(this, e.getMessage(), Toast.LENGTH_LONG);
           }

           try {
               recorder.start();
               startTime = System.currentTimeMillis();
               recording = true;
               audioThread.start();

           } catch (FFmpegFrameRecorder.Exception e) {
               e.printStackTrace();
           }
       }

       public void stopRecording() {

           runAudioThread = false;
           try {
               audioThread.join();
           } catch (InterruptedException e) {
               // reset interrupt to be nice
               Thread.currentThread().interrupt();
               return;
           }
           audioRecordRunnable = null;
           audioThread = null;

           if (recorder != null &amp;&amp; recording) {

               recording = false;
               if(Static.DEBUG) Log.i(TAG,"Finishing recording, calling stop and release on recorder");
               try {
                   recorder.stop();
                   recorder.release();
               } catch (FFmpegFrameRecorder.Exception e) {
                   e.printStackTrace();
               }
               recorder = null;

           }
       }

       @Override
       public boolean onKeyDown(int keyCode, KeyEvent event) {

           if (keyCode == KeyEvent.KEYCODE_BACK) {
               if (recording) {
                   stopRecording();
               }

               finish();

               return true;
           }

           return super.onKeyDown(keyCode, event);
       }

       public void onClickBtnStartRecord(View v) {
           if (!recording) {
               startRecording();
               if(Static.DEBUG) Log.i(TAG, "Start Button Pushed");
           } else {
               // This will trigger the audio recording loop to stop and then set isRecorderStart = false;
               stopRecording();
               if(Static.DEBUG) Log.i(TAG, "Stop Button Pushed");
           }
       }

       //---------------------------------------------
       // audio thread, gets and encodes audio data
       //---------------------------------------------
       class AudioRecordRunnable implements Runnable {

           @Override
           public void run() {
               android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);

               // Audio
               int bufferSize;
               ShortBuffer audioData;
               int bufferReadResult;

               bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
                       AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
               audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
                       AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);

               audioData = ShortBuffer.allocate(bufferSize);

               if(Static.DEBUG) Log.i(TAG, "audioRecord.startRecording()");
               audioRecord.startRecording();

               /* ffmpeg_audio encoding loop */
               while (runAudioThread) {
                   //if(Static.DEBUG) Log.i(TAG,"recording? " + recording);
                   bufferReadResult = audioRecord.read(audioData.array(), 0, audioData.capacity());
                   audioData.limit(bufferReadResult);
                   if (bufferReadResult > 0) {
                       //if(Static.DEBUG) Log.i(TAG,"bufferReadResult: " + bufferReadResult);
                       // If "recording" isn't true when start this thread, it never get's set according to this if statement...!!!
                       // Why?  Good question...
                       if (recording) {
                           try {
                               recorder.recordSamples(audioData);
                               //if(Static.DEBUG) Log.i(TAG,"recording " + 1024*i + " to " + 1024*i+1024);
                           } catch (FFmpegFrameRecorder.Exception e) {
                               if(Static.DEBUG) Log.i(TAG,e.getMessage());
                               e.printStackTrace();
                           }
                       }
                   }
               }
               if(Static.DEBUG) Log.i(TAG,"AudioThread Finished, release audioRecord");

               /* encoding finish, release recorder */
               if (audioRecord != null) {
                   audioRecord.stop();
                   audioRecord.release();
                   audioRecord = null;
                   if(Static.DEBUG) Log.i(TAG,"audioRecord released");
               }
           }
       }

       /**TODO*/
       private Camera.Size getBestPreviewSize(int width, int height, Camera.Parameters parameters) {
           Camera.Size result=null;

           for (Camera.Size size : parameters.getSupportedPreviewSizes()) {

               if(Static.DEBUG) Log.i(TAG, size.width + "/" + size.height);

               if (size.width&lt;=width &amp;&amp; size.height&lt;=height) {
                   if (result==null) {
                       result=size;
                   } else {
                       int resultArea=result.width*result.height;
                       int newArea=size.width*size.height;

                       if (newArea>resultArea) {
                           result=size;
                       }
                   }
               }
           }

           return(result);
       }

       //---------------------------------------------
       // camera thread, gets and encodes video data
       //---------------------------------------------
       private class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {

           private final String TAG = CameraView.class.getSimpleName();

           private SurfaceHolder mHolder;
           private Camera mCamera;

           public CameraView(Context context, Camera camera) {
               super(context);
               if(Static.DEBUG) Log.i(TAG, "camera view");

               mCamera = camera;

               // Install a SurfaceHolder.Callback so we get notified when the
               // underlying surface is created and destroyed.
               mHolder = getHolder();
               mHolder.addCallback(CameraView.this);
               mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
               mCamera.setPreviewCallback(CameraView.this);
           }

           @Override
           public void surfaceCreated(SurfaceHolder holder) {
               try {
                   stopPreview();
                   mCamera.setPreviewDisplay(holder);
               } catch (IOException exception) {
                   mCamera.release();
                   mCamera = null;
               }
           }

           public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
               if(Static.DEBUG) Log.i(TAG, "surfaceChanged() => w=" + w + ", h=" + h);

               // If your preview can change or rotate, take care of those events here.
               // Make sure to stop the preview before resizing or reformatting it.

               if (mHolder.getSurface() == null){
                   // preview surface does not exist
                   return;
               }

               // stop preview before making changes
               try {
                   stopPreview();
               } catch (Exception e){
                   // ignore: tried to stop a non-existent preview
               }

               // start preview with new settings
               try {
                   Camera.Parameters params = mCamera.getParameters();

                   previewSize = getBestPreviewSize(w, h, params);
                   if(Static.DEBUG) Log.i(TAG, "getBestPreviewSize() => w=" + previewSize.width + ", h=" + previewSize.height);
                   if (previewSize != null)
                       params.setPreviewSize(previewSize.width, previewSize.height);

                   params.setPreviewFrameRate(recorderFrameRate);
                   if(Static.DEBUG) Log.i(TAG,"Preview Framerate: " + params.getPreviewFrameRate());
                   mCamera.setParameters(params);
                   mCamera.setPreviewDisplay(holder);
                   mCamera.setPreviewCallback(CameraView.this);
                   startPreview();
               } catch (Exception e){
                   if(Static.DEBUG) Log.i(TAG, "Could not set preview display in surfaceChanged");
                   e.printStackTrace();
               }

           }

           @Override
           public void surfaceDestroyed(SurfaceHolder holder) {
               try {
                   mHolder.addCallback(null);
                   mCamera.setPreviewCallback(null);
               } catch (RuntimeException e) {
                   // The camera has probably just been released, ignore.
               }
           }

           public void startPreview() {
               if (!isPreviewOn &amp;&amp; mCamera != null) {
                   isPreviewOn = true;
                   mCamera.startPreview();
               }
           }

           public void stopPreview() {
               if (isPreviewOn &amp;&amp; mCamera != null) {
                   isPreviewOn = false;
                   mCamera.stopPreview();
               }
           }

           @Override
           public void onPreviewFrame(byte[] data, Camera camera) {
               if (audioRecord == null || audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
                   startTime = System.currentTimeMillis();
                   return;
               }

               // get video data
               if (yuvImage != null &amp;&amp; recording) {
                   ((ByteBuffer)yuvImage.image[0].position(0)).put(data);

                   try {
                       long t = 1000 * (System.currentTimeMillis() - startTime);
                       if(Static.DEBUG) Log.i(TAG,"Writing Frame on timestamp: "+t);
                       if (t > recorder.getTimestamp()) {
                           recorder.setTimestamp(t);
                       }
                       recorder.record(yuvImage);
                   } catch (FFmpegFrameRecorder.Exception e) {
                       if(Static.DEBUG) Log.i(TAG,e.getMessage());
                       e.printStackTrace();
                   }
               }
           }
       }
    }
    </string>

    activity_opencv.xml :

    &lt;?xml version="1.0" encoding="utf-8"?>

    <relativelayout>

       <framelayout>
       </framelayout>

       &lt;ImageButton<br />
           android:id=&quot;@+id/btnStartRecord&quot;<br />
           android:layout_width=&quot;70dp&quot;<br />
           android:layout_height=&quot;70dp&quot;<br />
           android:scaleType=&quot;fitXY&quot;<br />
           android:src=&quot;@drawable/record_icon&quot;<br />
           android:background=&quot;@null&quot;<br />
           android:text=&quot;@string/btnStartRecord&quot;<br />
           android:onClick=&quot;onClickBtnStartRecord&quot;<br />
           android:clickable=&quot;true&quot;<br />
           android:layout_centerVertical=&quot;true&quot;<br />
           android:layout_alignParentRight=&quot;true&quot;<br />
           android:layout_alignParentEnd=&quot;true&quot;/&gt;

       <textview></textview>

    </relativelayout>

    Overlay views working, but recorded video is without overlay views.