Recherche avancée

Médias (91)

Autres articles (45)

  • Personnaliser les catégories

    21 juin 2013, par

    Formulaire de création d’une catégorie
    Pour ceux qui connaissent bien SPIP, une catégorie peut être assimilée à une rubrique.
    Dans le cas d’un document de type catégorie, les champs proposés par défaut sont : Texte
    On peut modifier ce formulaire dans la partie :
    Administration > Configuration des masques de formulaire.
    Dans le cas d’un document de type média, les champs non affichés par défaut sont : Descriptif rapide
    Par ailleurs, c’est dans cette partie configuration qu’on peut indiquer le (...)

  • Support audio et vidéo HTML5

    10 avril 2011

    MediaSPIP utilise les balises HTML5 video et audio pour la lecture de documents multimedia en profitant des dernières innovations du W3C supportées par les navigateurs modernes.
    Pour les navigateurs plus anciens, le lecteur flash Flowplayer est utilisé.
    Le lecteur HTML5 utilisé a été spécifiquement créé pour MediaSPIP : il est complètement modifiable graphiquement pour correspondre à un thème choisi.
    Ces technologies permettent de distribuer vidéo et son à la fois sur des ordinateurs conventionnels (...)

  • HTML5 audio and video support

    13 avril 2011, par

    MediaSPIP uses HTML5 video and audio tags to play multimedia files, taking advantage of the latest W3C innovations supported by modern browsers.
    The MediaSPIP player used has been created specifically for MediaSPIP and can be easily adapted to fit in with a specific theme.
    For older browsers the Flowplayer flash fallback is used.
    MediaSPIP allows for media playback on major mobile platforms with the above (...)

Sur d’autres sites (9167)

  • Android recording video with overlay view

    6 mars 2016, par t0m

    I am trying app in android which have functionality to capture video with overlay views. (Also i need onPreviewFrame method.)
    Via SurfaceView and JavaCV with FFmpeg :


    OpenCVCameraActivity.java :

    import android.app.Activity;
    import android.content.Context;
    import android.hardware.Camera;
    import android.hardware.Camera.PreviewCallback;
    import android.media.AudioFormat;
    import android.media.AudioRecord;
    import android.media.MediaRecorder;
    import android.os.Bundle;
    import android.util.Log;
    import android.view.KeyEvent;
    import android.view.Menu;
    import android.view.SurfaceHolder;
    import android.view.SurfaceView;
    import android.view.View;
    import android.widget.FrameLayout;
    import android.widget.Toast;

    import org.bytedeco.javacv.FFmpegFrameRecorder;
    import org.bytedeco.javacv.Frame;

    import java.io.File;
    import java.io.IOException;
    import java.nio.ByteBuffer;
    import java.nio.ShortBuffer;
    import java.util.List;


    @SuppressWarnings("ALL")
    public class OpenCVCameraActivity extends Activity {

       private static final String TAG = OpenCVCameraActivity.class.getSimpleName();

       private long startTime = 0;
       private boolean isPreviewOn = false;

       private int sampleAudioRateInHz = 44100;
       private Camera.Size previewSize;        //preview and Camera and Recorder width and height
       private int recorderFrameRate = 25;

       // audio data getting thread
       private AudioRecord audioRecord;
       private AudioRecordRunnable audioRecordRunnable;
       private Thread audioThread;
       private volatile boolean runAudioThread = true;

       // video data getting thread
       private Camera mCamera;
       private CameraView mPreview;
       private FFmpegFrameRecorder recorder;
       private boolean recording = false;
       private Frame yuvImage = null;

       //storage
       private Storage storage;

       @Override
       public void onCreate(Bundle savedInstanceState) {
           super.onCreate(savedInstanceState);
           if(Static.DEBUG) Log.i(TAG, "onCreate()");

           Thread.setDefaultUncaughtExceptionHandler(uncaughtExceptionHandler);

           setContentView(R.layout.activity_opencv);

           prepareCamera();
       }

       private Thread.UncaughtExceptionHandler uncaughtExceptionHandler =
               new Thread.UncaughtExceptionHandler() {
                   public void uncaughtException(Thread thread, Throwable ex) {
                       if(Static.DEBUG) Log.e(TAG, "Uncaught exception", ex);
                   }
               };

       @Override
       protected void onRestart() {
           super.onRestart();
           if (Static.DEBUG) Log.i(TAG, "onRestart()");
       }

       @Override
       protected void onStart() {
           super.onStart();
           if (Static.DEBUG) Log.i(TAG, "onStart()");
       }

       @Override
       protected void onResume() {
           super.onResume();
           if (Static.DEBUG) Log.i(TAG, "onResume()");

           storage = new Storage(this);
           if(storage.mExternalStorageAvailable == true && storage.mExternalStorageWriteable == false)
               Static.showToast(this, getString(R.string.errExternalStorageReadOnly), Toast.LENGTH_LONG);
           else if (storage.mExternalStorageAvailable == false && storage.mExternalStorageWriteable == false)
               Static.showToast(this, getString(R.string.errExternalStorage), Toast.LENGTH_LONG);
       }

       @Override
       public boolean onCreateOptionsMenu(Menu menu) {
           if (Static.DEBUG) Log.i(TAG, "onCreateOptionsMenu()");
           return super.onCreateOptionsMenu(menu);
       }

       @Override
       protected void onPause() {
           super.onPause();
           if (Static.DEBUG) Log.i(TAG, "onPause()");
       }

       @Override
       protected void onStop() {
           super.onStop();
           if (Static.DEBUG) Log.i(TAG, "onStop()");
       }

       @Override
       protected void onDestroy() {
           super.onDestroy();
           if (Static.DEBUG) Log.i(TAG, "onDestroy()");

           recording = false;

           if (mPreview != null) {
               mPreview.stopPreview();
           }

           if (mCamera != null) {
               mCamera.stopPreview();
               mCamera.release();
               mCamera = null;
           }
       }

       /** Prepare camera object.
        * */
       private void prepareCamera() {
           //1. Open camera object
           try {
               mCamera = getCameraInstance(this);
           } catch (Exception e) {
               e.printStackTrace();
               Static.showToast(this, e.getMessage(), Toast.LENGTH_LONG);
               finish();
               return;
           }

           setOptimalCameraParams();

           //2. Connect Preview + 3. Start Preview + 8e Stop preview v metode surfaceChanged
           mPreview = new CameraView(this, mCamera);
           FrameLayout preview = (FrameLayout) findViewById(R.id.cameraPreview);
           preview.addView(mPreview); //surfaceView to FrameLayout
           if(Static.DEBUG) Log.i(TAG, "camera preview start: OK");
       }

       /**
        * A safe way to get an instance of the Camera object.
        */
       @SuppressWarnings("deprecation")
       public static Camera getCameraInstance(Context ctx) throws Exception {
           Camera c = Camera.open();
           if (c == null)
               throw new Exception(ctx.getString(R.string.errCameraNotAvailable));

           if(Static.DEBUG) Log.i(TAG, "camera open");
           return c; // attempt to get a Camera instance, otherwise null
       }

       /**Method set optmial parameters. Depend on HW possibilities.*/
       @SuppressWarnings("deprecation")
       private void setOptimalCameraParams(){

           // Camera parameters
           Camera.Parameters params = mCamera.getParameters();
           List<string> focusModes = params.getSupportedFocusModes();
           if (Static.DEBUG) Log.i(TAG, "focusModes():" + focusModes.toString());
           if (Static.DEBUG) Log.i(TAG, "Camera parameters:\n" + params.flatten());

           params.setRecordingHint(true); //MediaRecorder.start() to start faster

           //Automatically autofocus if it's possible
           if (params.getSupportedFocusModes().contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
               params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
           } else if (focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) { //at least focus auto
               params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
           }

           // set Camera parameters
           mCamera.setParameters(params);
       }

       //---------------------------------------
       // initialize ffmpeg_recorder
       //---------------------------------------
       private void initRecorder() throws Exception {
           if(Static.DEBUG) Log.i(TAG,"init recorder");

           File output = null;

           try {
               output = storage.getOutputMediaFile(storage.MEDIA_TYPE_VIDEO);
               if(output == null)
                   throw new Exception();
           } catch (Exception e) {
               e.printStackTrace();
               throw new Exception(getString(R.string.errSetOutputFile));
           }

           if (yuvImage == null) {
               yuvImage = new Frame(previewSize.width, previewSize.height, Frame.DEPTH_UBYTE, 2);
               if(Static.DEBUG) Log.i(TAG, "create yuvImage");
           }

           if(Static.DEBUG) Log.i(TAG, "ffmpeg_url: " + output.getPath());
           recorder = new FFmpegFrameRecorder(output.getPath(), previewSize.width, previewSize.height, 1);
           //recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);
           //recorder.setVideoOption("preset", "veryfast"); // or ultrafast or fast, etc.
           //recorder.setVideoQuality(0); // maximum quality, replace recorder.setVideoBitrate(16384);
           //recorder.setPixelFormat(avutil.AV_PIX_FMT_YUV420P);
           recorder.setFormat("mp4");
           recorder.setSampleRate(sampleAudioRateInHz);
           recorder.setFrameRate(recorderFrameRate);

           if(Static.DEBUG) Log.i(TAG, "recorder initialize success");

           audioRecordRunnable = new AudioRecordRunnable();
           audioThread = new Thread(audioRecordRunnable);
           runAudioThread = true;
       }

       public void startRecording() {

           try {
               initRecorder();
           } catch (Exception e){
               e.printStackTrace();
               Static.showToast(this, e.getMessage(), Toast.LENGTH_LONG);
           }

           try {
               recorder.start();
               startTime = System.currentTimeMillis();
               recording = true;
               audioThread.start();

           } catch (FFmpegFrameRecorder.Exception e) {
               e.printStackTrace();
           }
       }

       public void stopRecording() {

           runAudioThread = false;
           try {
               audioThread.join();
           } catch (InterruptedException e) {
               // reset interrupt to be nice
               Thread.currentThread().interrupt();
               return;
           }
           audioRecordRunnable = null;
           audioThread = null;

           if (recorder != null &amp;&amp; recording) {

               recording = false;
               if(Static.DEBUG) Log.i(TAG,"Finishing recording, calling stop and release on recorder");
               try {
                   recorder.stop();
                   recorder.release();
               } catch (FFmpegFrameRecorder.Exception e) {
                   e.printStackTrace();
               }
               recorder = null;

           }
       }

       @Override
       public boolean onKeyDown(int keyCode, KeyEvent event) {

           if (keyCode == KeyEvent.KEYCODE_BACK) {
               if (recording) {
                   stopRecording();
               }

               finish();

               return true;
           }

           return super.onKeyDown(keyCode, event);
       }

       public void onClickBtnStartRecord(View v) {
           if (!recording) {
               startRecording();
               if(Static.DEBUG) Log.i(TAG, "Start Button Pushed");
           } else {
               // This will trigger the audio recording loop to stop and then set isRecorderStart = false;
               stopRecording();
               if(Static.DEBUG) Log.i(TAG, "Stop Button Pushed");
           }
       }

       //---------------------------------------------
       // audio thread, gets and encodes audio data
       //---------------------------------------------
       class AudioRecordRunnable implements Runnable {

           @Override
           public void run() {
               android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);

               // Audio
               int bufferSize;
               ShortBuffer audioData;
               int bufferReadResult;

               bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
                       AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
               audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
                       AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);

               audioData = ShortBuffer.allocate(bufferSize);

               if(Static.DEBUG) Log.i(TAG, "audioRecord.startRecording()");
               audioRecord.startRecording();

               /* ffmpeg_audio encoding loop */
               while (runAudioThread) {
                   //if(Static.DEBUG) Log.i(TAG,"recording? " + recording);
                   bufferReadResult = audioRecord.read(audioData.array(), 0, audioData.capacity());
                   audioData.limit(bufferReadResult);
                   if (bufferReadResult > 0) {
                       //if(Static.DEBUG) Log.i(TAG,"bufferReadResult: " + bufferReadResult);
                       // If "recording" isn't true when start this thread, it never get's set according to this if statement...!!!
                       // Why?  Good question...
                       if (recording) {
                           try {
                               recorder.recordSamples(audioData);
                               //if(Static.DEBUG) Log.i(TAG,"recording " + 1024*i + " to " + 1024*i+1024);
                           } catch (FFmpegFrameRecorder.Exception e) {
                               if(Static.DEBUG) Log.i(TAG,e.getMessage());
                               e.printStackTrace();
                           }
                       }
                   }
               }
               if(Static.DEBUG) Log.i(TAG,"AudioThread Finished, release audioRecord");

               /* encoding finish, release recorder */
               if (audioRecord != null) {
                   audioRecord.stop();
                   audioRecord.release();
                   audioRecord = null;
                   if(Static.DEBUG) Log.i(TAG,"audioRecord released");
               }
           }
       }

       /**TODO*/
       private Camera.Size getBestPreviewSize(int width, int height, Camera.Parameters parameters) {
           Camera.Size result=null;

           for (Camera.Size size : parameters.getSupportedPreviewSizes()) {

               if(Static.DEBUG) Log.i(TAG, size.width + "/" + size.height);

               if (size.width&lt;=width &amp;&amp; size.height&lt;=height) {
                   if (result==null) {
                       result=size;
                   } else {
                       int resultArea=result.width*result.height;
                       int newArea=size.width*size.height;

                       if (newArea>resultArea) {
                           result=size;
                       }
                   }
               }
           }

           return(result);
       }

       //---------------------------------------------
       // camera thread, gets and encodes video data
       //---------------------------------------------
       private class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {

           private final String TAG = CameraView.class.getSimpleName();

           private SurfaceHolder mHolder;
           private Camera mCamera;

           public CameraView(Context context, Camera camera) {
               super(context);
               if(Static.DEBUG) Log.i(TAG, "camera view");

               mCamera = camera;

               // Install a SurfaceHolder.Callback so we get notified when the
               // underlying surface is created and destroyed.
               mHolder = getHolder();
               mHolder.addCallback(CameraView.this);
               mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
               mCamera.setPreviewCallback(CameraView.this);
           }

           @Override
           public void surfaceCreated(SurfaceHolder holder) {
               try {
                   stopPreview();
                   mCamera.setPreviewDisplay(holder);
               } catch (IOException exception) {
                   mCamera.release();
                   mCamera = null;
               }
           }

           public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
               if(Static.DEBUG) Log.i(TAG, "surfaceChanged() => w=" + w + ", h=" + h);

               // If your preview can change or rotate, take care of those events here.
               // Make sure to stop the preview before resizing or reformatting it.

               if (mHolder.getSurface() == null){
                   // preview surface does not exist
                   return;
               }

               // stop preview before making changes
               try {
                   stopPreview();
               } catch (Exception e){
                   // ignore: tried to stop a non-existent preview
               }

               // start preview with new settings
               try {
                   Camera.Parameters params = mCamera.getParameters();

                   previewSize = getBestPreviewSize(w, h, params);
                   if(Static.DEBUG) Log.i(TAG, "getBestPreviewSize() => w=" + previewSize.width + ", h=" + previewSize.height);
                   if (previewSize != null)
                       params.setPreviewSize(previewSize.width, previewSize.height);

                   params.setPreviewFrameRate(recorderFrameRate);
                   if(Static.DEBUG) Log.i(TAG,"Preview Framerate: " + params.getPreviewFrameRate());
                   mCamera.setParameters(params);
                   mCamera.setPreviewDisplay(holder);
                   mCamera.setPreviewCallback(CameraView.this);
                   startPreview();
               } catch (Exception e){
                   if(Static.DEBUG) Log.i(TAG, "Could not set preview display in surfaceChanged");
                   e.printStackTrace();
               }

           }

           @Override
           public void surfaceDestroyed(SurfaceHolder holder) {
               try {
                   mHolder.addCallback(null);
                   mCamera.setPreviewCallback(null);
               } catch (RuntimeException e) {
                   // The camera has probably just been released, ignore.
               }
           }

           public void startPreview() {
               if (!isPreviewOn &amp;&amp; mCamera != null) {
                   isPreviewOn = true;
                   mCamera.startPreview();
               }
           }

           public void stopPreview() {
               if (isPreviewOn &amp;&amp; mCamera != null) {
                   isPreviewOn = false;
                   mCamera.stopPreview();
               }
           }

           @Override
           public void onPreviewFrame(byte[] data, Camera camera) {
               if (audioRecord == null || audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
                   startTime = System.currentTimeMillis();
                   return;
               }

               // get video data
               if (yuvImage != null &amp;&amp; recording) {
                   ((ByteBuffer)yuvImage.image[0].position(0)).put(data);

                   try {
                       long t = 1000 * (System.currentTimeMillis() - startTime);
                       if(Static.DEBUG) Log.i(TAG,"Writing Frame on timestamp: "+t);
                       if (t > recorder.getTimestamp()) {
                           recorder.setTimestamp(t);
                       }
                       recorder.record(yuvImage);
                   } catch (FFmpegFrameRecorder.Exception e) {
                       if(Static.DEBUG) Log.i(TAG,e.getMessage());
                       e.printStackTrace();
                   }
               }
           }
       }
    }
    </string>

    activity_opencv.xml :

    &lt;?xml version="1.0" encoding="utf-8"?>

    <relativelayout>

       <framelayout>
       </framelayout>

       &lt;ImageButton<br />
           android:id=&quot;@+id/btnStartRecord&quot;<br />
           android:layout_width=&quot;70dp&quot;<br />
           android:layout_height=&quot;70dp&quot;<br />
           android:scaleType=&quot;fitXY&quot;<br />
           android:src=&quot;@drawable/record_icon&quot;<br />
           android:background=&quot;@null&quot;<br />
           android:text=&quot;@string/btnStartRecord&quot;<br />
           android:onClick=&quot;onClickBtnStartRecord&quot;<br />
           android:clickable=&quot;true&quot;<br />
           android:layout_centerVertical=&quot;true&quot;<br />
           android:layout_alignParentRight=&quot;true&quot;<br />
           android:layout_alignParentEnd=&quot;true&quot;/&gt;

       <textview></textview>

    </relativelayout>

    Overlay views working, but recorded video is without overlay views.

  • MediaPlayer within TextureView not working as intended

    15 mars 2016, par Russiee

    I’ve put in a MediaPlayer within a TextureView, which itself is located inside a ListView.

    Yesterday, the MediaPlayer worked as intended with a test .mp4 clip.
    Today, the MediaPlayer tries doing some kind of FFmpeg Extractor, for which i’ve been unable to find any kind of information about.

    This is the stack trace :

    03-14 13:43:00.076 477-16532/? V/FFmpegExtractor: SniffFFMPEG
    03-14 13:43:00.076 477-16532/? I/FFmpegExtractor: android-source:0xafcff040
    03-14 13:43:00.077 477-16532/? D/FFMPEG: android source begin open
    03-14 13:43:00.077 477-16532/? D/FFMPEG: android open, url: android-source:0xafcff040
    03-14 13:43:00.077 477-16532/? D/FFMPEG: ffmpeg open android data source success, source ptr: 0xafcff040
    03-14 13:43:00.077 477-16532/? D/FFMPEG: android source open success
    03-14 13:43:00.149 477-16532/? I/FFMPEG: Input #0, mov,mp4,m4a,3gp,3g2,mj2, from 'android-source:0xafcff040':
    03-14 13:43:00.149 477-16532/? I/FFMPEG:   Metadata:
    03-14 13:43:00.149 477-16532/? I/FFMPEG:     major_brand     : qt  
    03-14 13:43:00.149 477-16532/? I/FFMPEG:     minor_version   : 0
    03-14 13:43:00.149 477-16532/? I/FFMPEG:     compatible_brands: qt  
    03-14 13:43:00.149 477-16532/? I/FFMPEG:     creation_time   : 2016-03-13 19:24:58
    03-14 13:43:00.149 477-16532/? I/FFMPEG:   Duration: 00:00:10.88, start: 0.000000, bitrate: 11209 kb/s
    03-14 13:43:00.149 477-16532/? I/FFMPEG:     Stream #0:0(und): Audio: aac (mp4a / 0x6134706D), 44100 Hz, mono, fltp, 86 kb/s (default)
    03-14 13:43:00.149 477-16532/? I/FFMPEG:     Metadata:
    03-14 13:43:00.149 477-16532/? I/FFMPEG:       creation_time   : 2016-03-13 19:24:58
    03-14 13:43:00.149 477-16532/? I/FFMPEG:       handler_name    : Core Media Data Handler
    03-14 13:43:00.149 477-16532/? I/FFMPEG:     Stream #0:1(und): Video: h264 (High) (avc1 / 0x31637661), yuv420p(tv, bt709), 1920x1080 [SAR 1:1 DAR 16:9], 11113 kb/s, 29.98 fps, 29.97 tbr, 600 tbn, 50 tbc (default)
    03-14 13:43:00.149 477-16532/? I/FFMPEG:     Metadata:
    03-14 13:43:00.149 477-16532/? I/FFMPEG:       creation_time   : 2016-03-13 19:24:58
    03-14 13:43:00.149 477-16532/? I/FFMPEG:       handler_name    : Core Media Data Handler
    03-14 13:43:00.149 477-16532/? I/FFMPEG:       encoder         : H.264
    03-14 13:43:00.149 477-16532/? I/FFMPEG:     Stream #0:2(und): Data: none (mebx / 0x7862656D), 1 kb/s (default)
    03-14 13:43:00.149 477-16532/? I/FFMPEG:     Metadata:
    03-14 13:43:00.149 477-16532/? I/FFMPEG:       creation_time   : 2016-03-13 19:24:58
    03-14 13:43:00.149 477-16532/? I/FFMPEG:       handler_name    : Core Media Data Handler
    03-14 13:43:00.149 477-16532/? I/FFMPEG:     Stream #0:3(und): Data: none (mebx / 0x7862656D), 0 kb/s (default)
    03-14 13:43:00.149 477-16532/? I/FFMPEG:     Metadata:
    03-14 13:43:00.149 477-16532/? I/FFMPEG:       creation_time   : 2016-03-13 19:24:58
    03-14 13:43:00.149 477-16532/? I/FFMPEG:       handler_name    : Core Media Data Handler
    03-14 13:43:00.149 477-16532/? I/FFmpegExtractor: FFmpegExtrator, url: android-source:0xafcff040, format_name: mov,mp4,m4a,3gp,3g2,mj2, format_long_name: QuickTime / MOV
    03-14 13:43:00.149 477-16532/? I/FFmpegExtractor: list the formats suppoted by ffmpeg:
    03-14 13:43:00.149 477-16532/? I/FFmpegExtractor: ========================================
    03-14 13:43:00.149 477-16532/? V/FFmpegExtractor: format_names[00]: mpeg
    03-14 13:43:00.149 477-16532/? V/FFmpegExtractor: format_names[01]: mpegts
    03-14 13:43:00.149 477-16532/? V/FFmpegExtractor: format_names[02]: mov,mp4,m4a,3gp,3g2,mj2
    03-14 13:43:00.149 477-16532/? V/FFmpegExtractor: format_names[03]: matroska,webm
    03-14 13:43:00.149 477-16532/? V/FFmpegExtractor: format_names[04]: asf
    03-14 13:43:00.149 477-16532/? V/FFmpegExtractor: format_names[05]: rm
    03-14 13:43:00.149 477-16532/? V/FFmpegExtractor: format_names[06]: flv
    03-14 13:43:00.149 477-16532/? V/FFmpegExtractor: format_names[07]: swf
    03-14 13:43:00.149 477-16532/? V/FFmpegExtractor: format_names[08]: avi
    03-14 13:43:00.149 477-16532/? V/FFmpegExtractor: format_names[09]: ape
    03-14 13:43:00.149 477-16532/? V/FFmpegExtractor: format_names[10]: dts
    03-14 13:43:00.149 477-16532/? V/FFmpegExtractor: format_names[11]: flac
    03-14 13:43:00.149 477-16532/? V/FFmpegExtractor: format_names[12]: ac3
    03-14 13:43:00.149 477-16532/? V/FFmpegExtractor: format_names[13]: wav
    03-14 13:43:00.149 477-16532/? V/FFmpegExtractor: format_names[14]: ogg
    03-14 13:43:00.149 477-16532/? V/FFmpegExtractor: format_names[15]: vc1
    03-14 13:43:00.149 477-16532/? V/FFmpegExtractor: format_names[16]: hevc
    03-14 13:43:00.149 477-16532/? I/FFmpegExtractor: ========================================
    03-14 13:43:00.149 477-16532/? V/FFmpegExtractor: major_brand tag is:qt  
    03-14 13:43:00.149 477-16532/? I/FFmpegExtractor: [mp4]format is mov, confidence should be larger than mpeg4
    03-14 13:43:00.149 477-16532/? D/FFMPEG: android source close
    03-14 13:43:00.149 477-16532/? I/FFmpegExtractor: sniff through BetterSniffFFMPEG success
    03-14 13:43:00.149 477-16532/? D/FFmpegExtractor: ffmpeg detected media content as 'video/mp4' with confidence 0.41
    03-14 13:43:00.149 477-16532/? I/MediaExtractor: Use extended extractor for the special mime(video/mp4) or codec
    03-14 13:43:00.149 477-16532/? V/FFmpegExtractor: FFmpegExtractor::FFmpegExtractor
    03-14 13:43:00.149 477-16532/? V/FFmpegExtractor: mFilename: android-source:0xafcff040
    03-14 13:43:00.150 477-16532/? D/FFMPEG: android source begin open
    03-14 13:43:00.150 477-16532/? D/FFMPEG: android open, url: android-source:0xafcff040
    03-14 13:43:00.150 477-16532/? D/FFMPEG: ffmpeg open android data source success, source ptr: 0xafcff040
    03-14 13:43:00.150 477-16532/? D/FFMPEG: android source open success
    03-14 13:43:00.230 477-16532/? V/FFmpegExtractor: file startTime: 0
    03-14 13:43:00.230 477-16532/? I/FFmpegExtractor: the duration is 00:00:10.87
    03-14 13:43:00.230 477-16532/? I/FFmpegExtractor: stream_index: 0
    03-14 13:43:00.230 477-16532/? I/FFmpegExtractor: support the codec(aac)
    03-14 13:43:00.230 477-16532/? V/FFmpegExtractor: Tag mp4a/0x6134706d with codec(aac)
    03-14 13:43:00.230 477-16532/? V/FFmpegExtractor: audio stream extradata(2):
    03-14 13:43:00.230 477-16532/? V/codec_utils: AAC
    03-14 13:43:00.230 477-16532/? V/codec_utils: aac profile: 1, sf_index: 4, channel: 1
    03-14 13:43:00.230 477-16532/? I/FFmpegExtractor: bit_rate: 86249, sample_rate: 44100, channels: 1, bits_per_coded_sample: 16, block_align:0
    03-14 13:43:00.230 477-16532/? I/FFmpegExtractor: the time is 00:00:10.93
    03-14 13:43:00.230 477-16532/? V/FFmpegExtractor: audio startTime:0
    03-14 13:43:00.230 477-16532/? V/FFmpegExtractor: create a audio track
    03-14 13:43:00.230 477-16532/? I/FFmpegExtractor: stream_index: 1
    03-14 13:43:00.230 477-16532/? I/FFmpegExtractor: support the codec(h264)
    03-14 13:43:00.230 477-16532/? V/FFmpegExtractor: Tag avc1/0x31637661 with codec(h264)
    03-14 13:43:00.230 477-16532/? V/FFmpegExtractor: video stream extradata:
    03-14 13:43:00.230 477-16532/? V/codec_utils: AVC
    03-14 13:43:00.230 477-16532/? I/FFmpegExtractor: width: 1920, height: 1080, bit_rate: 11113682
    03-14 13:43:00.230 477-16532/? I/FFmpegExtractor: the time is 00:00:10.87
    03-14 13:43:00.230 477-16532/? V/FFmpegExtractor: video startTime:0
    03-14 13:43:00.230 477-16532/? V/FFmpegExtractor: create a video track
    03-14 13:43:00.230 477-16532/? V/FFmpegExtractor: the stream is AVC, the length of a NAL unit: 4
    03-14 13:43:00.230 477-16532/? V/FFmpegExtractor: Starting reader thread
    03-14 13:43:00.230 477-16532/? D/FFmpegExtractor: Reader thread started
    03-14 13:43:00.231 477-16532/? V/FFmpegExtractor: mProbePkts: 0, mEOF: 0, pb->error(if has): 0, mDefersToCreateVideoTrack: 0, mDefersToCreateAudioTrack: 0
    03-14 13:43:00.231 477-16532/? D/FFmpegExtractor: supported mime: video/mp4
    03-14 13:43:00.231 477-16532/? V/FFmpegExtractor: FFmpegExtractor::getMetaData
    03-14 13:43:00.231 477-16537/? V/FFmpegExtractor: FFmpegExtractor enter thread(readerEntry)
    03-14 13:43:00.231 477-16532/? V/FFmpegExtractor: FFmpegExtractor::getTrack[0]
    03-14 13:43:00.231 477-16532/? V/FFmpegExtractor: FFmpegExtractor::getTrackMetaData[0]
    03-14 13:43:00.231 477-16532/? V/FFmpegExtractor: FFmpegExtractor::getTrack[1]
    03-14 13:43:00.231 477-16532/? V/FFmpegExtractor: FFmpegExtractor::getTrackMetaData[1]
    03-14 13:43:00.232 10973-10973/com.hobbyte.touringandroid E/MediaPlayer: Should have subtitle controller already set

    Upon trying to play the video file, I get this log :

    03-14 13:52:06.133 743-821/system_process W/AudioTrack: AUDIO_OUTPUT_FLAG_FAST denied by client
    03-14 13:52:06.133 477-477/? D/NuPlayerDriver: start(0xb0f05040)
    03-14 13:52:06.133 477-16531/? I/GenericSource: start
    03-14 13:52:06.133 477-16531/? V/FFmpegExtractor: FFmpegExtractor::Track::start audio
    03-14 13:52:06.133 477-16531/? V/FFmpegExtractor: FFmpegExtractor::Track::start video
    03-14 13:52:06.133 477-16532/? V/FFmpegExtractor: read audio flush pkt
    03-14 13:52:06.133 477-16532/? V/FFmpegExtractor: read video flush pkt
    03-14 13:52:06.139 477-24743/? D/SoftFFmpegAudio: SoftFFmpegAudio component: OMX.ffmpeg.aac.decoder mMode: 1
    03-14 13:52:06.141 477-24743/? V/SoftFFmpegAudio: get pcm params, nChannels:4294967295, nSamplingRate:4294967295
    03-14 13:52:06.141 477-24743/? V/SoftFFmpegAudio: set OMX_IndexParamAudioPcm, nChannels:1, nSampleRate:44100, nBitsPerSample:16
    03-14 13:52:06.141 477-24743/? V/SoftFFmpegAudio: set OMX_IndexParamAudioAac, nChannels:1, nSampleRate:44100
    03-14 13:52:06.141 477-24743/? E/OMXNodeInstance: setParameter(1866465283) ERROR: 0x8000101a
    03-14 13:52:06.141 477-24743/? V/SoftFFmpegAudio: get pcm params, nChannels:1, nSamplingRate:44100
    03-14 13:52:06.147 477-24744/? E/OMXNodeInstance: OMX_GetExtensionIndex OMX.google.android.index.storeMetaDataInBuffers failed
    03-14 13:52:06.147 477-24744/? E/ACodec: [OMX.google.h264.decoder] storeMetaDataInBuffers failed w/ err -2147483648
    03-14 13:52:06.149 477-24745/? I/SoftFFmpegAudio: got extradata, ignore: 0, size: 2
    03-14 13:52:06.150 477-24745/? I/SoftFFmpegAudio: extradata is ready, size: 2
    03-14 13:52:06.150 477-24745/? D/SoftFFmpegAudio: begin to open ffmpeg audio decoder(aac), mCtx sample_rate: 44100, channels: 1, , sample_fmt: (null)
    03-14 13:52:06.154 477-24745/? D/SoftFFmpegAudio: open ffmpeg audio decoder(aac) success, mCtx sample_rate: 44100, channels: 1, sample_fmt: fltp
    03-14 13:52:06.154 477-24745/? I/SoftFFmpegAudio: Create sample rate converter for conversion of 44100 Hz fltp 1 channels(mono) to 44100 Hz s16 1 channels(mono)!
    03-14 13:52:06.154 477-24743/? V/SoftFFmpegAudio: get pcm params, nChannels:1, nSamplingRate:44100
    03-14 13:52:06.155 477-24739/? D/AudioSink: bufferCount (8) is too small and increased to 12
    03-14 13:52:06.162 477-24747/? E/SoftAVC: Decoder failed: -2
    03-14 13:52:06.162 477-24744/? E/ACodec: [OMX.google.h264.decoder] ERROR(0x80001001)
    03-14 13:52:06.162 477-24744/? E/ACodec: signalError(omxError 0x80001001, internalError -2147483648)
    03-14 13:52:06.163 477-24741/? E/MediaCodec: Codec reported err 0x80001001, actionCode 0, while in state 6
    03-14 13:52:06.167 477-24740/? E/NuPlayerDecoder: Failed to queue input buffer for OMX.google.h264.decoder (err=-38)
    03-14 13:52:06.167 477-16531/? E/NuPlayer: received error(0xffffffda) from video decoder, flushing(0), now shutting down
    03-14 13:52:06.168 10973-11040/com.hobbyte.touringandroid E/MediaPlayer: error (1, -38)
    03-14 13:52:06.168 10973-10973/com.hobbyte.touringandroid E/MediaPlayer: Error (1,-38)
    03-14 13:52:06.168 477-24740/? E/NuPlayerDecoder: failed to flush OMX.google.h264.decoder (err=-38)
    03-14 13:52:06.168 477-16531/? E/NuPlayer: received error(0xffffffda) from video decoder, flushing(2), now shutting down
    03-14 13:52:06.169 10973-10989/com.hobbyte.touringandroid E/MediaPlayer: error (1, -38)
    03-14 13:52:06.170 10973-10973/com.hobbyte.touringandroid E/MediaPlayer: Error (1,-38)

    For reference, here is my class file :

       package com.hobbyte.touringandroid.ui.adapter;

    import android.content.Context;
    import android.graphics.SurfaceTexture;
    import android.media.AudioManager;
    import android.media.MediaPlayer;
    import android.util.DisplayMetrics;
    import android.view.Gravity;
    import android.view.LayoutInflater;
    import android.view.Surface;
    import android.view.TextureView;
    import android.view.View;
    import android.view.ViewGroup;
    import android.widget.ArrayAdapter;
    import android.widget.ImageButton;
    import android.widget.ImageView;
    import android.widget.SeekBar;
    import android.widget.TextView;

    import com.google.android.exoplayer.ExoPlayer;
    import com.google.android.exoplayer.FrameworkSampleSource;
    import com.google.android.exoplayer.MediaCodecVideoTrackRenderer;
    import com.google.android.exoplayer.SampleSource;
    import com.google.android.exoplayer.TrackRenderer;
    import com.hobbyte.touringandroid.App;
    import com.hobbyte.touringandroid.tourdata.ListViewItem;
    import com.hobbyte.touringandroid.internet.LoadImageFromURL;
    import com.hobbyte.touringandroid.R;

    import java.io.File;
    import java.io.IOException;
    import java.util.regex.Matcher;
    import java.util.regex.Pattern;

    /**
    * @author Nikita
    */
    public class PoiContentAdapter extends ArrayAdapter<listviewitem> {
       private static final String TAG = "PoiContentAdapter";

       public static final int HEADER = 0;
       public static final int BODY = 1;
       public static final int IMAGE = 2;
       public static final int VIDEO = 3;

       private static Pattern namePattern;
       private static final String FILE_NAME_PATTERN = "https?:\\/\\/[-\\w\\.\\/]*\\/(.+\\.(jpe?g|png|mp4))";

       private ListViewItem[] items;

       private String keyID;

       private TextureView textureView;
       private MediaPlayer player;
       private AudioManager audio;

       private ImageButton play;
       private ImageButton replay;
       private ImageButton mute;
       private ImageButton max;
       private SeekBar volume;

       private String filePath;

       @Override
       public int getViewTypeCount() {
           return 4;
       }

       @Override
           public int getItemViewType(int position) {
           return items[position].getType();
       }

       public PoiContentAdapter(Context context, ListViewItem[] content, String keyID) {
           super(context, 0, content);
           this.keyID = keyID;
           items = content;
           namePattern = Pattern.compile(FILE_NAME_PATTERN);
       }

       /**
        * Inflates a certain view depending on the type of ListViewItem (Normal text or Image URL)
        * @param position Position of item in the ItemList
        * @param view View
        * @param parent ParentView
        * @return the view in question
        */
       @Override
       public View getView(int position, View view, ViewGroup parent) {
           ListViewItem listViewItem = items[position];
           int listViewItemType = getItemViewType(position);
           String filename = null;

           TextView contentView;

           if (listViewItem.getUrl() != null) {
               Matcher m = namePattern.matcher(listViewItem.getUrl());
               if (m.matches()) {
                   filename = m.group(1);
               }
           }

           if (view == null) {
               if (listViewItemType == IMAGE) {
                   view = LayoutInflater.from(getContext()).inflate(R.layout.poi_image, parent, false);
               } else if(listViewItemType == VIDEO) {
                   view = LayoutInflater.from(getContext()).inflate(R.layout.poi_video, parent, false);
               } else {
                   view = LayoutInflater.from(getContext()).inflate(R.layout.poi_content, parent, false);
               }
           }

           switch (listViewItemType) {
               case IMAGE:
                   ImageView imageView = (ImageView) view.findViewById(R.id.poiContentImageView);
                   TextView textView = (TextView) view.findViewById(R.id.poiContentImageDesc);
                   textView.setText(listViewItem.getText());

                   if (filename != null) {
                       new LoadImageFromURL(imageView, App.context).execute(filename, keyID); //Load image in a separate thread
                   }
                   return view;

               case VIDEO:
                   filePath = getContext().getFilesDir() + "/" + String.format("%s/video/%s", keyID, filename);
                   File file = new File(filePath);
                   if(!file.exists()) {
                       view = LayoutInflater.from(getContext()).inflate(R.layout.poi_content, parent, false);
                       contentView = (TextView) view.findViewById(R.id.poiContentTextView);
                       contentView.setText("This contains a video." + "\n" + "Download this tour with Media to see this Video!" + "\n");
                       contentView.setGravity(Gravity.CENTER_HORIZONTAL);
                   } else {
                       System.out.println(filePath);
                       textureView = (TextureView) view.findViewById(R.id.poiContentVideoView);

                       DisplayMetrics metrics = App.context.getResources().getDisplayMetrics();
                       int height = metrics.heightPixels / 2;
                       int width = metrics.widthPixels;
                       textureView.setMinimumHeight(height);
                       textureView.setMinimumWidth(width);

                       play = (ImageButton) view.findViewById(R.id.playButton);
                       replay = (ImageButton) view.findViewById(R.id.replayButtoon);
                       mute = (ImageButton) view.findViewById(R.id.muteButton);
                       max = (ImageButton) view.findViewById(R.id.maxVolButton);
                       volume = (SeekBar) view.findViewById(R.id.volumeControl);
                       audio = (AudioManager) App.context.getSystemService(Context.AUDIO_SERVICE);

                       textureView.setSurfaceTextureListener(videoListener);
                       TextView videoDesc = (TextView) view.findViewById(R.id.poiContentVideoDesc);
                       videoDesc.setText(listViewItem.getText());
                   }
                   return view;
               case HEADER:
                   // TODO
                   if(view.findViewById(R.id.poiContentTextView) == null) {
                       view = LayoutInflater.from(getContext()).inflate(R.layout.poi_content, parent, false);
                   }
                   contentView = (TextView) view.findViewById(R.id.poiContentTextView);
                   contentView.setText(listViewItem.getText() + "\n");
                   if(listViewItem.getText().length() == 0) {
                       return new View(getContext());
                   }
                   return view;
               case BODY:
                   // TODO
                   contentView = (TextView) view.findViewById(R.id.poiContentTextView);
                   contentView.setText(listViewItem.getText() + "\n");
                   return view;
               default:
                   contentView = (TextView) view.findViewById(R.id.poiContentTextView);
                   contentView.setText("Something went wrong\n");
                   return view;
           }
       }

       private TextureView.SurfaceTextureListener videoListener = new TextureView.SurfaceTextureListener() {
           @Override
           public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
               Surface s = new Surface(surface);
               try {
                   player = new MediaPlayer();
                   player.setDataSource(filePath);
                   player.setSurface(s);
                   player.prepareAsync();
                   player.setAudioStreamType(AudioManager.STREAM_MUSIC);
                   player.setVideoScalingMode(MediaPlayer.VIDEO_SCALING_MODE_SCALE_TO_FIT);
                   player.setOnBufferingUpdateListener(new MediaPlayer.OnBufferingUpdateListener() {
                       @Override
                       public void onBufferingUpdate(MediaPlayer mp, int percent) {
                           //Do nothing
                       }
                   });
                   player.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
                       @Override
                       public void onCompletion(MediaPlayer mp) {
                           //Do nothing
                       }
                   });
                   player.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
                       @Override
                       public void onPrepared(final MediaPlayer mp) {
                           play.setOnClickListener(new View.OnClickListener() {
                               @Override
                               public void onClick(View v) {
                                   if(mp.isPlaying()) {
                                       mp.pause();
                                       play.setImageResource(R.mipmap.ic_play_arrow_white_36dp);
                                   } else {
                                       mp.start();
                                       play.setImageResource(R.mipmap.ic_pause_white_36dp);
                                   }
                               }
                           });

                           replay.setOnClickListener(new View.OnClickListener() {
                               @Override
                               public void onClick(View v) {
                                   if(mp.isPlaying()) {
                                       play.setImageResource(R.mipmap.ic_play_arrow_white_36dp);
                                       mp.pause();
                                       mp.seekTo(0);
                                   } else {
                                       play.setImageResource(R.mipmap.ic_play_arrow_white_36dp);
                                       mp.seekTo(0);
                                   }
                               }
                           });
                           mute.setOnClickListener(new View.OnClickListener() {
                               @Override
                               public void onClick(View v) {
                                   mp.setVolume(0.0f, 0.0f);
                                   volume.setProgress(0);
                               }
                           });
                           max.setOnClickListener(new View.OnClickListener() {
                               @Override
                               public void onClick(View v) {
                                   mp.setVolume(1.0f, 1.0f);
                                   volume.setProgress(audio.getStreamMaxVolume(AudioManager.STREAM_MUSIC));
                               }
                           });

                           int maxVolume = audio.getStreamMaxVolume(AudioManager.STREAM_MUSIC);
                           int currVolume = audio.getStreamVolume(AudioManager.STREAM_MUSIC);
                           volume.setMax(maxVolume);
                           volume.setProgress(currVolume);
                           volume.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
                               @Override
                               public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
                                   audio.setStreamVolume(AudioManager.STREAM_MUSIC, progress, 0);
                               }

                               @Override
                               public void onStartTrackingTouch(SeekBar seekBar) {

                               }

                               @Override
                               public void onStopTrackingTouch(SeekBar seekBar) {

                               }
                           });
                       }
                   });

                   player.setOnVideoSizeChangedListener(new MediaPlayer.OnVideoSizeChangedListener() {
                       @Override
                       public void onVideoSizeChanged(MediaPlayer mp, int width, int height) {
                           //Do nothing
                       }
                   });
               } catch (IllegalArgumentException e) {
                   // TODO Auto-generated catch block
                   e.printStackTrace();
               } catch (SecurityException e) {
                   // TODO Auto-generated catch block
                   e.printStackTrace();
               } catch (IllegalStateException e) {
                   // TODO Auto-generated catch block
                   e.printStackTrace();
               } catch (IOException e) {
                   // TODO Auto-generated catch block
                   e.printStackTrace();
               }
           }

           @Override
           public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {

           }

           @Override
           public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
               if (player != null) {
                   player.stop();
                   player.release();
                   player = null;
               }
               return true;
           }

           @Override
           public void onSurfaceTextureUpdated(SurfaceTexture surface) {

           }
       };
    }
    </listviewitem>

    I really do not understand in the slightest what is causing all these errors, and why the video file won’t play ?
    If anyone is able to help I will highly highly appreciate it !

    I am using Genymotion Emulator - Google Nexus 4 - API 21

    Thank you very much !

  • squeeze image while capturing video with FFmpegFrameRecorder

    12 avril 2016, par Saty

    I am trying to stream video with FFmpegFrameRecorder using javacv. All are working great except I find videos which are actually images are bit squeeze from the height.

    I am using the below code which half of the internet community is using to live stream

    public class MainActivity extends Activity implements OnClickListener {

    private final static String LOG_TAG = "MainActivity";

    private PowerManager.WakeLock mWakeLock;

    private String ffmpeg_link = "rtmp://username:password@xxx.xxx.xxx.xxx:1935/live/test.flv";
    //private String ffmpeg_link = "/mnt/sdcard/new_stream.flv";

    private volatile FFmpegFrameRecorder recorder;
    boolean recording = false;
    long startTime = 0;

    private int sampleAudioRateInHz = 44100;
    private int imageWidth = 320;
    private int imageHeight = 240;
    private int frameRate = 30;

    private Thread audioThread;
    volatile boolean runAudioThread = true;
    private AudioRecord audioRecord;
    private AudioRecordRunnable audioRecordRunnable;

    private CameraView cameraView;
    private IplImage yuvIplimage = null;

    private Button recordButton;
    private LinearLayout mainLayout;

    @Override
    public void onCreate(Bundle savedInstanceState) {
       super.onCreate(savedInstanceState);

       setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
       setContentView(R.layout.activity_main);

       initLayout();
       initRecorder();
    }

    @Override
    protected void onResume() {
       super.onResume();

       if (mWakeLock == null) {
           PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
           mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, LOG_TAG);
           mWakeLock.acquire();
       }
    }

    @Override
    protected void onPause() {
       super.onPause();

       if (mWakeLock != null) {
           mWakeLock.release();
           mWakeLock = null;
       }
    }

    @Override
    protected void onDestroy() {
       super.onDestroy();

       recording = false;
    }


    private void initLayout() {

       mainLayout = (LinearLayout) this.findViewById(R.id.record_layout);

       recordButton = (Button) findViewById(R.id.recorder_control);
       recordButton.setText("Start");
       recordButton.setOnClickListener(this);

       cameraView = new CameraView(this);

       LinearLayout.LayoutParams layoutParam = new LinearLayout.LayoutParams(imageWidth, imageHeight);        
       mainLayout.addView(cameraView, layoutParam);
       Log.v(LOG_TAG, "added cameraView to mainLayout");
    }

    private void initRecorder() {
       Log.w(LOG_TAG,"initRecorder");

       if (yuvIplimage == null) {
           // Recreated after frame size is set in surface change method
           yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_8U, 2);
           //yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_32S, 2);

           Log.v(LOG_TAG, "IplImage.create");
       }

       recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
       Log.v(LOG_TAG, "FFmpegFrameRecorder: " + ffmpeg_link + " imageWidth: " + imageWidth + " imageHeight " + imageHeight);

       recorder.setFormat("flv");
       Log.v(LOG_TAG, "recorder.setFormat(\"flv\")");

       recorder.setSampleRate(sampleAudioRateInHz);
       Log.v(LOG_TAG, "recorder.setSampleRate(sampleAudioRateInHz)");

       // re-set in the surface changed method as well
       recorder.setFrameRate(frameRate);
       Log.v(LOG_TAG, "recorder.setFrameRate(frameRate)");

       // Create audio recording thread
       audioRecordRunnable = new AudioRecordRunnable();
       audioThread = new Thread(audioRecordRunnable);
    }

    // Start the capture
    public void startRecording() {
       try {
           recorder.start();
           startTime = System.currentTimeMillis();
           recording = true;
           audioThread.start();
       } catch (FFmpegFrameRecorder.Exception e) {
           e.printStackTrace();
       }
    }

    public void stopRecording() {
       // This should stop the audio thread from running
       runAudioThread = false;

       if (recorder != null &amp;&amp; recording) {
           recording = false;
           Log.v(LOG_TAG,"Finishing recording, calling stop and release on recorder");
           try {
               recorder.stop();
               recorder.release();
           } catch (FFmpegFrameRecorder.Exception e) {
               e.printStackTrace();
           }
           recorder = null;
       }
    }

    @Override
    public boolean onKeyDown(int keyCode, KeyEvent event) {
       // Quit when back button is pushed
       if (keyCode == KeyEvent.KEYCODE_BACK) {
           if (recording) {
               stopRecording();
           }
           finish();
           return true;
       }
       return super.onKeyDown(keyCode, event);
    }

    @Override
    public void onClick(View v) {
       if (!recording) {
           startRecording();
           Log.w(LOG_TAG, "Start Button Pushed");
           recordButton.setText("Stop");
       } else {
           stopRecording();
           Log.w(LOG_TAG, "Stop Button Pushed");
           recordButton.setText("Start");
       }
    }

    //---------------------------------------------
    // audio thread, gets and encodes audio data
    //---------------------------------------------
    class AudioRecordRunnable implements Runnable {

       @Override
       public void run() {
           // Set the thread priority
           android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);

           // Audio
           int bufferSize;
           short[] audioData;
           int bufferReadResult;

           bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
                   AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT);
           audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
                   AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);

           audioData = new short[bufferSize];

           Log.d(LOG_TAG, "audioRecord.startRecording()");
           audioRecord.startRecording();

           // Audio Capture/Encoding Loop
           while (runAudioThread) {
               // Read from audioRecord
               bufferReadResult = audioRecord.read(audioData, 0, audioData.length);
               if (bufferReadResult > 0) {
                   //Log.v(LOG_TAG,"audioRecord bufferReadResult: " + bufferReadResult);

                   // Changes in this variable may not be picked up despite it being "volatile"
                   if (recording) {
                       try {
                           // Write to FFmpegFrameRecorder
                           Buffer[] buffer = {ShortBuffer.wrap(audioData, 0, bufferReadResult)};                        
                           recorder.record(buffer);
                       } catch (FFmpegFrameRecorder.Exception e) {
                           Log.v(LOG_TAG,e.getMessage());
                           e.printStackTrace();
                       }
                   }
               }
           }
           Log.v(LOG_TAG,"AudioThread Finished");

           /* Capture/Encoding finished, release recorder */
           if (audioRecord != null) {
               audioRecord.stop();
               audioRecord.release();
               audioRecord = null;
               Log.v(LOG_TAG,"audioRecord released");
           }
       }
    }

    class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {

       private boolean previewRunning = false;

       private SurfaceHolder holder;
       private Camera camera;

       private byte[] previewBuffer;

       long videoTimestamp = 0;

       Bitmap bitmap;
       Canvas canvas;

       public CameraView(Context _context) {
           super(_context);

           holder = this.getHolder();
           holder.addCallback(this);
           holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
       }

       @Override
       public void surfaceCreated(SurfaceHolder holder) {
           camera = Camera.open();

           try {
               camera.setPreviewDisplay(holder);
               camera.setPreviewCallback(this);

               Camera.Parameters currentParams = camera.getParameters();
               Log.v(LOG_TAG,"Preview Framerate: " + currentParams.getPreviewFrameRate());
               Log.v(LOG_TAG,"Preview imageWidth: " + currentParams.getPreviewSize().width + " imageHeight: " + currentParams.getPreviewSize().height);

               // Use these values
               imageWidth = currentParams.getPreviewSize().width;
               imageHeight = currentParams.getPreviewSize().height;
               frameRate = currentParams.getPreviewFrameRate();                

               bitmap = Bitmap.createBitmap(imageWidth, imageHeight, Bitmap.Config.ALPHA_8);


               /*
               Log.v(LOG_TAG,"Creating previewBuffer size: " + imageWidth * imageHeight * ImageFormat.getBitsPerPixel(currentParams.getPreviewFormat())/8);
               previewBuffer = new byte[imageWidth * imageHeight * ImageFormat.getBitsPerPixel(currentParams.getPreviewFormat())/8];
               camera.addCallbackBuffer(previewBuffer);
               camera.setPreviewCallbackWithBuffer(this);
               */              

               camera.startPreview();
               previewRunning = true;
           }
           catch (IOException e) {
               Log.v(LOG_TAG,e.getMessage());
               e.printStackTrace();
           }  
       }

       public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
           Log.v(LOG_TAG,"Surface Changed: width " + width + " height: " + height);

           // We would do this if we want to reset the camera parameters
           /*
           if (!recording) {
               if (previewRunning){
                   camera.stopPreview();
               }
               try {
                   //Camera.Parameters cameraParameters = camera.getParameters();
                   //p.setPreviewSize(imageWidth, imageHeight);
                   //p.setPreviewFrameRate(frameRate);
                   //camera.setParameters(cameraParameters);

                   camera.setPreviewDisplay(holder);
                   camera.startPreview();
                   previewRunning = true;
               }
               catch (IOException e) {
                   Log.e(LOG_TAG,e.getMessage());
                   e.printStackTrace();
               }  
           }            
           */

           // Get the current parameters
           Camera.Parameters currentParams = camera.getParameters();
           Log.v(LOG_TAG,"Preview Framerate: " + currentParams.getPreviewFrameRate());
           Log.v(LOG_TAG,"Preview imageWidth: " + currentParams.getPreviewSize().width + " imageHeight: " + currentParams.getPreviewSize().height);

           // Use these values
           imageWidth = currentParams.getPreviewSize().width;
           imageHeight = currentParams.getPreviewSize().height;
           frameRate = currentParams.getPreviewFrameRate();

           // Create the yuvIplimage if needed
           yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_8U, 2);
           //yuvIplimage = IplImage.create(imageWidth, imageHeight, IPL_DEPTH_32S, 2);
       }

       @Override
       public void surfaceDestroyed(SurfaceHolder holder) {
           try {
               camera.setPreviewCallback(null);

               previewRunning = false;
               camera.release();

           } catch (RuntimeException e) {
               Log.v(LOG_TAG,e.getMessage());
               e.printStackTrace();
           }
       }

       @Override
       public void onPreviewFrame(byte[] data, Camera camera) {

           if (yuvIplimage != null &amp;&amp; recording) {
               videoTimestamp = 1000 * (System.currentTimeMillis() - startTime);

               // Put the camera preview frame right into the yuvIplimage object
               yuvIplimage.getByteBuffer().put(data);

               // FAQ about IplImage:
               // - For custom raw processing of data, getByteBuffer() returns an NIO direct
               //   buffer wrapped around the memory pointed by imageData, and under Android we can
               //   also use that Buffer with Bitmap.copyPixelsFromBuffer() and copyPixelsToBuffer().
               // - To get a BufferedImage from an IplImage, we may call getBufferedImage().
               // - The createFrom() factory method can construct an IplImage from a BufferedImage.
               // - There are also a few copy*() methods for BufferedImage&lt;->IplImage data transfers.

               // Let's try it..
               // This works but only on transparency
               // Need to find the right Bitmap and IplImage matching types

               /*
               bitmap.copyPixelsFromBuffer(yuvIplimage.getByteBuffer());
               //bitmap.setPixel(10,10,Color.MAGENTA);

               canvas = new Canvas(bitmap);
               Paint paint = new Paint();
               paint.setColor(Color.GREEN);
               float leftx = 20;
               float topy = 20;
               float rightx = 50;
               float bottomy = 100;
               RectF rectangle = new RectF(leftx,topy,rightx,bottomy);
               canvas.drawRect(rectangle, paint);

               bitmap.copyPixelsToBuffer(yuvIplimage.getByteBuffer());
               */
               //Log.v(LOG_TAG,"Writing Frame");

               try {

                   // Get the correct time
                   recorder.setTimestamp(videoTimestamp);

                   // Record the image into FFmpegFrameRecorder
                   recorder.record(yuvIplimage);

               } catch (FFmpegFrameRecorder.Exception e) {
                   Log.v(LOG_TAG,e.getMessage());
                   e.printStackTrace();
               }
           }
       }
    }

    }