
Recherche avancée
Autres articles (50)
-
MediaSPIP v0.2
21 juin 2013, parMediaSPIP 0.2 is the first MediaSPIP stable release.
Its official release date is June 21, 2013 and is announced here.
The zip file provided here only contains the sources of MediaSPIP in its standalone version.
To get a working installation, you must manually install all-software dependencies on the server.
If you want to use this archive for an installation in "farm mode", you will also need to proceed to other manual (...) -
Librairies et binaires spécifiques au traitement vidéo et sonore
31 janvier 2010, parLes logiciels et librairies suivantes sont utilisées par SPIPmotion d’une manière ou d’une autre.
Binaires obligatoires FFMpeg : encodeur principal, permet de transcoder presque tous les types de fichiers vidéo et sonores dans les formats lisibles sur Internet. CF ce tutoriel pour son installation ; Oggz-tools : outils d’inspection de fichiers ogg ; Mediainfo : récupération d’informations depuis la plupart des formats vidéos et sonores ;
Binaires complémentaires et facultatifs flvtool2 : (...) -
Support audio et vidéo HTML5
10 avril 2011MediaSPIP utilise les balises HTML5 video et audio pour la lecture de documents multimedia en profitant des dernières innovations du W3C supportées par les navigateurs modernes.
Pour les navigateurs plus anciens, le lecteur flash Flowplayer est utilisé.
Le lecteur HTML5 utilisé a été spécifiquement créé pour MediaSPIP : il est complètement modifiable graphiquement pour correspondre à un thème choisi.
Ces technologies permettent de distribuer vidéo et son à la fois sur des ordinateurs conventionnels (...)
Sur d’autres sites (9048)
-
Android recording video with overlay view
6 mars 2016, par t0mI am trying app in android which have functionality to capture video with overlay views. (Also i need onPreviewFrame method.)
Via SurfaceView and JavaCV with FFmpeg :
OpenCVCameraActivity.java :
import android.app.Activity;
import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.util.Log;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.widget.FrameLayout;
import android.widget.Toast;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.Frame;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ShortBuffer;
import java.util.List;
@SuppressWarnings("ALL")
public class OpenCVCameraActivity extends Activity {
private static final String TAG = OpenCVCameraActivity.class.getSimpleName();
private long startTime = 0;
private boolean isPreviewOn = false;
private int sampleAudioRateInHz = 44100;
private Camera.Size previewSize; //preview and Camera and Recorder width and height
private int recorderFrameRate = 25;
// audio data getting thread
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private Thread audioThread;
private volatile boolean runAudioThread = true;
// video data getting thread
private Camera mCamera;
private CameraView mPreview;
private FFmpegFrameRecorder recorder;
private boolean recording = false;
private Frame yuvImage = null;
//storage
private Storage storage;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if(Static.DEBUG) Log.i(TAG, "onCreate()");
Thread.setDefaultUncaughtExceptionHandler(uncaughtExceptionHandler);
setContentView(R.layout.activity_opencv);
prepareCamera();
}
private Thread.UncaughtExceptionHandler uncaughtExceptionHandler =
new Thread.UncaughtExceptionHandler() {
public void uncaughtException(Thread thread, Throwable ex) {
if(Static.DEBUG) Log.e(TAG, "Uncaught exception", ex);
}
};
@Override
protected void onRestart() {
super.onRestart();
if (Static.DEBUG) Log.i(TAG, "onRestart()");
}
@Override
protected void onStart() {
super.onStart();
if (Static.DEBUG) Log.i(TAG, "onStart()");
}
@Override
protected void onResume() {
super.onResume();
if (Static.DEBUG) Log.i(TAG, "onResume()");
storage = new Storage(this);
if(storage.mExternalStorageAvailable == true && storage.mExternalStorageWriteable == false)
Static.showToast(this, getString(R.string.errExternalStorageReadOnly), Toast.LENGTH_LONG);
else if (storage.mExternalStorageAvailable == false && storage.mExternalStorageWriteable == false)
Static.showToast(this, getString(R.string.errExternalStorage), Toast.LENGTH_LONG);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
if (Static.DEBUG) Log.i(TAG, "onCreateOptionsMenu()");
return super.onCreateOptionsMenu(menu);
}
@Override
protected void onPause() {
super.onPause();
if (Static.DEBUG) Log.i(TAG, "onPause()");
}
@Override
protected void onStop() {
super.onStop();
if (Static.DEBUG) Log.i(TAG, "onStop()");
}
@Override
protected void onDestroy() {
super.onDestroy();
if (Static.DEBUG) Log.i(TAG, "onDestroy()");
recording = false;
if (mPreview != null) {
mPreview.stopPreview();
}
if (mCamera != null) {
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
}
/** Prepare camera object.
* */
private void prepareCamera() {
//1. Open camera object
try {
mCamera = getCameraInstance(this);
} catch (Exception e) {
e.printStackTrace();
Static.showToast(this, e.getMessage(), Toast.LENGTH_LONG);
finish();
return;
}
setOptimalCameraParams();
//2. Connect Preview + 3. Start Preview + 8e Stop preview v metode surfaceChanged
mPreview = new CameraView(this, mCamera);
FrameLayout preview = (FrameLayout) findViewById(R.id.cameraPreview);
preview.addView(mPreview); //surfaceView to FrameLayout
if(Static.DEBUG) Log.i(TAG, "camera preview start: OK");
}
/**
* A safe way to get an instance of the Camera object.
*/
@SuppressWarnings("deprecation")
public static Camera getCameraInstance(Context ctx) throws Exception {
Camera c = Camera.open();
if (c == null)
throw new Exception(ctx.getString(R.string.errCameraNotAvailable));
if(Static.DEBUG) Log.i(TAG, "camera open");
return c; // attempt to get a Camera instance, otherwise null
}
/**Method set optmial parameters. Depend on HW possibilities.*/
@SuppressWarnings("deprecation")
private void setOptimalCameraParams(){
// Camera parameters
Camera.Parameters params = mCamera.getParameters();
List<string> focusModes = params.getSupportedFocusModes();
if (Static.DEBUG) Log.i(TAG, "focusModes():" + focusModes.toString());
if (Static.DEBUG) Log.i(TAG, "Camera parameters:\n" + params.flatten());
params.setRecordingHint(true); //MediaRecorder.start() to start faster
//Automatically autofocus if it's possible
if (params.getSupportedFocusModes().contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
} else if (focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) { //at least focus auto
params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
}
// set Camera parameters
mCamera.setParameters(params);
}
//---------------------------------------
// initialize ffmpeg_recorder
//---------------------------------------
private void initRecorder() throws Exception {
if(Static.DEBUG) Log.i(TAG,"init recorder");
File output = null;
try {
output = storage.getOutputMediaFile(storage.MEDIA_TYPE_VIDEO);
if(output == null)
throw new Exception();
} catch (Exception e) {
e.printStackTrace();
throw new Exception(getString(R.string.errSetOutputFile));
}
if (yuvImage == null) {
yuvImage = new Frame(previewSize.width, previewSize.height, Frame.DEPTH_UBYTE, 2);
if(Static.DEBUG) Log.i(TAG, "create yuvImage");
}
if(Static.DEBUG) Log.i(TAG, "ffmpeg_url: " + output.getPath());
recorder = new FFmpegFrameRecorder(output.getPath(), previewSize.width, previewSize.height, 1);
//recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);
//recorder.setVideoOption("preset", "veryfast"); // or ultrafast or fast, etc.
//recorder.setVideoQuality(0); // maximum quality, replace recorder.setVideoBitrate(16384);
//recorder.setPixelFormat(avutil.AV_PIX_FMT_YUV420P);
recorder.setFormat("mp4");
recorder.setSampleRate(sampleAudioRateInHz);
recorder.setFrameRate(recorderFrameRate);
if(Static.DEBUG) Log.i(TAG, "recorder initialize success");
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
runAudioThread = true;
}
public void startRecording() {
try {
initRecorder();
} catch (Exception e){
e.printStackTrace();
Static.showToast(this, e.getMessage(), Toast.LENGTH_LONG);
}
try {
recorder.start();
startTime = System.currentTimeMillis();
recording = true;
audioThread.start();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}
public void stopRecording() {
runAudioThread = false;
try {
audioThread.join();
} catch (InterruptedException e) {
// reset interrupt to be nice
Thread.currentThread().interrupt();
return;
}
audioRecordRunnable = null;
audioThread = null;
if (recorder != null && recording) {
recording = false;
if(Static.DEBUG) Log.i(TAG,"Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
if (recording) {
stopRecording();
}
finish();
return true;
}
return super.onKeyDown(keyCode, event);
}
public void onClickBtnStartRecord(View v) {
if (!recording) {
startRecording();
if(Static.DEBUG) Log.i(TAG, "Start Button Pushed");
} else {
// This will trigger the audio recording loop to stop and then set isRecorderStart = false;
stopRecording();
if(Static.DEBUG) Log.i(TAG, "Stop Button Pushed");
}
}
//---------------------------------------------
// audio thread, gets and encodes audio data
//---------------------------------------------
class AudioRecordRunnable implements Runnable {
@Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
// Audio
int bufferSize;
ShortBuffer audioData;
int bufferReadResult;
bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
audioData = ShortBuffer.allocate(bufferSize);
if(Static.DEBUG) Log.i(TAG, "audioRecord.startRecording()");
audioRecord.startRecording();
/* ffmpeg_audio encoding loop */
while (runAudioThread) {
//if(Static.DEBUG) Log.i(TAG,"recording? " + recording);
bufferReadResult = audioRecord.read(audioData.array(), 0, audioData.capacity());
audioData.limit(bufferReadResult);
if (bufferReadResult > 0) {
//if(Static.DEBUG) Log.i(TAG,"bufferReadResult: " + bufferReadResult);
// If "recording" isn't true when start this thread, it never get's set according to this if statement...!!!
// Why? Good question...
if (recording) {
try {
recorder.recordSamples(audioData);
//if(Static.DEBUG) Log.i(TAG,"recording " + 1024*i + " to " + 1024*i+1024);
} catch (FFmpegFrameRecorder.Exception e) {
if(Static.DEBUG) Log.i(TAG,e.getMessage());
e.printStackTrace();
}
}
}
}
if(Static.DEBUG) Log.i(TAG,"AudioThread Finished, release audioRecord");
/* encoding finish, release recorder */
if (audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
if(Static.DEBUG) Log.i(TAG,"audioRecord released");
}
}
}
/**TODO*/
private Camera.Size getBestPreviewSize(int width, int height, Camera.Parameters parameters) {
Camera.Size result=null;
for (Camera.Size size : parameters.getSupportedPreviewSizes()) {
if(Static.DEBUG) Log.i(TAG, size.width + "/" + size.height);
if (size.width<=width && size.height<=height) {
if (result==null) {
result=size;
} else {
int resultArea=result.width*result.height;
int newArea=size.width*size.height;
if (newArea>resultArea) {
result=size;
}
}
}
}
return(result);
}
//---------------------------------------------
// camera thread, gets and encodes video data
//---------------------------------------------
private class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {
private final String TAG = CameraView.class.getSimpleName();
private SurfaceHolder mHolder;
private Camera mCamera;
public CameraView(Context context, Camera camera) {
super(context);
if(Static.DEBUG) Log.i(TAG, "camera view");
mCamera = camera;
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(CameraView.this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mCamera.setPreviewCallback(CameraView.this);
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
try {
stopPreview();
mCamera.setPreviewDisplay(holder);
} catch (IOException exception) {
mCamera.release();
mCamera = null;
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
if(Static.DEBUG) Log.i(TAG, "surfaceChanged() => w=" + w + ", h=" + h);
// If your preview can change or rotate, take care of those events here.
// Make sure to stop the preview before resizing or reformatting it.
if (mHolder.getSurface() == null){
// preview surface does not exist
return;
}
// stop preview before making changes
try {
stopPreview();
} catch (Exception e){
// ignore: tried to stop a non-existent preview
}
// start preview with new settings
try {
Camera.Parameters params = mCamera.getParameters();
previewSize = getBestPreviewSize(w, h, params);
if(Static.DEBUG) Log.i(TAG, "getBestPreviewSize() => w=" + previewSize.width + ", h=" + previewSize.height);
if (previewSize != null)
params.setPreviewSize(previewSize.width, previewSize.height);
params.setPreviewFrameRate(recorderFrameRate);
if(Static.DEBUG) Log.i(TAG,"Preview Framerate: " + params.getPreviewFrameRate());
mCamera.setParameters(params);
mCamera.setPreviewDisplay(holder);
mCamera.setPreviewCallback(CameraView.this);
startPreview();
} catch (Exception e){
if(Static.DEBUG) Log.i(TAG, "Could not set preview display in surfaceChanged");
e.printStackTrace();
}
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
try {
mHolder.addCallback(null);
mCamera.setPreviewCallback(null);
} catch (RuntimeException e) {
// The camera has probably just been released, ignore.
}
}
public void startPreview() {
if (!isPreviewOn && mCamera != null) {
isPreviewOn = true;
mCamera.startPreview();
}
}
public void stopPreview() {
if (isPreviewOn && mCamera != null) {
isPreviewOn = false;
mCamera.stopPreview();
}
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
if (audioRecord == null || audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
startTime = System.currentTimeMillis();
return;
}
// get video data
if (yuvImage != null && recording) {
((ByteBuffer)yuvImage.image[0].position(0)).put(data);
try {
long t = 1000 * (System.currentTimeMillis() - startTime);
if(Static.DEBUG) Log.i(TAG,"Writing Frame on timestamp: "+t);
if (t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(yuvImage);
} catch (FFmpegFrameRecorder.Exception e) {
if(Static.DEBUG) Log.i(TAG,e.getMessage());
e.printStackTrace();
}
}
}
}
}
</string>activity_opencv.xml :
<?xml version="1.0" encoding="utf-8"?>
<relativelayout>
<framelayout>
</framelayout>
<ImageButton<br />
android:id="@+id/btnStartRecord"<br />
android:layout_width="70dp"<br />
android:layout_height="70dp"<br />
android:scaleType="fitXY"<br />
android:src="@drawable/record_icon"<br />
android:background="@null"<br />
android:text="@string/btnStartRecord"<br />
android:onClick="onClickBtnStartRecord"<br />
android:clickable="true"<br />
android:layout_centerVertical="true"<br />
android:layout_alignParentRight="true"<br />
android:layout_alignParentEnd="true"/>
<textview></textview>
</relativelayout>Overlay views working, but recorded video is without overlay views.
-
Android recording video with overlay view [way 1]
2 mars 2016, par t0mI am trying app in android which have functionality to capture video with overlay views. I tried two ways (1. and 2.).
1. Via SurfaceView and JavaCV with FFmpeg.
2. Via OpenCV and JavaCV with FFmpeg.
3. For API21+ maybe with MediaProjection.(Question is divided to two questions, due to stackoverflow length limit.)
ad 1. Via SurfaceView and JavaCV with FFmpeg :
OpenCVCameraActivity.java :
import android.app.Activity;
import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.util.Log;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.widget.FrameLayout;
import android.widget.Toast;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.Frame;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ShortBuffer;
import java.util.List;
@SuppressWarnings("ALL")
public class OpenCVCameraActivity extends Activity {
private static final String TAG = OpenCVCameraActivity.class.getSimpleName();
private long startTime = 0;
private boolean isPreviewOn = false;
private int sampleAudioRateInHz = 44100;
private Camera.Size previewSize; //preview and Camera and Recorder width and height
private int recorderFrameRate = 25;
// audio data getting thread
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private Thread audioThread;
private volatile boolean runAudioThread = true;
// video data getting thread
private Camera mCamera;
private CameraView mPreview;
private FFmpegFrameRecorder recorder;
private boolean recording = false;
private Frame yuvImage = null;
//storage
private Storage storage;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if(Static.DEBUG) Log.i(TAG, "onCreate()");
Thread.setDefaultUncaughtExceptionHandler(uncaughtExceptionHandler);
setContentView(R.layout.activity_opencv);
prepareCamera();
}
private Thread.UncaughtExceptionHandler uncaughtExceptionHandler =
new Thread.UncaughtExceptionHandler() {
public void uncaughtException(Thread thread, Throwable ex) {
if(Static.DEBUG) Log.e(TAG, "Uncaught exception", ex);
}
};
@Override
protected void onRestart() {
super.onRestart();
if (Static.DEBUG) Log.i(TAG, "onRestart()");
}
@Override
protected void onStart() {
super.onStart();
if (Static.DEBUG) Log.i(TAG, "onStart()");
}
@Override
protected void onResume() {
super.onResume();
if (Static.DEBUG) Log.i(TAG, "onResume()");
storage = new Storage(this);
if(storage.mExternalStorageAvailable == true && storage.mExternalStorageWriteable == false)
Static.showToast(this, getString(R.string.errExternalStorageReadOnly), Toast.LENGTH_LONG);
else if (storage.mExternalStorageAvailable == false && storage.mExternalStorageWriteable == false)
Static.showToast(this, getString(R.string.errExternalStorage), Toast.LENGTH_LONG);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
if (Static.DEBUG) Log.i(TAG, "onCreateOptionsMenu()");
return super.onCreateOptionsMenu(menu);
}
@Override
protected void onPause() {
super.onPause();
if (Static.DEBUG) Log.i(TAG, "onPause()");
}
@Override
protected void onStop() {
super.onStop();
if (Static.DEBUG) Log.i(TAG, "onStop()");
}
@Override
protected void onDestroy() {
super.onDestroy();
if (Static.DEBUG) Log.i(TAG, "onDestroy()");
recording = false;
if (mPreview != null) {
mPreview.stopPreview();
}
if (mCamera != null) {
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
}
/** Prepare camera object.
* */
private void prepareCamera() {
//1. Open camera object
try {
mCamera = getCameraInstance(this);
} catch (Exception e) {
e.printStackTrace();
Static.showToast(this, e.getMessage(), Toast.LENGTH_LONG);
finish();
return;
}
setOptimalCameraParams();
//2. Connect Preview + 3. Start Preview + 8e Stop preview v metode surfaceChanged
mPreview = new CameraView(this, mCamera);
FrameLayout preview = (FrameLayout) findViewById(R.id.cameraPreview);
preview.addView(mPreview); //surfaceView to FrameLayout
if(Static.DEBUG) Log.i(TAG, "camera preview start: OK");
}
/**
* A safe way to get an instance of the Camera object.
*/
@SuppressWarnings("deprecation")
public static Camera getCameraInstance(Context ctx) throws Exception {
Camera c = Camera.open();
if (c == null)
throw new Exception(ctx.getString(R.string.errCameraNotAvailable));
if(Static.DEBUG) Log.i(TAG, "camera open");
return c; // attempt to get a Camera instance, otherwise null
}
/**Method set optmial parameters. Depend on HW possibilities.*/
@SuppressWarnings("deprecation")
private void setOptimalCameraParams(){
// Camera parameters
Camera.Parameters params = mCamera.getParameters();
List<string> focusModes = params.getSupportedFocusModes();
if (Static.DEBUG) Log.i(TAG, "focusModes():" + focusModes.toString());
if (Static.DEBUG) Log.i(TAG, "Camera parameters:\n" + params.flatten());
params.setRecordingHint(true); //MediaRecorder.start() to start faster
//Automatically autofocus if it's possible
if (params.getSupportedFocusModes().contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
} else if (focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) { //at least focus auto
params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
}
// set Camera parameters
mCamera.setParameters(params);
}
//---------------------------------------
// initialize ffmpeg_recorder
//---------------------------------------
private void initRecorder() throws Exception {
if(Static.DEBUG) Log.i(TAG,"init recorder");
File output = null;
try {
output = storage.getOutputMediaFile(storage.MEDIA_TYPE_VIDEO);
if(output == null)
throw new Exception();
} catch (Exception e) {
e.printStackTrace();
throw new Exception(getString(R.string.errSetOutputFile));
}
if (yuvImage == null) {
yuvImage = new Frame(previewSize.width, previewSize.height, Frame.DEPTH_UBYTE, 2);
if(Static.DEBUG) Log.i(TAG, "create yuvImage");
}
if(Static.DEBUG) Log.i(TAG, "ffmpeg_url: " + output.getPath());
recorder = new FFmpegFrameRecorder(output.getPath(), previewSize.width, previewSize.height, 1);
//recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);
//recorder.setVideoOption("preset", "veryfast"); // or ultrafast or fast, etc.
//recorder.setVideoQuality(0); // maximum quality, replace recorder.setVideoBitrate(16384);
//recorder.setPixelFormat(avutil.AV_PIX_FMT_YUV420P);
recorder.setFormat("mp4");
recorder.setSampleRate(sampleAudioRateInHz);
recorder.setFrameRate(recorderFrameRate);
if(Static.DEBUG) Log.i(TAG, "recorder initialize success");
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
runAudioThread = true;
}
public void startRecording() {
try {
initRecorder();
} catch (Exception e){
e.printStackTrace();
Static.showToast(this, e.getMessage(), Toast.LENGTH_LONG);
}
try {
recorder.start();
startTime = System.currentTimeMillis();
recording = true;
audioThread.start();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}
public void stopRecording() {
runAudioThread = false;
try {
audioThread.join();
} catch (InterruptedException e) {
// reset interrupt to be nice
Thread.currentThread().interrupt();
return;
}
audioRecordRunnable = null;
audioThread = null;
if (recorder != null && recording) {
recording = false;
if(Static.DEBUG) Log.i(TAG,"Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
if (recording) {
stopRecording();
}
finish();
return true;
}
return super.onKeyDown(keyCode, event);
}
public void onClickBtnStartRecord(View v) {
if (!recording) {
startRecording();
if(Static.DEBUG) Log.i(TAG, "Start Button Pushed");
} else {
// This will trigger the audio recording loop to stop and then set isRecorderStart = false;
stopRecording();
if(Static.DEBUG) Log.i(TAG, "Stop Button Pushed");
}
}
//---------------------------------------------
// audio thread, gets and encodes audio data
//---------------------------------------------
class AudioRecordRunnable implements Runnable {
@Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
// Audio
int bufferSize;
ShortBuffer audioData;
int bufferReadResult;
bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
audioData = ShortBuffer.allocate(bufferSize);
if(Static.DEBUG) Log.i(TAG, "audioRecord.startRecording()");
audioRecord.startRecording();
/* ffmpeg_audio encoding loop */
while (runAudioThread) {
//if(Static.DEBUG) Log.i(TAG,"recording? " + recording);
bufferReadResult = audioRecord.read(audioData.array(), 0, audioData.capacity());
audioData.limit(bufferReadResult);
if (bufferReadResult > 0) {
//if(Static.DEBUG) Log.i(TAG,"bufferReadResult: " + bufferReadResult);
// If "recording" isn't true when start this thread, it never get's set according to this if statement...!!!
// Why? Good question...
if (recording) {
try {
recorder.recordSamples(audioData);
//if(Static.DEBUG) Log.i(TAG,"recording " + 1024*i + " to " + 1024*i+1024);
} catch (FFmpegFrameRecorder.Exception e) {
if(Static.DEBUG) Log.i(TAG,e.getMessage());
e.printStackTrace();
}
}
}
}
if(Static.DEBUG) Log.i(TAG,"AudioThread Finished, release audioRecord");
/* encoding finish, release recorder */
if (audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
if(Static.DEBUG) Log.i(TAG,"audioRecord released");
}
}
}
/**TODO*/
private Camera.Size getBestPreviewSize(int width, int height, Camera.Parameters parameters) {
Camera.Size result=null;
for (Camera.Size size : parameters.getSupportedPreviewSizes()) {
if(Static.DEBUG) Log.i(TAG, size.width + "/" + size.height);
if (size.width<=width && size.height<=height) {
if (result==null) {
result=size;
} else {
int resultArea=result.width*result.height;
int newArea=size.width*size.height;
if (newArea>resultArea) {
result=size;
}
}
}
}
return(result);
}
//---------------------------------------------
// camera thread, gets and encodes video data
//---------------------------------------------
private class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {
private final String TAG = CameraView.class.getSimpleName();
private SurfaceHolder mHolder;
private Camera mCamera;
public CameraView(Context context, Camera camera) {
super(context);
if(Static.DEBUG) Log.i(TAG, "camera view");
mCamera = camera;
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(CameraView.this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mCamera.setPreviewCallback(CameraView.this);
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
try {
stopPreview();
mCamera.setPreviewDisplay(holder);
} catch (IOException exception) {
mCamera.release();
mCamera = null;
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
if(Static.DEBUG) Log.i(TAG, "surfaceChanged() => w=" + w + ", h=" + h);
// If your preview can change or rotate, take care of those events here.
// Make sure to stop the preview before resizing or reformatting it.
if (mHolder.getSurface() == null){
// preview surface does not exist
return;
}
// stop preview before making changes
try {
stopPreview();
} catch (Exception e){
// ignore: tried to stop a non-existent preview
}
// start preview with new settings
try {
Camera.Parameters params = mCamera.getParameters();
previewSize = getBestPreviewSize(w, h, params);
if(Static.DEBUG) Log.i(TAG, "getBestPreviewSize() => w=" + previewSize.width + ", h=" + previewSize.height);
if (previewSize != null)
params.setPreviewSize(previewSize.width, previewSize.height);
params.setPreviewFrameRate(recorderFrameRate);
if(Static.DEBUG) Log.i(TAG,"Preview Framerate: " + params.getPreviewFrameRate());
mCamera.setParameters(params);
mCamera.setPreviewDisplay(holder);
mCamera.setPreviewCallback(CameraView.this);
startPreview();
} catch (Exception e){
if(Static.DEBUG) Log.i(TAG, "Could not set preview display in surfaceChanged");
e.printStackTrace();
}
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
try {
mHolder.addCallback(null);
mCamera.setPreviewCallback(null);
} catch (RuntimeException e) {
// The camera has probably just been released, ignore.
}
}
public void startPreview() {
if (!isPreviewOn && mCamera != null) {
isPreviewOn = true;
mCamera.startPreview();
}
}
public void stopPreview() {
if (isPreviewOn && mCamera != null) {
isPreviewOn = false;
mCamera.stopPreview();
}
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
if (audioRecord == null || audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
startTime = System.currentTimeMillis();
return;
}
// get video data
if (yuvImage != null && recording) {
((ByteBuffer)yuvImage.image[0].position(0)).put(data);
try {
long t = 1000 * (System.currentTimeMillis() - startTime);
if(Static.DEBUG) Log.i(TAG,"Writing Frame on timestamp: "+t);
if (t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(yuvImage);
} catch (FFmpegFrameRecorder.Exception e) {
if(Static.DEBUG) Log.i(TAG,e.getMessage());
e.printStackTrace();
}
}
}
}
}
</string>activity_opencv.xml :
<?xml version="1.0" encoding="utf-8"?>
<relativelayout>
<framelayout>
</framelayout>
<ImageButton<br />
android:id="@+id/btnStartRecord"<br />
android:layout_width="70dp"<br />
android:layout_height="70dp"<br />
android:scaleType="fitXY"<br />
android:src="@drawable/record_icon"<br />
android:background="@null"<br />
android:text="@string/btnStartRecord"<br />
android:onClick="onClickBtnStartRecord"<br />
android:clickable="true"<br />
android:layout_centerVertical="true"<br />
android:layout_alignParentRight="true"<br />
android:layout_alignParentEnd="true"/>
<textview></textview>
</relativelayout>Overlay views working, but recorded video is without overlay views.
2. Via OpenCV and JavaCV with FFmpeg :
-
Getting Access Violations Exceptions in FFmpeg when trying to free IO buffer
26 juin 2015, par PatrikI’ve been trying to create an audio stream extractor/demuxer using FFmpeg.AutoGen and C#. While the code actually works (in the sense that it extracts the audio stream correctly), I can’t seem to clean up the resources afterwards. Whenever I try to free my input buffer (allocated using av_malloc) I get an Access Violation Exception, and if I don’t free it, I seem to be leaking memory corresponding to the size of the allocated input buffer.
This is my code (from a console application I used for testing) :
class Program
{
static void Main(string[] args)
{
using (var videoStream = Assembly.GetExecutingAssembly().GetManifestResourceStream("FFmpeg.Demux.test.mp4"))
using (MemoryStream output = new MemoryStream())
{
FFmpegAudioExtractor audioExtractor = new FFmpegAudioExtractor();
audioExtractor.Extract(videoStream, output);
Debug.Assert(1331200 == output.Length);
//File.WriteAllBytes(@"c:\temp\out.pcm", output.ToArray());
}
Console.WriteLine("Done");
Console.ReadLine();
}
}
public class FFmpegAudioExtractor
{
public FFmpegAudioExtractor()
{
FFmpegInvoke.av_register_all();
FFmpegInvoke.avcodec_register_all();
}
public unsafe void Extract(Stream input, Stream output)
{
AVFormatContext* inFormatContextPtr = null;
int inFomatContextOpenResult = -1;
byte* inIoBuffer = null;
AVIOContext* inIoContextPtr = null;
SwrContext* swrContextPtr = null;
AVFrame* inFramePtr = null;
AVFrame* outFramePtr = null;
AVCodecContext* inCodecContextPtr = null;
try
{
/* 1 */
inFormatContextPtr = FFmpegInvoke.avformat_alloc_context();
if (inFormatContextPtr == null)
throw new ApplicationException("Failed to allocate the input format context (AVFormatContext).");
// HACK this should alloc a fixed buffer and use callbacks to fill it
if (input.Length > int.MaxValue)
throw new ArgumentException("Data too large.", "input");
// TEST alloc a 10MB buffer to make the memory leak real obvious
int inIoBufferSize = 1024 * 1024 * 10; //(int)input.Length;
/* 2 */
inIoBuffer = (byte*)FFmpegInvoke.av_malloc((uint)inIoBufferSize);
if (inIoBuffer == null)
throw new ApplicationException("Failed to allocate the input IO buffer.");
// HACK continued, fill buffer
IntPtr inIoBufferPtr = new IntPtr(inIoBuffer);
byte[] buffer = new byte[4096];
int read, offset = 0;
while ((read = input.Read(buffer, 0, buffer.Length)) > 0)
{
Marshal.Copy(buffer, 0, inIoBufferPtr + offset, read);
offset += read;
}
/* 3 */
inIoContextPtr = FFmpegInvoke.avio_alloc_context((sbyte*)inIoBuffer,
inIoBufferSize,
0 /* writable */,
null,
IntPtr.Zero,
IntPtr.Zero,
IntPtr.Zero);
if (inIoContextPtr == null)
throw new ApplicationException("Failed to allocate the input IO context (AVIOContext).");
// configure the format context to use our custom IO contect
inFormatContextPtr->pb = inIoContextPtr;
inFormatContextPtr->flags = FFmpegInvoke.AVFMT_FLAG_CUSTOM_IO;
/* 35 */
inFomatContextOpenResult = FFmpegInvoke.avformat_open_input(&inFormatContextPtr, "", null, null);
if (inFomatContextOpenResult != 0)
throw new ApplicationException("Could not open input: " + inFomatContextOpenResult.ToString(CultureInfo.InvariantCulture));
// retrieve stream information
int avformatFindStreamInfoResult = FFmpegInvoke.avformat_find_stream_info(inFormatContextPtr, null);
if (avformatFindStreamInfoResult < 0)
throw new ApplicationException("Failed to locate stream info: " + avformatFindStreamInfoResult.ToString(CultureInfo.InvariantCulture));
// find audio stream
int inAudioStreamIndex = FFmpegInvoke.av_find_best_stream(inFormatContextPtr,
AVMediaType.AVMEDIA_TYPE_AUDIO,
-1 /* wanted_stream_nb */,
-1 /* related_stream */,
null /* [out] decoder */,
0 /* flags */);
if (inAudioStreamIndex < 0)
throw new ApplicationException("Failed to find audio stream: " + inAudioStreamIndex.ToString(CultureInfo.InvariantCulture));
// get audio stream pointer
AVStream* inAudioStreamPtr = inFormatContextPtr->streams[inAudioStreamIndex];
Contract.Assume(inAudioStreamPtr != null);
// find the decoder
AVCodec* inCodecPtr = FFmpegInvoke.avcodec_find_decoder(inAudioStreamPtr->codec->codec_id);
if (inCodecPtr == null)
throw new ApplicationException("Failed to find decoder with codec_id: " + inAudioStreamPtr->codec->codec_id.ToString());
/* 36 */
inCodecContextPtr = FFmpegInvoke.avcodec_alloc_context3(inCodecPtr);
if (FFmpegInvoke.avcodec_copy_context(inCodecContextPtr, inAudioStreamPtr->codec) != 0)
throw new ApplicationException("Failed to copy context.");
// open codec
/* 37 */
if (FFmpegInvoke.avcodec_open2(inCodecContextPtr, inCodecPtr, null) < 0)
{
string codecName = Marshal.PtrToStringAuto(new IntPtr(inCodecPtr->name));
throw new Exception("Failed to open codec: " + codecName);
}
// alloc frame
/* 38 */
inFramePtr = FFmpegInvoke.av_frame_alloc();
if (inFramePtr == null)
throw new ApplicationException("Could not allocate frame");
// initialize packet, set data to NULL, let the demuxer fill it
AVPacket packet = new AVPacket();
AVPacket* packetPtr = &packet;
FFmpegInvoke.av_init_packet(packetPtr);
packetPtr->data = null;
packetPtr->size = 0;
// alloc SWR
/* 4 */
swrContextPtr = FFmpegInvoke.swr_alloc();
AVSampleFormat outSampleFormat = AVSampleFormat.AV_SAMPLE_FMT_S16;
long outChannelLayout = FFmpegInvoke.AV_CH_FRONT_LEFT | FFmpegInvoke.AV_CH_FRONT_RIGHT;
// configure SWR
FFmpegInvoke.av_opt_set_sample_fmt(swrContextPtr, "in_sample_fmt", inCodecContextPtr->sample_fmt, 0);
FFmpegInvoke.av_opt_set_sample_fmt(swrContextPtr, "out_sample_fmt", outSampleFormat, 0);
// setting the in_channel_layout seems to break things
//FFmpegInvoke.av_opt_set_channel_layout(swrContextPtr, "in_channel_layout", (long)inCodecContextPtr->channel_layout, 0);
FFmpegInvoke.av_opt_set_channel_layout(swrContextPtr, "out_channel_layout", outChannelLayout, 0);
FFmpegInvoke.av_opt_set_int(swrContextPtr, "in_sample_rate", inCodecContextPtr->sample_rate, 0);
FFmpegInvoke.av_opt_set_int(swrContextPtr, "out_sample_rate", 44100, 0);
// allock output frane
/* 45 */
outFramePtr = FFmpegInvoke.av_frame_alloc();
outFramePtr->channel_layout = (ulong)outChannelLayout;
outFramePtr->sample_rate = 44100;
outFramePtr->format = (int)outSampleFormat;
// config done, init
FFmpegInvoke.swr_init(swrContextPtr);
bool frameFinished;
// read frames from the file
while (FFmpegInvoke.av_read_frame(inFormatContextPtr, packetPtr) >= 0)
{
AVPacket* origPacketPtr = packetPtr;
try
{
if (packetPtr->stream_index != inAudioStreamIndex)
continue;
do
{
byte[] decodedFrame;
int decodedBytes = this.DecodePacket(inCodecContextPtr,
packetPtr,
inFramePtr,
swrContextPtr,
outFramePtr,
out frameFinished,
out decodedFrame);
if (decodedBytes < 0)
break;
output.Write(decodedFrame, 0, decodedFrame.Length);
packetPtr->data += decodedBytes;
packetPtr->size -= decodedBytes;
} while (packetPtr->size > 0);
}
finally
{
FFmpegInvoke.av_free_packet(origPacketPtr);
}
}
// flush cached frames
packetPtr->data = null;
packetPtr->size = 0;
do
{
byte[] decodedFrame;
this.DecodePacket(inCodecContextPtr, packetPtr, inFramePtr, swrContextPtr, outFramePtr, out frameFinished, out decodedFrame);
if (decodedFrame != null)
output.Write(decodedFrame, 0, decodedFrame.Length);
} while (frameFinished);
}
finally
{
/* 45 */
if (outFramePtr != null)
FFmpegInvoke.av_frame_free(&outFramePtr);
/* 4 */
if (swrContextPtr != null)
FFmpegInvoke.swr_free(&swrContextPtr);
/* 38 */
if (inFramePtr != null)
FFmpegInvoke.av_frame_free(&inFramePtr);
/* 37 */
if (inCodecContextPtr != null)
FFmpegInvoke.avcodec_close(inCodecContextPtr);
/* 36 */
if (inCodecContextPtr != null)
FFmpegInvoke.avcodec_free_context(&inCodecContextPtr);
/* 35 */
if (inFomatContextOpenResult == 0)
FFmpegInvoke.avformat_close_input(&inFormatContextPtr);
/* 3 */
if (inIoContextPtr != null)
FFmpegInvoke.av_freep(&inIoContextPtr);
//* 2 */
if (inIoBuffer != null)
FFmpegInvoke.av_freep(&inIoBuffer);
/* 1 */
// This is called by avformat_close_input
if (inFormatContextPtr != null)
FFmpegInvoke.avformat_free_context(inFormatContextPtr);
}
}
private unsafe int DecodePacket(AVCodecContext* audioDecoderContextPtr, AVPacket* packetPtr, AVFrame* inFramePtr, SwrContext* swrContextPtr, AVFrame* outFramePtr, out bool frameDecoded, out byte[] decodedFrame)
{
decodedFrame = null;
/* decode audio frame */
int gotFrame;
int readBytes = FFmpegInvoke.avcodec_decode_audio4(audioDecoderContextPtr, inFramePtr, &gotFrame, packetPtr);
if (readBytes < 0)
throw new ApplicationException("Error decoding audio frame: " + readBytes.ToString(CultureInfo.InvariantCulture));
frameDecoded = gotFrame != 0;
/* Some audio decoders decode only part of the packet, and have to be
* called again with the remainder of the packet data.
* Sample: fate-suite/lossless-audio/luckynight-partial.shn
* Also, some decoders might over-read the packet. */
int decoded = Math.Min(readBytes, packetPtr->size);
if (frameDecoded)
{
if (FFmpegInvoke.swr_convert_frame(swrContextPtr, outFramePtr, inFramePtr) != 0)
throw new ApplicationException("Failed to convert frame.");
long delay;
do
{
int unpaddedLinesize = outFramePtr->nb_samples * outFramePtr->channels * FFmpegInvoke.av_get_bytes_per_sample((AVSampleFormat)outFramePtr->format);
IntPtr dataPtr = new IntPtr(outFramePtr->extended_data[0]);
decodedFrame = new byte[unpaddedLinesize];
Marshal.Copy(dataPtr, decodedFrame, 0, unpaddedLinesize);
// check if we have more samples to convert for this frame
delay = FFmpegInvoke.swr_get_delay(swrContextPtr, 44100);
if (delay > 0)
{
if (FFmpegInvoke.swr_convert_frame(swrContextPtr, outFramePtr, null) != 0)
throw new ApplicationException("Failed to convert frame.");
}
} while (delay > 0);
}
return decoded;
}
}The failing line is :
FFmpegInvoke.av_freep(&inIoBuffer);