
Recherche avancée
Médias (91)
-
Géodiversité
9 septembre 2011, par ,
Mis à jour : Août 2018
Langue : français
Type : Texte
-
USGS Real-time Earthquakes
8 septembre 2011, par
Mis à jour : Septembre 2011
Langue : français
Type : Texte
-
SWFUpload Process
6 septembre 2011, par
Mis à jour : Septembre 2011
Langue : français
Type : Texte
-
La conservation du net art au musée. Les stratégies à l’œuvre
26 mai 2011
Mis à jour : Juillet 2013
Langue : français
Type : Texte
-
Podcasting Legal guide
16 mai 2011, par
Mis à jour : Mai 2011
Langue : English
Type : Texte
-
Creativecommons informational flyer
16 mai 2011, par
Mis à jour : Juillet 2013
Langue : English
Type : Texte
Autres articles (16)
-
Personnaliser en ajoutant son logo, sa bannière ou son image de fond
5 septembre 2013, parCertains thèmes prennent en compte trois éléments de personnalisation : l’ajout d’un logo ; l’ajout d’une bannière l’ajout d’une image de fond ;
-
Ecrire une actualité
21 juin 2013, parPrésentez les changements dans votre MédiaSPIP ou les actualités de vos projets sur votre MédiaSPIP grâce à la rubrique actualités.
Dans le thème par défaut spipeo de MédiaSPIP, les actualités sont affichées en bas de la page principale sous les éditoriaux.
Vous pouvez personnaliser le formulaire de création d’une actualité.
Formulaire de création d’une actualité Dans le cas d’un document de type actualité, les champs proposés par défaut sont : Date de publication ( personnaliser la date de publication ) (...) -
Publier sur MédiaSpip
13 juin 2013Puis-je poster des contenus à partir d’une tablette Ipad ?
Oui, si votre Médiaspip installé est à la version 0.2 ou supérieure. Contacter au besoin l’administrateur de votre MédiaSpip pour le savoir
Sur d’autres sites (4065)
-
AVIO Open error -RTSP Protocol
22 février 2016, par Muthukumar SIam new to java CV. I am using java CV 0.8 I used Record Activity.java from samples folder. i did few modification in record activity. I tried to record video in format flv and mp4 . but i got exception like this
org.bytedeco.javacv.FrameRecorder$Exception : avio_open error() error -1330794744 : Could not open ’rtsp ://10.0.0.136:1935/live/myStream’package com.example.javacv10;
import java.io.IOException;
import java.nio.ShortBuffer;
import org.bytedeco.javacpp.opencv_core.IplImage;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import android.app.Activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.PowerManager;
import android.util.Log;
import android.view.Display;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
public class HomeActivity extends Activity implements OnClickListener {
private final static String CLASS_LABEL = "RecordActivity";
private final static String LOG_TAG = CLASS_LABEL;
private PowerManager.WakeLock mWakeLock;
private String ffmpeg_link = "rtsp://10.0.0.136:1935/live/myStream";
long startTime = 0;
boolean recording = false;
private volatile FFmpegFrameRecorder recorder;
private boolean isPreviewOn = false;
private int sampleAudioRateInHz = 44100;
private int imageWidth = 320;
private int imageHeight = 240;
private int frameRate = 30;
/* audio data getting thread */
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private Thread audioThread;
volatile boolean runAudioThread = true;
/* video data getting thread */
private Camera cameraDevice;
private CameraView cameraView;
private IplImage yuvIplimage = null;
/* layout setting */
private final int bg_screen_bx = 232;
private final int bg_screen_by = 128;
private final int bg_screen_width = 700;
private final int bg_screen_height = 500;
private final int bg_width = 1123;
private final int bg_height = 715;
private final int live_width = 640;
private final int live_height = 480;
private int screenWidth, screenHeight;
private Button btnRecorderControl;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
setContentView(R.layout.activity_home);
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
initLayout();
initRecorder();
}
@Override
protected void onResume() {
super.onResume();
if (mWakeLock == null) {
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
}
}
@Override
protected void onPause() {
super.onPause();
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
@Override
protected void onDestroy() {
super.onDestroy();
recording = false;
if (cameraView != null) {
cameraView.stopPreview();
}
if(cameraDevice != null) {
cameraDevice.stopPreview();
cameraDevice.release();
cameraDevice = null;
}
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
private void initLayout() {
/* get size of screen */
Display display = ((WindowManager) getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
screenWidth = display.getWidth();
screenHeight = display.getHeight();
RelativeLayout.LayoutParams layoutParam = null;
LayoutInflater myInflate = null;
myInflate = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
RelativeLayout topLayout = new RelativeLayout(this);
setContentView(topLayout);
LinearLayout preViewLayout = (LinearLayout) myInflate.inflate(R.layout.activity_home, null);
layoutParam = new RelativeLayout.LayoutParams(screenWidth, screenHeight);
topLayout.addView(preViewLayout, layoutParam);
/* add control button: start and stop */
btnRecorderControl = (Button) findViewById(R.id.recorder_control);
btnRecorderControl.setText("Start");
btnRecorderControl.setOnClickListener(this);
/* add camera view */
int display_width_d = (int) (1.0 * bg_screen_width * screenWidth / bg_width);
int display_height_d = (int) (1.0 * bg_screen_height * screenHeight / bg_height);
int prev_rw, prev_rh;
if (1.0 * display_width_d / display_height_d > 1.0 * live_width / live_height) {
prev_rh = display_height_d;
prev_rw = (int) (1.0 * display_height_d * live_width / live_height);
} else {
prev_rw = display_width_d;
prev_rh = (int) (1.0 * display_width_d * live_height / live_width);
}
layoutParam = new RelativeLayout.LayoutParams(prev_rw, prev_rh);
layoutParam.topMargin = (int) (1.0 * bg_screen_by * screenHeight / bg_height);
layoutParam.leftMargin = (int) (1.0 * bg_screen_bx * screenWidth / bg_width);
cameraDevice = Camera.open();
Log.i(LOG_TAG, "cameara open");
cameraView = new CameraView(this, cameraDevice);
topLayout.addView(cameraView, layoutParam);
Log.i(LOG_TAG, "cameara preview start: OK");
}
//---------------------------------------
// initialize ffmpeg_recorder
//---------------------------------------
private void initRecorder() {
Log.w(LOG_TAG,"init recorder");
if (yuvIplimage == null) {
yuvIplimage = IplImage.create(imageWidth, imageHeight, 8, 2);
Log.i(LOG_TAG, "create yuvIplimage");
}
Log.i(LOG_TAG, "ffmpeg_url: " + ffmpeg_link);
recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
recorder.setFormat("flv");
recorder.setSampleRate(sampleAudioRateInHz);
// Set in the surface changed method
recorder.setFrameRate(frameRate);
Log.i(LOG_TAG, "recorder initialize success");
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
runAudioThread = true;
}
public void startRecording() {
try {
recorder.start();
startTime = System.currentTimeMillis();
recording = true;
audioThread.start();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}
public void stopRecording() {
runAudioThread = false;
try {
audioThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
audioRecordRunnable = null;
audioThread = null;
if (recorder != null && recording) {
recording = false;
Log.v(LOG_TAG,"Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
if (recording) {
stopRecording();
}
finish();
return true;
}
return super.onKeyDown(keyCode, event);
}
//---------------------------------------------
// audio thread, gets and encodes audio data
//---------------------------------------------
class AudioRecordRunnable implements Runnable {
@Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
// Audio
int bufferSize;
short[] audioData;
int bufferReadResult;
bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_OUT_DEFAULT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_OUT_DEFAULT, bufferSize);
audioData = new short[bufferSize];
Log.d(LOG_TAG, "audioRecord.startRecording()");
audioRecord.startRecording();
/* ffmpeg_audio encoding loop */
while (runAudioThread) {
//Log.v(LOG_TAG,"recording? " + recording);
bufferReadResult = audioRecord.read(audioData, 0, audioData.length);
if (bufferReadResult > 0) {
Log.v(LOG_TAG,"bufferReadResult: " + bufferReadResult);
// If "recording" isn't true when start this thread, it never get's set according to this if statement...!!!
// Why? Good question...
if (recording) {
try {
recorder.record(ShortBuffer.wrap(audioData, 0, bufferReadResult));
//Log.v(LOG_TAG,"recording " + 1024*i + " to " + 1024*i+1024);
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG,e.getMessage());
e.printStackTrace();
}
}
}
}
Log.v(LOG_TAG,"AudioThread Finished, release audioRecord");
/* encoding finish, release recorder */
if (audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
Log.v(LOG_TAG,"audioRecord released");
}
}
}
//---------------------------------------------
// camera thread, gets and encodes video data
//---------------------------------------------
class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {
private SurfaceHolder mHolder;
private Camera mCamera;
public CameraView(Context context, Camera camera) {
super(context);
Log.w("camera","camera view");
mCamera = camera;
mHolder = getHolder();
mHolder.addCallback(CameraView.this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mCamera.setPreviewCallback(CameraView.this);
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
try {
stopPreview();
mCamera.setPreviewDisplay(holder);
} catch (IOException exception) {
mCamera.release();
mCamera = null;
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Log.v(LOG_TAG,"Setting imageWidth: " + imageWidth + " imageHeight: " + imageHeight + " frameRate: " + frameRate);
Camera.Parameters camParams = mCamera.getParameters();
camParams.setPreviewSize(imageWidth, imageHeight);
Log.v(LOG_TAG,"Preview Framerate: " + camParams.getPreviewFrameRate());
camParams.setPreviewFrameRate(frameRate);
mCamera.setParameters(camParams);
startPreview();
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
try {
mHolder.addCallback(null);
mCamera.setPreviewCallback(null);
} catch (RuntimeException e) {
// The camera has probably just been released, ignore.
}
}
public void startPreview() {
if (!isPreviewOn && mCamera != null) {
isPreviewOn = true;
mCamera.startPreview();
}
}
public void stopPreview() {
if (isPreviewOn && mCamera != null) {
isPreviewOn = false;
mCamera.stopPreview();
}
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
/* get video data */
if (yuvIplimage != null && recording) {
yuvIplimage.getByteBuffer().put(data);
Log.v(LOG_TAG,"Writing Frame");
try {
long t = 1000 * (System.currentTimeMillis() - startTime);
if (t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(yuvIplimage);
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG,e.getMessage());
e.printStackTrace();
}
}
}
}
@Override
public void onClick(View v) {
if (!recording) {
startRecording();
Log.w(LOG_TAG, "Start Button Pushed");
btnRecorderControl.setText("Stop");
} else {
// This will trigger the audio recording loop to stop and then set isRecorderStart = false;
stopRecording();
Log.w(LOG_TAG, "Stop Button Pushed");
btnRecorderControl.setText("Start");
}
}
} -
Android recording video with overlay view [way 1]
2 mars 2016, par t0mI am trying app in android which have functionality to capture video with overlay views. I tried two ways (1. and 2.).
1. Via SurfaceView and JavaCV with FFmpeg.
2. Via OpenCV and JavaCV with FFmpeg.
3. For API21+ maybe with MediaProjection.(Question is divided to two questions, due to stackoverflow length limit.)
ad 1. Via SurfaceView and JavaCV with FFmpeg :
OpenCVCameraActivity.java :
import android.app.Activity;
import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.util.Log;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.widget.FrameLayout;
import android.widget.Toast;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.Frame;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ShortBuffer;
import java.util.List;
@SuppressWarnings("ALL")
public class OpenCVCameraActivity extends Activity {
private static final String TAG = OpenCVCameraActivity.class.getSimpleName();
private long startTime = 0;
private boolean isPreviewOn = false;
private int sampleAudioRateInHz = 44100;
private Camera.Size previewSize; //preview and Camera and Recorder width and height
private int recorderFrameRate = 25;
// audio data getting thread
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private Thread audioThread;
private volatile boolean runAudioThread = true;
// video data getting thread
private Camera mCamera;
private CameraView mPreview;
private FFmpegFrameRecorder recorder;
private boolean recording = false;
private Frame yuvImage = null;
//storage
private Storage storage;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if(Static.DEBUG) Log.i(TAG, "onCreate()");
Thread.setDefaultUncaughtExceptionHandler(uncaughtExceptionHandler);
setContentView(R.layout.activity_opencv);
prepareCamera();
}
private Thread.UncaughtExceptionHandler uncaughtExceptionHandler =
new Thread.UncaughtExceptionHandler() {
public void uncaughtException(Thread thread, Throwable ex) {
if(Static.DEBUG) Log.e(TAG, "Uncaught exception", ex);
}
};
@Override
protected void onRestart() {
super.onRestart();
if (Static.DEBUG) Log.i(TAG, "onRestart()");
}
@Override
protected void onStart() {
super.onStart();
if (Static.DEBUG) Log.i(TAG, "onStart()");
}
@Override
protected void onResume() {
super.onResume();
if (Static.DEBUG) Log.i(TAG, "onResume()");
storage = new Storage(this);
if(storage.mExternalStorageAvailable == true && storage.mExternalStorageWriteable == false)
Static.showToast(this, getString(R.string.errExternalStorageReadOnly), Toast.LENGTH_LONG);
else if (storage.mExternalStorageAvailable == false && storage.mExternalStorageWriteable == false)
Static.showToast(this, getString(R.string.errExternalStorage), Toast.LENGTH_LONG);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
if (Static.DEBUG) Log.i(TAG, "onCreateOptionsMenu()");
return super.onCreateOptionsMenu(menu);
}
@Override
protected void onPause() {
super.onPause();
if (Static.DEBUG) Log.i(TAG, "onPause()");
}
@Override
protected void onStop() {
super.onStop();
if (Static.DEBUG) Log.i(TAG, "onStop()");
}
@Override
protected void onDestroy() {
super.onDestroy();
if (Static.DEBUG) Log.i(TAG, "onDestroy()");
recording = false;
if (mPreview != null) {
mPreview.stopPreview();
}
if (mCamera != null) {
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
}
/** Prepare camera object.
* */
private void prepareCamera() {
//1. Open camera object
try {
mCamera = getCameraInstance(this);
} catch (Exception e) {
e.printStackTrace();
Static.showToast(this, e.getMessage(), Toast.LENGTH_LONG);
finish();
return;
}
setOptimalCameraParams();
//2. Connect Preview + 3. Start Preview + 8e Stop preview v metode surfaceChanged
mPreview = new CameraView(this, mCamera);
FrameLayout preview = (FrameLayout) findViewById(R.id.cameraPreview);
preview.addView(mPreview); //surfaceView to FrameLayout
if(Static.DEBUG) Log.i(TAG, "camera preview start: OK");
}
/**
* A safe way to get an instance of the Camera object.
*/
@SuppressWarnings("deprecation")
public static Camera getCameraInstance(Context ctx) throws Exception {
Camera c = Camera.open();
if (c == null)
throw new Exception(ctx.getString(R.string.errCameraNotAvailable));
if(Static.DEBUG) Log.i(TAG, "camera open");
return c; // attempt to get a Camera instance, otherwise null
}
/**Method set optmial parameters. Depend on HW possibilities.*/
@SuppressWarnings("deprecation")
private void setOptimalCameraParams(){
// Camera parameters
Camera.Parameters params = mCamera.getParameters();
List<string> focusModes = params.getSupportedFocusModes();
if (Static.DEBUG) Log.i(TAG, "focusModes():" + focusModes.toString());
if (Static.DEBUG) Log.i(TAG, "Camera parameters:\n" + params.flatten());
params.setRecordingHint(true); //MediaRecorder.start() to start faster
//Automatically autofocus if it's possible
if (params.getSupportedFocusModes().contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
} else if (focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) { //at least focus auto
params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
}
// set Camera parameters
mCamera.setParameters(params);
}
//---------------------------------------
// initialize ffmpeg_recorder
//---------------------------------------
private void initRecorder() throws Exception {
if(Static.DEBUG) Log.i(TAG,"init recorder");
File output = null;
try {
output = storage.getOutputMediaFile(storage.MEDIA_TYPE_VIDEO);
if(output == null)
throw new Exception();
} catch (Exception e) {
e.printStackTrace();
throw new Exception(getString(R.string.errSetOutputFile));
}
if (yuvImage == null) {
yuvImage = new Frame(previewSize.width, previewSize.height, Frame.DEPTH_UBYTE, 2);
if(Static.DEBUG) Log.i(TAG, "create yuvImage");
}
if(Static.DEBUG) Log.i(TAG, "ffmpeg_url: " + output.getPath());
recorder = new FFmpegFrameRecorder(output.getPath(), previewSize.width, previewSize.height, 1);
//recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);
//recorder.setVideoOption("preset", "veryfast"); // or ultrafast or fast, etc.
//recorder.setVideoQuality(0); // maximum quality, replace recorder.setVideoBitrate(16384);
//recorder.setPixelFormat(avutil.AV_PIX_FMT_YUV420P);
recorder.setFormat("mp4");
recorder.setSampleRate(sampleAudioRateInHz);
recorder.setFrameRate(recorderFrameRate);
if(Static.DEBUG) Log.i(TAG, "recorder initialize success");
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
runAudioThread = true;
}
public void startRecording() {
try {
initRecorder();
} catch (Exception e){
e.printStackTrace();
Static.showToast(this, e.getMessage(), Toast.LENGTH_LONG);
}
try {
recorder.start();
startTime = System.currentTimeMillis();
recording = true;
audioThread.start();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}
public void stopRecording() {
runAudioThread = false;
try {
audioThread.join();
} catch (InterruptedException e) {
// reset interrupt to be nice
Thread.currentThread().interrupt();
return;
}
audioRecordRunnable = null;
audioThread = null;
if (recorder != null && recording) {
recording = false;
if(Static.DEBUG) Log.i(TAG,"Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
if (recording) {
stopRecording();
}
finish();
return true;
}
return super.onKeyDown(keyCode, event);
}
public void onClickBtnStartRecord(View v) {
if (!recording) {
startRecording();
if(Static.DEBUG) Log.i(TAG, "Start Button Pushed");
} else {
// This will trigger the audio recording loop to stop and then set isRecorderStart = false;
stopRecording();
if(Static.DEBUG) Log.i(TAG, "Stop Button Pushed");
}
}
//---------------------------------------------
// audio thread, gets and encodes audio data
//---------------------------------------------
class AudioRecordRunnable implements Runnable {
@Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
// Audio
int bufferSize;
ShortBuffer audioData;
int bufferReadResult;
bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
audioData = ShortBuffer.allocate(bufferSize);
if(Static.DEBUG) Log.i(TAG, "audioRecord.startRecording()");
audioRecord.startRecording();
/* ffmpeg_audio encoding loop */
while (runAudioThread) {
//if(Static.DEBUG) Log.i(TAG,"recording? " + recording);
bufferReadResult = audioRecord.read(audioData.array(), 0, audioData.capacity());
audioData.limit(bufferReadResult);
if (bufferReadResult > 0) {
//if(Static.DEBUG) Log.i(TAG,"bufferReadResult: " + bufferReadResult);
// If "recording" isn't true when start this thread, it never get's set according to this if statement...!!!
// Why? Good question...
if (recording) {
try {
recorder.recordSamples(audioData);
//if(Static.DEBUG) Log.i(TAG,"recording " + 1024*i + " to " + 1024*i+1024);
} catch (FFmpegFrameRecorder.Exception e) {
if(Static.DEBUG) Log.i(TAG,e.getMessage());
e.printStackTrace();
}
}
}
}
if(Static.DEBUG) Log.i(TAG,"AudioThread Finished, release audioRecord");
/* encoding finish, release recorder */
if (audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
if(Static.DEBUG) Log.i(TAG,"audioRecord released");
}
}
}
/**TODO*/
private Camera.Size getBestPreviewSize(int width, int height, Camera.Parameters parameters) {
Camera.Size result=null;
for (Camera.Size size : parameters.getSupportedPreviewSizes()) {
if(Static.DEBUG) Log.i(TAG, size.width + "/" + size.height);
if (size.width<=width && size.height<=height) {
if (result==null) {
result=size;
} else {
int resultArea=result.width*result.height;
int newArea=size.width*size.height;
if (newArea>resultArea) {
result=size;
}
}
}
}
return(result);
}
//---------------------------------------------
// camera thread, gets and encodes video data
//---------------------------------------------
private class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {
private final String TAG = CameraView.class.getSimpleName();
private SurfaceHolder mHolder;
private Camera mCamera;
public CameraView(Context context, Camera camera) {
super(context);
if(Static.DEBUG) Log.i(TAG, "camera view");
mCamera = camera;
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(CameraView.this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mCamera.setPreviewCallback(CameraView.this);
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
try {
stopPreview();
mCamera.setPreviewDisplay(holder);
} catch (IOException exception) {
mCamera.release();
mCamera = null;
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
if(Static.DEBUG) Log.i(TAG, "surfaceChanged() => w=" + w + ", h=" + h);
// If your preview can change or rotate, take care of those events here.
// Make sure to stop the preview before resizing or reformatting it.
if (mHolder.getSurface() == null){
// preview surface does not exist
return;
}
// stop preview before making changes
try {
stopPreview();
} catch (Exception e){
// ignore: tried to stop a non-existent preview
}
// start preview with new settings
try {
Camera.Parameters params = mCamera.getParameters();
previewSize = getBestPreviewSize(w, h, params);
if(Static.DEBUG) Log.i(TAG, "getBestPreviewSize() => w=" + previewSize.width + ", h=" + previewSize.height);
if (previewSize != null)
params.setPreviewSize(previewSize.width, previewSize.height);
params.setPreviewFrameRate(recorderFrameRate);
if(Static.DEBUG) Log.i(TAG,"Preview Framerate: " + params.getPreviewFrameRate());
mCamera.setParameters(params);
mCamera.setPreviewDisplay(holder);
mCamera.setPreviewCallback(CameraView.this);
startPreview();
} catch (Exception e){
if(Static.DEBUG) Log.i(TAG, "Could not set preview display in surfaceChanged");
e.printStackTrace();
}
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
try {
mHolder.addCallback(null);
mCamera.setPreviewCallback(null);
} catch (RuntimeException e) {
// The camera has probably just been released, ignore.
}
}
public void startPreview() {
if (!isPreviewOn && mCamera != null) {
isPreviewOn = true;
mCamera.startPreview();
}
}
public void stopPreview() {
if (isPreviewOn && mCamera != null) {
isPreviewOn = false;
mCamera.stopPreview();
}
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
if (audioRecord == null || audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
startTime = System.currentTimeMillis();
return;
}
// get video data
if (yuvImage != null && recording) {
((ByteBuffer)yuvImage.image[0].position(0)).put(data);
try {
long t = 1000 * (System.currentTimeMillis() - startTime);
if(Static.DEBUG) Log.i(TAG,"Writing Frame on timestamp: "+t);
if (t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(yuvImage);
} catch (FFmpegFrameRecorder.Exception e) {
if(Static.DEBUG) Log.i(TAG,e.getMessage());
e.printStackTrace();
}
}
}
}
}
</string>activity_opencv.xml :
<?xml version="1.0" encoding="utf-8"?>
<relativelayout>
<framelayout>
</framelayout>
<ImageButton<br />
android:id="@+id/btnStartRecord"<br />
android:layout_width="70dp"<br />
android:layout_height="70dp"<br />
android:scaleType="fitXY"<br />
android:src="@drawable/record_icon"<br />
android:background="@null"<br />
android:text="@string/btnStartRecord"<br />
android:onClick="onClickBtnStartRecord"<br />
android:clickable="true"<br />
android:layout_centerVertical="true"<br />
android:layout_alignParentRight="true"<br />
android:layout_alignParentEnd="true"/>
<textview></textview>
</relativelayout>Overlay views working, but recorded video is without overlay views.
2. Via OpenCV and JavaCV with FFmpeg :
-
Android recording video with overlay view [way 2]
2 mars 2016, par t0mI am trying app in android which have functionality to capture video with overlay views. I tried two ways (1. and 2.).
1. Via SurfaceView and JavaCV with FFmpeg.
2. Via OpenCV and JavaCV with FFmpeg.
3. For API21+ maybe with MediaProjection.(Question is divided to two questions, due to stackoverflow length limit.)
ad 1. Via SurfaceView and JavaCV with FFmpeg :
ad 2. Via OpenCV and JavaCV with FFmpeg :
OpenCVCameraActivity.java :
import android.app.Activity;
import android.hardware.Camera;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.SubMenu;
import android.view.SurfaceView;
import android.view.View;
import android.view.WindowManager;
import android.widget.Toast;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.Frame;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Mat;
import java.io.File;
import java.nio.ByteBuffer;
import java.nio.ShortBuffer;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.ListIterator;
@SuppressWarnings("ALL")
public class OpenCVCameraActivity extends Activity implements
CameraBridgeViewBase.CvCameraViewListener2,
View.OnTouchListener {
//name of activity, for DEBUGGING
private static final String TAG = OpenCVCameraActivity.class.getSimpleName();
private OpenCVCameraPreview mOpenCvCameraView;
private List mResolutionList;
private MenuItem[] mEffectMenuItems;
private SubMenu mColorEffectsMenu;
private MenuItem[] mResolutionMenuItems;
private SubMenu mResolutionMenu;
private static long frameCounter = 0;
long startTime = 0;
private Mat edgesMat;
boolean recording = false;
private int sampleAudioRateInHz = 44100;
private int imageWidth = 1280;
private int imageHeight = 720;
private int frameRate = 30;
private Frame yuvImage = null;
private File ffmpeg_link;
private FFmpegFrameRecorder recorder;
/*audio data getting thread */
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private Thread audioThread;
volatile boolean runAudioThread = true;
ShortBuffer[] samples;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
mOpenCvCameraView.setOnTouchListener(OpenCVCameraActivity.this);
break;
default:
super.onManagerConnected(status);
break;
}
}
};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if(Static.DEBUG) Log.i(TAG, "onCreate()");
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
Thread.setDefaultUncaughtExceptionHandler(uncaughtExceptionHandler);
try {
setContentView(R.layout.activity_opencv);
mOpenCvCameraView = (OpenCVCameraPreview) findViewById(R.id.openCVCameraPreview);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
mOpenCvCameraView.enableFpsMeter();
ffmpeg_link = new File(Environment.getExternalStorageDirectory(), "stream.mp4");
} catch (Exception e){
e.printStackTrace();
}
}
private Thread.UncaughtExceptionHandler uncaughtExceptionHandler =
new Thread.UncaughtExceptionHandler() {
public void uncaughtException(Thread thread, Throwable ex) {
if(Static.DEBUG) Log.e(TAG, "Uncaught exception", ex);
}
};
@Override
protected void onRestart() {
if (Static.DEBUG) Log.i(TAG, "onRestart()");
super.onRestart();
}
@Override
protected void onStart() {
if (Static.DEBUG) Log.i(TAG, "onStart()");
super.onStart();
}
@Override
protected void onResume() {
if (Static.DEBUG) Log.i(TAG, "onResume()");
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.i(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_1_0, this, mLoaderCallback);
} else {
Log.i(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
if (Static.DEBUG) Log.i(TAG, "onCreateOptionsMenu()");
super.onCreateOptionsMenu(menu);
List<string> effects = mOpenCvCameraView.getEffectList();
if (effects == null) {
Log.e(TAG, "Color effects are not supported by device!");
return true;
}
mColorEffectsMenu = menu.addSubMenu("Color Effect");
mEffectMenuItems = new MenuItem[effects.size()];
int idx = 0;
ListIterator<string> effectItr = effects.listIterator();
while(effectItr.hasNext()) {
String element = effectItr.next();
mEffectMenuItems[idx] = mColorEffectsMenu.add(1, idx, Menu.NONE, element);
idx++;
}
mResolutionMenu = menu.addSubMenu("Resolution");
mResolutionList = mOpenCvCameraView.getResolutionList();
mResolutionMenuItems = new MenuItem[mResolutionList.size()];
ListIterator resolutionItr = mResolutionList.listIterator();
idx = 0;
while(resolutionItr.hasNext()) {
Camera.Size element = resolutionItr.next();
mResolutionMenuItems[idx] = mResolutionMenu.add(2, idx, Menu.NONE,
Integer.valueOf(element.width).toString() + "x" + Integer.valueOf(element.height).toString());
idx++;
}
return true;
}
@Override
protected void onPause() {
if (Static.DEBUG) Log.i(TAG, "onPause()");
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
@Override
protected void onStop() {
if (Static.DEBUG) Log.i(TAG, "onStop()");
super.onStop();
}
@Override
protected void onDestroy() {
if (Static.DEBUG) Log.i(TAG, "onDestroy()");
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
++frameCounter;
//Log.i(TAG, "Frame number: "+frameCounter);
final Mat rgba = inputFrame.rgba();
//Core.flip(rgba, rgba, 1);
/*if(Static.DEBUG) Log.i(TAG,"rgba.total(): "+rgba.total());
if(Static.DEBUG) Log.i(TAG,"rgba.channels(): " +rgba.channels());*/
byte[] data = new byte[(int) (rgba.total() * rgba.channels())];
rgba.get(0, 0, data);
//if(Static.DEBUG) Log.i(TAG,"return_buff: "+return_buff.length);
if (audioRecord == null || audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
startTime = System.currentTimeMillis();
return rgba;
}
// get video data
if (yuvImage != null && recording) {
ByteBuffer b = (ByteBuffer)yuvImage.image[0].position(0);
b.put(data);
try {
long t = 1000 * (System.currentTimeMillis() - startTime);
if(Static.DEBUG) Log.i(TAG,"Writing Frame on timestamp: "+t);
if (t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(yuvImage);
} catch (FFmpegFrameRecorder.Exception e) {
if(Static.DEBUG) Log.i(TAG,e.getMessage());
e.printStackTrace();
}
}
return rgba;
}
@Override
public void onCameraViewStarted(int width, int height) {
edgesMat = new Mat();
}
@Override
public void onCameraViewStopped() {
if (edgesMat != null)
edgesMat.release();
edgesMat = null;
}
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
if (item.getGroupId() == 1)
{
mOpenCvCameraView.setEffect((String) item.getTitle());
Toast.makeText(this, mOpenCvCameraView.getEffect(), Toast.LENGTH_SHORT).show();
} else if (item.getGroupId() == 2) {
int id = item.getItemId();
Camera.Size resolution = mResolutionList.get(id);
mOpenCvCameraView.setResolution(resolution);
resolution = mOpenCvCameraView.getResolution();
String caption = Integer.valueOf(resolution.width).toString() + "x" + Integer.valueOf(resolution.height).toString();
Toast.makeText(this, caption, Toast.LENGTH_SHORT).show();
}
return true;
}
@Override
public boolean onTouch(View v, MotionEvent event) {
Log.i(TAG,"onTouch event");
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss");
String currentDateandTime = sdf.format(new Date());
String fileName = Environment.getExternalStorageDirectory().getPath() +
"/sample_picture_" + currentDateandTime + ".jpg";
mOpenCvCameraView.takePicture(fileName);
Toast.makeText(this, fileName + " saved", Toast.LENGTH_SHORT).show();
return false;
}
/**
* Click to ImageButton to start recording.
*/
public void onClickBtnStartRecord2(View v) {
if (Static.DEBUG) Log.i(TAG, "onClickBtnStartRecord()");
if(!recording)
startRecording();
else
stopRecording();
}
private void startRecording() {
if (Static.DEBUG) Log.i(TAG, "startRecording()");
initRecorder();
try {
recorder.start();
startTime = System.currentTimeMillis();
recording = true;
audioThread.start();
if (Static.DEBUG) Log.i(TAG, "startRecording() success");
} catch(FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}
private void stopRecording() {
if (Static.DEBUG) Log.i(TAG, "stopRecording()");
runAudioThread = false;
try {
audioThread.join();
} catch(InterruptedException e) {
e.printStackTrace();
}
audioRecordRunnable = null;
audioThread = null;
if (Static.DEBUG) Log.i(TAG, "stopRecording() 2");
if(recorder != null && recording) {
recording = false;
try {
recorder.stop();
recorder.release();
Log.i(TAG, "Finishing recording, calling stop and release on recorder");
} catch(FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}
//---------------------------------------
// initialize ffmpeg_recorder
//---------------------------------------
private void initRecorder() {
Log.i(TAG, "init recorder");
try {
if (yuvImage == null) {
yuvImage = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 4);
Log.i(TAG, "create yuvImage");
}
Log.i(TAG, "ffmpeg_url: " + ffmpeg_link.getAbsolutePath());
//Log.i(TAG, "ffmpeg_url: " + ffmpeg_link.exists());
recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
recorder.setFormat("mp4");
recorder.setSampleRate(sampleAudioRateInHz);
// Set in the surface changed method
recorder.setFrameRate(frameRate);
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
runAudioThread = true;
Log.i(TAG, "recorder initialize success");
} catch (Exception e){
e.printStackTrace();
}
}
//---------------------------------------------
// audio thread, gets and encodes audio data
//---------------------------------------------
class AudioRecordRunnable implements Runnable {
@Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
// Audio
int bufferSize;
ShortBuffer audioData;
int bufferReadResult;
bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
audioData = ShortBuffer.allocate(bufferSize);
Log.d(TAG, "audioRecord.startRecording()");
audioRecord.startRecording();
// ffmpeg_audio encoding loop
while(runAudioThread) {
//Log.v(TAG,"recording? " + recording);
bufferReadResult = audioRecord.read(audioData.array(), 0, audioData.capacity());
audioData.limit(bufferReadResult);
if(bufferReadResult > 0) {
Log.v(TAG, "bufferReadResult: " + bufferReadResult);
// If "recording" isn't true when start this thread, it never get's set according to this if statement...!!!
// Why? Good question...
if(recording) {
try {
recorder.recordSamples(audioData);
//Log.v(TAG,"recording " + 1024*i + " to " + 1024*i+1024);
} catch(FFmpegFrameRecorder.Exception e) {
Log.v(TAG, e.getMessage());
e.printStackTrace();
}
}
}
}
Log.v(TAG, "AudioThread Finished, release audioRecord");
// encoding finish, release recorder
if(audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
Log.v(TAG, "audioRecord released");
}
}
}
}
</string></string>OpenCVCameraPreview.java :
import android.content.Context;
import android.hardware.Camera;
import android.util.AttributeSet;
import android.util.Log;
import org.opencv.android.JavaCameraView;
import java.io.FileOutputStream;
import java.util.List;
public class OpenCVCameraPreview extends JavaCameraView implements Camera.PictureCallback {
private static final String TAG = OpenCVCameraPreview.class.getSimpleName();
private String mPictureFileName;
public OpenCVCameraPreview(Context context, AttributeSet attrs) {
super(context, attrs);
}
public List<string> getEffectList() {
return mCamera.getParameters().getSupportedColorEffects();
}
public boolean isEffectSupported() {
return (mCamera.getParameters().getColorEffect() != null);
}
public String getEffect() {
return mCamera.getParameters().getColorEffect();
}
public void setEffect(String effect) {
Camera.Parameters params = mCamera.getParameters();
params.setColorEffect(effect);
mCamera.setParameters(params);
}
public List getResolutionList() {
return mCamera.getParameters().getSupportedPreviewSizes();
}
public void setResolution(Camera.Size resolution) {
disconnectCamera();
mMaxHeight = resolution.height;
mMaxWidth = resolution.width;
connectCamera(getWidth(), getHeight());
}
public Camera.Size getResolution() {
return mCamera.getParameters().getPreviewSize();
}
public void takePicture(final String fileName) {
Log.i(TAG, "Taking picture");
this.mPictureFileName = fileName;
// Postview and jpeg are sent in the same buffers if the queue is not empty when performing a capture.
// Clear up buffers to avoid mCamera.takePicture to be stuck because of a memory issue
mCamera.setPreviewCallback(null);
// PictureCallback is implemented by the current class
mCamera.takePicture(null, null, this);
}
@Override
public void onPictureTaken(byte[] data, Camera camera) {
Log.i(TAG, "Saving a bitmap to file");
// The camera preview was automatically stopped. Start it again.
mCamera.startPreview();
mCamera.setPreviewCallback(this);
// Write the image in a file (in jpeg format)
try {
FileOutputStream fos = new FileOutputStream(mPictureFileName);
fos.write(data);
fos.close();
} catch (java.io.IOException e) {
Log.e("PictureDemo", "Exception in photoCallback", e);
}
}
}
</string>activity_opencv.xml :
<?xml version="1.0" encoding="utf-8"?>
<relativelayout>
<ImageButton<br />
android:id="@+id/btnStartRecord2"<br />
android:layout_width="70dp"<br />
android:layout_height="70dp"<br />
android:scaleType="fitXY"<br />
android:src="@drawable/record_icon"<br />
android:background="@null"<br />
android:text="@string/btnStartRecord"<br />
android:onClick="onClickBtnStartRecord2"<br />
android:layout_centerVertical="true"<br />
android:layout_alignParentRight="true"<br />
android:layout_alignParentEnd="true"/>
</relativelayout>Overlay views working, but recorded video is without overlay views, and recording with
onCameraFrame
method is very slow.