
Recherche avancée
Médias (91)
-
Corona Radiata
26 septembre 2011, par
Mis à jour : Septembre 2011
Langue : English
Type : Audio
-
Lights in the Sky
26 septembre 2011, par
Mis à jour : Septembre 2011
Langue : English
Type : Audio
-
Head Down
26 septembre 2011, par
Mis à jour : Septembre 2011
Langue : English
Type : Audio
-
Echoplex
26 septembre 2011, par
Mis à jour : Septembre 2011
Langue : English
Type : Audio
-
Discipline
26 septembre 2011, par
Mis à jour : Septembre 2011
Langue : English
Type : Audio
-
Letting You
26 septembre 2011, par
Mis à jour : Septembre 2011
Langue : English
Type : Audio
Autres articles (103)
-
MediaSPIP 0.1 Beta version
25 avril 2011, parMediaSPIP 0.1 beta is the first version of MediaSPIP proclaimed as "usable".
The zip file provided here only contains the sources of MediaSPIP in its standalone version.
To get a working installation, you must manually install all-software dependencies on the server.
If you want to use this archive for an installation in "farm mode", you will also need to proceed to other manual (...) -
MediaSPIP version 0.1 Beta
16 avril 2011, parMediaSPIP 0.1 beta est la première version de MediaSPIP décrétée comme "utilisable".
Le fichier zip ici présent contient uniquement les sources de MediaSPIP en version standalone.
Pour avoir une installation fonctionnelle, il est nécessaire d’installer manuellement l’ensemble des dépendances logicielles sur le serveur.
Si vous souhaitez utiliser cette archive pour une installation en mode ferme, il vous faudra également procéder à d’autres modifications (...) -
Amélioration de la version de base
13 septembre 2013Jolie sélection multiple
Le plugin Chosen permet d’améliorer l’ergonomie des champs de sélection multiple. Voir les deux images suivantes pour comparer.
Il suffit pour cela d’activer le plugin Chosen (Configuration générale du site > Gestion des plugins), puis de configurer le plugin (Les squelettes > Chosen) en activant l’utilisation de Chosen dans le site public et en spécifiant les éléments de formulaires à améliorer, par exemple select[multiple] pour les listes à sélection multiple (...)
Sur d’autres sites (10378)
-
Android studio + OpenCV + FFmpeg
26 février 2016, par t0mI have problem with code, which is functional only for Genymotion device (Android 4.1.1), but for Genymotion device 5.0.1 and real device Huawei honor 4c Android 4.4.2 not.
I have imported OpenCV 3.1 to Android studio by : http://stackoverflow.com/a/27421494/4244605
I added JavaCV with FFmpeg by : https://github.com/bytedeco/javacvAndroid studio 1.5.1
minSdkVersion 15
compileSdkVersion 23Code is only for test.
OpenCVCameraActivity.java :package co.timeiseverything.pstimeiseverything;
import android.app.Activity;
import android.hardware.Camera;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.SubMenu;
import android.view.SurfaceView;
import android.view.View;
import android.view.WindowManager;
import android.widget.Toast;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.Frame;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Mat;
import java.io.File;
import java.nio.ShortBuffer;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.ListIterator;
@SuppressWarnings("ALL")
public class OpenCVCameraActivity extends Activity implements
CameraBridgeViewBase.CvCameraViewListener2,
View.OnTouchListener {
//name of activity, for DEBUGGING
private static final String TAG = OpenCVCameraActivity.class.getSimpleName();
private OpenCVCameraPreview mOpenCvCameraView;
private List mResolutionList;
private MenuItem[] mEffectMenuItems;
private SubMenu mColorEffectsMenu;
private MenuItem[] mResolutionMenuItems;
private SubMenu mResolutionMenu;
private static long frameCounter = 0;
long startTime = 0;
private Mat edgesMat;
boolean recording = false;
private int sampleAudioRateInHz = 44100;
private int imageWidth = 1920;
private int imageHeight = 1080;
private int frameRate = 30;
private Frame yuvImage = null;
private File ffmpeg_link;
private FFmpegFrameRecorder recorder;
/* audio data getting thread */
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private Thread audioThread;
volatile boolean runAudioThread = true;
ShortBuffer[] samples;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
mOpenCvCameraView.setOnTouchListener(OpenCVCameraActivity.this);
break;
default:
super.onManagerConnected(status);
break;
}
}
};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if(Static.DEBUG) Log.i(TAG, "onCreate()");
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
try {
setContentView(R.layout.activity_opencv);
mOpenCvCameraView = (OpenCVCameraPreview) findViewById(R.id.openCVCameraPreview);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
//mOpenCvCameraView.enableFpsMeter();
ffmpeg_link = new File(Environment.getExternalStorageDirectory(), "stream.mp4");
} catch (Exception e){
e.printStackTrace();
}
}
@Override
protected void onRestart() {
if (Static.DEBUG) Log.i(TAG, "onRestart()");
super.onRestart();
}
@Override
protected void onStart() {
if (Static.DEBUG) Log.i(TAG, "onStart()");
super.onStart();
}
@Override
protected void onResume() {
if (Static.DEBUG) Log.i(TAG, "onResume()");
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.i(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_11, this, mLoaderCallback);
} else {
Log.i(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
if (Static.DEBUG) Log.i(TAG, "onCreateOptionsMenu()");
super.onCreateOptionsMenu(menu);
List<string> effects = mOpenCvCameraView.getEffectList();
if (effects == null) {
Log.e(TAG, "Color effects are not supported by device!");
return true;
}
mColorEffectsMenu = menu.addSubMenu("Color Effect");
mEffectMenuItems = new MenuItem[effects.size()];
int idx = 0;
ListIterator<string> effectItr = effects.listIterator();
while(effectItr.hasNext()) {
String element = effectItr.next();
mEffectMenuItems[idx] = mColorEffectsMenu.add(1, idx, Menu.NONE, element);
idx++;
}
mResolutionMenu = menu.addSubMenu("Resolution");
mResolutionList = mOpenCvCameraView.getResolutionList();
mResolutionMenuItems = new MenuItem[mResolutionList.size()];
ListIterator resolutionItr = mResolutionList.listIterator();
idx = 0;
while(resolutionItr.hasNext()) {
Camera.Size element = resolutionItr.next();
mResolutionMenuItems[idx] = mResolutionMenu.add(2, idx, Menu.NONE,
Integer.valueOf(element.width).toString() + "x" + Integer.valueOf(element.height).toString());
idx++;
}
return true;
}
@Override
protected void onPause() {
if (Static.DEBUG) Log.i(TAG, "onPause()");
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
@Override
protected void onStop() {
if (Static.DEBUG) Log.i(TAG, "onStop()");
super.onStop();
}
@Override
protected void onDestroy() {
if (Static.DEBUG) Log.i(TAG, "onDestroy()");
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
++frameCounter;
//Log.i(TAG, "Frame number: "+frameCounter);
return inputFrame.rgba();
}
@Override
public void onCameraViewStarted(int width, int height) {
edgesMat = new Mat();
}
@Override
public void onCameraViewStopped() {
if (edgesMat != null)
edgesMat.release();
edgesMat = null;
}
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
if (item.getGroupId() == 1)
{
mOpenCvCameraView.setEffect((String) item.getTitle());
Toast.makeText(this, mOpenCvCameraView.getEffect(), Toast.LENGTH_SHORT).show();
} else if (item.getGroupId() == 2) {
int id = item.getItemId();
Camera.Size resolution = mResolutionList.get(id);
mOpenCvCameraView.setResolution(resolution);
resolution = mOpenCvCameraView.getResolution();
String caption = Integer.valueOf(resolution.width).toString() + "x" + Integer.valueOf(resolution.height).toString();
Toast.makeText(this, caption, Toast.LENGTH_SHORT).show();
}
return true;
}
@Override
public boolean onTouch(View v, MotionEvent event) {
Log.i(TAG,"onTouch event");
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss");
String currentDateandTime = sdf.format(new Date());
String fileName = Environment.getExternalStorageDirectory().getPath() +
"/sample_picture_" + currentDateandTime + ".jpg";
mOpenCvCameraView.takePicture(fileName);
Toast.makeText(this, fileName + " saved", Toast.LENGTH_SHORT).show();
return false;
}
/**
* Click to ImageButton to start recording.
*/
public void onClickBtnStartRecord2(View v) {
if (Static.DEBUG) Log.i(TAG, "onClickBtnStartRecord()");
if(!recording)
startRecording();
else
stopRecording();
}
private void startRecording() {
if (Static.DEBUG) Log.i(TAG, "startRecording()");
initRecorder();
try {
recorder.start();
startTime = System.currentTimeMillis();
recording = true;
audioThread.start();
} catch(FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}
private void stopRecording() {
if (Static.DEBUG) Log.i(TAG, "stopRecording()");
runAudioThread = false;
try {
audioThread.join();
} catch(InterruptedException e) {
e.printStackTrace();
}
audioRecordRunnable = null;
audioThread = null;
if(recorder != null && recording) {
recording = false;
Log.v(TAG, "Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
} catch(FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}
//---------------------------------------
// initialize ffmpeg_recorder
//---------------------------------------
private void initRecorder() {
Log.w(TAG, "init recorder");
try {
if (yuvImage == null) {
yuvImage = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
Log.i(TAG, "create yuvImage");
}
Log.i(TAG, "ffmpeg_url: " + ffmpeg_link.getAbsolutePath());
Log.i(TAG, "ffmpeg_url: " + ffmpeg_link.exists());
recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
recorder.setFormat("mp4");
recorder.setSampleRate(sampleAudioRateInHz);
// Set in the surface changed method
recorder.setFrameRate(frameRate);
Log.i(TAG, "recorder initialize success");
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
runAudioThread = true;
} catch (Exception e){
e.printStackTrace();
}
}
//---------------------------------------------
// audio thread, gets and encodes audio data
//---------------------------------------------
class AudioRecordRunnable implements Runnable {
@Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
// Audio
int bufferSize;
ShortBuffer audioData;
int bufferReadResult;
bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
audioData = ShortBuffer.allocate(bufferSize);
Log.d(TAG, "audioRecord.startRecording()");
audioRecord.startRecording();
/* ffmpeg_audio encoding loop */
while(runAudioThread) {
//Log.v(TAG,"recording? " + recording);
bufferReadResult = audioRecord.read(audioData.array(), 0, audioData.capacity());
audioData.limit(bufferReadResult);
if(bufferReadResult > 0) {
Log.v(TAG, "bufferReadResult: " + bufferReadResult);
// If "recording" isn't true when start this thread, it never get's set according to this if statement...!!!
// Why? Good question...
if(recording) {
try {
recorder.recordSamples(audioData);
//Log.v(TAG,"recording " + 1024*i + " to " + 1024*i+1024);
} catch(FFmpegFrameRecorder.Exception e) {
Log.v(TAG, e.getMessage());
e.printStackTrace();
}
}
}
}
Log.v(TAG, "AudioThread Finished, release audioRecord");
/* encoding finish, release recorder */
if(audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
Log.v(TAG, "audioRecord released");
}
}
}
}
</string></string>OpenCVCameraPreview.java :
package co.timeiseverything.pstimeiseverything;
import android.content.Context;
import android.hardware.Camera;
import android.util.AttributeSet;
import android.util.Log;
import org.opencv.android.JavaCameraView;
import java.io.FileOutputStream;
import java.util.List;
public class OpenCVCameraPreview extends JavaCameraView implements Camera.PictureCallback {
private static final String TAG = OpenCVCameraPreview.class.getSimpleName();
private String mPictureFileName;
public OpenCVCameraPreview(Context context, AttributeSet attrs) {
super(context, attrs);
}
public List<string> getEffectList() {
return mCamera.getParameters().getSupportedColorEffects();
}
public boolean isEffectSupported() {
return (mCamera.getParameters().getColorEffect() != null);
}
public String getEffect() {
return mCamera.getParameters().getColorEffect();
}
public void setEffect(String effect) {
Camera.Parameters params = mCamera.getParameters();
params.setColorEffect(effect);
mCamera.setParameters(params);
}
public List getResolutionList() {
return mCamera.getParameters().getSupportedPreviewSizes();
}
public void setResolution(Camera.Size resolution) {
disconnectCamera();
mMaxHeight = resolution.height;
mMaxWidth = resolution.width;
connectCamera(getWidth(), getHeight());
}
public Camera.Size getResolution() {
return mCamera.getParameters().getPreviewSize();
}
public void takePicture(final String fileName) {
Log.i(TAG, "Taking picture");
this.mPictureFileName = fileName;
// Postview and jpeg are sent in the same buffers if the queue is not empty when performing a capture.
// Clear up buffers to avoid mCamera.takePicture to be stuck because of a memory issue
mCamera.setPreviewCallback(null);
// PictureCallback is implemented by the current class
mCamera.takePicture(null, null, this);
}
@Override
public void onPictureTaken(byte[] data, Camera camera) {
Log.i(TAG, "Saving a bitmap to file");
// The camera preview was automatically stopped. Start it again.
mCamera.startPreview();
mCamera.setPreviewCallback(this);
// Write the image in a file (in jpeg format)
try {
FileOutputStream fos = new FileOutputStream(mPictureFileName);
fos.write(data);
fos.close();
} catch (java.io.IOException e) {
Log.e("PictureDemo", "Exception in photoCallback", e);
}
}
}
</string>Gradle :
apply plugin: 'com.android.application'
android {
compileSdkVersion 23
buildToolsVersion "23.0.2"
defaultConfig {
applicationId "co.timeiseverything.pstimeiseverything"
minSdkVersion 15
targetSdkVersion 23
versionCode 1
versionName "1.0"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
packagingOptions {
exclude 'META-INF/maven/org.bytedeco.javacpp-presets/opencv/pom.properties'
exclude 'META-INF/maven/org.bytedeco.javacpp-presets/opencv/pom.xml'
exclude 'META-INF/maven/org.bytedeco.javacpp-presets/ffmpeg/pom.properties'
exclude 'META-INF/maven/org.bytedeco.javacpp-presets/ffmpeg/pom.xml'
}
}
repositories {
mavenCentral()
}
dependencies {
compile fileTree(include: ['*.jar'], dir: 'libs')
testCompile 'junit:junit:4.12'
compile 'com.android.support:appcompat-v7:23.1.1'
compile 'com.google.android.gms:play-services-appindexing:8.1.0'
compile group: 'org.bytedeco', name: 'javacv', version: '1.1'
compile group: 'org.bytedeco.javacpp-presets', name: 'opencv', version: '3.0.0-1.1', classifier: 'android-arm'
compile group: 'org.bytedeco.javacpp-presets', name: 'opencv', version: '3.0.0-1.1', classifier: 'android-x86'
compile group: 'org.bytedeco.javacpp-presets', name: 'ffmpeg', version: '2.8.1-1.1', classifier: 'android-arm'
compile group: 'org.bytedeco.javacpp-presets', name: 'ffmpeg', version: '2.8.1-1.1', classifier: 'android-x86'
compile project(':openCVLibrary310')
}proguard-rules.pro
Edited by : linkjniLibs :
app/src/main/jniLibs :armeabi armeabi-v7a arm64-v8a mips mips64 x86 x86_64
Problem
02-19 11:57:37.684 1759-1759/ I/OpenCVCameraActivity: onClickBtnStartRecord()
02-19 11:57:37.684 1759-1759/ I/OpenCVCameraActivity: startRecording()
02-19 11:57:37.684 1759-1759/ W/OpenCVCameraActivity: init recorder
02-19 11:57:37.691 1759-1759/ I/OpenCVCameraActivity: create yuvImage
02-19 11:57:37.691 1759-1759/ I/OpenCVCameraActivity: ffmpeg_url: /storage/emulated/0/stream.mp4
02-19 11:57:37.696 1759-1759/ I/OpenCVCameraActivity: ffmpeg_url: false
02-19 11:57:37.837 1759-1759/ W/linker: libjniavutil.so: unused DT entry: type 0x1d arg 0x18cc3
02-19 11:57:37.837 1759-1759/ W/linker: libjniavutil.so: unused DT entry: type 0x6ffffffe arg 0x21c30
02-19 11:57:37.837 1759-1759/ W/linker: libjniavutil.so: unused DT entry: type 0x6fffffff arg 0x1
02-19 11:57:37.838 1759-1759/co.timeiseverything.pstimeiseverything E/art: dlopen("/data/app/co.timeiseverything.pstimeiseverything-2/lib/x86/libjniavutil.so", RTLD_LAZY) failed: dlopen failed: cannot locate symbol "av_version_info" referenced by "libjniavutil.so"...
02-19 11:57:37.843 1759-1759/co.timeiseverything.pstimeiseverything I/art: Rejecting re-init on previously-failed class java.lang.Class
02-19 11:57:37.844 1759-1759/co.timeiseverything.pstimeiseverything E/AndroidRuntime: FATAL EXCEPTION: main
Process: co.timeiseverything.pstimeiseverything, PID: 1759
java.lang.IllegalStateException: Could not execute method of the activity
at android.view.View$1.onClick(View.java:4020)
at android.view.View.performClick(View.java:4780)
at android.view.View$PerformClick.run(View.java:19866)
at android.os.Handler.handleCallback(Handler.java:739)
at android.os.Handler.dispatchMessage(Handler.java:95)
at android.os.Looper.loop(Looper.java:135)
at android.app.ActivityThread.main(ActivityThread.java:5254)
at java.lang.reflect.Method.invoke(Native Method)
at java.lang.reflect.Method.invoke(Method.java:372)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:903)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:698)
Caused by: java.lang.reflect.InvocationTargetException
at java.lang.reflect.Method.invoke(Native Method)
at java.lang.reflect.Method.invoke(Method.java:372)
at android.view.View$1.onClick(View.java:4015)
at android.view.View.performClick(View.java:4780)
at android.view.View$PerformClick.run(View.java:19866)
at android.os.Handler.handleCallback(Handler.java:739)
at android.os.Handler.dispatchMessage(Handler.java:95)
at android.os.Looper.loop(Looper.java:135)
at android.app.ActivityThread.main(ActivityThread.java:5254)
at java.lang.reflect.Method.invoke(Native Method)
at java.lang.reflect.Method.invoke(Method.java:372)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:903)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:698)
Caused by: java.lang.UnsatisfiedLinkError: org.bytedeco.javacpp.avutil
at java.lang.Class.classForName(Native Method)
at java.lang.Class.forName(Class.java:309)
at org.bytedeco.javacpp.Loader.load(Loader.java:413)
at org.bytedeco.javacpp.Loader.load(Loader.java:381)
at org.bytedeco.javacpp.avcodec$AVPacket.<clinit>(avcodec.java:1650)
at org.bytedeco.javacv.FFmpegFrameRecorder.<init>(FFmpegFrameRecorder.java:149)
at org.bytedeco.javacv.FFmpegFrameRecorder.<init>(FFmpegFrameRecorder.java:129)
at co.timeiseverything.pstimeiseverything.OpenCVCameraActivity.initRecorder(OpenCVCameraActivity.java:320)
at co.timeiseverything.pstimeiseverything.OpenCVCameraActivity.startRecording(OpenCVCameraActivity.java:266)
at co.timeiseverything.pstimeiseverything.OpenCVCameraActivity.onClickBtnStartRecord2(OpenCVCameraActivity.java:259)
at java.lang.reflect.Method.invoke(Native Method)
at java.lang.reflect.Method.invoke(Method.java:372)
at android.view.View$1.onClick(View.java:4015)
at android.view.View.performClick(View.java:4780)
at android.view.View$PerformClick.run(View.java:19866)
at android.os.Handler.handleCallback(Handler.java:739)
at android.os.Handler.dispatchMessage(Handler.java:95)
at android.os.Looper.loop(Looper.java:135)
at android.app.ActivityThread.main(ActivityThread.java:5254)
at java.lang.reflect.Method.invoke(Native Method)
at java.lang.reflect.Method.invoke(Method.java:372)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:903)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:698)
</init></init></clinit> -
Android recording video with overlay view [way 1]
2 mars 2016, par t0mI am trying app in android which have functionality to capture video with overlay views. I tried two ways (1. and 2.).
1. Via SurfaceView and JavaCV with FFmpeg.
2. Via OpenCV and JavaCV with FFmpeg.
3. For API21+ maybe with MediaProjection.(Question is divided to two questions, due to stackoverflow length limit.)
ad 1. Via SurfaceView and JavaCV with FFmpeg :
OpenCVCameraActivity.java :
import android.app.Activity;
import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.util.Log;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.widget.FrameLayout;
import android.widget.Toast;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.Frame;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ShortBuffer;
import java.util.List;
@SuppressWarnings("ALL")
public class OpenCVCameraActivity extends Activity {
private static final String TAG = OpenCVCameraActivity.class.getSimpleName();
private long startTime = 0;
private boolean isPreviewOn = false;
private int sampleAudioRateInHz = 44100;
private Camera.Size previewSize; //preview and Camera and Recorder width and height
private int recorderFrameRate = 25;
// audio data getting thread
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private Thread audioThread;
private volatile boolean runAudioThread = true;
// video data getting thread
private Camera mCamera;
private CameraView mPreview;
private FFmpegFrameRecorder recorder;
private boolean recording = false;
private Frame yuvImage = null;
//storage
private Storage storage;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if(Static.DEBUG) Log.i(TAG, "onCreate()");
Thread.setDefaultUncaughtExceptionHandler(uncaughtExceptionHandler);
setContentView(R.layout.activity_opencv);
prepareCamera();
}
private Thread.UncaughtExceptionHandler uncaughtExceptionHandler =
new Thread.UncaughtExceptionHandler() {
public void uncaughtException(Thread thread, Throwable ex) {
if(Static.DEBUG) Log.e(TAG, "Uncaught exception", ex);
}
};
@Override
protected void onRestart() {
super.onRestart();
if (Static.DEBUG) Log.i(TAG, "onRestart()");
}
@Override
protected void onStart() {
super.onStart();
if (Static.DEBUG) Log.i(TAG, "onStart()");
}
@Override
protected void onResume() {
super.onResume();
if (Static.DEBUG) Log.i(TAG, "onResume()");
storage = new Storage(this);
if(storage.mExternalStorageAvailable == true && storage.mExternalStorageWriteable == false)
Static.showToast(this, getString(R.string.errExternalStorageReadOnly), Toast.LENGTH_LONG);
else if (storage.mExternalStorageAvailable == false && storage.mExternalStorageWriteable == false)
Static.showToast(this, getString(R.string.errExternalStorage), Toast.LENGTH_LONG);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
if (Static.DEBUG) Log.i(TAG, "onCreateOptionsMenu()");
return super.onCreateOptionsMenu(menu);
}
@Override
protected void onPause() {
super.onPause();
if (Static.DEBUG) Log.i(TAG, "onPause()");
}
@Override
protected void onStop() {
super.onStop();
if (Static.DEBUG) Log.i(TAG, "onStop()");
}
@Override
protected void onDestroy() {
super.onDestroy();
if (Static.DEBUG) Log.i(TAG, "onDestroy()");
recording = false;
if (mPreview != null) {
mPreview.stopPreview();
}
if (mCamera != null) {
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
}
/** Prepare camera object.
* */
private void prepareCamera() {
//1. Open camera object
try {
mCamera = getCameraInstance(this);
} catch (Exception e) {
e.printStackTrace();
Static.showToast(this, e.getMessage(), Toast.LENGTH_LONG);
finish();
return;
}
setOptimalCameraParams();
//2. Connect Preview + 3. Start Preview + 8e Stop preview v metode surfaceChanged
mPreview = new CameraView(this, mCamera);
FrameLayout preview = (FrameLayout) findViewById(R.id.cameraPreview);
preview.addView(mPreview); //surfaceView to FrameLayout
if(Static.DEBUG) Log.i(TAG, "camera preview start: OK");
}
/**
* A safe way to get an instance of the Camera object.
*/
@SuppressWarnings("deprecation")
public static Camera getCameraInstance(Context ctx) throws Exception {
Camera c = Camera.open();
if (c == null)
throw new Exception(ctx.getString(R.string.errCameraNotAvailable));
if(Static.DEBUG) Log.i(TAG, "camera open");
return c; // attempt to get a Camera instance, otherwise null
}
/**Method set optmial parameters. Depend on HW possibilities.*/
@SuppressWarnings("deprecation")
private void setOptimalCameraParams(){
// Camera parameters
Camera.Parameters params = mCamera.getParameters();
List<string> focusModes = params.getSupportedFocusModes();
if (Static.DEBUG) Log.i(TAG, "focusModes():" + focusModes.toString());
if (Static.DEBUG) Log.i(TAG, "Camera parameters:\n" + params.flatten());
params.setRecordingHint(true); //MediaRecorder.start() to start faster
//Automatically autofocus if it's possible
if (params.getSupportedFocusModes().contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
} else if (focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) { //at least focus auto
params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
}
// set Camera parameters
mCamera.setParameters(params);
}
//---------------------------------------
// initialize ffmpeg_recorder
//---------------------------------------
private void initRecorder() throws Exception {
if(Static.DEBUG) Log.i(TAG,"init recorder");
File output = null;
try {
output = storage.getOutputMediaFile(storage.MEDIA_TYPE_VIDEO);
if(output == null)
throw new Exception();
} catch (Exception e) {
e.printStackTrace();
throw new Exception(getString(R.string.errSetOutputFile));
}
if (yuvImage == null) {
yuvImage = new Frame(previewSize.width, previewSize.height, Frame.DEPTH_UBYTE, 2);
if(Static.DEBUG) Log.i(TAG, "create yuvImage");
}
if(Static.DEBUG) Log.i(TAG, "ffmpeg_url: " + output.getPath());
recorder = new FFmpegFrameRecorder(output.getPath(), previewSize.width, previewSize.height, 1);
//recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);
//recorder.setVideoOption("preset", "veryfast"); // or ultrafast or fast, etc.
//recorder.setVideoQuality(0); // maximum quality, replace recorder.setVideoBitrate(16384);
//recorder.setPixelFormat(avutil.AV_PIX_FMT_YUV420P);
recorder.setFormat("mp4");
recorder.setSampleRate(sampleAudioRateInHz);
recorder.setFrameRate(recorderFrameRate);
if(Static.DEBUG) Log.i(TAG, "recorder initialize success");
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
runAudioThread = true;
}
public void startRecording() {
try {
initRecorder();
} catch (Exception e){
e.printStackTrace();
Static.showToast(this, e.getMessage(), Toast.LENGTH_LONG);
}
try {
recorder.start();
startTime = System.currentTimeMillis();
recording = true;
audioThread.start();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}
public void stopRecording() {
runAudioThread = false;
try {
audioThread.join();
} catch (InterruptedException e) {
// reset interrupt to be nice
Thread.currentThread().interrupt();
return;
}
audioRecordRunnable = null;
audioThread = null;
if (recorder != null && recording) {
recording = false;
if(Static.DEBUG) Log.i(TAG,"Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
if (recording) {
stopRecording();
}
finish();
return true;
}
return super.onKeyDown(keyCode, event);
}
public void onClickBtnStartRecord(View v) {
if (!recording) {
startRecording();
if(Static.DEBUG) Log.i(TAG, "Start Button Pushed");
} else {
// This will trigger the audio recording loop to stop and then set isRecorderStart = false;
stopRecording();
if(Static.DEBUG) Log.i(TAG, "Stop Button Pushed");
}
}
//---------------------------------------------
// audio thread, gets and encodes audio data
//---------------------------------------------
class AudioRecordRunnable implements Runnable {
@Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
// Audio
int bufferSize;
ShortBuffer audioData;
int bufferReadResult;
bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
audioData = ShortBuffer.allocate(bufferSize);
if(Static.DEBUG) Log.i(TAG, "audioRecord.startRecording()");
audioRecord.startRecording();
/* ffmpeg_audio encoding loop */
while (runAudioThread) {
//if(Static.DEBUG) Log.i(TAG,"recording? " + recording);
bufferReadResult = audioRecord.read(audioData.array(), 0, audioData.capacity());
audioData.limit(bufferReadResult);
if (bufferReadResult > 0) {
//if(Static.DEBUG) Log.i(TAG,"bufferReadResult: " + bufferReadResult);
// If "recording" isn't true when start this thread, it never get's set according to this if statement...!!!
// Why? Good question...
if (recording) {
try {
recorder.recordSamples(audioData);
//if(Static.DEBUG) Log.i(TAG,"recording " + 1024*i + " to " + 1024*i+1024);
} catch (FFmpegFrameRecorder.Exception e) {
if(Static.DEBUG) Log.i(TAG,e.getMessage());
e.printStackTrace();
}
}
}
}
if(Static.DEBUG) Log.i(TAG,"AudioThread Finished, release audioRecord");
/* encoding finish, release recorder */
if (audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
if(Static.DEBUG) Log.i(TAG,"audioRecord released");
}
}
}
/**TODO*/
private Camera.Size getBestPreviewSize(int width, int height, Camera.Parameters parameters) {
Camera.Size result=null;
for (Camera.Size size : parameters.getSupportedPreviewSizes()) {
if(Static.DEBUG) Log.i(TAG, size.width + "/" + size.height);
if (size.width<=width && size.height<=height) {
if (result==null) {
result=size;
} else {
int resultArea=result.width*result.height;
int newArea=size.width*size.height;
if (newArea>resultArea) {
result=size;
}
}
}
}
return(result);
}
//---------------------------------------------
// camera thread, gets and encodes video data
//---------------------------------------------
private class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {
private final String TAG = CameraView.class.getSimpleName();
private SurfaceHolder mHolder;
private Camera mCamera;
public CameraView(Context context, Camera camera) {
super(context);
if(Static.DEBUG) Log.i(TAG, "camera view");
mCamera = camera;
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(CameraView.this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mCamera.setPreviewCallback(CameraView.this);
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
try {
stopPreview();
mCamera.setPreviewDisplay(holder);
} catch (IOException exception) {
mCamera.release();
mCamera = null;
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
if(Static.DEBUG) Log.i(TAG, "surfaceChanged() => w=" + w + ", h=" + h);
// If your preview can change or rotate, take care of those events here.
// Make sure to stop the preview before resizing or reformatting it.
if (mHolder.getSurface() == null){
// preview surface does not exist
return;
}
// stop preview before making changes
try {
stopPreview();
} catch (Exception e){
// ignore: tried to stop a non-existent preview
}
// start preview with new settings
try {
Camera.Parameters params = mCamera.getParameters();
previewSize = getBestPreviewSize(w, h, params);
if(Static.DEBUG) Log.i(TAG, "getBestPreviewSize() => w=" + previewSize.width + ", h=" + previewSize.height);
if (previewSize != null)
params.setPreviewSize(previewSize.width, previewSize.height);
params.setPreviewFrameRate(recorderFrameRate);
if(Static.DEBUG) Log.i(TAG,"Preview Framerate: " + params.getPreviewFrameRate());
mCamera.setParameters(params);
mCamera.setPreviewDisplay(holder);
mCamera.setPreviewCallback(CameraView.this);
startPreview();
} catch (Exception e){
if(Static.DEBUG) Log.i(TAG, "Could not set preview display in surfaceChanged");
e.printStackTrace();
}
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
try {
mHolder.addCallback(null);
mCamera.setPreviewCallback(null);
} catch (RuntimeException e) {
// The camera has probably just been released, ignore.
}
}
public void startPreview() {
if (!isPreviewOn && mCamera != null) {
isPreviewOn = true;
mCamera.startPreview();
}
}
public void stopPreview() {
if (isPreviewOn && mCamera != null) {
isPreviewOn = false;
mCamera.stopPreview();
}
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
if (audioRecord == null || audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
startTime = System.currentTimeMillis();
return;
}
// get video data
if (yuvImage != null && recording) {
((ByteBuffer)yuvImage.image[0].position(0)).put(data);
try {
long t = 1000 * (System.currentTimeMillis() - startTime);
if(Static.DEBUG) Log.i(TAG,"Writing Frame on timestamp: "+t);
if (t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(yuvImage);
} catch (FFmpegFrameRecorder.Exception e) {
if(Static.DEBUG) Log.i(TAG,e.getMessage());
e.printStackTrace();
}
}
}
}
}
</string>activity_opencv.xml :
<?xml version="1.0" encoding="utf-8"?>
<relativelayout>
<framelayout>
</framelayout>
<ImageButton<br />
android:id="@+id/btnStartRecord"<br />
android:layout_width="70dp"<br />
android:layout_height="70dp"<br />
android:scaleType="fitXY"<br />
android:src="@drawable/record_icon"<br />
android:background="@null"<br />
android:text="@string/btnStartRecord"<br />
android:onClick="onClickBtnStartRecord"<br />
android:clickable="true"<br />
android:layout_centerVertical="true"<br />
android:layout_alignParentRight="true"<br />
android:layout_alignParentEnd="true"/>
<textview></textview>
</relativelayout>Overlay views working, but recorded video is without overlay views.
2. Via OpenCV and JavaCV with FFmpeg :
-
Android recording video with overlay view [way 2]
2 mars 2016, par t0mI am trying app in android which have functionality to capture video with overlay views. I tried two ways (1. and 2.).
1. Via SurfaceView and JavaCV with FFmpeg.
2. Via OpenCV and JavaCV with FFmpeg.
3. For API21+ maybe with MediaProjection.(Question is divided to two questions, due to stackoverflow length limit.)
ad 1. Via SurfaceView and JavaCV with FFmpeg :
ad 2. Via OpenCV and JavaCV with FFmpeg :
OpenCVCameraActivity.java :
import android.app.Activity;
import android.hardware.Camera;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.SubMenu;
import android.view.SurfaceView;
import android.view.View;
import android.view.WindowManager;
import android.widget.Toast;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.Frame;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Mat;
import java.io.File;
import java.nio.ByteBuffer;
import java.nio.ShortBuffer;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.ListIterator;
@SuppressWarnings("ALL")
public class OpenCVCameraActivity extends Activity implements
CameraBridgeViewBase.CvCameraViewListener2,
View.OnTouchListener {
//name of activity, for DEBUGGING
private static final String TAG = OpenCVCameraActivity.class.getSimpleName();
private OpenCVCameraPreview mOpenCvCameraView;
private List mResolutionList;
private MenuItem[] mEffectMenuItems;
private SubMenu mColorEffectsMenu;
private MenuItem[] mResolutionMenuItems;
private SubMenu mResolutionMenu;
private static long frameCounter = 0;
long startTime = 0;
private Mat edgesMat;
boolean recording = false;
private int sampleAudioRateInHz = 44100;
private int imageWidth = 1280;
private int imageHeight = 720;
private int frameRate = 30;
private Frame yuvImage = null;
private File ffmpeg_link;
private FFmpegFrameRecorder recorder;
/*audio data getting thread */
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private Thread audioThread;
volatile boolean runAudioThread = true;
ShortBuffer[] samples;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
mOpenCvCameraView.setOnTouchListener(OpenCVCameraActivity.this);
break;
default:
super.onManagerConnected(status);
break;
}
}
};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if(Static.DEBUG) Log.i(TAG, "onCreate()");
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
Thread.setDefaultUncaughtExceptionHandler(uncaughtExceptionHandler);
try {
setContentView(R.layout.activity_opencv);
mOpenCvCameraView = (OpenCVCameraPreview) findViewById(R.id.openCVCameraPreview);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
mOpenCvCameraView.enableFpsMeter();
ffmpeg_link = new File(Environment.getExternalStorageDirectory(), "stream.mp4");
} catch (Exception e){
e.printStackTrace();
}
}
private Thread.UncaughtExceptionHandler uncaughtExceptionHandler =
new Thread.UncaughtExceptionHandler() {
public void uncaughtException(Thread thread, Throwable ex) {
if(Static.DEBUG) Log.e(TAG, "Uncaught exception", ex);
}
};
@Override
protected void onRestart() {
if (Static.DEBUG) Log.i(TAG, "onRestart()");
super.onRestart();
}
@Override
protected void onStart() {
if (Static.DEBUG) Log.i(TAG, "onStart()");
super.onStart();
}
@Override
protected void onResume() {
if (Static.DEBUG) Log.i(TAG, "onResume()");
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.i(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_1_0, this, mLoaderCallback);
} else {
Log.i(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
if (Static.DEBUG) Log.i(TAG, "onCreateOptionsMenu()");
super.onCreateOptionsMenu(menu);
List<string> effects = mOpenCvCameraView.getEffectList();
if (effects == null) {
Log.e(TAG, "Color effects are not supported by device!");
return true;
}
mColorEffectsMenu = menu.addSubMenu("Color Effect");
mEffectMenuItems = new MenuItem[effects.size()];
int idx = 0;
ListIterator<string> effectItr = effects.listIterator();
while(effectItr.hasNext()) {
String element = effectItr.next();
mEffectMenuItems[idx] = mColorEffectsMenu.add(1, idx, Menu.NONE, element);
idx++;
}
mResolutionMenu = menu.addSubMenu("Resolution");
mResolutionList = mOpenCvCameraView.getResolutionList();
mResolutionMenuItems = new MenuItem[mResolutionList.size()];
ListIterator resolutionItr = mResolutionList.listIterator();
idx = 0;
while(resolutionItr.hasNext()) {
Camera.Size element = resolutionItr.next();
mResolutionMenuItems[idx] = mResolutionMenu.add(2, idx, Menu.NONE,
Integer.valueOf(element.width).toString() + "x" + Integer.valueOf(element.height).toString());
idx++;
}
return true;
}
@Override
protected void onPause() {
if (Static.DEBUG) Log.i(TAG, "onPause()");
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
@Override
protected void onStop() {
if (Static.DEBUG) Log.i(TAG, "onStop()");
super.onStop();
}
@Override
protected void onDestroy() {
if (Static.DEBUG) Log.i(TAG, "onDestroy()");
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
++frameCounter;
//Log.i(TAG, "Frame number: "+frameCounter);
final Mat rgba = inputFrame.rgba();
//Core.flip(rgba, rgba, 1);
/*if(Static.DEBUG) Log.i(TAG,"rgba.total(): "+rgba.total());
if(Static.DEBUG) Log.i(TAG,"rgba.channels(): " +rgba.channels());*/
byte[] data = new byte[(int) (rgba.total() * rgba.channels())];
rgba.get(0, 0, data);
//if(Static.DEBUG) Log.i(TAG,"return_buff: "+return_buff.length);
if (audioRecord == null || audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
startTime = System.currentTimeMillis();
return rgba;
}
// get video data
if (yuvImage != null && recording) {
ByteBuffer b = (ByteBuffer)yuvImage.image[0].position(0);
b.put(data);
try {
long t = 1000 * (System.currentTimeMillis() - startTime);
if(Static.DEBUG) Log.i(TAG,"Writing Frame on timestamp: "+t);
if (t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(yuvImage);
} catch (FFmpegFrameRecorder.Exception e) {
if(Static.DEBUG) Log.i(TAG,e.getMessage());
e.printStackTrace();
}
}
return rgba;
}
@Override
public void onCameraViewStarted(int width, int height) {
edgesMat = new Mat();
}
@Override
public void onCameraViewStopped() {
if (edgesMat != null)
edgesMat.release();
edgesMat = null;
}
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
if (item.getGroupId() == 1)
{
mOpenCvCameraView.setEffect((String) item.getTitle());
Toast.makeText(this, mOpenCvCameraView.getEffect(), Toast.LENGTH_SHORT).show();
} else if (item.getGroupId() == 2) {
int id = item.getItemId();
Camera.Size resolution = mResolutionList.get(id);
mOpenCvCameraView.setResolution(resolution);
resolution = mOpenCvCameraView.getResolution();
String caption = Integer.valueOf(resolution.width).toString() + "x" + Integer.valueOf(resolution.height).toString();
Toast.makeText(this, caption, Toast.LENGTH_SHORT).show();
}
return true;
}
@Override
public boolean onTouch(View v, MotionEvent event) {
Log.i(TAG,"onTouch event");
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss");
String currentDateandTime = sdf.format(new Date());
String fileName = Environment.getExternalStorageDirectory().getPath() +
"/sample_picture_" + currentDateandTime + ".jpg";
mOpenCvCameraView.takePicture(fileName);
Toast.makeText(this, fileName + " saved", Toast.LENGTH_SHORT).show();
return false;
}
/**
* Click to ImageButton to start recording.
*/
public void onClickBtnStartRecord2(View v) {
if (Static.DEBUG) Log.i(TAG, "onClickBtnStartRecord()");
if(!recording)
startRecording();
else
stopRecording();
}
private void startRecording() {
if (Static.DEBUG) Log.i(TAG, "startRecording()");
initRecorder();
try {
recorder.start();
startTime = System.currentTimeMillis();
recording = true;
audioThread.start();
if (Static.DEBUG) Log.i(TAG, "startRecording() success");
} catch(FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}
private void stopRecording() {
if (Static.DEBUG) Log.i(TAG, "stopRecording()");
runAudioThread = false;
try {
audioThread.join();
} catch(InterruptedException e) {
e.printStackTrace();
}
audioRecordRunnable = null;
audioThread = null;
if (Static.DEBUG) Log.i(TAG, "stopRecording() 2");
if(recorder != null && recording) {
recording = false;
try {
recorder.stop();
recorder.release();
Log.i(TAG, "Finishing recording, calling stop and release on recorder");
} catch(FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}
//---------------------------------------
// initialize ffmpeg_recorder
//---------------------------------------
private void initRecorder() {
Log.i(TAG, "init recorder");
try {
if (yuvImage == null) {
yuvImage = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 4);
Log.i(TAG, "create yuvImage");
}
Log.i(TAG, "ffmpeg_url: " + ffmpeg_link.getAbsolutePath());
//Log.i(TAG, "ffmpeg_url: " + ffmpeg_link.exists());
recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
recorder.setFormat("mp4");
recorder.setSampleRate(sampleAudioRateInHz);
// Set in the surface changed method
recorder.setFrameRate(frameRate);
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
runAudioThread = true;
Log.i(TAG, "recorder initialize success");
} catch (Exception e){
e.printStackTrace();
}
}
//---------------------------------------------
// audio thread, gets and encodes audio data
//---------------------------------------------
class AudioRecordRunnable implements Runnable {
@Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
// Audio
int bufferSize;
ShortBuffer audioData;
int bufferReadResult;
bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
audioData = ShortBuffer.allocate(bufferSize);
Log.d(TAG, "audioRecord.startRecording()");
audioRecord.startRecording();
// ffmpeg_audio encoding loop
while(runAudioThread) {
//Log.v(TAG,"recording? " + recording);
bufferReadResult = audioRecord.read(audioData.array(), 0, audioData.capacity());
audioData.limit(bufferReadResult);
if(bufferReadResult > 0) {
Log.v(TAG, "bufferReadResult: " + bufferReadResult);
// If "recording" isn't true when start this thread, it never get's set according to this if statement...!!!
// Why? Good question...
if(recording) {
try {
recorder.recordSamples(audioData);
//Log.v(TAG,"recording " + 1024*i + " to " + 1024*i+1024);
} catch(FFmpegFrameRecorder.Exception e) {
Log.v(TAG, e.getMessage());
e.printStackTrace();
}
}
}
}
Log.v(TAG, "AudioThread Finished, release audioRecord");
// encoding finish, release recorder
if(audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
Log.v(TAG, "audioRecord released");
}
}
}
}
</string></string>OpenCVCameraPreview.java :
import android.content.Context;
import android.hardware.Camera;
import android.util.AttributeSet;
import android.util.Log;
import org.opencv.android.JavaCameraView;
import java.io.FileOutputStream;
import java.util.List;
public class OpenCVCameraPreview extends JavaCameraView implements Camera.PictureCallback {
private static final String TAG = OpenCVCameraPreview.class.getSimpleName();
private String mPictureFileName;
public OpenCVCameraPreview(Context context, AttributeSet attrs) {
super(context, attrs);
}
public List<string> getEffectList() {
return mCamera.getParameters().getSupportedColorEffects();
}
public boolean isEffectSupported() {
return (mCamera.getParameters().getColorEffect() != null);
}
public String getEffect() {
return mCamera.getParameters().getColorEffect();
}
public void setEffect(String effect) {
Camera.Parameters params = mCamera.getParameters();
params.setColorEffect(effect);
mCamera.setParameters(params);
}
public List getResolutionList() {
return mCamera.getParameters().getSupportedPreviewSizes();
}
public void setResolution(Camera.Size resolution) {
disconnectCamera();
mMaxHeight = resolution.height;
mMaxWidth = resolution.width;
connectCamera(getWidth(), getHeight());
}
public Camera.Size getResolution() {
return mCamera.getParameters().getPreviewSize();
}
public void takePicture(final String fileName) {
Log.i(TAG, "Taking picture");
this.mPictureFileName = fileName;
// Postview and jpeg are sent in the same buffers if the queue is not empty when performing a capture.
// Clear up buffers to avoid mCamera.takePicture to be stuck because of a memory issue
mCamera.setPreviewCallback(null);
// PictureCallback is implemented by the current class
mCamera.takePicture(null, null, this);
}
@Override
public void onPictureTaken(byte[] data, Camera camera) {
Log.i(TAG, "Saving a bitmap to file");
// The camera preview was automatically stopped. Start it again.
mCamera.startPreview();
mCamera.setPreviewCallback(this);
// Write the image in a file (in jpeg format)
try {
FileOutputStream fos = new FileOutputStream(mPictureFileName);
fos.write(data);
fos.close();
} catch (java.io.IOException e) {
Log.e("PictureDemo", "Exception in photoCallback", e);
}
}
}
</string>activity_opencv.xml :
<?xml version="1.0" encoding="utf-8"?>
<relativelayout>
<ImageButton<br />
android:id="@+id/btnStartRecord2"<br />
android:layout_width="70dp"<br />
android:layout_height="70dp"<br />
android:scaleType="fitXY"<br />
android:src="@drawable/record_icon"<br />
android:background="@null"<br />
android:text="@string/btnStartRecord"<br />
android:onClick="onClickBtnStartRecord2"<br />
android:layout_centerVertical="true"<br />
android:layout_alignParentRight="true"<br />
android:layout_alignParentEnd="true"/>
</relativelayout>Overlay views working, but recorded video is without overlay views, and recording with
onCameraFrame
method is very slow.