
Recherche avancée
Autres articles (42)
-
La file d’attente de SPIPmotion
28 novembre 2010, parUne file d’attente stockée dans la base de donnée
Lors de son installation, SPIPmotion crée une nouvelle table dans la base de donnée intitulée spip_spipmotion_attentes.
Cette nouvelle table est constituée des champs suivants : id_spipmotion_attente, l’identifiant numérique unique de la tâche à traiter ; id_document, l’identifiant numérique du document original à encoder ; id_objet l’identifiant unique de l’objet auquel le document encodé devra être attaché automatiquement ; objet, le type d’objet auquel (...) -
Contribute to documentation
13 avril 2011Documentation is vital to the development of improved technical capabilities.
MediaSPIP welcomes documentation by users as well as developers - including : critique of existing features and functions articles contributed by developers, administrators, content producers and editors screenshots to illustrate the above translations of existing documentation into other languages
To contribute, register to the project users’ mailing (...) -
Supporting all media types
13 avril 2011, parUnlike most software and media-sharing platforms, MediaSPIP aims to manage as many different media types as possible. The following are just a few examples from an ever-expanding list of supported formats : images : png, gif, jpg, bmp and more audio : MP3, Ogg, Wav and more video : AVI, MP4, OGV, mpg, mov, wmv and more text, code and other data : OpenOffice, Microsoft Office (Word, PowerPoint, Excel), web (html, CSS), LaTeX, Google Earth and (...)
Sur d’autres sites (4456)
-
live Streaming video in server side not responding(Wowza -ffmpeg-JavaCV1.1)
12 février 2016, par Muthukumar SI am new to javaCV. I tried to record mp4 and flv videos using javaCV 1.1 in mobile side working nice. But i need server side didn’t show it. thanks I am using wowza media stream engine in server side. thanks for help
package com.example.javacv11;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ShortBuffer;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.Frame;
import android.app.Activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.PowerManager;
import android.util.Log;
import android.view.Display;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
public class FirstActivity extends Activity implements OnClickListener {
private final static String CLASS_LABEL = "RecordActivity";
private final static String LOG_TAG = CLASS_LABEL;
private PowerManager.WakeLock mWakeLock;
private String ffmpeg_link = "rtmp://10.0.0.136:1935/live/myStream";
long startTime = 0;
boolean recording = false;
private FFmpegFrameRecorder recorder;
private boolean isPreviewOn = false;
private int sampleAudioRateInHz = 44100;
private int imageWidth = 320;
private int imageHeight = 240;
private int frameRate = 30;
/* audio data getting thread */
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private Thread audioThread;
volatile boolean runAudioThread = true;
/* video data getting thread */
private Camera cameraDevice;
private CameraView cameraView;
private Frame yuvImage = null;
/* layout setting */
private final int bg_screen_bx = 232;
private final int bg_screen_by = 128;
private final int bg_screen_width = 700;
private final int bg_screen_height = 500;
private final int bg_width = 1123;
private final int bg_height = 715;
private final int live_width = 640;
private final int live_height = 480;
private int screenWidth, screenHeight;
private Button btnRecorderControl;
/* The number of seconds in the continuous record loop (or 0 to disable loop). */
final int RECORD_LENGTH = 10;
Frame[] images;
long[] timestamps;
ShortBuffer[] samples;
int imagesIndex, samplesIndex;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
setContentView(R.layout.activity_first);
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
initLayout();
}
@Override
protected void onResume() {
super.onResume();
if (mWakeLock == null) {
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
}
}
@Override
protected void onPause() {
super.onPause();
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
@Override
protected void onDestroy() {
super.onDestroy();
recording = false;
if (cameraView != null) {
cameraView.stopPreview();
}
if(cameraDevice != null) {
cameraDevice.stopPreview();
cameraDevice.release();
cameraDevice = null;
}
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
private void initLayout() {
/* get size of screen */
Display display = ((WindowManager) getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
screenWidth = display.getWidth();
screenHeight = display.getHeight();
RelativeLayout.LayoutParams layoutParam = null;
LayoutInflater myInflate = null;
myInflate = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
RelativeLayout topLayout = new RelativeLayout(this);
setContentView(topLayout);
LinearLayout preViewLayout = (LinearLayout) myInflate.inflate(R.layout.activity_first, null);
layoutParam = new RelativeLayout.LayoutParams(screenWidth, screenHeight);
topLayout.addView(preViewLayout, layoutParam);
/* add control button: start and stop */
btnRecorderControl = (Button) findViewById(R.id.recorder_control);
btnRecorderControl.setText("Start");
btnRecorderControl.setOnClickListener(this);
/* add camera view */
int display_width_d = (int) (1.0 * bg_screen_width * screenWidth / bg_width);
int display_height_d = (int) (1.0 * bg_screen_height * screenHeight / bg_height);
int prev_rw, prev_rh;
if (1.0 * display_width_d / display_height_d > 1.0 * live_width / live_height) {
prev_rh = display_height_d;
prev_rw = (int) (1.0 * display_height_d * live_width / live_height);
} else {
prev_rw = display_width_d;
prev_rh = (int) (1.0 * display_width_d * live_height / live_width);
}
layoutParam = new RelativeLayout.LayoutParams(prev_rw, prev_rh);
layoutParam.topMargin = (int) (1.0 * bg_screen_by * screenHeight / bg_height);
layoutParam.leftMargin = (int) (1.0 * bg_screen_bx * screenWidth / bg_width);
cameraDevice = Camera.open();
Log.i(LOG_TAG, "cameara open");
cameraView = new CameraView(this, cameraDevice);
topLayout.addView(cameraView, layoutParam);
Log.i(LOG_TAG, "cameara preview start: OK");
}
//---------------------------------------
// initialize ffmpeg_recorder
//---------------------------------------
private void initRecorder() {
Log.w(LOG_TAG,"init recorder");
if (RECORD_LENGTH > 0) {
imagesIndex = 0;
images = new Frame[RECORD_LENGTH * frameRate];
timestamps = new long[images.length];
for (int i = 0; i < images.length; i++) {
images[i] = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
timestamps[i] = -1;
}
} else if (yuvImage == null) {
yuvImage = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
Log.i(LOG_TAG, "create yuvImage");
}
Log.i(LOG_TAG, "ffmpeg_url: " + ffmpeg_link);
recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
recorder.setFormat("mp4");
recorder.setSampleRate(sampleAudioRateInHz);
// Set in the surface changed method
recorder.setFrameRate(frameRate);
Log.i(LOG_TAG, "recorder initialize success");
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
runAudioThread = true;
}
public void startRecording() {
initRecorder();
try {
recorder.start();
startTime = System.currentTimeMillis();
recording = true;
audioThread.start();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}
public void stopRecording() {
runAudioThread = false;
try {
audioThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
audioRecordRunnable = null;
audioThread = null;
if (recorder != null && recording) {
if (RECORD_LENGTH > 0) {
Log.v(LOG_TAG,"Writing frames");
try {
int firstIndex = imagesIndex % samples.length;
int lastIndex = (imagesIndex - 1) % images.length;
if (imagesIndex <= images.length) {
firstIndex = 0;
lastIndex = imagesIndex - 1;
}
if ((startTime = timestamps[lastIndex] - RECORD_LENGTH * 1000000L) < 0) {
startTime = 0;
}
if (lastIndex < firstIndex) {
lastIndex += images.length;
}
for (int i = firstIndex; i <= lastIndex; i++) {
long t = timestamps[i % timestamps.length] - startTime;
if (t >= 0) {
if (t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(images[i % images.length]);
}
}
firstIndex = samplesIndex % samples.length;
lastIndex = (samplesIndex - 1) % samples.length;
if (samplesIndex <= samples.length) {
firstIndex = 0;
lastIndex = samplesIndex - 1;
}
if (lastIndex < firstIndex) {
lastIndex += samples.length;
}
for (int i = firstIndex; i <= lastIndex; i++) {
recorder.recordSamples(samples[i % samples.length]);
}
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG,e.getMessage());
e.printStackTrace();
}
}
recording = false;
Log.v(LOG_TAG,"Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
if (recording) {
stopRecording();
}
finish();
return true;
}
return super.onKeyDown(keyCode, event);
}
//---------------------------------------------
// audio thread, gets and encodes audio data
//---------------------------------------------
class AudioRecordRunnable implements Runnable {
@Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
// Audio
int bufferSize;
ShortBuffer audioData;
int bufferReadResult;
bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
if (RECORD_LENGTH > 0) {
samplesIndex = 0;
samples = new ShortBuffer[RECORD_LENGTH * sampleAudioRateInHz * 2 / bufferSize + 1];
for (int i = 0; i < samples.length; i++) {
samples[i] = ShortBuffer.allocate(bufferSize);
}
} else {
audioData = ShortBuffer.allocate(bufferSize);
}
Log.d(LOG_TAG, "audioRecord.startRecording()");
audioRecord.startRecording();
/* ffmpeg_audio encoding loop */
while (runAudioThread) {
if (RECORD_LENGTH > 0) {
audioData = samples[samplesIndex++ % samples.length];
audioData.position(0).limit(0);
}
//Log.v(LOG_TAG,"recording? " + recording);
bufferReadResult = audioRecord.read(audioData.array(), 0, audioData.capacity());
audioData.limit(bufferReadResult);
if (bufferReadResult > 0) {
Log.v(LOG_TAG,"bufferReadResult: " + bufferReadResult);
// If "recording" isn't true when start this thread, it never get's set according to this if statement...!!!
// Why? Good question...
if (recording) {
if (RECORD_LENGTH <= 0) try {
recorder.recordSamples(audioData);
//Log.v(LOG_TAG,"recording " + 1024*i + " to " + 1024*i+1024);
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG,e.getMessage());
e.printStackTrace();
}
}
}
}
Log.v(LOG_TAG,"AudioThread Finished, release audioRecord");
/* encoding finish, release recorder */
if (audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
Log.v(LOG_TAG,"audioRecord released");
}
}
}
//---------------------------------------------
// camera thread, gets and encodes video data
//---------------------------------------------
class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {
private SurfaceHolder mHolder;
private Camera mCamera;
public CameraView(Context context, Camera camera) {
super(context);
Log.w("camera","camera view");
mCamera = camera;
mHolder = getHolder();
mHolder.addCallback(CameraView.this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mCamera.setPreviewCallback(CameraView.this);
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
try {
stopPreview();
mCamera.setPreviewDisplay(holder);
} catch (IOException exception) {
mCamera.release();
mCamera = null;
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Camera.Parameters camParams = mCamera.getParameters();
List sizes = camParams.getSupportedPreviewSizes();
// Sort the list in ascending order
Collections.sort(sizes, new Comparator() {
public int compare(final Camera.Size a, final Camera.Size b) {
return a.width * a.height - b.width * b.height;
}
});
// Pick the first preview size that is equal or bigger, or pick the last (biggest) option if we cannot
// reach the initial settings of imageWidth/imageHeight.
for (int i = 0; i < sizes.size(); i++) {
if ((sizes.get(i).width >= imageWidth && sizes.get(i).height >= imageHeight) || i == sizes.size() - 1) {
imageWidth = sizes.get(i).width;
imageHeight = sizes.get(i).height;
Log.v(LOG_TAG, "Changed to supported resolution: " + imageWidth + "x" + imageHeight);
break;
}
}
camParams.setPreviewSize(imageWidth, imageHeight);
Log.v(LOG_TAG,"Setting imageWidth: " + imageWidth + " imageHeight: " + imageHeight + " frameRate: " + frameRate);
camParams.setPreviewFrameRate(frameRate);
Log.v(LOG_TAG,"Preview Framerate: " + camParams.getPreviewFrameRate());
mCamera.setParameters(camParams);
startPreview();
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
try {
mHolder.addCallback(null);
mCamera.setPreviewCallback(null);
} catch (RuntimeException e) {
// The camera has probably just been released, ignore.
}
}
public void startPreview() {
if (!isPreviewOn && mCamera != null) {
isPreviewOn = true;
mCamera.startPreview();
}
}
public void stopPreview() {
if (isPreviewOn && mCamera != null) {
isPreviewOn = false;
mCamera.stopPreview();
}
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
if (audioRecord == null || audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
startTime = System.currentTimeMillis();
return;
}
if (RECORD_LENGTH > 0) {
int i = imagesIndex++ % images.length;
yuvImage = images[i];
timestamps[i] = 1000 * (System.currentTimeMillis() - startTime);
}
/* get video data */
if (yuvImage != null && recording) {
((ByteBuffer)yuvImage.image[0].position(0)).put(data);
if (RECORD_LENGTH <= 0) try {
Log.v(LOG_TAG,"Writing Frame");
long t = 1000 * (System.currentTimeMillis() - startTime);
if (t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(yuvImage);
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG,e.getMessage());
e.printStackTrace();
}
}
}
}
@Override
public void onClick(View v) {
if (!recording) {
startRecording();
Log.w(LOG_TAG, "Start Button Pushed");
btnRecorderControl.setText("Stop");
} else {
// This will trigger the audio recording loop to stop and then set isRecorderStart = false;
stopRecording();
Log.w(LOG_TAG, "Stop Button Pushed");
btnRecorderControl.setText("Start");
}
}
} -
Surface Texture object is not getting the frames from a Surface Class
22 avril 2016, par Juan Manuel González OteroOn the one hand, I have a Surface Class which when instantiated, automatically initialize a new thread and start grabbing frames from a streaming source via native code based on FFMPEG. Here is the main parts of the code for the aforementioned Surface Class :
public class StreamingSurface extends Surface implements Runnable {
...
public StreamingSurface(SurfaceTexture surfaceTexture, int width, int height) {
super(surfaceTexture);
screenWidth = width;
screenHeight = height;
init();
}
public void init() {
mDrawTop = 0;
mDrawLeft = 0;
mVideoCurrentFrame = 0;
this.setVideoFile();
this.startPlay();
}
public void setVideoFile() {
// Initialise FFMPEG
naInit("");
// Get stream video res
int[] res = naGetVideoRes();
mDisplayWidth = (int)(res[0]);
mDisplayHeight = (int)(res[1]);
// Prepare Display
mBitmap = Bitmap.createBitmap(mDisplayWidth, mDisplayHeight, Bitmap.Config.ARGB_8888);
naPrepareDisplay(mBitmap, mDisplayWidth, mDisplayHeight);
}
public void startPlay() {
thread = new Thread(this);
thread.start();
}
@Override
public void run() {
while (true) {
while (2 == mStatus) {
//pause
SystemClock.sleep(100);
}
mVideoCurrentFrame = naGetVideoFrame();
if (0 < mVideoCurrentFrame) {
//success, redraw
if(isValid()){
Canvas canvas = lockCanvas(null);
if (null != mBitmap) {
canvas.drawBitmap(mBitmap, mDrawLeft, mDrawTop, prFramePaint);
}
unlockCanvasAndPost(canvas);
}
} else {
//failure, probably end of video, break
naFinish(mBitmap);
mStatus = 0;
break;
}
}
}
}In my MainActivity class, I instantiated this class in the following way :
public void startCamera(int texture)
{
mSurface = new SurfaceTexture(texture);
mSurface.setOnFrameAvailableListener(this);
Surface surface = new StreamingSurface(mSurface, 640, 360);
surface.release();
}I read the following line in the Android developer page, regarding the Surface class constructor :
"Images drawn to the Surface will be made available to the SurfaceTexture, which can attach them to an OpenGL ES texture via updateTexImage()."
That is exactly what I want to do, and I have everything ready for the further renderization. But definitely, with the above code, I never get my frames captured in the surface class transformed to its corresponding SurfaceTexture. I know this because the debugger, for instace, never call the OnFrameAvailableLister method associated with that Surface Texture.
Any ideas ? Maybe the fact that I am using a thread to call the drawing functions is messing everything ? In such a case, what alternatives I have to grab the frames ?
Thanks in advance
-
How to Make Video from Arraylist of images with Music
6 avril 2018, par hardik sojitraI am making an app which makes video from selected images which is stored in SD card I have implement the
group : ’com.github.hiteshsondhi88.libffmpeg’, name : ’FFmpegAndroid’, version : ’0.2.5’ this library and make this classes but it does not workUtility.java
public class Utility {
private final static String TAG = Utility.class.getName();
private static Context mContext;
public Utility(Context context) {
mContext = context;
}
public static String excuteCommand(String command)
{
try {
Log.d(TAG, "execute command : " + command);
Process process = Runtime.getRuntime().exec(command);
BufferedReader reader = new BufferedReader(
new InputStreamReader(process.getInputStream()));
int read;
char[] buffer = new char[4096];
StringBuffer output = new StringBuffer();
while ((read = reader.read(buffer)) > 0) {
output.append(buffer, 0, read);
}
reader.close();
process.waitFor();
Log.d(TAG, "command result: " + output.toString());
return output.toString();
} catch (IOException e) {
Log.e(TAG, e.getMessage(), e);
} catch (InterruptedException e) {
Log.e(TAG, e.getMessage(), e);
}
return "";
}
public String getPathOfAppInternalStorage()
{
return Environment.getExternalStorageDirectory().getAbsolutePath();
}
public void saveFileToAppInternalStorage(InputStream inputStream, String fileName)
{
File file = new File(getPathOfAppInternalStorage() + "/" + fileName);
if (file.exists())
{
Log.d(TAG, "SaveRawToAppDir Delete Exsisted File");
file.delete();
}
FileOutputStream outputStream;
try {
outputStream = mContext.openFileOutput(fileName, Context.MODE_PRIVATE);
byte[] buffer = new byte[1024];
int length;
while ((length = inputStream.read(buffer)) > 0)
{
outputStream.write(buffer, 0, length);
}
outputStream.close();
inputStream.close();
} catch (Exception e) {
Log.e(TAG, e.getMessage(), e);
}
}
public static boolean isFileExsisted(String filePath)
{
File file = new File(filePath);
return file.exists();
}
public static void deleteFileAtPath(String filePath)
{
File file = new File(filePath);
file.delete();
}
}ffmpegController.java
package com.example.admin.imagetovideowithnative;
import android.content.Context;
import android.util.Log;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
public class FfmpegController {
private static Context mContext;
private static Utility mUtility;
private static String mFfmpegBinaryPath;
public FfmpegController(Context context) {
mContext = context;
mUtility = new Utility(context);
initFfmpeg();
}
private void initFfmpeg()
{
mFfmpegBinaryPath = mContext.getApplicationContext().getFilesDir().getAbsolutePath() + "/ffmpeg";
if (Utility.isFileExsisted(mFfmpegBinaryPath))
return;
InputStream inputStream = mContext.getResources().openRawResource(R.raw.ffmpeg);
mUtility.saveFileToAppInternalStorage(inputStream, "ffmpeg");
Utility.excuteCommand(CommandHelper.commandChangeFilePermissionForExecuting(mFfmpegBinaryPath));
}
public void convertImageToVideo(String inputImgPath)
{
Log.e("Image Parth", "inputImgPath - "+inputImgPath);
if (Utility.isFileExsisted(pathOuputVideo()))
Utility.deleteFileAtPath(pathOuputVideo());
saveShellCommandImg2VideoToAppDir(inputImgPath);
Utility.excuteCommand("sh" + " " + pathShellScriptImg2Video());
}
public String pathOuputVideo()
{
return mUtility.getPathOfAppInternalStorage() + "/out.mp4";
}
private String pathShellScriptImg2Video()
{
return mUtility.getPathOfAppInternalStorage() + "/img2video.sh";
}
private void saveShellCommandImg2VideoToAppDir(String inputImgPath)
{
String command = CommandHelper.commandConvertImgToVideo(mFfmpegBinaryPath, inputImgPath, pathOuputVideo());
InputStream is = new ByteArrayInputStream(command.getBytes());
mUtility.saveFileToAppInternalStorage(is, "img2video.sh");
}
}CommandHelper.java
package com.example.admin.imagetovideowithnative;
import android.util.Log;
public class CommandHelper {
public static String commandConvertImgToVideo(String ffmpegBinaryPath, String inputImgPath, String outputVideoPath) {
Log.e("ffmpegBinaryPath", "ffmpegBinaryPath - " + ffmpegBinaryPath);
Log.e("inputImgPath", "inputImgPath - " + inputImgPath);
Log.e("outputVideoPath", "outputVideoPath - " + outputVideoPath);
return ffmpegBinaryPath + " -r 1/1 -i " + inputImgPath + " -c:v libx264 -crf 23 -pix_fmt yuv420p -s 640x480 " + outputVideoPath;
}
public static String commandChangeFilePermissionForExecuting(String filePath) {
return "chmod 777 " + filePath;
}
}AsynckTask
class Asynck extends AsyncTask {
FfmpegController mFfmpegController = new FfmpegController(PhotosActivity.this);
Utility mUtility = new Utility(PhotosActivity.this);
@Override
protected void onPreExecute() {
super.onPreExecute();
Log.e("Video Process Start", "======================== Video Process Start ======================================");
}
@Override
protected Void doInBackground(Object... objects) {
mFfmpegController.convertImageToVideo("");
return null;
}
@Override
protected void onPostExecute(Void aVoid) {
super.onPostExecute(aVoid);
Log.e("Video Process Complete", "======================== Video Process Complete ======================================");
Log.e("Video Path", "Path - " + mFfmpegController.pathOuputVideo());
Toast.makeText(PhotosActivity.this, "Video Process Complete", Toast.LENGTH_LONG).show();
}
}