
Recherche avancée
Autres articles (39)
-
(Dés)Activation de fonctionnalités (plugins)
18 février 2011, parPour gérer l’ajout et la suppression de fonctionnalités supplémentaires (ou plugins), MediaSPIP utilise à partir de la version 0.2 SVP.
SVP permet l’activation facile de plugins depuis l’espace de configuration de MediaSPIP.
Pour y accéder, il suffit de se rendre dans l’espace de configuration puis de se rendre sur la page "Gestion des plugins".
MediaSPIP est fourni par défaut avec l’ensemble des plugins dits "compatibles", ils ont été testés et intégrés afin de fonctionner parfaitement avec chaque (...) -
Le plugin : Podcasts.
14 juillet 2010, parLe problème du podcasting est à nouveau un problème révélateur de la normalisation des transports de données sur Internet.
Deux formats intéressants existent : Celui développé par Apple, très axé sur l’utilisation d’iTunes dont la SPEC est ici ; Le format "Media RSS Module" qui est plus "libre" notamment soutenu par Yahoo et le logiciel Miro ;
Types de fichiers supportés dans les flux
Le format d’Apple n’autorise que les formats suivants dans ses flux : .mp3 audio/mpeg .m4a audio/x-m4a .mp4 (...) -
Les autorisations surchargées par les plugins
27 avril 2010, parMediaspip core
autoriser_auteur_modifier() afin que les visiteurs soient capables de modifier leurs informations sur la page d’auteurs
Sur d’autres sites (6194)
-
fatal singal 11 error on FFMPEG streaming for RTSP
2 mars 2018, par Anuran BarmanI have added rtsp streaming with FFMPEG in my app.What I am trying to do is add two fragments (each will load different rtsp streaming) into single activity.
While the first fragment in first LinearLayout is working fine second one is giving error as mentioned in the question. What can be the possible reason for this ? I am loading the library in static block , can that be the reason ?My code for single fragment which is added twice in two different linear.
layouts.@SuppressWarnings("JniMissingFunction")
public class StreamingActivity extends Fragment implements SurfaceHolder.Callback {
private static boolean loadedLibraries;
static {
try {
System.loadLibrary("avutil");
System.loadLibrary("avcodec");
System.loadLibrary("avformat");
System.loadLibrary("swscale");
System.loadLibrary("avfilter");
System.loadLibrary("ffmpeg-jni");
loadedLibraries = true;
} catch (Throwable e) {
e.printStackTrace();
}
}
private SurfaceView surfaceView;
private ProgressBar progressBar;
private boolean isPlaying;
private boolean isInitialized;
private String url;
private final String TAG=StreamingActivity.class.getSimpleName();
@Nullable
@Override
public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
View view=inflater.inflate(R.layout.activity_streaming,null,false);
surfaceView = (SurfaceView)view.findViewById(R.id.surfaceView);
progressBar = ((ProgressBar)view.findViewById(R.id.progressBar));
surfaceView.getHolder().addCallback(this);
url =getArguments().getString("url");
return view;
}
@Override
public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
new PlayVideo().execute();
}
@DebugLog
private void postInit() {
if (isInitialized) {
initPlay();
progressBar.setVisibility(View.GONE);
} else {
getActivity().finish();
}
}
private void initPlay() {
int[] res = naGetVideoRes();
Log.d("ANURAN", "res width " + res[0] + ": height " + res[1]);
if (res[0] <= 0) {
res[0] = 480;
}
if (res[1] <= 0) {
res[1] = 320;
}
int[] screenRes = getScreenRes();
int width, height;
float widthScaledRatio = screenRes[0] * 1.0f / res[0];
float heightScaledRatio = screenRes[1] * 1.0f / res[1];
if (widthScaledRatio > heightScaledRatio) {
//use heightScaledRatio
width = (int) (res[0] * heightScaledRatio);
height = screenRes[1];
} else {
//use widthScaledRatio
width = screenRes[0];
height = (int) (res[1] * widthScaledRatio);
}
Log.d(TAG, "width " + width + ",height:" + height);
updateSurfaceView(width, height);
naSetup(width, height);
playMedia();
}
private void playMedia() {
if(progressBar.getVisibility()==View.VISIBLE){
progressBar.setVisibility(View.GONE);
}
naPlay();
isPlaying = true;
}
@DebugLog
private void updateSurfaceView(int pWidth, int pHeight) {
//update surfaceview dimension, this will cause the native window to change
Log.d("ANURAN UPDATE SURFACE", "width " + pWidth + ",height:" + pHeight);
FrameLayout.LayoutParams params = (FrameLayout.LayoutParams) surfaceView.getLayoutParams();
params.width = pWidth;
params.height = pHeight;
surfaceView.setLayoutParams(params);
}
@DebugLog
@SuppressLint("NewApi")
private int[] getScreenRes() {
int[] res = new int[2];
Display display = getActivity().getWindowManager().getDefaultDisplay();
Point size = new Point();
display.getSize(size);
res[0] = size.x;
res[1] = size.y;
return res;
}
@Override
public void onStop() {
super.onStop();
Toast.makeText(getActivity(),"onStop called",Toast.LENGTH_SHORT).show();
stopPlaying();
getActivity().finish();
}
// @Override
// public void onBackPressed() {
// stopPlaying();
// getActivity().finish();
// }
private void stopPlaying() {
isPlaying = false;
naStop();
}
@Override
public void onDestroy() {
super.onDestroy();
stopPlaying();
getActivity().finish();
}
@Override
public void onResume() {
super.onResume();
progressBar.setVisibility(View.VISIBLE);
}
private static native int naInit(String pFileName);
private static native int[] naGetVideoRes();
private static native void naSetSurface(Surface pSurface);
private static native int naSetup(int pWidth, int pHeight);
private static native void naPlay();
private static native void naStop();
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
Timber.d(TAG, "surfacechanged: " + width + ":" + height);
naSetSurface(holder.getSurface());
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
Timber.d(TAG, "surfaceDestroyed");
naSetSurface(null);
}
private class PlayVideo extends AsyncTask{
@Override
protected Void doInBackground(Void... voids) {
isInitialized=(naInit(url)==0);
return null;
}
@Override
protected void onPostExecute(Void aVoid) {
super.onPostExecute(aVoid);
postInit();
this.cancel(true);
}
}
}My activity which is loading the two fragments :
public class CamerasActivity extends BaseActivity {
FragmentManager fragmentManager;
FragmentTransaction fragmentTransaction;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_cameras);
fragmentManager=getSupportFragmentManager();
fragmentTransaction=fragmentManager.beginTransaction();
StreamingActivity streamingFragment=new StreamingActivity();
Bundle bundle=new Bundle();
bundle.putString("url",getIntent().getStringExtra(Constants.IntentExtras.DATA));
streamingFragment.setArguments(bundle);
StreamingActivity streamingFragment2=new StreamingActivity();
Bundle bundle2=new Bundle();
bundle2.putString("url",getIntent().getStringExtra(Constants.IntentExtras.DATA));
streamingFragment2.setArguments(bundle2);
fragmentTransaction.replace(R.id.frame1,streamingFragment);
fragmentTransaction.replace(R.id.frame2,streamingFragment2);
fragmentTransaction.commit();
}
}layout for CamerasActivity.java
<?xml version="1.0" encoding="utf-8"?>
<linearlayout>
<linearlayout></linearlayout>
<linearlayout></linearlayout>
</linearlayout>Why is the second fragment crashing ?
As we mention in the JNI wrapper which Java class will be calling the native functions can there only one instance of that class in whole scope ? can that be the reason ? -
Convert Audio Files using FFMPEG in android
25 avril 2018, par Venkat MaddyI developing android application, Where I’ve asked to record the calls and convert them to WAVE(.wav) format and send to server.
I am able to successfully record and save the audio in my mobile. The recorded file mime type is 3gpp, now I want to convert this 3gpp file to wav. I tried using FFMPEG but unable to convert it. I am posting my code here, Any help would be grateful.
While Converting the recorded file from aac to wav, I’m getting below error
Exception while trying to run: [Ljava.lang.String;@64075a0
java.io.IOException: Cannot run program "/data/user/0/com.sms.example.example/files/ffmpeg": error=2, No such file or directory
at java.lang.ProcessBuilder.start(ProcessBuilder.java:983)
at java.lang.Runtime.exec(Runtime.java:691)
at java.lang.Runtime.exec(Runtime.java:559)
at com.github.hiteshsondhi88.libffmpeg.ShellCommand.run(ShellCommand.java:10)
at com.github.hiteshsondhi88.libffmpeg.FFmpegExecuteAsyncTask.doInBackground(FFmpegExecuteAsyncTask.java:38)
at com.github.hiteshsondhi88.libffmpeg.FFmpegExecuteAsyncTask.doInBackground(FFmpegExecuteAsyncTask.java:10)
at android.os.AsyncTask$2.call(AsyncTask.java:305)
at java.util.concurrent.FutureTask.run(FutureTask.java:237)
at android.os.AsyncTask$SerialExecutor$1.run(AsyncTask.java:243)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1133)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:607)
at java.lang.Thread.run(Thread.java:761)
Caused by: java.io.IOException: error=2, No such file or directory
at java.lang.UNIXProcess.forkAndExec(Native Method)
at java.lang.UNIXProcess.<init>(UNIXProcess.java:133)
at java.lang.ProcessImpl.start(ProcessImpl.java:128)
at java.lang.ProcessBuilder.start(ProcessBuilder.java:964)
</init>Recorder Service
public class CallService extends Service {
//util object creation
NetworkUtils networkUtils;
SharedPref sharedPref;
MediaRecorder recorder;
File audiofile;
String name, phonenumber;
String audio_format;
public String Audio_Type;
int audioSource;
Context context;
private Handler handler;
Timer timer;
Boolean offHook = false, ringing = false;
Toast toast;
Boolean isOffHook = false;
private boolean recordstarted = false;
private static final String ACTION_IN = "android.intent.action.PHONE_STATE";
private static final String ACTION_OUT = "android.intent.action.NEW_OUTGOING_CALL";
private CallBr br_call;
public String file_name="", recordedFileName="",uploadedFileName="", base64_file="", statusMessage="";
public int statusCode, file_size=0, delay = 1000,period = 5000;
public FFmpeg fFmpeg;
@Override
public IBinder onBind(Intent arg0) {
// TODO Auto-generated method stub
return null;
}
@Override
public void onDestroy() {
Log.d("service", "destroy");
super.onDestroy();
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
sharedPref = new SharedPref(this);
networkUtils = new NetworkUtils(this);
final IntentFilter filter = new IntentFilter();
filter.addAction(ACTION_OUT);
filter.addAction(ACTION_IN);
this.br_call = new CallBr();
this.registerReceiver(this.br_call, filter);
fFmpeg = FFmpeg.getInstance(CallService.this);
Timer task = new Timer();
task.scheduleAtFixedRate(new TimerTask() {
@Override
public void run() {
if (networkUtils.checkConnection()){
}else{
// Toast.makeText(CallService.this, getResources().getString(R.string.no_connection), Toast.LENGTH_SHORT).show();
}
}
}, delay, period);
// if(terminate != null) {
// stopSelf();
// }
return START_STICKY;
}
public class CallBr extends BroadcastReceiver {
Bundle bundle;
String state;
String inCall, outCall;
public boolean wasRinging = false;
public File sampleDir;
@Override
public void onReceive(Context context, Intent intent) {
if (intent.getAction().equals(ACTION_IN)) {
if ((bundle = intent.getExtras()) != null) {
state = bundle.getString(TelephonyManager.EXTRA_STATE);
if (state.equals(TelephonyManager.EXTRA_STATE_RINGING)) {
inCall = bundle.getString(TelephonyManager.EXTRA_INCOMING_NUMBER);
wasRinging = true;
Toast.makeText(context, "IN : " + inCall, Toast.LENGTH_LONG).show();
} else if (state.equals(TelephonyManager.EXTRA_STATE_OFFHOOK)) {
if (wasRinging == true) {
Toast.makeText(context, "ANSWERED", Toast.LENGTH_LONG).show();
String out = new SimpleDateFormat("dd-MM-yyyy hh-mm-ss").format(new Date());
sampleDir = new File(Environment.getExternalStorageDirectory().getAbsolutePath(), "/OfficeRecordings/");
if (!sampleDir.exists()) {
sampleDir.mkdirs();
}
Log.d("TService", "onReceive: "+sampleDir);
file_name = "Incoming";
String path = Environment.getExternalStorageDirectory().getAbsolutePath();
// try {
// audiofile = File.createTempFile(file_name, ".wav", sampleDir);
// recordedFileName = sampleDir + String.valueOf(System.currentTimeMillis() + ".wav");
//
// } catch (IOException e) {
// e.printStackTrace();
// }
// recordedFileName = sampleDir+"_"+ file_name +String.valueOf(System.currentTimeMillis() + ".wav");
uploadedFileName = sharedPref.getStringValue("userId")+"_"+sharedPref.getStringValue("mobile_number_prefs")+"_" +file_name +String.valueOf(System.currentTimeMillis()+ ".aac");
recordedFileName = sampleDir+"/"+uploadedFileName;
recorder = new MediaRecorder();
// recorder.setAudioSource(MediaRecorder.AudioSource.VOICE_CALL);
// recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
recorder.setOutputFormat(MediaRecorder.OutputFormat.AAC_ADTS);
recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
// recorder.setOutputFile(audiofile.getAbsolutePath());
recorder.setOutputFile(recordedFileName);
try {
recorder.prepare();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
recorder.start();
recordstarted = true;
}
} else if (state.equals(TelephonyManager.EXTRA_STATE_IDLE)) {
wasRinging = false;
Toast.makeText(context, "REJECT || DISCO", Toast.LENGTH_LONG).show();
if (recordstarted) {
recorder.stop();
recordstarted = false;
if (recordedFileName.isEmpty()){
}else{
convertFileToBase64(context);
}
}
}
}
} else if (intent.getAction().equals(ACTION_OUT)) {
if ((bundle = intent.getExtras()) != null) {
outCall = intent.getStringExtra(Intent.EXTRA_PHONE_NUMBER);
Toast.makeText(context, "OUT : " + outCall, Toast.LENGTH_LONG).show();
sampleDir = new File(Environment.getExternalStorageDirectory().getAbsolutePath(), "/OfficeRecordings/");
if (!sampleDir.exists()) {
sampleDir.mkdirs();
}
Log.d("TService", "onReceive: "+sampleDir);
file_name = "Outgoing";
String path = Environment.getExternalStorageDirectory().getAbsolutePath();
uploadedFileName = sharedPref.getStringValue("userId")+"_"+sharedPref.getStringValue("mobile_number_prefs")+"_" +file_name +String.valueOf(System.currentTimeMillis() + ".amr");
recordedFileName = sampleDir+"/"+uploadedFileName;
// try {
// audiofile = File.createTempFile(file_name, ".wav", sampleDir);
// recordedFileName = audiofile.getName();
//
// } catch (IOException e) {
// e.printStackTrace();
// }
recorder = new MediaRecorder();
// recorder.setAudioSource(MediaRecorder.AudioSource.VOICE_CALL);
// recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
recorder.setOutputFormat(MediaRecorder.OutputFormat.AMR_NB);
recorder.setAudioEncoder(MediaRecorder.AudioEncoder.DEFAULT);
recorder.setOutputFile(audiofile.getAbsolutePath());
try {
recorder.prepare();
} catch (IllegalStateException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
recorder.start();
recordstarted = true;
if (state.equals(TelephonyManager.EXTRA_STATE_IDLE)) {
wasRinging = false;
Toast.makeText(context, "REJECT || DISCO", Toast.LENGTH_LONG).show();
if (recordstarted) {
recorder.stop();
recordstarted = false;
if (recordedFileName.isEmpty()){
}else{
convertFileToBase64(context);
}
}
}
}
}
// LocalBroadcastManager.getInstance(context).sendBroadcast(new Intent("callRecord"));
}
public void convertFileToBase64(Context context){
File uploadedFile = new File(recordedFileName);
Log.d("CallService", "convertFileToBase64: "+uploadedFile);
String[] cmd = new String[4];
cmd[0] = "ffmpeg ";
cmd[1] = "-i ";
cmd[2] = recordedFileName+" ";
cmd[3] = " "+sampleDir+"/"+"sampleAudio.wav";
execFFmpegBinary(cmd);
file_size = Integer.parseInt(String.valueOf(uploadedFile.length()/1024));
Log.d("CallService", "onReceive: "+file_size);
byte[] byteArray = new byte[1024*11];
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
// File uploadedFile = new File(sampleDir+"/"+recordedFileName);
Uri path = Uri.fromFile(uploadedFile);
// Uri path = Uri.fromFile(uploadedFile);
Log.d("CallService", "convertFileToBase64: "+path);
try {
InputStream inputStream = context.getContentResolver().openInputStream(path);
byte[] b = new byte[1024 * 11];
int bytesRead = 0;
while ((bytesRead = inputStream.read(b)) != -1) {
byteArrayOutputStream.write(b, 0, bytesRead);
}
byteArray = byteArrayOutputStream.toByteArray();
Log.e("Byte array", ">" + byteArray);
} catch (IOException e) {
e.printStackTrace();
}
base64_file = Base64.encodeToString(byteArray, Base64.DEFAULT);
Log.d("CallRecorder", "base64File: "+base64_file);
if (!(base64_file.isEmpty())){
Log.d("CallService", "convertFileToBase64: base64 not empty");
if (networkUtils.checkConnection()){
uploadRecordedFileToServer();
}else{
saveFileToLocalDB();
Toast.makeText(context, getString(R.string.no_connection), Toast.LENGTH_SHORT).show();
}
}
}
private void execFFmpegBinary(final String[] command) {
try {
fFmpeg.execute(command, new ExecuteBinaryResponseHandler() {
@Override
public void onFailure(String s) {
Log.d("CallService", "onFailure: "+s);
Toast.makeText(CallService.this, "Failed to convert", Toast.LENGTH_SHORT).show();
}
@Override
public void onSuccess(String s) {
Log.d("CallService", "onSuccess: "+s);
}
@Override
public void onProgress(String s) {
Log.d("CallService", "Started command : ffmpeg "+command);
Log.d("CallService", "progress : "+s);
// progressDialog.setMessage("Processing\n"+s);
}
@Override
public void onStart() {
Log.d("CallService", "Started command : ffmpeg " + command);
// progressDialog.setMessage("Processing...");
// progressDialog.show();
}
@Override
public void onFinish() {
Log.d("CallService", "Finished command : ffmpeg "+command);
// progressDialog.dismiss();
}
});
} catch (FFmpegCommandAlreadyRunningException e) {
// do nothing for now
}
}Amy help would be really very grateful... Thanks in advance !!!
-
NoMethodFoundException when trying to load native methods from jar file
20 février 2018, par Anuran BarmanI am trying to load ffmpeg methods in android. My requirement is that I dont want to pack the .so files within the apk.Only if the user wants then only I will download the jar file and load the ffmpeg native methods.After searching I think that loading .so files at run time is not possible.So what I did that I made the so file and created a different android application with only one class FFMPEG.java whose only duty is to call the native methods from ffmpeg library.so I made the jar file and loaded that into my main application with ClassLoader. Constructor is getting called so it means class is loaded but methods are not getting loaded though they are declared public in the jar file. I am trying to stream RTSP video with FFMPEG.Below are my jar file and main application codes.
public class FFMPEG {
public FFMPEG(){
Log.d(FFMPEG.class.getSimpleName(),"constructor called");
}
public static native int naInit(String pFileName);
public static native int[] naGetVideoRes();
public static native void naSetSurface(Surface pSurface);
public static native int naSetup(int pWidth, int pHeight);
public static native void naPlay();
public static native void naStop();
public static boolean loadedLibraries;
static {
try {
System.loadLibrary("avutil");
System.loadLibrary("avcodec");
System.loadLibrary("avformat");
System.loadLibrary("swscale");
System.loadLibrary("avfilter");
System.loadLibrary("ffmpeg-jni");
loadedLibraries = true;
} catch (Throwable e) {
e.printStackTrace();
}
}
public int libInit(String filename){
return naInit(filename);
}
public int[] libGetVideoRes(){
return naGetVideoRes();
}
public void libSetSurface(Surface surface){
naSetSurface(surface);
}
public int libSetup(int width,int height){
return naSetup(width,height);
}
public void libPlay(){
naPlay();
}
public void libStop(){
naStop();
}
}My main application activity code.The jar file location in my sdcard named camlib.jar
@SuppressWarnings("JniMissingFunction")
public class MainActivity extends AppCompatActivity implements SurfaceHolder.Callback {
private SurfaceView surfaceView;
private ProgressBar progressBar;
private final String TAG=MainActivity.class.getSimpleName();
private boolean isPlaying;
private boolean isClassLoaded;
private boolean isInitialized;
private String url="";
Method libInit,libGetVideoRes,libSetSurface,libSetup,libPlay,libStop;
Object myInstance;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_main);
surfaceView = (SurfaceView) findViewById(R.id.surfaceView);
progressBar = ((ProgressBar) findViewById(R.id.progressBar));
surfaceView.getHolder().addCallback(this);
int permission= ActivityCompat.checkSelfPermission(this, Manifest.permission.READ_EXTERNAL_STORAGE);
if(permission== PackageManager.PERMISSION_GRANTED){
loadClass();
}else{
ActivityCompat.requestPermissions(this,new String[]{Manifest.permission.READ_EXTERNAL_STORAGE},200);
}
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if(requestCode==200){
if(grantResults.length>0){
if (grantResults[0]==PackageManager.PERMISSION_GRANTED){
loadClass();
}else{
ActivityCompat.requestPermissions(this,new String[]{Manifest.permission.READ_EXTERNAL_STORAGE},200);
}
}
}
}
public void loadClass(){
try {
final String libPath = Environment.getExternalStorageDirectory() + "/camlib.jar";
final File tmpDir = getDir("dex", 0);
final DexClassLoader classloader = new DexClassLoader(libPath, tmpDir.getAbsolutePath(), null, this.getClass().getClassLoader());
final Class classToLoad = (Class) classloader.loadClass("com.myeglu.obbapplication.FFMPEG");
myInstance = classToLoad.newInstance();
libInit= classToLoad.getMethod("libInit");
libGetVideoRes=classToLoad.getMethod("libGetVideoRes");
libSetSurface=classToLoad.getMethod("libSetSurface");
libSetup=classToLoad.getMethod("libSetup");
libPlay=classToLoad.getMethod("libPlay");
libStop=classToLoad.getMethod("libStop");
isClassLoaded=true;
new PlayVideo().execute();
} catch (Exception e) {
e.printStackTrace();
}
}
private void postInit() {
if (isInitialized) {
initPlay();
progressBar.setVisibility(View.GONE);
} else {
finish();
}
}
private void initPlay() {
try {
int[] res = (int[])libGetVideoRes.invoke(myInstance);
Log.d("ANURAN", "res width " + res[0] + ": height " + res[1]);
if (res[0] <= 0) {
res[0] = 480;
}
if (res[1] <= 0) {
res[1] = 320;
}
int[] screenRes = getScreenRes();
int width, height;
float widthScaledRatio = screenRes[0] * 1.0f / res[0];
float heightScaledRatio = screenRes[1] * 1.0f / res[1];
if (widthScaledRatio > heightScaledRatio) {
//use heightScaledRatio
width = (int) (res[0] * heightScaledRatio);
height = screenRes[1];
} else {
//use widthScaledRatio
width = screenRes[0];
height = (int) (res[1] * widthScaledRatio);
}
Log.d(TAG, "width " + width + ",height:" + height);
updateSurfaceView(width, height);
libSetup.invoke(myInstance,width,height);
playMedia();
}catch (Exception e){
}
}
private void playMedia() {
try {
if (progressBar.getVisibility() == View.VISIBLE) {
progressBar.setVisibility(View.GONE);
}
libPlay.invoke(myInstance);
isPlaying = true;
}catch (Exception e){
}
}
private void updateSurfaceView(int pWidth, int pHeight) {
//update surfaceview dimension, this will cause the native window to change
Log.d("ANURAN UPDATE SURFACE", "width " + pWidth + ",height:" + pHeight);
FrameLayout.LayoutParams params = (FrameLayout.LayoutParams) surfaceView.getLayoutParams();
params.width = pWidth;
params.height = pHeight;
surfaceView.setLayoutParams(params);
}
@SuppressLint("NewApi")
private int[] getScreenRes() {
int[] res = new int[2];
Display display = getWindowManager().getDefaultDisplay();
Point size = new Point();
display.getSize(size);
res[0] = size.x;
res[1] = size.y;
return res;
}
@Override
protected void onStop() {
super.onStop();
Toast.makeText(MainActivity.this,"onStop called",Toast.LENGTH_SHORT).show();
stopPlaying();
finish();
}
@Override
public void onBackPressed() {
stopPlaying();
finish();
}
private void stopPlaying() {
isPlaying = false;
try{
libStop.invoke(myInstance);
}catch (Exception e){
}
}
@Override
protected void onDestroy() {
super.onDestroy();
stopPlaying();
finish();
}
@Override
protected void onRestart() {
super.onRestart();
Toast.makeText(MainActivity.this,"onRestart called",Toast.LENGTH_SHORT).show();
progressBar.setVisibility(View.VISIBLE);
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
if(isClassLoaded){
try {
libSetSurface.invoke(myInstance,holder.getSurface());
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
}
}
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
if(isClassLoaded) {
try {
libSetSurface.invoke(myInstance, null);
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
}
}
}
private class PlayVideo extends AsyncTask {
@Override
protected Void doInBackground(Void... voids) {
try {
int temp=(int)libInit.invoke(myInstance,url);
isInitialized=(temp==0);
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
}
return null;
}
@Override
protected void onPostExecute(Void aVoid) {
super.onPostExecute(aVoid);
postInit();
this.cancel(true);
}
}
}