
Recherche avancée
Médias (1)
-
Video d’abeille en portrait
14 mai 2011, par
Mis à jour : Février 2012
Langue : français
Type : Video
Autres articles (54)
-
Publier sur MédiaSpip
13 juin 2013Puis-je poster des contenus à partir d’une tablette Ipad ?
Oui, si votre Médiaspip installé est à la version 0.2 ou supérieure. Contacter au besoin l’administrateur de votre MédiaSpip pour le savoir -
Les formats acceptés
28 janvier 2010, parLes commandes suivantes permettent d’avoir des informations sur les formats et codecs gérés par l’installation local de ffmpeg :
ffmpeg -codecs ffmpeg -formats
Les format videos acceptés en entrée
Cette liste est non exhaustive, elle met en exergue les principaux formats utilisés : h264 : H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10 m4v : raw MPEG-4 video format flv : Flash Video (FLV) / Sorenson Spark / Sorenson H.263 Theora wmv :
Les formats vidéos de sortie possibles
Dans un premier temps on (...) -
Ajouter notes et légendes aux images
7 février 2011, parPour pouvoir ajouter notes et légendes aux images, la première étape est d’installer le plugin "Légendes".
Une fois le plugin activé, vous pouvez le configurer dans l’espace de configuration afin de modifier les droits de création / modification et de suppression des notes. Par défaut seuls les administrateurs du site peuvent ajouter des notes aux images.
Modification lors de l’ajout d’un média
Lors de l’ajout d’un média de type "image" un nouveau bouton apparait au dessus de la prévisualisation (...)
Sur d’autres sites (10427)
-
AVIO Open error -RTSP Protocol
22 février 2016, par Muthukumar SIam new to java CV. I am using java CV 0.8 I used Record Activity.java from samples folder. i did few modification in record activity. I tried to record video in format flv and mp4 . but i got exception like this
org.bytedeco.javacv.FrameRecorder$Exception : avio_open error() error -1330794744 : Could not open ’rtsp ://10.0.0.136:1935/live/myStream’package com.example.javacv10;
import java.io.IOException;
import java.nio.ShortBuffer;
import org.bytedeco.javacpp.opencv_core.IplImage;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import android.app.Activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.PowerManager;
import android.util.Log;
import android.view.Display;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
public class HomeActivity extends Activity implements OnClickListener {
private final static String CLASS_LABEL = "RecordActivity";
private final static String LOG_TAG = CLASS_LABEL;
private PowerManager.WakeLock mWakeLock;
private String ffmpeg_link = "rtsp://10.0.0.136:1935/live/myStream";
long startTime = 0;
boolean recording = false;
private volatile FFmpegFrameRecorder recorder;
private boolean isPreviewOn = false;
private int sampleAudioRateInHz = 44100;
private int imageWidth = 320;
private int imageHeight = 240;
private int frameRate = 30;
/* audio data getting thread */
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private Thread audioThread;
volatile boolean runAudioThread = true;
/* video data getting thread */
private Camera cameraDevice;
private CameraView cameraView;
private IplImage yuvIplimage = null;
/* layout setting */
private final int bg_screen_bx = 232;
private final int bg_screen_by = 128;
private final int bg_screen_width = 700;
private final int bg_screen_height = 500;
private final int bg_width = 1123;
private final int bg_height = 715;
private final int live_width = 640;
private final int live_height = 480;
private int screenWidth, screenHeight;
private Button btnRecorderControl;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
setContentView(R.layout.activity_home);
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
initLayout();
initRecorder();
}
@Override
protected void onResume() {
super.onResume();
if (mWakeLock == null) {
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
}
}
@Override
protected void onPause() {
super.onPause();
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
@Override
protected void onDestroy() {
super.onDestroy();
recording = false;
if (cameraView != null) {
cameraView.stopPreview();
}
if(cameraDevice != null) {
cameraDevice.stopPreview();
cameraDevice.release();
cameraDevice = null;
}
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
private void initLayout() {
/* get size of screen */
Display display = ((WindowManager) getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
screenWidth = display.getWidth();
screenHeight = display.getHeight();
RelativeLayout.LayoutParams layoutParam = null;
LayoutInflater myInflate = null;
myInflate = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
RelativeLayout topLayout = new RelativeLayout(this);
setContentView(topLayout);
LinearLayout preViewLayout = (LinearLayout) myInflate.inflate(R.layout.activity_home, null);
layoutParam = new RelativeLayout.LayoutParams(screenWidth, screenHeight);
topLayout.addView(preViewLayout, layoutParam);
/* add control button: start and stop */
btnRecorderControl = (Button) findViewById(R.id.recorder_control);
btnRecorderControl.setText("Start");
btnRecorderControl.setOnClickListener(this);
/* add camera view */
int display_width_d = (int) (1.0 * bg_screen_width * screenWidth / bg_width);
int display_height_d = (int) (1.0 * bg_screen_height * screenHeight / bg_height);
int prev_rw, prev_rh;
if (1.0 * display_width_d / display_height_d > 1.0 * live_width / live_height) {
prev_rh = display_height_d;
prev_rw = (int) (1.0 * display_height_d * live_width / live_height);
} else {
prev_rw = display_width_d;
prev_rh = (int) (1.0 * display_width_d * live_height / live_width);
}
layoutParam = new RelativeLayout.LayoutParams(prev_rw, prev_rh);
layoutParam.topMargin = (int) (1.0 * bg_screen_by * screenHeight / bg_height);
layoutParam.leftMargin = (int) (1.0 * bg_screen_bx * screenWidth / bg_width);
cameraDevice = Camera.open();
Log.i(LOG_TAG, "cameara open");
cameraView = new CameraView(this, cameraDevice);
topLayout.addView(cameraView, layoutParam);
Log.i(LOG_TAG, "cameara preview start: OK");
}
//---------------------------------------
// initialize ffmpeg_recorder
//---------------------------------------
private void initRecorder() {
Log.w(LOG_TAG,"init recorder");
if (yuvIplimage == null) {
yuvIplimage = IplImage.create(imageWidth, imageHeight, 8, 2);
Log.i(LOG_TAG, "create yuvIplimage");
}
Log.i(LOG_TAG, "ffmpeg_url: " + ffmpeg_link);
recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
recorder.setFormat("flv");
recorder.setSampleRate(sampleAudioRateInHz);
// Set in the surface changed method
recorder.setFrameRate(frameRate);
Log.i(LOG_TAG, "recorder initialize success");
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
runAudioThread = true;
}
public void startRecording() {
try {
recorder.start();
startTime = System.currentTimeMillis();
recording = true;
audioThread.start();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}
public void stopRecording() {
runAudioThread = false;
try {
audioThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
audioRecordRunnable = null;
audioThread = null;
if (recorder != null && recording) {
recording = false;
Log.v(LOG_TAG,"Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
if (recording) {
stopRecording();
}
finish();
return true;
}
return super.onKeyDown(keyCode, event);
}
//---------------------------------------------
// audio thread, gets and encodes audio data
//---------------------------------------------
class AudioRecordRunnable implements Runnable {
@Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
// Audio
int bufferSize;
short[] audioData;
int bufferReadResult;
bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_OUT_DEFAULT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.CHANNEL_OUT_DEFAULT, bufferSize);
audioData = new short[bufferSize];
Log.d(LOG_TAG, "audioRecord.startRecording()");
audioRecord.startRecording();
/* ffmpeg_audio encoding loop */
while (runAudioThread) {
//Log.v(LOG_TAG,"recording? " + recording);
bufferReadResult = audioRecord.read(audioData, 0, audioData.length);
if (bufferReadResult > 0) {
Log.v(LOG_TAG,"bufferReadResult: " + bufferReadResult);
// If "recording" isn't true when start this thread, it never get's set according to this if statement...!!!
// Why? Good question...
if (recording) {
try {
recorder.record(ShortBuffer.wrap(audioData, 0, bufferReadResult));
//Log.v(LOG_TAG,"recording " + 1024*i + " to " + 1024*i+1024);
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG,e.getMessage());
e.printStackTrace();
}
}
}
}
Log.v(LOG_TAG,"AudioThread Finished, release audioRecord");
/* encoding finish, release recorder */
if (audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
Log.v(LOG_TAG,"audioRecord released");
}
}
}
//---------------------------------------------
// camera thread, gets and encodes video data
//---------------------------------------------
class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {
private SurfaceHolder mHolder;
private Camera mCamera;
public CameraView(Context context, Camera camera) {
super(context);
Log.w("camera","camera view");
mCamera = camera;
mHolder = getHolder();
mHolder.addCallback(CameraView.this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mCamera.setPreviewCallback(CameraView.this);
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
try {
stopPreview();
mCamera.setPreviewDisplay(holder);
} catch (IOException exception) {
mCamera.release();
mCamera = null;
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Log.v(LOG_TAG,"Setting imageWidth: " + imageWidth + " imageHeight: " + imageHeight + " frameRate: " + frameRate);
Camera.Parameters camParams = mCamera.getParameters();
camParams.setPreviewSize(imageWidth, imageHeight);
Log.v(LOG_TAG,"Preview Framerate: " + camParams.getPreviewFrameRate());
camParams.setPreviewFrameRate(frameRate);
mCamera.setParameters(camParams);
startPreview();
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
try {
mHolder.addCallback(null);
mCamera.setPreviewCallback(null);
} catch (RuntimeException e) {
// The camera has probably just been released, ignore.
}
}
public void startPreview() {
if (!isPreviewOn && mCamera != null) {
isPreviewOn = true;
mCamera.startPreview();
}
}
public void stopPreview() {
if (isPreviewOn && mCamera != null) {
isPreviewOn = false;
mCamera.stopPreview();
}
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
/* get video data */
if (yuvIplimage != null && recording) {
yuvIplimage.getByteBuffer().put(data);
Log.v(LOG_TAG,"Writing Frame");
try {
long t = 1000 * (System.currentTimeMillis() - startTime);
if (t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(yuvIplimage);
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG,e.getMessage());
e.printStackTrace();
}
}
}
}
@Override
public void onClick(View v) {
if (!recording) {
startRecording();
Log.w(LOG_TAG, "Start Button Pushed");
btnRecorderControl.setText("Stop");
} else {
// This will trigger the audio recording loop to stop and then set isRecorderStart = false;
stopRecording();
Log.w(LOG_TAG, "Stop Button Pushed");
btnRecorderControl.setText("Start");
}
}
} -
live Streaming video in server side not responding(Wowza -ffmpeg-JavaCV1.1)
12 février 2016, par Muthukumar SI am new to javaCV. I tried to record mp4 and flv videos using javaCV 1.1 in mobile side working nice. But i need server side didn’t show it. thanks I am using wowza media stream engine in server side. thanks for help
package com.example.javacv11;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ShortBuffer;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.Frame;
import android.app.Activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.PowerManager;
import android.util.Log;
import android.view.Display;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
public class FirstActivity extends Activity implements OnClickListener {
private final static String CLASS_LABEL = "RecordActivity";
private final static String LOG_TAG = CLASS_LABEL;
private PowerManager.WakeLock mWakeLock;
private String ffmpeg_link = "rtmp://10.0.0.136:1935/live/myStream";
long startTime = 0;
boolean recording = false;
private FFmpegFrameRecorder recorder;
private boolean isPreviewOn = false;
private int sampleAudioRateInHz = 44100;
private int imageWidth = 320;
private int imageHeight = 240;
private int frameRate = 30;
/* audio data getting thread */
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private Thread audioThread;
volatile boolean runAudioThread = true;
/* video data getting thread */
private Camera cameraDevice;
private CameraView cameraView;
private Frame yuvImage = null;
/* layout setting */
private final int bg_screen_bx = 232;
private final int bg_screen_by = 128;
private final int bg_screen_width = 700;
private final int bg_screen_height = 500;
private final int bg_width = 1123;
private final int bg_height = 715;
private final int live_width = 640;
private final int live_height = 480;
private int screenWidth, screenHeight;
private Button btnRecorderControl;
/* The number of seconds in the continuous record loop (or 0 to disable loop). */
final int RECORD_LENGTH = 10;
Frame[] images;
long[] timestamps;
ShortBuffer[] samples;
int imagesIndex, samplesIndex;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
setContentView(R.layout.activity_first);
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
initLayout();
}
@Override
protected void onResume() {
super.onResume();
if (mWakeLock == null) {
PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, CLASS_LABEL);
mWakeLock.acquire();
}
}
@Override
protected void onPause() {
super.onPause();
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
@Override
protected void onDestroy() {
super.onDestroy();
recording = false;
if (cameraView != null) {
cameraView.stopPreview();
}
if(cameraDevice != null) {
cameraDevice.stopPreview();
cameraDevice.release();
cameraDevice = null;
}
if (mWakeLock != null) {
mWakeLock.release();
mWakeLock = null;
}
}
private void initLayout() {
/* get size of screen */
Display display = ((WindowManager) getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
screenWidth = display.getWidth();
screenHeight = display.getHeight();
RelativeLayout.LayoutParams layoutParam = null;
LayoutInflater myInflate = null;
myInflate = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
RelativeLayout topLayout = new RelativeLayout(this);
setContentView(topLayout);
LinearLayout preViewLayout = (LinearLayout) myInflate.inflate(R.layout.activity_first, null);
layoutParam = new RelativeLayout.LayoutParams(screenWidth, screenHeight);
topLayout.addView(preViewLayout, layoutParam);
/* add control button: start and stop */
btnRecorderControl = (Button) findViewById(R.id.recorder_control);
btnRecorderControl.setText("Start");
btnRecorderControl.setOnClickListener(this);
/* add camera view */
int display_width_d = (int) (1.0 * bg_screen_width * screenWidth / bg_width);
int display_height_d = (int) (1.0 * bg_screen_height * screenHeight / bg_height);
int prev_rw, prev_rh;
if (1.0 * display_width_d / display_height_d > 1.0 * live_width / live_height) {
prev_rh = display_height_d;
prev_rw = (int) (1.0 * display_height_d * live_width / live_height);
} else {
prev_rw = display_width_d;
prev_rh = (int) (1.0 * display_width_d * live_height / live_width);
}
layoutParam = new RelativeLayout.LayoutParams(prev_rw, prev_rh);
layoutParam.topMargin = (int) (1.0 * bg_screen_by * screenHeight / bg_height);
layoutParam.leftMargin = (int) (1.0 * bg_screen_bx * screenWidth / bg_width);
cameraDevice = Camera.open();
Log.i(LOG_TAG, "cameara open");
cameraView = new CameraView(this, cameraDevice);
topLayout.addView(cameraView, layoutParam);
Log.i(LOG_TAG, "cameara preview start: OK");
}
//---------------------------------------
// initialize ffmpeg_recorder
//---------------------------------------
private void initRecorder() {
Log.w(LOG_TAG,"init recorder");
if (RECORD_LENGTH > 0) {
imagesIndex = 0;
images = new Frame[RECORD_LENGTH * frameRate];
timestamps = new long[images.length];
for (int i = 0; i < images.length; i++) {
images[i] = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
timestamps[i] = -1;
}
} else if (yuvImage == null) {
yuvImage = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
Log.i(LOG_TAG, "create yuvImage");
}
Log.i(LOG_TAG, "ffmpeg_url: " + ffmpeg_link);
recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
recorder.setFormat("mp4");
recorder.setSampleRate(sampleAudioRateInHz);
// Set in the surface changed method
recorder.setFrameRate(frameRate);
Log.i(LOG_TAG, "recorder initialize success");
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
runAudioThread = true;
}
public void startRecording() {
initRecorder();
try {
recorder.start();
startTime = System.currentTimeMillis();
recording = true;
audioThread.start();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}
public void stopRecording() {
runAudioThread = false;
try {
audioThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
audioRecordRunnable = null;
audioThread = null;
if (recorder != null && recording) {
if (RECORD_LENGTH > 0) {
Log.v(LOG_TAG,"Writing frames");
try {
int firstIndex = imagesIndex % samples.length;
int lastIndex = (imagesIndex - 1) % images.length;
if (imagesIndex <= images.length) {
firstIndex = 0;
lastIndex = imagesIndex - 1;
}
if ((startTime = timestamps[lastIndex] - RECORD_LENGTH * 1000000L) < 0) {
startTime = 0;
}
if (lastIndex < firstIndex) {
lastIndex += images.length;
}
for (int i = firstIndex; i <= lastIndex; i++) {
long t = timestamps[i % timestamps.length] - startTime;
if (t >= 0) {
if (t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(images[i % images.length]);
}
}
firstIndex = samplesIndex % samples.length;
lastIndex = (samplesIndex - 1) % samples.length;
if (samplesIndex <= samples.length) {
firstIndex = 0;
lastIndex = samplesIndex - 1;
}
if (lastIndex < firstIndex) {
lastIndex += samples.length;
}
for (int i = firstIndex; i <= lastIndex; i++) {
recorder.recordSamples(samples[i % samples.length]);
}
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG,e.getMessage());
e.printStackTrace();
}
}
recording = false;
Log.v(LOG_TAG,"Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
if (recording) {
stopRecording();
}
finish();
return true;
}
return super.onKeyDown(keyCode, event);
}
//---------------------------------------------
// audio thread, gets and encodes audio data
//---------------------------------------------
class AudioRecordRunnable implements Runnable {
@Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
// Audio
int bufferSize;
ShortBuffer audioData;
int bufferReadResult;
bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
if (RECORD_LENGTH > 0) {
samplesIndex = 0;
samples = new ShortBuffer[RECORD_LENGTH * sampleAudioRateInHz * 2 / bufferSize + 1];
for (int i = 0; i < samples.length; i++) {
samples[i] = ShortBuffer.allocate(bufferSize);
}
} else {
audioData = ShortBuffer.allocate(bufferSize);
}
Log.d(LOG_TAG, "audioRecord.startRecording()");
audioRecord.startRecording();
/* ffmpeg_audio encoding loop */
while (runAudioThread) {
if (RECORD_LENGTH > 0) {
audioData = samples[samplesIndex++ % samples.length];
audioData.position(0).limit(0);
}
//Log.v(LOG_TAG,"recording? " + recording);
bufferReadResult = audioRecord.read(audioData.array(), 0, audioData.capacity());
audioData.limit(bufferReadResult);
if (bufferReadResult > 0) {
Log.v(LOG_TAG,"bufferReadResult: " + bufferReadResult);
// If "recording" isn't true when start this thread, it never get's set according to this if statement...!!!
// Why? Good question...
if (recording) {
if (RECORD_LENGTH <= 0) try {
recorder.recordSamples(audioData);
//Log.v(LOG_TAG,"recording " + 1024*i + " to " + 1024*i+1024);
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG,e.getMessage());
e.printStackTrace();
}
}
}
}
Log.v(LOG_TAG,"AudioThread Finished, release audioRecord");
/* encoding finish, release recorder */
if (audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
Log.v(LOG_TAG,"audioRecord released");
}
}
}
//---------------------------------------------
// camera thread, gets and encodes video data
//---------------------------------------------
class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {
private SurfaceHolder mHolder;
private Camera mCamera;
public CameraView(Context context, Camera camera) {
super(context);
Log.w("camera","camera view");
mCamera = camera;
mHolder = getHolder();
mHolder.addCallback(CameraView.this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mCamera.setPreviewCallback(CameraView.this);
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
try {
stopPreview();
mCamera.setPreviewDisplay(holder);
} catch (IOException exception) {
mCamera.release();
mCamera = null;
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
Camera.Parameters camParams = mCamera.getParameters();
List sizes = camParams.getSupportedPreviewSizes();
// Sort the list in ascending order
Collections.sort(sizes, new Comparator() {
public int compare(final Camera.Size a, final Camera.Size b) {
return a.width * a.height - b.width * b.height;
}
});
// Pick the first preview size that is equal or bigger, or pick the last (biggest) option if we cannot
// reach the initial settings of imageWidth/imageHeight.
for (int i = 0; i < sizes.size(); i++) {
if ((sizes.get(i).width >= imageWidth && sizes.get(i).height >= imageHeight) || i == sizes.size() - 1) {
imageWidth = sizes.get(i).width;
imageHeight = sizes.get(i).height;
Log.v(LOG_TAG, "Changed to supported resolution: " + imageWidth + "x" + imageHeight);
break;
}
}
camParams.setPreviewSize(imageWidth, imageHeight);
Log.v(LOG_TAG,"Setting imageWidth: " + imageWidth + " imageHeight: " + imageHeight + " frameRate: " + frameRate);
camParams.setPreviewFrameRate(frameRate);
Log.v(LOG_TAG,"Preview Framerate: " + camParams.getPreviewFrameRate());
mCamera.setParameters(camParams);
startPreview();
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
try {
mHolder.addCallback(null);
mCamera.setPreviewCallback(null);
} catch (RuntimeException e) {
// The camera has probably just been released, ignore.
}
}
public void startPreview() {
if (!isPreviewOn && mCamera != null) {
isPreviewOn = true;
mCamera.startPreview();
}
}
public void stopPreview() {
if (isPreviewOn && mCamera != null) {
isPreviewOn = false;
mCamera.stopPreview();
}
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
if (audioRecord == null || audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
startTime = System.currentTimeMillis();
return;
}
if (RECORD_LENGTH > 0) {
int i = imagesIndex++ % images.length;
yuvImage = images[i];
timestamps[i] = 1000 * (System.currentTimeMillis() - startTime);
}
/* get video data */
if (yuvImage != null && recording) {
((ByteBuffer)yuvImage.image[0].position(0)).put(data);
if (RECORD_LENGTH <= 0) try {
Log.v(LOG_TAG,"Writing Frame");
long t = 1000 * (System.currentTimeMillis() - startTime);
if (t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(yuvImage);
} catch (FFmpegFrameRecorder.Exception e) {
Log.v(LOG_TAG,e.getMessage());
e.printStackTrace();
}
}
}
}
@Override
public void onClick(View v) {
if (!recording) {
startRecording();
Log.w(LOG_TAG, "Start Button Pushed");
btnRecorderControl.setText("Stop");
} else {
// This will trigger the audio recording loop to stop and then set isRecorderStart = false;
stopRecording();
Log.w(LOG_TAG, "Stop Button Pushed");
btnRecorderControl.setText("Start");
}
}
} -
VLC command line : RTSP to mp4 file (video + audio)
8 février 2016, par Dmitriy GerashenkoWin 7 x64.
When I watch this link in VLC player all is fine :
rtsp ://184.72.239.149/vod/mp4:BigBuckBunny_175k.mov
When I try to capture link to file and to display :
- There is video without sound on display.
- Created file is not playable.
Command :
vlc.exe -vvv "rtsp://184.72.239.149/vod/mp4:BigBuckBunny_175k.mov" --sout="#transcode{venc=ffmpeg,vcodec=mp4v,vfilter=canvas{width=800,height=600},aenc=ffmpeg{strict=-2},acodec=mp4a}:duplicate{dst=display,dst=standard{access=file,mux=mp4,dst=video.mp4}"
There are some errors in log, for example :
MPEG4GenericRTPSource Warning: Unknown or unsupported "mode": AAC-hbr
. Is it cause of my problems ? How to solve my issue ?Complete log (from VLCJ) :
cd C:\Users\gda\Documents\NetBeansProjects\Video; "JAVA_HOME=C:\\Program Files (x86)\\Java\\jdk1.8.0_71" cmd /c "\"\"C:\\Users\\gda\\AppData\\Roaming\\NetBeans\\8.0.2\\maven\\bin\\mvn.bat\" -Dexec.args=\"-classpath %classpath ru.cherezweb.app.video.Video\" -Dexec.executable=\"C:\\Program Files (x86)\\Java\\jdk1.8.0_71\\bin\\java.exe\" -Dmaven.ext.class.path=C:\\Users\\gda\\AppData\\Roaming\\NetBeans\\8.0.2\\maven-nblib\\netbeans-eventspy.jar -Dfile.encoding=UTF-8 process-classes org.codehaus.mojo:exec-maven-plugin:1.2.1:exec\""
Scanning for projects...
------------------------------------------------------------------------
Building Video 1.0-SNAPSHOT
------------------------------------------------------------------------
--- maven-resources-plugin:2.5:resources (default-resources) @ Video ---
[debug] execute contextualize
Using 'UTF-8' encoding to copy filtered resources.
skip non existing resourceDirectory C:\Users\gda\Documents\NetBeansProjects\Video\src\main\resources
--- maven-compiler-plugin:2.3.2:compile (default-compile) @ Video ---
Nothing to compile - all classes are up to date
--- exec-maven-plugin:1.2.1:exec (default-cli) @ Video ---
20:37:29.097 [main] INFO uk.co.caprica.vlcj.Info - vlcj: 3.10.1
20:37:29.101 [main] INFO uk.co.caprica.vlcj.Info - java: 1.8.0_71 Oracle Corporation
20:37:29.101 [main] INFO uk.co.caprica.vlcj.Info - java home: C:\Program Files (x86)\Java\jdk1.8.0_71\jre
20:37:29.101 [main] INFO uk.co.caprica.vlcj.Info - os: Windows 10 10.0 x86
20:37:29.103 [main] DEBUG u.c.c.vlcj.discovery.NativeDiscovery - discover()
20:37:29.103 [main] DEBUG u.c.c.vlcj.discovery.NativeDiscovery - jnaLibraryPath=null
20:37:29.103 [main] DEBUG u.c.c.vlcj.discovery.NativeDiscovery - discoveryStrategy=uk.co.caprica.vlcj.discovery.linux.DefaultLinuxNativeDiscoveryStrategy@176c05c
20:37:29.104 [main] DEBUG u.c.c.vlcj.discovery.NativeDiscovery - supported=false
20:37:29.104 [main] DEBUG u.c.c.vlcj.discovery.NativeDiscovery - discoveryStrategy=uk.co.caprica.vlcj.discovery.windows.DefaultWindowsNativeDiscoveryStrategy@1eb6432
20:37:29.104 [main] DEBUG u.c.c.vlcj.discovery.NativeDiscovery - supported=true
20:37:29.104 [main] DEBUG u.c.c.v.d.AbstractNativeDiscoveryStrategy - discover()
20:37:29.105 [main] DEBUG u.c.c.v.r.windows.WindowsRuntimeUtil - getVlcInstallDir()
20:37:29.430 [main] DEBUG u.c.c.v.d.AbstractNativeDiscoveryStrategy - directoryNames=[C:\Program Files (x86)\VideoLAN\VLC, C:\Users\gda\Documents\NetBeansProjects\Video, C:\Program Files\Broadcom\Broadcom 802.11 Network Adapter, C:\Program Files (x86)\Intel\iCLS Client\, C:\Program Files\Intel\iCLS Client\, C:\PROGRAMDATA\ORACLE\JAVA\JAVAPATH, C:\Windows\SYSTEM32, C:\Windows, C:\Windows\SYSTEM32\WBEM, C:\Windows\SYSTEM32\WINDOWSPOWERSHELL\V1.0\, C:\PROGRAM FILES\INTEL\INTEL(R) MANAGEMENT ENGINE COMPONENTS\DAL, C:\PROGRAM FILES\INTEL\INTEL(R) MANAGEMENT ENGINE COMPONENTS\IPT, C:\PROGRAM FILES (X86)\INTEL\INTEL(R) MANAGEMENT ENGINE COMPONENTS\DAL, C:\PROGRAM FILES (X86)\INTEL\INTEL(R) MANAGEMENT ENGINE COMPONENTS\IPT, C:\Program Files\Intel\Intel(R) Management Engine Components\DAL, C:\Program Files\Intel\Intel(R) Management Engine Components\IPT, C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\DAL, C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\IPT, C:\Program Files\WIDCOMM\Bluetooth Software\, C:\Program Files\WIDCOMM\Bluetooth Software\syswow64, C:\Program Files\TortoiseSVN\bin, C:\Program Files (x86)\NVIDIA Corporation\PhysX\Common, C:\Program Files\OpenVPN\bin, C:\WINDOWS\system32, C:\WINDOWS, C:\WINDOWS\System32\Wbem, C:\WINDOWS\System32\WindowsPowerShell\v1.0\, C:\Program Files\TortoiseGit\bin, C:\Program Files (x86)\Skype\Phone\]
20:37:29.430 [main] DEBUG u.c.c.v.d.AbstractNativeDiscoveryStrategy - directoryName=C:\Program Files (x86)\VideoLAN\VLC
20:37:29.430 [main] DEBUG u.c.c.v.d.AbstractNativeDiscoveryStrategy - Matched 'libvlc.dll' in 'C:\Program Files (x86)\VideoLAN\VLC'
20:37:29.430 [main] DEBUG u.c.c.v.d.AbstractNativeDiscoveryStrategy - Matched 'libvlccore.dll' in 'C:\Program Files (x86)\VideoLAN\VLC'
20:37:29.430 [main] DEBUG u.c.c.v.d.AbstractNativeDiscoveryStrategy - Matched all required files
20:37:29.430 [main] DEBUG u.c.c.v.d.AbstractNativeDiscoveryStrategy - result=C:\Program Files (x86)\VideoLAN\VLC
20:37:29.430 [main] DEBUG u.c.c.vlcj.discovery.NativeDiscovery - path=C:\Program Files (x86)\VideoLAN\VLC
20:37:29.431 [main] INFO u.c.c.vlcj.discovery.NativeDiscovery - Discovery found libvlc at 'C:\Program Files (x86)\VideoLAN\VLC'
true
2.2.1 Terry Pratchett (Weatherwax)
20:37:29.490 [main] DEBUG u.c.c.vlcj.player.MediaPlayerFactory - initX=null
20:37:29.494 [main] INFO u.c.c.vlcj.binding.LibVlcFactory - vlc: 2.2.1 Terry Pratchett (Weatherwax), changeset 2.2.1-0-ga425c42
20:37:29.494 [main] INFO u.c.c.vlcj.binding.LibVlcFactory - libvlc: C:\Program Files (x86)\VideoLAN\VLC\libvlc.dll
20:37:29.494 [main] DEBUG u.c.c.vlcj.player.MediaPlayerFactory - MediaPlayerFactory(libvlc=Proxy interface to Native Library ,libvlcArgs=[])
20:37:29.494 [main] DEBUG u.c.c.vlcj.player.MediaPlayerFactory - jna.library.path=null
20:37:29.494 [main] DEBUG u.c.c.vlcj.player.MediaPlayerFactory - VLC_PLUGIN_PATH=null
20:37:29.529 [main] DEBUG u.c.c.vlcj.player.MediaPlayerFactory - instance=native@0x1195650 (uk.co.caprica.vlcj.binding.internal.libvlc_instance_t@1195650)
20:37:29.529 [main] DEBUG u.c.c.vlcj.player.MediaPlayerFactory - equalizerAvailable=true
20:37:29.529 [main] DEBUG u.c.c.vlcj.player.MediaPlayerFactory - createEqualizerBandFrequencies()
20:37:29.529 [main] DEBUG u.c.c.vlcj.player.MediaPlayerFactory - numBands=10
20:37:29.530 [main] DEBUG u.c.c.vlcj.player.MediaPlayerFactory - result=[31.25, 62.5, 125.0, 250.0, 500.0, 1000.0, 2000.0, 4000.0, 8000.0, 16000.0]
20:37:29.530 [main] DEBUG u.c.c.vlcj.player.MediaPlayerFactory - createEqualizerPresetNames()
20:37:29.530 [main] DEBUG u.c.c.vlcj.player.MediaPlayerFactory - numPresets=18
20:37:29.530 [main] DEBUG u.c.c.vlcj.player.MediaPlayerFactory - result=[Flat, Classical, Club, Dance, Full bass, Full bass and treble, Full treble, Headphones, Large Hall, Live, Party, Pop, Reggae, Rock, Ska, Soft, Soft rock, Techno]
20:37:29.530 [main] DEBUG u.c.c.vlcj.player.MediaPlayerFactory - newHeadlessMediaPlayer()
20:37:29.538 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - DefaultMediaPlayer(libvlc=Proxy interface to Native Library , instance=native@0x1195650 (uk.co.caprica.vlcj.binding.internal.libvlc_instance_t@1195650))
20:37:29.538 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - createInstance()
20:37:29.543 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - mediaPlayerInstance=native@0x16853ac4 (uk.co.caprica.vlcj.binding.internal.libvlc_media_player_t@16853ac4)
20:37:29.544 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - mediaPlayerEventManager=native@0x16878818 (uk.co.caprica.vlcj.binding.internal.libvlc_event_manager_t@16878818)
20:37:29.544 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - registerEventListener()
20:37:29.546 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - event=libvlc_MediaPlayerMediaChanged
20:37:29.567 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - result=0
20:37:29.567 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - event=libvlc_MediaPlayerNothingSpecial
20:37:29.567 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - result=0
20:37:29.567 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - event=libvlc_MediaPlayerOpening
20:37:29.567 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - result=0
20:37:29.567 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - event=libvlc_MediaPlayerBuffering
20:37:29.567 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - result=0
20:37:29.567 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - event=libvlc_MediaPlayerPlaying
20:37:29.567 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - result=0
20:37:29.567 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - event=libvlc_MediaPlayerPaused
20:37:29.568 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - result=0
20:37:29.568 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - event=libvlc_MediaPlayerStopped
20:37:29.568 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - result=0
20:37:29.568 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - event=libvlc_MediaPlayerForward
20:37:29.568 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - result=0
20:37:29.568 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - event=libvlc_MediaPlayerBackward
20:37:29.568 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - result=0
20:37:29.568 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - event=libvlc_MediaPlayerEndReached
20:37:29.568 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - result=0
20:37:29.569 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - event=libvlc_MediaPlayerEncounteredError
20:37:29.569 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - result=0
20:37:29.569 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - event=libvlc_MediaPlayerTimeChanged
20:37:29.569 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - result=0
20:37:29.569 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - event=libvlc_MediaPlayerPositionChanged
20:37:29.569 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - result=0
20:37:29.569 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - event=libvlc_MediaPlayerSeekableChanged
20:37:29.569 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - result=0
20:37:29.569 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - event=libvlc_MediaPlayerPausableChanged
20:37:29.569 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - result=0
20:37:29.569 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - event=libvlc_MediaPlayerTitleChanged
20:37:29.569 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - result=0
20:37:29.569 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - event=libvlc_MediaPlayerSnapshotTaken
20:37:29.570 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - result=0
20:37:29.570 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - event=libvlc_MediaPlayerLengthChanged
20:37:29.570 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - result=0
20:37:29.570 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - event=libvlc_MediaPlayerVout
20:37:29.570 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - result=0
20:37:29.570 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - event=libvlc_MediaPlayerScrambledChanged
20:37:29.570 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - result=0
20:37:29.572 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - playMedia(mrl=rtsp://184.72.239.149/vod/mp4:BigBuckBunny_175k.mov,mediaOptions=[:sout=#transcode{venc=ffmpeg,vcodec=mp4v,vfilter=canvas{width=800,height=600},aenc=ffmpeg{strict=-2},acodec=mp4a}:duplicate{dst=display,dst=standard{access=file,mux=mp4,dst=yahoo.m4v}}])
20:37:29.573 [main] DEBUG u.c.c.v.player.MediaResourceLocator - encodeMrl(mrl=rtsp://184.72.239.149/vod/mp4:BigBuckBunny_175k.mov)
20:37:29.573 [main] DEBUG u.c.c.v.player.MediaResourceLocator - MRL does not contain any Unicode characters
20:37:29.573 [main] DEBUG u.c.c.v.player.MediaResourceLocator - result=rtsp://184.72.239.149/vod/mp4:BigBuckBunny_175k.mov
20:37:29.574 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - playMedia(media=SimpleMedia[mrl=rtsp://184.72.239.149/vod/mp4:BigBuckBunny_175k.mov,mediaOptions=[Ljava.lang.String;@17c74e5])
20:37:29.574 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - prepareMedia(media=SimpleMedia[mrl=rtsp://184.72.239.149/vod/mp4:BigBuckBunny_175k.mov,mediaOptions=[Ljava.lang.String;@17c74e5])
20:37:29.574 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - setMedia(media=SimpleMedia[mrl=rtsp://184.72.239.149/vod/mp4:BigBuckBunny_175k.mov,mediaOptions=[Ljava.lang.String;@17c74e5])
20:37:29.574 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - createMediaInstance(media=SimpleMedia[mrl=rtsp://184.72.239.149/vod/mp4:BigBuckBunny_175k.mov,mediaOptions=[Ljava.lang.String;@17c74e5])
20:37:29.574 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - Treating mrl as a location
20:37:29.574 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - mediaInstance=native@0x16876500 (uk.co.caprica.vlcj.binding.internal.libvlc_media_t@16876500)
20:37:29.574 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - mediaOption=:sout=#transcode{venc=ffmpeg,vcodec=mp4v,vfilter=canvas{width=800,height=600},aenc=ffmpeg{strict=-2},acodec=mp4a}:duplicate{dst=display,dst=standard{access=file,mux=mp4,dst=yahoo.m4v}}
20:37:29.575 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - registerMediaEventListener()
20:37:29.575 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - event=libvlc_MediaMetaChanged
20:37:29.575 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - result=0
20:37:29.575 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - event=libvlc_MediaSubItemAdded
20:37:29.575 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - result=0
20:37:29.576 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - event=libvlc_MediaDurationChanged
20:37:29.576 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - result=0
20:37:29.576 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - event=libvlc_MediaParsedChanged
20:37:29.576 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - result=0
20:37:29.576 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - event=libvlc_MediaFreed
20:37:29.576 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - result=0
20:37:29.576 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - event=libvlc_MediaStateChanged
20:37:29.576 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - result=0
20:37:29.576 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - event=libvlc_MediaSubItemTreeAdded
20:37:29.576 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - result=0
20:37:29.585 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - mrl(mediaInstance=native@0x16876500 (uk.co.caprica.vlcj.binding.internal.libvlc_media_t@16876500))
20:37:29.587 [pool-2-thread-1] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - mediaChanged(mediaPlayer=uk.co.caprica.vlcj.player.headless.DefaultHeadlessMediaPlayer@1727f27,media=native@0x16876500 (uk.co.caprica.vlcj.binding.internal.libvlc_media_t@16876500),mrl=rtsp://184.72.239.149/vod/mp4:BigBuckBunny_175k.mov)
20:37:29.588 [pool-2-thread-1] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - Raising event for new media
20:37:29.588 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - result=true
20:37:29.588 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - play()
20:37:29.593 [main] DEBUG u.c.c.vlcj.player.DefaultMediaPlayer - after play
MPEG4GenericRTPSource Warning: Unknown or unsupported "mode": AAC-hbr
[1682c9cc] core input error: ES_OUT_RESET_PCR called
[16f79744] core decoder error: cannot continue streaming due to errors
[1682c9cc] core input error: ES_OUT_RESET_PCR called
[h264 @ 1702ba80] decode_slice_header error
[h264 @ 1702ba80] decode_slice_header error
[16828134] stream_out_transcode stream out: input interval 41666 (base 2)
[16828134] stream_out_transcode stream out: output interval 41666 (base 1)