
Recherche avancée
Autres articles (50)
-
ANNEXE : Les plugins utilisés spécifiquement pour la ferme
5 mars 2010, parLe site central/maître de la ferme a besoin d’utiliser plusieurs plugins supplémentaires vis à vis des canaux pour son bon fonctionnement. le plugin Gestion de la mutualisation ; le plugin inscription3 pour gérer les inscriptions et les demandes de création d’instance de mutualisation dès l’inscription des utilisateurs ; le plugin verifier qui fournit une API de vérification des champs (utilisé par inscription3) ; le plugin champs extras v2 nécessité par inscription3 (...)
-
MediaSPIP v0.2
21 juin 2013, parMediaSPIP 0.2 is the first MediaSPIP stable release.
Its official release date is June 21, 2013 and is announced here.
The zip file provided here only contains the sources of MediaSPIP in its standalone version.
To get a working installation, you must manually install all-software dependencies on the server.
If you want to use this archive for an installation in "farm mode", you will also need to proceed to other manual (...) -
Creating farms of unique websites
13 avril 2011, parMediaSPIP platforms can be installed as a farm, with a single "core" hosted on a dedicated server and used by multiple websites.
This allows (among other things) : implementation costs to be shared between several different projects / individuals rapid deployment of multiple unique sites creation of groups of like-minded sites, making it possible to browse media in a more controlled and selective environment than the major "open" (...)
Sur d’autres sites (7562)
-
javax.media.NoDataSinkException
23 novembre 2022, par DivyaI am trying to convert Jpeg images into .mov video file



package com.ecomm.pl4mms.test;

import java.io.*;
import java.util.*;
import java.awt.Dimension;

import javax.media.*;
import javax.media.control.*;
import javax.media.protocol.*;
import javax.media.protocol.DataSource;
import javax.media.datasink.*;
import javax.media.format.VideoFormat;
import javax.media.format.JPEGFormat;

public class JpegImagesToMovie implements ControllerListener, DataSinkListener {

 public boolean doItPath(int width, int height, int frameRate, Vector inFiles, String outputURL) {
 // Check for output file extension.
 if (!outputURL.endsWith(".mov") && !outputURL.endsWith(".MOV")) {
 // System.err.println("The output file extension should end with a
 // .mov extension");
 prUsage();
 }

 // Generate the output media locators.
 MediaLocator oml;

 if ((oml = createMediaLocator("file:" + outputURL)) == null) {
 // System.err.println("Cannot build media locator from: " +
 // outputURL);
 //System.exit(0);
 }

 boolean success = doIt(width, height, frameRate, inFiles, oml);

 System.gc();
 return success;
 }

 public boolean doIt(int width, int height, int frameRate, Vector inFiles, MediaLocator outML) {
 try {
 System.out.println(inFiles.size());
 ImageDataSource ids = new ImageDataSource(width, height, frameRate, inFiles);

 Processor p;

 try {
 // System.err.println("- create processor for the image
 // datasource ...");
 System.out.println("processor");
 p = Manager.createProcessor(ids);
 System.out.println("success");
 } catch (Exception e) {
 // System.err.println("Yikes! Cannot create a processor from the
 // data source.");
 return false;
 }

 p.addControllerListener(this);

 // Put the Processor into configured state so we can set
 // some processing options on the processor.
 p.configure();
 if (!waitForState(p, p.Configured)) {
 System.out.println("Issue configuring");
 // System.err.println("Failed to configure the processor.");
 p.close();
 p.deallocate();
 return false;
 }
 System.out.println("Configured");

 // Set the output content descriptor to QuickTime.
 p.setContentDescriptor(new ContentDescriptor(FileTypeDescriptor.QUICKTIME));
System.out.println(outML);
 // Query for the processor for supported formats.
 // Then set it on the processor.
 TrackControl tcs[] = p.getTrackControls();
 Format f[] = tcs[0].getSupportedFormats();
 System.out.println(f[0].getEncoding());
 if (f == null || f.length <= 0) {
 System.err.println("The mux does not support the input format: " + tcs[0].getFormat());
 p.close();
 p.deallocate();
 return false;
 }

 tcs[0].setFormat(f[0]);

 // System.err.println("Setting the track format to: " + f[0]);

 // We are done with programming the processor. Let's just
 // realize it.
 p.realize();
 if (!waitForState(p, p.Realized)) {
 // System.err.println("Failed to realize the processor.");
 p.close();
 p.deallocate();
 return false;
 }

 // Now, we'll need to create a DataSink.
 DataSink dsink;
 if ((dsink = createDataSink(p, outML)) == null) {
 // System.err.println("Failed to create a DataSink for the given
 // output MediaLocator: " + outML);
 p.close();
 p.deallocate();
 return false;
 }

 dsink.addDataSinkListener(this);
 fileDone = false;

 // System.err.println("start processing...");

 // OK, we can now start the actual transcoding.
 try {
 p.start();
 dsink.start();
 } catch (IOException e) {
 p.close();
 p.deallocate();
 dsink.close();
 // System.err.println("IO error during processing");
 return false;
 }

 // Wait for EndOfStream event.
 waitForFileDone();

 // Cleanup.
 try {
 dsink.close();
 } catch (Exception e) {
 }
 p.removeControllerListener(this);

 // System.err.println("...done processing.");

 p.close();

 return true;
 } catch (NotConfiguredError e) {
 // TODO Auto-generated catch block
 e.printStackTrace();
 }

 return false;
 }

 /**
 * Create the DataSink.
 */
 DataSink createDataSink(Processor p, MediaLocator outML) {
System.out.println("In data sink");
 DataSource ds;

 if ((ds = p.getDataOutput()) == null) {
 System.out.println("Something is really wrong: the processor does not have an output DataSource");
 return null;
 }

 DataSink dsink;

 try {
 System.out.println("- create DataSink for: " + ds.toString()+ds.getContentType());
 dsink = Manager.createDataSink(ds, outML);
 dsink.open();
 System.out.println("Done data sink");
 } catch (Exception e) {
 System.err.println("Cannot create the DataSink: " +e);
 e.printStackTrace();
 return null;
 }

 return dsink;
 }

 Object waitSync = new Object();
 boolean stateTransitionOK = true;

 /**
 * Block until the processor has transitioned to the given state. Return
 * false if the transition failed.
 */
 boolean waitForState(Processor p, int state) {
 synchronized (waitSync) {
 try {
 while (p.getState() < state && stateTransitionOK)
 waitSync.wait();
 } catch (Exception e) {
 }
 }
 return stateTransitionOK;
 }

 /**
 * Controller Listener.
 */
 public void controllerUpdate(ControllerEvent evt) {

 if (evt instanceof ConfigureCompleteEvent || evt instanceof RealizeCompleteEvent
 || evt instanceof PrefetchCompleteEvent) {
 synchronized (waitSync) {
 stateTransitionOK = true;
 waitSync.notifyAll();
 }
 } else if (evt instanceof ResourceUnavailableEvent) {
 synchronized (waitSync) {
 stateTransitionOK = false;
 waitSync.notifyAll();
 }
 } else if (evt instanceof EndOfMediaEvent) {
 evt.getSourceController().stop();
 evt.getSourceController().close();
 }
 }

 Object waitFileSync = new Object();
 boolean fileDone = false;
 boolean fileSuccess = true;

 /**
 * Block until file writing is done.
 */
 boolean waitForFileDone() {
 synchronized (waitFileSync) {
 try {
 while (!fileDone)
 waitFileSync.wait();
 } catch (Exception e) {
 }
 }
 return fileSuccess;
 }

 /**
 * Event handler for the file writer.
 */
 public void dataSinkUpdate(DataSinkEvent evt) {

 if (evt instanceof EndOfStreamEvent) {
 synchronized (waitFileSync) {
 fileDone = true;
 waitFileSync.notifyAll();
 }
 } else if (evt instanceof DataSinkErrorEvent) {
 synchronized (waitFileSync) {
 fileDone = true;
 fileSuccess = false;
 waitFileSync.notifyAll();
 }
 }
 }

 public static void main(String arg[]) {
 try {
 String args[] = { "-w 100 -h 100 -f 100 -o F:\\test.mov F:\\Text69.jpg F:\\Textnew.jpg" };
 if (args.length == 0)
 prUsage();

 // Parse the arguments.
 int i = 0;
 int width = -1, height = -1, frameRate = 1;
 Vector inputFiles = new Vector();
 String outputURL = null;

 while (i < args.length) {

 if (args[i].equals("-w")) {
 i++;
 if (i >= args.length)
 width = new Integer(args[i]).intValue();
 } else if (args[i].equals("-h")) {
 i++;
 if (i >= args.length)
 height = new Integer(args[i]).intValue();
 } else if (args[i].equals("-f")) {
 i++;
 if (i >= args.length)
 frameRate = new Integer(args[i]).intValue();
 } else if (args[i].equals("-o")) {
 System.out.println("in ou");
 i++;
 System.out.println(i);
 if (i >= args.length)
 outputURL = args[i];
 System.out.println(outputURL);
 } else {
 System.out.println("adding"+args[i]);
 inputFiles.addElement(args[i]);
 }
 i++;

 }
 inputFiles.addElement("F:\\Textnew.jpg");
 outputURL = "F:\\test.mov";
 System.out.println(inputFiles.size() + outputURL);
 if (outputURL == null || inputFiles.size() == 0)
 prUsage();

 // Check for output file extension.
 if (!outputURL.endsWith(".mov") && !outputURL.endsWith(".MOV")) {
 System.err.println("The output file extension should end with a .mov extension");
 prUsage();
 }
 width = 100;
 height = 100;
 if (width < 0 || height < 0) {
 System.err.println("Please specify the correct image size.");
 prUsage();
 }

 // Check the frame rate.
 if (frameRate < 1)
 frameRate = 1;

 // Generate the output media locators.
 MediaLocator oml;
 oml = createMediaLocator(outputURL);
 System.out.println("Media" + oml);
 if (oml == null) {
 System.err.println("Cannot build media locator from: " + outputURL);
 // //System.exit(0);
 }
 System.out.println("Before change");
System.out.println(inputFiles.size());
 JpegImagesToMovie imageToMovie = new JpegImagesToMovie();
 boolean status = imageToMovie.doIt(width, height, frameRate, inputFiles, oml);
 System.out.println("Status"+status);
 //System.exit(0);
 } catch (Exception e) {
 // TODO Auto-generated catch block
 e.printStackTrace();
 }
 }

 static void prUsage() {
 System.err.println(
 "Usage: java JpegImagesToMovie -w <width> -h <height> -f -o <output url="url"> <input jpeg="jpeg" file="file" 1="1" /> <input jpeg="jpeg" file="file" 2="2" /> ...");
 //System.exit(-1);
 }

 /**
 * Create a media locator from the given string.
 */
 static MediaLocator createMediaLocator(String url) {
 System.out.println(url);
 MediaLocator ml;

 if (url.indexOf(":") > 0 && (ml = new MediaLocator(url)) != null)
 return ml;

 if (url.startsWith(File.separator)) {
 if ((ml = new MediaLocator("file:" + url)) != null)
 return ml;
 } else {
 String file = "file:" + System.getProperty("user.dir") + File.separator + url;
 if ((ml = new MediaLocator(file)) != null)
 return ml;
 }

 return null;
 }

 ///////////////////////////////////////////////
 //
 // Inner classes.
 ///////////////////////////////////////////////

 /**
 * A DataSource to read from a list of JPEG image files and turn that into a
 * stream of JMF buffers. The DataSource is not seekable or positionable.
 */
 class ImageDataSource extends PullBufferDataSource {

 ImageSourceStream streams[];

 ImageDataSource(int width, int height, int frameRate, Vector images) {
 streams = new ImageSourceStream[1];
 streams[0] = new ImageSourceStream(width, height, frameRate, images);
 }

 public void setLocator(MediaLocator source) {
 }

 public MediaLocator getLocator() {
 return null;
 }

 /**
 * Content type is of RAW since we are sending buffers of video frames
 * without a container format.
 */
 public String getContentType() {
 return ContentDescriptor.RAW;
 }

 public void connect() {
 }

 public void disconnect() {
 }

 public void start() {
 }

 public void stop() {
 }

 /**
 * Return the ImageSourceStreams.
 */
 public PullBufferStream[] getStreams() {
 return streams;
 }

 /**
 * We could have derived the duration from the number of frames and
 * frame rate. But for the purpose of this program, it's not necessary.
 */
 public Time getDuration() {
 return DURATION_UNKNOWN;
 }

 public Object[] getControls() {
 return new Object[0];
 }

 public Object getControl(String type) {
 return null;
 }
 }

 /**
 * The source stream to go along with ImageDataSource.
 */
 class ImageSourceStream implements PullBufferStream {

 Vector images;
 int width, height;
 VideoFormat format;

 int nextImage = 0; // index of the next image to be read.
 boolean ended = false;

 public ImageSourceStream(int width, int height, int frameRate, Vector images) {
 this.width = width;
 this.height = height;
 this.images = images;

 format = new JPEGFormat(new Dimension(width, height), Format.NOT_SPECIFIED, Format.byteArray,
 (float) frameRate, 75, JPEGFormat.DEC_422);
 }

 /**
 * We should never need to block assuming data are read from files.
 */
 public boolean willReadBlock() {
 return false;
 }

 /**
 * This is called from the Processor to read a frame worth of video
 * data.
 */
 public void read(Buffer buf) throws IOException {

 // Check if we've finished all the frames.
 if (nextImage >= images.size()) {
 // We are done. Set EndOfMedia.
 System.err.println("Done reading all images.");
 buf.setEOM(true);
 buf.setOffset(0);
 buf.setLength(0);
 ended = true;
 return;
 }

 String imageFile = (String) images.elementAt(nextImage);
 nextImage++;

 System.err.println(" - reading image file: " + imageFile);

 // Open a random access file for the next image.
 RandomAccessFile raFile;
 raFile = new RandomAccessFile(imageFile, "r");

 byte data[] = null;

 // Check the input buffer type & size.

 if (buf.getData() instanceof byte[])
 data = (byte[]) buf.getData();

 // Check to see the given buffer is big enough for the frame.
 if (data == null || data.length < raFile.length()) {
 data = new byte[(int) raFile.length()];
 buf.setData(data);
 }

 // Read the entire JPEG image from the file.
 raFile.readFully(data, 0, (int) raFile.length());

 System.err.println(" read " + raFile.length() + " bytes.");

 buf.setOffset(0);
 buf.setLength((int) raFile.length());
 buf.setFormat(format);
 buf.setFlags(buf.getFlags() | buf.FLAG_KEY_FRAME);

 // Close the random access file.
 raFile.close();
 }

 /**
 * Return the format of each video frame. That will be JPEG.
 */
 public Format getFormat() {
 return format;
 }

 public ContentDescriptor getContentDescriptor() {
 return new ContentDescriptor(ContentDescriptor.RAW);
 }

 public long getContentLength() {
 return 0;
 }

 public boolean endOfStream() {
 return ended;
 }

 public Object[] getControls() {
 return new Object[0];
 }

 public Object getControl(String type) {
 return null;
 }
 }
}
</output></height></width>



I am getting



Cannot create the DataSink: javax.media.NoDataSinkException: Cannot find a DataSink for: com.sun.media.multiplexer.BasicMux$BasicMuxDataSource@d7b1517
javax.media.NoDataSinkException: Cannot find a DataSink for: com.sun.media.multiplexer.BasicMux$BasicMuxDataSource@d7b1517
 at javax.media.Manager.createDataSink(Manager.java:1894)
 at com.ecomm.pl4mms.test.JpegImagesToMovie.createDataSink(JpegImagesToMovie.java:168)
 at com.ecomm.pl4mms.test.JpegImagesToMovie.doIt(JpegImagesToMovie.java:104)
 at com.ecomm.pl4mms.test.JpegImagesToMovie.main(JpegImagesToMovie.java:330)




Please help me to resolve this and let me what can be the cause of this



I am using java 1.8 and trying to create video with jpeg images and using



javax.media to perform this action. and i followed http://www.oracle.com/technetwork/java/javase/documentation/jpegimagestomovie-176885.html
to write the code


-
VP8 Codec SDK "Aylesbury" Release
28 octobre 2010, par noreply@blogger.com (John Luther)Today we’re making available "Aylesbury," our first named release of libvpx, the VP8 codec SDK. VP8 is the video codec used in WebM. Note that the VP8 specification has not changed, only the SDK.
What’s an Aylesbury ? It’s a breed of duck. We like ducks, so we plan to use duck-related names for each major libvpx release, in alphabetical order. Our goal is to have one named release of libvpx per calendar quarter, each with a theme.
You can download the Aylesbury libvpx release from our Downloads page or check it out of our Git repository and build it yourself. In the coming days Aylesbury will be integrated into all of the WebM project components (DirectShow filters, QuickTime plugins, etc.). We encourage anyone using our components to upgrade to the Aylesbury releases.
For Aylesbury the theme was faster decoder, better encoder. We used our May 19, 2010 launch release of libvpx as the benchmark. We’re very happy with the results (see graphs below) :
- 20-40% (average 28%) improvement in libvpx decoder speed
- Over 7% overall PSNR improvement (6.3% SSIM) in VP8 "best" quality encoding mode, and up to 60% improvement on very noisy, still or slow moving source video.
The main improvements to the decoder are :
- Single-core assembly "hot spot" optimizations, including improved vp8_sixtap_predict() and SSE2 loopfilter functions
- Threading improvements for more efficient use of multiple processor cores
- Improved memory handling and reduced footprint
- Combining IDCT and reconstruction steps
- SSSE3 usage in functions where appropriate
On the encoder front, we concentrated on clips in the 30-45 dB range and saw the biggest gains in higher-quality source clips (greater that 38 dB), low to medium-motion clips, and clips with noisy source material. Many code contributions made this possible, but a few of the highlights were :
- Adaptive width and strength alternate reference frame noise suppression filter with optional motion compensation.
- Transform improvements (improved accuracy and reduction in round trip error)
- Trellis-based quantized coefficient optimization
- Two-pass rate control and quantizer changes
- Rate distortion changes
- Zero bin and rounding changes
- Work on MB-level quality control and bit allocation
We’re targeting Q1 2011 for the next named libvpx release, which we’re calling Bali. The theme for that release will be faster encoder. We are constantly working on improvements to video quality in the encoder, so after Aylesbury we won’t tie that work to specific named releases.
WebM at Streaming Media West
Members of the WebM project will discuss Aylesbury during a session at the Streaming Media West conference on November 3rd (session C203 : WebM Open Video Project Update). For more information, visit www.streamingmedia.com/west.
John Luther is Product Manager of the WebM Project.
-
Android studio + OpenCV + FFmpeg
11 juillet 2018, par t0mI have problem with code, which is functional only for Genymotion device (Android 4.1.1), but for Genymotion device 5.0.1 and real device Huawei honor 4c Android 4.4.2 not.
I have imported OpenCV 3.1 to Android studio by : https://stackoverflow.com/a/27421494/4244605
I added JavaCV with FFmpeg by : https://github.com/bytedeco/javacvAndroid studio 1.5.1
minSdkVersion 15
compileSdkVersion 23Code is only for test.
OpenCVCameraActivity.java :import android.app.Activity;
import android.hardware.Camera;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.SubMenu;
import android.view.SurfaceView;
import android.view.View;
import android.view.WindowManager;
import android.widget.Toast;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.Frame;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Mat;
import java.io.File;
import java.nio.ShortBuffer;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.ListIterator;
@SuppressWarnings("ALL")
public class OpenCVCameraActivity extends Activity implements
CameraBridgeViewBase.CvCameraViewListener2,
View.OnTouchListener {
//name of activity, for DEBUGGING
private static final String TAG = OpenCVCameraActivity.class.getSimpleName();
private OpenCVCameraPreview mOpenCvCameraView;
private List mResolutionList;
private MenuItem[] mEffectMenuItems;
private SubMenu mColorEffectsMenu;
private MenuItem[] mResolutionMenuItems;
private SubMenu mResolutionMenu;
private static long frameCounter = 0;
long startTime = 0;
private Mat edgesMat;
boolean recording = false;
private int sampleAudioRateInHz = 44100;
private int imageWidth = 1920;
private int imageHeight = 1080;
private int frameRate = 30;
private Frame yuvImage = null;
private File ffmpeg_link;
private FFmpegFrameRecorder recorder;
/* audio data getting thread */
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private Thread audioThread;
volatile boolean runAudioThread = true;
ShortBuffer[] samples;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
Log.i(TAG, "OpenCV loaded successfully");
mOpenCvCameraView.enableView();
mOpenCvCameraView.setOnTouchListener(OpenCVCameraActivity.this);
break;
default:
super.onManagerConnected(status);
break;
}
}
};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if(Static.DEBUG) Log.i(TAG, "onCreate()");
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
try {
setContentView(R.layout.activity_opencv);
mOpenCvCameraView = (OpenCVCameraPreview) findViewById(R.id.openCVCameraPreview);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCvCameraViewListener(this);
//mOpenCvCameraView.enableFpsMeter();
ffmpeg_link = new File(Environment.getExternalStorageDirectory(), "stream.mp4");
} catch (Exception e){
e.printStackTrace();
}
}
@Override
protected void onRestart() {
if (Static.DEBUG) Log.i(TAG, "onRestart()");
super.onRestart();
}
@Override
protected void onStart() {
if (Static.DEBUG) Log.i(TAG, "onStart()");
super.onStart();
}
@Override
protected void onResume() {
if (Static.DEBUG) Log.i(TAG, "onResume()");
super.onResume();
if (!OpenCVLoader.initDebug()) {
Log.i(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_11, this, mLoaderCallback);
} else {
Log.i(TAG, "OpenCV library found inside package. Using it!");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
if (Static.DEBUG) Log.i(TAG, "onCreateOptionsMenu()");
super.onCreateOptionsMenu(menu);
List<string> effects = mOpenCvCameraView.getEffectList();
if (effects == null) {
Log.e(TAG, "Color effects are not supported by device!");
return true;
}
mColorEffectsMenu = menu.addSubMenu("Color Effect");
mEffectMenuItems = new MenuItem[effects.size()];
int idx = 0;
ListIterator<string> effectItr = effects.listIterator();
while(effectItr.hasNext()) {
String element = effectItr.next();
mEffectMenuItems[idx] = mColorEffectsMenu.add(1, idx, Menu.NONE, element);
idx++;
}
mResolutionMenu = menu.addSubMenu("Resolution");
mResolutionList = mOpenCvCameraView.getResolutionList();
mResolutionMenuItems = new MenuItem[mResolutionList.size()];
ListIterator resolutionItr = mResolutionList.listIterator();
idx = 0;
while(resolutionItr.hasNext()) {
Camera.Size element = resolutionItr.next();
mResolutionMenuItems[idx] = mResolutionMenu.add(2, idx, Menu.NONE,
Integer.valueOf(element.width).toString() + "x" + Integer.valueOf(element.height).toString());
idx++;
}
return true;
}
@Override
protected void onPause() {
if (Static.DEBUG) Log.i(TAG, "onPause()");
super.onPause();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
@Override
protected void onStop() {
if (Static.DEBUG) Log.i(TAG, "onStop()");
super.onStop();
}
@Override
protected void onDestroy() {
if (Static.DEBUG) Log.i(TAG, "onDestroy()");
super.onDestroy();
if (mOpenCvCameraView != null)
mOpenCvCameraView.disableView();
}
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
++frameCounter;
//Log.i(TAG, "Frame number: "+frameCounter);
return inputFrame.rgba();
}
@Override
public void onCameraViewStarted(int width, int height) {
edgesMat = new Mat();
}
@Override
public void onCameraViewStopped() {
if (edgesMat != null)
edgesMat.release();
edgesMat = null;
}
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
if (item.getGroupId() == 1)
{
mOpenCvCameraView.setEffect((String) item.getTitle());
Toast.makeText(this, mOpenCvCameraView.getEffect(), Toast.LENGTH_SHORT).show();
} else if (item.getGroupId() == 2) {
int id = item.getItemId();
Camera.Size resolution = mResolutionList.get(id);
mOpenCvCameraView.setResolution(resolution);
resolution = mOpenCvCameraView.getResolution();
String caption = Integer.valueOf(resolution.width).toString() + "x" + Integer.valueOf(resolution.height).toString();
Toast.makeText(this, caption, Toast.LENGTH_SHORT).show();
}
return true;
}
@Override
public boolean onTouch(View v, MotionEvent event) {
Log.i(TAG,"onTouch event");
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss");
String currentDateandTime = sdf.format(new Date());
String fileName = Environment.getExternalStorageDirectory().getPath() +
"/sample_picture_" + currentDateandTime + ".jpg";
mOpenCvCameraView.takePicture(fileName);
Toast.makeText(this, fileName + " saved", Toast.LENGTH_SHORT).show();
return false;
}
/**
* Click to ImageButton to start recording.
*/
public void onClickBtnStartRecord2(View v) {
if (Static.DEBUG) Log.i(TAG, "onClickBtnStartRecord()");
if(!recording)
startRecording();
else
stopRecording();
}
private void startRecording() {
if (Static.DEBUG) Log.i(TAG, "startRecording()");
initRecorder();
try {
recorder.start();
startTime = System.currentTimeMillis();
recording = true;
audioThread.start();
} catch(FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}
private void stopRecording() {
if (Static.DEBUG) Log.i(TAG, "stopRecording()");
runAudioThread = false;
try {
audioThread.join();
} catch(InterruptedException e) {
e.printStackTrace();
}
audioRecordRunnable = null;
audioThread = null;
if(recorder != null && recording) {
recording = false;
Log.v(TAG, "Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
} catch(FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}
//---------------------------------------
// initialize ffmpeg_recorder
//---------------------------------------
private void initRecorder() {
Log.w(TAG, "init recorder");
try {
if (yuvImage == null) {
yuvImage = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
Log.i(TAG, "create yuvImage");
}
Log.i(TAG, "ffmpeg_url: " + ffmpeg_link.getAbsolutePath());
Log.i(TAG, "ffmpeg_url: " + ffmpeg_link.exists());
recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
recorder.setFormat("mp4");
recorder.setSampleRate(sampleAudioRateInHz);
// Set in the surface changed method
recorder.setFrameRate(frameRate);
Log.i(TAG, "recorder initialize success");
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
runAudioThread = true;
} catch (Exception e){
e.printStackTrace();
}
}
//---------------------------------------------
// audio thread, gets and encodes audio data
//---------------------------------------------
class AudioRecordRunnable implements Runnable {
@Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
// Audio
int bufferSize;
ShortBuffer audioData;
int bufferReadResult;
bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
audioData = ShortBuffer.allocate(bufferSize);
Log.d(TAG, "audioRecord.startRecording()");
audioRecord.startRecording();
/* ffmpeg_audio encoding loop */
while(runAudioThread) {
//Log.v(TAG,"recording? " + recording);
bufferReadResult = audioRecord.read(audioData.array(), 0, audioData.capacity());
audioData.limit(bufferReadResult);
if(bufferReadResult > 0) {
Log.v(TAG, "bufferReadResult: " + bufferReadResult);
// If "recording" isn't true when start this thread, it never get's set according to this if statement...!!!
// Why? Good question...
if(recording) {
try {
recorder.recordSamples(audioData);
//Log.v(TAG,"recording " + 1024*i + " to " + 1024*i+1024);
} catch(FFmpegFrameRecorder.Exception e) {
Log.v(TAG, e.getMessage());
e.printStackTrace();
}
}
}
}
Log.v(TAG, "AudioThread Finished, release audioRecord");
/* encoding finish, release recorder */
if(audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
Log.v(TAG, "audioRecord released");
}
}
}
}
</string></string>OpenCVCameraPreview.java :
import android.content.Context;
import android.hardware.Camera;
import android.util.AttributeSet;
import android.util.Log;
import org.opencv.android.JavaCameraView;
import java.io.FileOutputStream;
import java.util.List;
public class OpenCVCameraPreview extends JavaCameraView implements Camera.PictureCallback {
private static final String TAG = OpenCVCameraPreview.class.getSimpleName();
private String mPictureFileName;
public OpenCVCameraPreview(Context context, AttributeSet attrs) {
super(context, attrs);
}
public List<string> getEffectList() {
return mCamera.getParameters().getSupportedColorEffects();
}
public boolean isEffectSupported() {
return (mCamera.getParameters().getColorEffect() != null);
}
public String getEffect() {
return mCamera.getParameters().getColorEffect();
}
public void setEffect(String effect) {
Camera.Parameters params = mCamera.getParameters();
params.setColorEffect(effect);
mCamera.setParameters(params);
}
public List getResolutionList() {
return mCamera.getParameters().getSupportedPreviewSizes();
}
public void setResolution(Camera.Size resolution) {
disconnectCamera();
mMaxHeight = resolution.height;
mMaxWidth = resolution.width;
connectCamera(getWidth(), getHeight());
}
public Camera.Size getResolution() {
return mCamera.getParameters().getPreviewSize();
}
public void takePicture(final String fileName) {
Log.i(TAG, "Taking picture");
this.mPictureFileName = fileName;
// Postview and jpeg are sent in the same buffers if the queue is not empty when performing a capture.
// Clear up buffers to avoid mCamera.takePicture to be stuck because of a memory issue
mCamera.setPreviewCallback(null);
// PictureCallback is implemented by the current class
mCamera.takePicture(null, null, this);
}
@Override
public void onPictureTaken(byte[] data, Camera camera) {
Log.i(TAG, "Saving a bitmap to file");
// The camera preview was automatically stopped. Start it again.
mCamera.startPreview();
mCamera.setPreviewCallback(this);
// Write the image in a file (in jpeg format)
try {
FileOutputStream fos = new FileOutputStream(mPictureFileName);
fos.write(data);
fos.close();
} catch (java.io.IOException e) {
Log.e("PictureDemo", "Exception in photoCallback", e);
}
}
}
</string>Gradle :
apply plugin: 'com.android.application'
android {
compileSdkVersion 23
buildToolsVersion "23.0.2"
defaultConfig {
applicationId "co.example.example"
minSdkVersion 15
targetSdkVersion 23
versionCode 1
versionName "1.0"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
packagingOptions {
exclude 'META-INF/maven/org.bytedeco.javacpp-presets/opencv/pom.properties'
exclude 'META-INF/maven/org.bytedeco.javacpp-presets/opencv/pom.xml'
exclude 'META-INF/maven/org.bytedeco.javacpp-presets/ffmpeg/pom.properties'
exclude 'META-INF/maven/org.bytedeco.javacpp-presets/ffmpeg/pom.xml'
}
}
repositories {
mavenCentral()
}
dependencies {
compile fileTree(include: ['*.jar'], dir: 'libs')
testCompile 'junit:junit:4.12'
compile 'com.android.support:appcompat-v7:23.1.1'
compile 'com.google.android.gms:play-services-appindexing:8.1.0'
compile group: 'org.bytedeco', name: 'javacv', version: '1.1'
compile group: 'org.bytedeco.javacpp-presets', name: 'opencv', version: '3.0.0-1.1', classifier: 'android-arm'
compile group: 'org.bytedeco.javacpp-presets', name: 'opencv', version: '3.0.0-1.1', classifier: 'android-x86'
compile group: 'org.bytedeco.javacpp-presets', name: 'ffmpeg', version: '2.8.1-1.1', classifier: 'android-arm'
compile group: 'org.bytedeco.javacpp-presets', name: 'ffmpeg', version: '2.8.1-1.1', classifier: 'android-x86'
compile project(':openCVLibrary310')
}proguard-rules.pro
Edited by : linkjniLibs :
app/src/main/jniLibs :armeabi armeabi-v7a arm64-v8a mips mips64 x86 x86_64
Problem
02-19 11:57:37.684 1759-1759/ I/OpenCVCameraActivity: onClickBtnStartRecord()
02-19 11:57:37.684 1759-1759/ I/OpenCVCameraActivity: startRecording()
02-19 11:57:37.684 1759-1759/ W/OpenCVCameraActivity: init recorder
02-19 11:57:37.691 1759-1759/ I/OpenCVCameraActivity: create yuvImage
02-19 11:57:37.691 1759-1759/ I/OpenCVCameraActivity: ffmpeg_url: /storage/emulated/0/stream.mp4
02-19 11:57:37.696 1759-1759/ I/OpenCVCameraActivity: ffmpeg_url: false
02-19 11:57:37.837 1759-1759/ W/linker: libjniavutil.so: unused DT entry: type 0x1d arg 0x18cc3
02-19 11:57:37.837 1759-1759/ W/linker: libjniavutil.so: unused DT entry: type 0x6ffffffe arg 0x21c30
02-19 11:57:37.837 1759-1759/ W/linker: libjniavutil.so: unused DT entry: type 0x6fffffff arg 0x1
02-19 11:57:37.838 1759-1759/co.example.example E/art: dlopen("/data/app/co.example.example-2/lib/x86/libjniavutil.so", RTLD_LAZY) failed: dlopen failed: cannot locate symbol "av_version_info" referenced by "libjniavutil.so"...
02-19 11:57:37.843 1759-1759/co.example.example I/art: Rejecting re-init on previously-failed class java.lang.Class
02-19 11:57:37.844 1759-1759/co.example.example E/AndroidRuntime: FATAL EXCEPTION: main
Process: co.example.example, PID: 1759
java.lang.IllegalStateException: Could not execute method of the activity
at android.view.View$1.onClick(View.java:4020)
at android.view.View.performClick(View.java:4780)
at android.view.View$PerformClick.run(View.java:19866)
at android.os.Handler.handleCallback(Handler.java:739)
at android.os.Handler.dispatchMessage(Handler.java:95)
at android.os.Looper.loop(Looper.java:135)
at android.app.ActivityThread.main(ActivityThread.java:5254)
at java.lang.reflect.Method.invoke(Native Method)
at java.lang.reflect.Method.invoke(Method.java:372)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:903)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:698)
Caused by: java.lang.reflect.InvocationTargetException
at java.lang.reflect.Method.invoke(Native Method)
at java.lang.reflect.Method.invoke(Method.java:372)
at android.view.View$1.onClick(View.java:4015)
at android.view.View.performClick(View.java:4780)
at android.view.View$PerformClick.run(View.java:19866)
at android.os.Handler.handleCallback(Handler.java:739)
at android.os.Handler.dispatchMessage(Handler.java:95)
at android.os.Looper.loop(Looper.java:135)
at android.app.ActivityThread.main(ActivityThread.java:5254)
at java.lang.reflect.Method.invoke(Native Method)
at java.lang.reflect.Method.invoke(Method.java:372)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:903)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:698)
Caused by: java.lang.UnsatisfiedLinkError: org.bytedeco.javacpp.avutil
at java.lang.Class.classForName(Native Method)
at java.lang.Class.forName(Class.java:309)
at org.bytedeco.javacpp.Loader.load(Loader.java:413)
at org.bytedeco.javacpp.Loader.load(Loader.java:381)
at org.bytedeco.javacpp.avcodec$AVPacket.<clinit>(avcodec.java:1650)
at org.bytedeco.javacv.FFmpegFrameRecorder.<init>(FFmpegFrameRecorder.java:149)
at org.bytedeco.javacv.FFmpegFrameRecorder.<init>(FFmpegFrameRecorder.java:129)
at co.example.example.OpenCVCameraActivity.initRecorder(OpenCVCameraActivity.java:320)
at co.example.example.OpenCVCameraActivity.startRecording(OpenCVCameraActivity.java:266)
at co.example.example.OpenCVCameraActivity.onClickBtnStartRecord2(OpenCVCameraActivity.java:259)
at java.lang.reflect.Method.invoke(Native Method)
at java.lang.reflect.Method.invoke(Method.java:372)
at android.view.View$1.onClick(View.java:4015)
at android.view.View.performClick(View.java:4780)
at android.view.View$PerformClick.run(View.java:19866)
at android.os.Handler.handleCallback(Handler.java:739)
at android.os.Handler.dispatchMessage(Handler.java:95)
at android.os.Looper.loop(Looper.java:135)
at android.app.ActivityThread.main(ActivityThread.java:5254)
at java.lang.reflect.Method.invoke(Native Method)
at java.lang.reflect.Method.invoke(Method.java:372)
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:903)
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:698)
</init></init></clinit>