
Recherche avancée
Autres articles (21)
-
Les formats acceptés
28 janvier 2010, parLes commandes suivantes permettent d’avoir des informations sur les formats et codecs gérés par l’installation local de ffmpeg :
ffmpeg -codecs ffmpeg -formats
Les format videos acceptés en entrée
Cette liste est non exhaustive, elle met en exergue les principaux formats utilisés : h264 : H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10 m4v : raw MPEG-4 video format flv : Flash Video (FLV) / Sorenson Spark / Sorenson H.263 Theora wmv :
Les formats vidéos de sortie possibles
Dans un premier temps on (...) -
Ajouter notes et légendes aux images
7 février 2011, parPour pouvoir ajouter notes et légendes aux images, la première étape est d’installer le plugin "Légendes".
Une fois le plugin activé, vous pouvez le configurer dans l’espace de configuration afin de modifier les droits de création / modification et de suppression des notes. Par défaut seuls les administrateurs du site peuvent ajouter des notes aux images.
Modification lors de l’ajout d’un média
Lors de l’ajout d’un média de type "image" un nouveau bouton apparait au dessus de la prévisualisation (...) -
Des sites réalisés avec MediaSPIP
2 mai 2011, parCette page présente quelques-uns des sites fonctionnant sous MediaSPIP.
Vous pouvez bien entendu ajouter le votre grâce au formulaire en bas de page.
Sur d’autres sites (6962)
-
How to create a video from image buffers using fluent-ffmpeg ?
23 juin 2016, par Zacky PickholzI’ve been trying to create a slideshow from a series of images using nodejs + fluent-ffmpeg, however it is not working well or consistently. ffmpeg occasionally emits "Error : ffmpeg exited with code 1 : pipe:0 : Invalid data found when processing input", and in case an eventual video (mp4) is created it seems to be missing images/frames.
The process is as follows : images are loaded into memory, transformed resized to same dimensions using lwip, written sequentially into a passthrough stream, which is fed to ffmpeg as input.
Relevant code snippets :
var lwip = require('lwip');
var ffmpeg = require('fluent-ffmpeg');
var stream = require('stream');
var imagesStream = new stream.PassThrough();
...
image.batch()
.contain(options.video.width, options.video.height, 'lanczos')
.toBuffer(options.frames.format, {quality: 100}, (err, buffer) => {
if (err) {
throw ('error convering image to buffer. ' + err);
}
imagesStream.write(buffer, 'utf8');
resolve();
});
...
ffmpeg(imagesStream)
.inputOptions('-framerate 1/' + options.frames.secsPerImage)
.input(path.join(AUDIO_ROOT, options.audio.track))
.save(path.join(path.join(OUTPUT_FOLDER, `${options.video.output.prefix}${timestamp}.${options.video.output.format}`)))
.size(`${options.video.width}x${options.video.height}`)
.on('start', () => {
console.log('creating the clip now...')
})
.on('progress', (progress) => {
var progPercent = Math.round(100 * progress.frames / (numImages * options.frames.secsPerImage * 25));
progPercent = Math.min(progPercent, 100);
console.log(`processing: ${progPercent}% done`)
})
.on('stderr', (line) => {
console.error('ffmpeg error: ' + line);
})
.on('error', (error) => {
reject('ffmpeg transcoding error: ' + error);
})
.on('end', () => {
console.log('done!');
resolve(true);
})
.run();And here is the output :
"C:\Program Files (x86)\JetBrains\WebStorm 2016.1.1\bin\runnerw.exe" "C:\Program Files\nodejs\node.exe" vm2.js
image count: 18
image count: 6
creating the clip now...
ffmpeg error: ffmpeg version N-80335-gcb46b78 Copyright (c) 2000-2016 the FFmpeg developers
ffmpeg error: built with gcc 5.4.0 (GCC)
ffmpeg error: configuration: --enable-gpl --enable-version3 --disable-w32threads --enable-nvenc --enable-avisynth --enable-bzlib --enable-fontconfig --enable-frei0r --enable-gnutls --enable-iconv --enable-libass --enable-libbluray --enable-libbs2b --enable-libcaca --enable-libfreetype --enable-libgme --enable-libgsm --enable-libilbc --enable-libmodplug --enable-libmfx --enable-libmp3lame --enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopenjpeg --enable-libopus --enable-librtmp --enable-libschroedinger --enable-libsnappy --enable-libsoxr --enable-libspeex --enable-libtheora --enable-libtwolame --enable-libvidstab --enable-libvo-amrwbenc --enable-libvorbis --enable-libvpx --enable-libwavpack --enable-libwebp --enable-libx264 --enable-libx265 --enable-libxavs --enable-libxvid --enable-libzimg --enable-lzma --enable-decklink --enable-zlib
ffmpeg error: libavutil 55. 24.100 / 55. 24.100
ffmpeg error: libavcodec 57. 46.100 / 57. 46.100
ffmpeg error: libavformat 57. 38.100 / 57. 38.100
ffmpeg error: libavdevice 57. 0.101 / 57. 0.101
ffmpeg error: libavfilter 6. 46.101 / 6. 46.101
ffmpeg error: libswscale 4. 1.100 / 4. 1.100
ffmpeg error: libswresample 2. 1.100 / 2. 1.100
ffmpeg error: libpostproc 54. 0.100 / 54. 0.100
creating the clip now...
ffmpeg error: ffmpeg version N-80335-gcb46b78 Copyright (c) 2000-2016 the FFmpeg developers
ffmpeg error: built with gcc 5.4.0 (GCC)
ffmpeg error: configuration: --enable-gpl --enable-version3 --disable-w32threads --enable-nvenc --enable-avisynth --enable-bzlib --enable-fontconfig --enable-frei0r --enable-gnutls --enable-iconv --enable-libass --enable-libbluray --enable-libbs2b --enable-libcaca --enable-libfreetype --enable-libgme --enable-libgsm --enable-libilbc --enable-libmodplug --enable-libmfx --enable-libmp3lame --enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopenjpeg --enable-libopus --enable-librtmp --enable-libschroedinger --enable-libsnappy --enable-libsoxr --enable-libspeex --enable-libtheora --enable-libtwolame --enable-libvidstab --enable-libvo-amrwbenc --enable-libvorbis --enable-libvpx --enable-libwavpack --enable-libwebp --enable-libx264 --enable-libx265 --enable-libxavs --enable-libxvid --enable-libzimg --enable-lzma --enable-decklink --enable-zlib
ffmpeg error: libavutil 55. 24.100 / 55. 24.100
ffmpeg error: libavcodec 57. 46.100 / 57. 46.100
ffmpeg error: libavformat 57. 38.100 / 57. 38.100
ffmpeg error: libavdevice 57. 0.101 / 57. 0.101
ffmpeg error: libavfilter 6. 46.101 / 6. 46.101
ffmpeg error: libswscale 4. 1.100 / 4. 1.100
ffmpeg error: libswresample 2. 1.100 / 2. 1.100
ffmpeg error: libpostproc 54. 0.100 / 54. 0.100
ffmpeg error: pipe:0: Invalid data found when processing input
ffmpeg error:
an error has occurred: ffmpeg transcoding error: Error: ffmpeg exited with code 1: pipe:0: Invalid data found when processing input
ffmpeg error: [jpeg_pipe @ 0000000000308fe0] Format jpeg_pipe detected only with low score of 6, misdetection possible!
ffmpeg error: Input #0, jpeg_pipe, from 'pipe:0':
ffmpeg error: Duration: N/A, bitrate: N/A
ffmpeg error: Stream #0:0: Video: mjpeg, yuvj420p(pc, bt470bg/unknown/unknown), 1920x1080 [SAR 1:1 DAR 16:9], 0.33 tbr, 0.33 tbn, 0.33 tbc
ffmpeg error: [mp3 @ 0000000002fa0720] Estimating duration from bitrate, this may be inaccurate
ffmpeg error: Input #1, mp3, from 'audio\avicii.mp3':
ffmpeg error: Metadata:
ffmpeg error: album : True
ffmpeg error: genre : House
ffmpeg error: copyright : ℗ 2013 Avicii Music AB, / PRMD under exclusive license to Universal Music AB
ffmpeg error: encoded_by : Oz
ffmpeg error: title : Wake Me Up
ffmpeg error: artist : Avicii
ffmpeg error: album_artist : Avicii
ffmpeg error: disc : 1/1
ffmpeg error: track : 1/12
ffmpeg error: TYER : 2013-09-13T07:00:00Z
ffmpeg error: Duration: 00:04:09.73, start: 0.000000, bitrate: 321 kb/s
ffmpeg error: Stream #1:0: Audio: mp3, 44100 Hz, stereo, s16p, 320 kb/s
ffmpeg error: Stream #1:1: Video: mjpeg, yuvj444p(pc, bt470bg/unknown/unknown), 600x600 [SAR 305:305 DAR 1:1], 90k tbr, 90k tbn, 90k tbc
ffmpeg error: Metadata:
ffmpeg error: comment : Cover (front)
ffmpeg error: No pixel format specified, yuvj420p for H.264 encoding chosen.
ffmpeg error: Use -pix_fmt yuv420p for compatibility with outdated media players.
ffmpeg error: [libx264 @ 000000000030e860] using SAR=1/1
ffmpeg error: [libx264 @ 000000000030e860] using cpu capabilities: MMX2 SSE2Fast SSSE3 SSE4.2 AVX
ffmpeg error: [libx264 @ 000000000030e860] profile High, level 4.0
ffmpeg error: [libx264 @ 000000000030e860] 264 - core 148 r2694 3b70645 - H.264/MPEG-4 AVC codec - Copyleft 2003-2016 - http://www.videolan.org/x264.html - options: cabac=1 ref=3 deblock=1:0:0 analyse=0x3:0x113 me=hex subme=7 psy=1 psy_rd=1.00:0.00 mixed_ref=1 me_range=16 chroma_me=1 trellis=1 8x8dct=1 cqm=0 deadzone=21,11 fast_pskip=1 chroma_qp_offset=-2 threads=12 lookahead_threads=2 sliced_threads=0 nr=0 decimate=1 interlaced=0 bluray_compat=0 constrained_intra=0 bframes=3 b_pyramid=2 b_adapt=1 b_bias=0 direct=1 weightb=1 open_gop=0 weightp=2 keyint=250 keyint_min=25 scenecut=40 intra_refresh=0 rc_lookahead=40 rc=crf mbtree=1 crf=23.0 qcomp=0.60 qpmin=0 qpmax=69 qpstep=4 ip_ratio=1.40 aq=1:1.00
ffmpeg error: [mp4 @ 0000000002ec6980] Using AVStream.codec to pass codec parameters to muxers is deprecated, use AVStream.codecpar instead.
ffmpeg error: Last message repeated 1 times
ffmpeg error: Output #0, mp4, to 'output\clip_2016-06-22_06-17-25.mp4':
ffmpeg error: Metadata:
ffmpeg error: encoder : Lavf57.38.100
ffmpeg error: Stream #0:0: Video: h264 (libx264) ([33][0][0][0] / 0x0021), yuvj420p(pc), 1920x1080 [SAR 1:1 DAR 16:9], q=-1--1, 25 fps, 12800 tbn, 25 tbc
ffmpeg error: Metadata:
ffmpeg error: encoder : Lavc57.46.100 libx264
ffmpeg error: Side data:
ffmpeg error: cpb: bitrate max/min/avg: 0/0/0 buffer size: 0 vbv_delay: -1
ffmpeg error: Stream #0:1: Audio: aac (LC) ([64][0][0][0] / 0x0040), 44100 Hz, stereo, fltp, 128 kb/s
ffmpeg error: Metadata:
ffmpeg error: encoder : Lavc57.46.100 aac
ffmpeg error: Stream mapping:
ffmpeg error: Stream #0:0 -> #0:0 (mjpeg (native) -> h264 (libx264))
ffmpeg error: Stream #1:0 -> #0:1 (mp3 (native) -> aac (native))
ffmpeg error: frame= 75 fps=0.0 q=28.0 size= 0kB time=00:00:00.64 bitrate= 0.6kbits/s dup=74 drop=0 speed=1.09x
processing: 17% done
ffmpeg error: frame= 150 fps= 97 q=28.0 size= 371kB time=00:00:03.64 bitrate= 835.6kbits/s dup=148 drop=0 speed=2.35x
processing: 33% done
processing: 33% done
ffmpeg error: frame= 150 fps= 73 q=28.0 size= 879kB time=00:00:07.36 bitrate= 977.8kbits/s dup=148 drop=0 speed= 3.6x
processing: 33% done
ffmpeg error: frame= 150 fps= 59 q=28.0 size= 952kB time=00:00:18.36 bitrate= 424.7kbits/s dup=148 drop=0 speed=7.21x
processing: 33% done
ffmpeg error: frame= 150 fps= 49 q=28.0 size= 1190kB time=00:00:32.99 bitrate= 295.3kbits/s dup=148 drop=0 speed=10.8x
ffmpeg error: frame= 150 fps= 42 q=28.0 size= 1409kB time=00:00:46.64 bitrate= 247.4kbits/s dup=148 drop=0 speed=13.1x
processing: 33% done
processing: 33% done
ffmpeg error: frame= 150 fps= 37 q=28.0 size= 1628kB time=00:01:00.30 bitrate= 221.1kbits/s dup=148 drop=0 speed=14.9x
processing: 33% done
ffmpeg error: frame= 150 fps= 33 q=28.0 size= 1878kB time=00:01:15.83 bitrate= 202.8kbits/s dup=148 drop=0 speed=16.7x
processing: 33% done
ffmpeg error: frame= 150 fps= 30 q=28.0 size= 2130kB time=00:01:31.64 bitrate= 190.4kbits/s dup=148 drop=0 speed=18.2x
processing: 33% done
ffmpeg error: frame= 150 fps= 27 q=28.0 size= 2375kB time=00:01:47.18 bitrate= 181.5kbits/s dup=148 drop=0 speed=19.3x
processing: 33% done
ffmpeg error: frame= 150 fps= 25 q=28.0 size= 2626kB time=00:02:03.15 bitrate= 174.7kbits/s dup=148 drop=0 speed=20.4x
processing: 33% done
ffmpeg error: frame= 150 fps= 23 q=28.0 size= 2832kB time=00:02:16.20 bitrate= 170.3kbits/s dup=148 drop=0 speed=20.8x
ffmpeg error: frame= 150 fps= 21 q=28.0 size= 3063kB time=00:02:30.34 bitrate= 166.9kbits/s dup=148 drop=0 speed=21.3x
processing: 33% done
processing: 33% done
ffmpeg error: frame= 150 fps= 20 q=28.0 size= 3298kB time=00:02:44.93 bitrate= 163.8kbits/s dup=148 drop=0 speed=21.8x
processing: 33% done
ffmpeg error: frame= 150 fps= 19 q=28.0 size= 3522kB time=00:02:58.93 bitrate= 161.3kbits/s dup=148 drop=0 speed=22.2x
processing: 33% done
ffmpeg error: frame= 150 fps= 18 q=28.0 size= 3792kB time=00:03:15.83 bitrate= 158.6kbits/s dup=148 drop=0 speed=22.9x
processing: 33% done
ffmpeg error: frame= 150 fps= 17 q=28.0 size= 4035kB time=00:03:31.11 bitrate= 156.6kbits/s dup=148 drop=0 speed=23.3x
processing: 33% done
ffmpeg error: frame= 150 fps= 16 q=28.0 size= 4294kB time=00:03:47.62 bitrate= 154.5kbits/s dup=148 drop=0 speed=23.8x
processing: 33% done
ffmpeg error: frame= 150 fps= 15 q=28.0 size= 4566kB time=00:04:04.87 bitrate= 152.7kbits/s dup=148 drop=0 speed=24.4x
processing: 33% done
ffmpeg error: frame= 150 fps= 14 q=-1.0 Lsize= 4851kB time=00:04:09.73 bitrate= 159.1kbits/s dup=148 drop=0 speed=23.6x
ffmpeg error: video:826kB audio:3978kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: 0.967442%
ffmpeg error: [libx264 @ 000000000030e860] frame I:2 Avg QP:14.53 size:414604
ffmpeg error: [libx264 @ 000000000030e860] frame P:38 Avg QP:16.59 size: 222
ffmpeg error: [libx264 @ 000000000030e860] frame B:110 Avg QP:12.67 size: 69
ffmpeg error: [libx264 @ 000000000030e860] consecutive B-frames: 1.3% 2.7% 0.0% 96.0%
ffmpeg error: [libx264 @ 000000000030e860] mb I I16..4: 25.5% 49.4% 25.1%
ffmpeg error: [libx264 @ 000000000030e860] mb P I16..4: 0.0% 0.0% 0.0% P16..4: 0.7% 0.0% 0.0% 0.0% 0.0% skip:99.2%
ffmpeg error: [libx264 @ 000000000030e860] mb B I16..4: 0.0% 0.0% 0.0% B16..8: 0.0% 0.0% 0.0% direct: 0.0% skip:100.0% L0: 1.2% L1:98.8% BI: 0.0%
ffmpeg error: [libx264 @ 000000000030e860] 8x8 transform intra:49.4% inter:92.1%
ffmpeg error: [libx264 @ 000000000030e860] coded y,uvDC,uvAC intra: 74.2% 73.7% 69.0% inter: 0.0% 0.2% 0.0%
ffmpeg error: [libx264 @ 000000000030e860] i16 v,h,dc,p: 97% 0% 2% 1%
ffmpeg error: [libx264 @ 000000000030e860] i8 v,h,dc,ddl,ddr,vr,hd,vl,hu: 13% 19% 15% 7% 9% 7% 10% 7% 13%
ffmpeg error: [libx264 @ 000000000030e860] i4 v,h,dc,ddl,ddr,vr,hd,vl,hu: 12% 21% 8% 8% 11% 9% 12% 7% 13%
ffmpeg error: [libx264 @ 000000000030e860] i8c dc,h,v,p: 55% 19% 16% 10%
ffmpeg error: [libx264 @ 000000000030e860] Weighted P-Frames: Y:0.0% UV:0.0%
ffmpeg error: [libx264 @ 000000000030e860] ref P L0: 95.4% 0.6% 3.1% 0.9%
ffmpeg error: [libx264 @ 000000000030e860] ref B L1: 98.8% 1.2%
ffmpeg error: [libx264 @ 000000000030e860] kb/s:1127.01
ffmpeg error: [aac @ 0000000002ecc880] Qavg: 541.237
ffmpeg error:
done! -
Encoding Exception during Transcode with audio files
6 mai 2016, par Hakop ZakaryanI am attempting to transcode using an FFMPEG Wrapper Library called JAVE on Mac OSX 10.11.3 using Eclipse 4.50
My Converter.java class looks something like this :
package matador;
import it.sauronsoftware.jave.AudioAttributes;
import it.sauronsoftware.jave.EncodingAttributes;
import it.sauronsoftware.jave.EncoderException;
import it.sauronsoftware.jave.InputFormatException;
import it.sauronsoftware.jave.Encoder;
import java.io.*;
public class Converter {
public static void main(String[] args) throws InputFormatException, EncoderException {
File source = new File("Classic.m4a");
File target = new File("target.mp3");
AudioAttributes audio = new AudioAttributes();
audio.setCodec("libmp3lame");
audio.setBitRate(new Integer(128000));
audio.setChannels(new Integer(2));
audio.setSamplingRate(new Integer(44100));
EncodingAttributes attrs = new EncodingAttributes();
attrs.setFormat("mp3");
attrs.setAudioAttributes(audio);
Encoder encoder = new Encoder(new MyFFMPEGExecutableLocator());
try {
encoder.encode(source, target, attrs, null);
} catch (IllegalArgumentException e) {
e.printStackTrace();
} catch (InputFormatException e) {
e.printStackTrace();
} catch (EncoderException e) {
e.printStackTrace();
}
}
}The problem I am running into is that with specific audio files (regardless of format) give me this EncoderException :
it.sauronsoftware.jave.EncoderException: Metadata:
at it.sauronsoftware.jave.Encoder.encode(Encoder.java:863)
at matador.Converter.main(Converter.java:32)I have looked through Encoder.java and the EncoderException on line 863 is this specific code :
} else if (!line.startsWith("Output #0")) {
throw new EncoderException(line);I have been unable to figure out why this may be occurring but specific audio files (WAV/AAC/etc) do encode yet a majority just give this exception.
Thank you for the help !
Edit : As per request for possibly being able to help me further, here is the entirety of the Encoder.java code :
package it.sauronsoftware.jave;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Hashtable;
import java.util.StringTokenizer;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class Encoder {
private static final Pattern FORMAT_PATTERN = Pattern
.compile("^\\s*([D ])([E ])\\s+([\\w,]+)\\s+.+$");
private static final Pattern ENCODER_DECODER_PATTERN = Pattern.compile(
"^\\s*([D ])([E ])([AVS]).{3}\\s+(.+)$", Pattern.CASE_INSENSITIVE);
private static final Pattern PROGRESS_INFO_PATTERN = Pattern.compile(
"\\s*(\\w+)\\s*=\\s*(\\S+)\\s*", Pattern.CASE_INSENSITIVE);
private static final Pattern SIZE_PATTERN = Pattern.compile(
"(\\d+)x(\\d+)", Pattern.CASE_INSENSITIVE);
private static final Pattern FRAME_RATE_PATTERN = Pattern.compile(
"([\\d.]+)\\s+(?:fps|tb\\(r\\))", Pattern.CASE_INSENSITIVE);
private static final Pattern BIT_RATE_PATTERN = Pattern.compile(
"(\\d+)\\s+kb/s", Pattern.CASE_INSENSITIVE);
private static final Pattern SAMPLING_RATE_PATTERN = Pattern.compile(
"(\\d+)\\s+Hz", Pattern.CASE_INSENSITIVE);
private static final Pattern CHANNELS_PATTERN = Pattern.compile(
"(mono|stereo)", Pattern.CASE_INSENSITIVE);
private static final Pattern SUCCESS_PATTERN = Pattern.compile(
"^\\s*video\\:\\S+\\s+audio\\:\\S+\\s+global headers\\:\\S+.*$",
Pattern.CASE_INSENSITIVE);
private FFMPEGLocator locator;
public Encoder() {
this.locator = new DefaultFFMPEGLocator();
}
public Encoder(FFMPEGLocator locator) {
this.locator = locator;
}
public String[] getAudioDecoders() throws EncoderException {
ArrayList res = new ArrayList();
FFMPEGExecutor ffmpeg = locator.createExecutor();
ffmpeg.addArgument("-formats");
try {
ffmpeg.execute();
RBufferedReader reader = null;
reader = new RBufferedReader(new InputStreamReader(ffmpeg
.getInputStream()));
String line;
boolean evaluate = false;
while ((line = reader.readLine()) != null) {
if (line.trim().length() == 0) {
continue;
}
if (evaluate) {
Matcher matcher = ENCODER_DECODER_PATTERN.matcher(line);
if (matcher.matches()) {
String decoderFlag = matcher.group(1);
String audioVideoFlag = matcher.group(3);
if ("D".equals(decoderFlag)
&& "A".equals(audioVideoFlag)) {
String name = matcher.group(4);
res.add(name);
}
} else {
break;
}
} else if (line.trim().equals("Codecs:")) {
evaluate = true;
}
}
} catch (IOException e) {
throw new EncoderException(e);
} finally {
ffmpeg.destroy();
}
int size = res.size();
String[] ret = new String[size];
for (int i = 0; i < size; i++) {
ret[i] = (String) res.get(i);
}
return ret;
}
public String[] getAudioEncoders() throws EncoderException {
ArrayList res = new ArrayList();
FFMPEGExecutor ffmpeg = locator.createExecutor();
ffmpeg.addArgument("-formats");
try {
ffmpeg.execute();
RBufferedReader reader = null;
reader = new RBufferedReader(new InputStreamReader(ffmpeg
.getInputStream()));
String line;
boolean evaluate = false;
while ((line = reader.readLine()) != null) {
if (line.trim().length() == 0) {
continue;
}
if (evaluate) {
Matcher matcher = ENCODER_DECODER_PATTERN.matcher(line);
if (matcher.matches()) {
String encoderFlag = matcher.group(2);
String audioVideoFlag = matcher.group(3);
if ("E".equals(encoderFlag)
&& "A".equals(audioVideoFlag)) {
String name = matcher.group(4);
res.add(name);
}
} else {
break;
}
} else if (line.trim().equals("Codecs:")) {
evaluate = true;
}
}
} catch (IOException e) {
throw new EncoderException(e);
} finally {
ffmpeg.destroy();
}
int size = res.size();
String[] ret = new String[size];
for (int i = 0; i < size; i++) {
ret[i] = (String) res.get(i);
}
return ret;
}
public String[] getVideoDecoders() throws EncoderException {
ArrayList res = new ArrayList();
FFMPEGExecutor ffmpeg = locator.createExecutor();
ffmpeg.addArgument("-formats");
try {
ffmpeg.execute();
RBufferedReader reader = null;
reader = new RBufferedReader(new InputStreamReader(ffmpeg
.getInputStream()));
String line;
boolean evaluate = false;
while ((line = reader.readLine()) != null) {
if (line.trim().length() == 0) {
continue;
}
if (evaluate) {
Matcher matcher = ENCODER_DECODER_PATTERN.matcher(line);
if (matcher.matches()) {
String decoderFlag = matcher.group(1);
String audioVideoFlag = matcher.group(3);
if ("D".equals(decoderFlag)
&& "V".equals(audioVideoFlag)) {
String name = matcher.group(4);
res.add(name);
}
} else {
break;
}
} else if (line.trim().equals("Codecs:")) {
evaluate = true;
}
}
} catch (IOException e) {
throw new EncoderException(e);
} finally {
ffmpeg.destroy();
}
int size = res.size();
String[] ret = new String[size];
for (int i = 0; i < size; i++) {
ret[i] = (String) res.get(i);
}
return ret;
}
public String[] getVideoEncoders() throws EncoderException {
ArrayList res = new ArrayList();
FFMPEGExecutor ffmpeg = locator.createExecutor();
ffmpeg.addArgument("-formats");
try {
ffmpeg.execute();
RBufferedReader reader = null;
reader = new RBufferedReader(new InputStreamReader(ffmpeg
.getInputStream()));
String line;
boolean evaluate = false;
while ((line = reader.readLine()) != null) {
if (line.trim().length() == 0) {
continue;
}
if (evaluate) {
Matcher matcher = ENCODER_DECODER_PATTERN.matcher(line);
if (matcher.matches()) {
String encoderFlag = matcher.group(2);
String audioVideoFlag = matcher.group(3);
if ("E".equals(encoderFlag)
&& "V".equals(audioVideoFlag)) {
String name = matcher.group(4);
res.add(name);
}
} else {
break;
}
} else if (line.trim().equals("Codecs:")) {
evaluate = true;
}
}
} catch (IOException e) {
throw new EncoderException(e);
} finally {
ffmpeg.destroy();
}
int size = res.size();
String[] ret = new String[size];
for (int i = 0; i < size; i++) {
ret[i] = (String) res.get(i);
}
return ret;
}
public String[] getSupportedEncodingFormats() throws EncoderException {
ArrayList res = new ArrayList();
FFMPEGExecutor ffmpeg = locator.createExecutor();
ffmpeg.addArgument("-formats");
try {
ffmpeg.execute();
RBufferedReader reader = null;
reader = new RBufferedReader(new InputStreamReader(ffmpeg
.getInputStream()));
String line;
boolean evaluate = false;
while ((line = reader.readLine()) != null) {
if (line.trim().length() == 0) {
continue;
}
if (evaluate) {
Matcher matcher = FORMAT_PATTERN.matcher(line);
if (matcher.matches()) {
String encoderFlag = matcher.group(2);
if ("E".equals(encoderFlag)) {
String aux = matcher.group(3);
StringTokenizer st = new StringTokenizer(aux, ",");
while (st.hasMoreTokens()) {
String token = st.nextToken().trim();
if (!res.contains(token)) {
res.add(token);
}
}
}
} else {
break;
}
} else if (line.trim().equals("File formats:")) {
evaluate = true;
}
}
} catch (IOException e) {
throw new EncoderException(e);
} finally {
ffmpeg.destroy();
}
int size = res.size();
String[] ret = new String[size];
for (int i = 0; i < size; i++) {
ret[i] = (String) res.get(i);
}
return ret;
}
public String[] getSupportedDecodingFormats() throws EncoderException {
ArrayList res = new ArrayList();
FFMPEGExecutor ffmpeg = locator.createExecutor();
ffmpeg.addArgument("-formats");
try {
ffmpeg.execute();
RBufferedReader reader = null;
reader = new RBufferedReader(new InputStreamReader(ffmpeg
.getInputStream()));
String line;
boolean evaluate = false;
while ((line = reader.readLine()) != null) {
if (line.trim().length() == 0) {
continue;
}
if (evaluate) {
Matcher matcher = FORMAT_PATTERN.matcher(line);
if (matcher.matches()) {
String decoderFlag = matcher.group(1);
if ("D".equals(decoderFlag)) {
String aux = matcher.group(3);
StringTokenizer st = new StringTokenizer(aux, ",");
while (st.hasMoreTokens()) {
String token = st.nextToken().trim();
if (!res.contains(token)) {
res.add(token);
}
}
}
} else {
break;
}
} else if (line.trim().equals("File formats:")) {
evaluate = true;
}
}
} catch (IOException e) {
throw new EncoderException(e);
} finally {
ffmpeg.destroy();
}
int size = res.size();
String[] ret = new String[size];
for (int i = 0; i < size; i++) {
ret[i] = (String) res.get(i);
}
return ret;
}
public MultimediaInfo getInfo(File source) throws InputFormatException,
EncoderException {
FFMPEGExecutor ffmpeg = locator.createExecutor();
ffmpeg.addArgument("-i");
ffmpeg.addArgument(source.getAbsolutePath());
try {
ffmpeg.execute();
} catch (IOException e) {
throw new EncoderException(e);
}
try {
RBufferedReader reader = null;
reader = new RBufferedReader(new InputStreamReader(ffmpeg
.getErrorStream()));
return parseMultimediaInfo(source, reader);
} finally {
ffmpeg.destroy();
}
}
private MultimediaInfo parseMultimediaInfo(File source,
RBufferedReader reader) throws InputFormatException,
EncoderException {
Pattern p1 = Pattern.compile("^\\s*Input #0, (\\w+).+$\\s*",
Pattern.CASE_INSENSITIVE);
Pattern p2 = Pattern.compile(
"^\\s*Duration: (\\d\\d):(\\d\\d):(\\d\\d)\\.(\\d).*$",
Pattern.CASE_INSENSITIVE);
Pattern p3 = Pattern.compile(
"^\\s*Stream #\\S+: ((?:Audio)|(?:Video)|(?:Data)): (.*)\\s*$",
Pattern.CASE_INSENSITIVE);
MultimediaInfo info = null;
try {
int step = 0;
while (true) {
String line = reader.readLine();
if (line == null) {
break;
}
if (step == 0) {
String token = source.getAbsolutePath() + ": ";
if (line.startsWith(token)) {
String message = line.substring(token.length());
throw new InputFormatException(message);
}
Matcher m = p1.matcher(line);
if (m.matches()) {
String format = m.group(1);
info = new MultimediaInfo();
info.setFormat(format);
step++;
}
} else if (step == 1) {
Matcher m = p2.matcher(line);
if (m.matches()) {
long hours = Integer.parseInt(m.group(1));
long minutes = Integer.parseInt(m.group(2));
long seconds = Integer.parseInt(m.group(3));
long dec = Integer.parseInt(m.group(4));
long duration = (dec * 100L) + (seconds * 1000L)
+ (minutes * 60L * 1000L)
+ (hours * 60L * 60L * 1000L);
info.setDuration(duration);
step++;
} else {
step = 3;
}
} else if (step == 2) {
Matcher m = p3.matcher(line);
if (m.matches()) {
String type = m.group(1);
String specs = m.group(2);
if ("Video".equalsIgnoreCase(type)) {
VideoInfo video = new VideoInfo();
StringTokenizer st = new StringTokenizer(specs, ",");
for (int i = 0; st.hasMoreTokens(); i++) {
String token = st.nextToken().trim();
if (i == 0) {
video.setDecoder(token);
} else {
boolean parsed = false;
// Video size.
Matcher m2 = SIZE_PATTERN.matcher(token);
if (!parsed && m2.find()) {
int width = Integer.parseInt(m2
.group(1));
int height = Integer.parseInt(m2
.group(2));
video.setSize(new VideoSize(width,
height));
parsed = true;
}
// Frame rate.
m2 = FRAME_RATE_PATTERN.matcher(token);
if (!parsed && m2.find()) {
try {
float frameRate = Float
.parseFloat(m2.group(1));
video.setFrameRate(frameRate);
} catch (NumberFormatException e) {
;
}
parsed = true;
}
// Bit rate.
m2 = BIT_RATE_PATTERN.matcher(token);
if (!parsed && m2.find()) {
int bitRate = Integer.parseInt(m2
.group(1));
video.setBitRate(bitRate);
parsed = true;
}
}
}
info.setVideo(video);
} else if ("Audio".equalsIgnoreCase(type)) {
AudioInfo audio = new AudioInfo();
StringTokenizer st = new StringTokenizer(specs, ",");
for (int i = 0; st.hasMoreTokens(); i++) {
String token = st.nextToken().trim();
if (i == 0) {
audio.setDecoder(token);
} else {
boolean parsed = false;
// Sampling rate.
Matcher m2 = SAMPLING_RATE_PATTERN
.matcher(token);
if (!parsed && m2.find()) {
int samplingRate = Integer.parseInt(m2
.group(1));
audio.setSamplingRate(samplingRate);
parsed = true;
}
// Channels.
m2 = CHANNELS_PATTERN.matcher(token);
if (!parsed && m2.find()) {
String ms = m2.group(1);
if ("mono".equalsIgnoreCase(ms)) {
audio.setChannels(1);
} else if ("stereo"
.equalsIgnoreCase(ms)) {
audio.setChannels(2);
}
parsed = true;
}
// Bit rate.
m2 = BIT_RATE_PATTERN.matcher(token);
if (!parsed && m2.find()) {
int bitRate = Integer.parseInt(m2
.group(1));
audio.setBitRate(bitRate);
parsed = true;
}
}
}
info.setAudio(audio);
}
} else {
step = 3;
}
}
if (step == 3) {
reader.reinsertLine(line);
break;
}
}
} catch (IOException e) {
throw new EncoderException(e);
}
if (info == null) {
throw new InputFormatException();
}
return info;
}
private Hashtable parseProgressInfoLine(String line) {
Hashtable table = null;
Matcher m = PROGRESS_INFO_PATTERN.matcher(line);
while (m.find()) {
if (table == null) {
table = new Hashtable();
}
String key = m.group(1);
String value = m.group(2);
table.put(key, value);
}
return table;
}
public void encode(File source, File target, EncodingAttributes attributes)
throws IllegalArgumentException, InputFormatException,
EncoderException {
encode(source, target, attributes, null);
}
public void encode(File source, File target, EncodingAttributes attributes,
EncoderProgressListener listener) throws IllegalArgumentException,
InputFormatException, EncoderException {
String formatAttribute = attributes.getFormat();
Float offsetAttribute = attributes.getOffset();
Float durationAttribute = attributes.getDuration();
AudioAttributes audioAttributes = attributes.getAudioAttributes();
VideoAttributes videoAttributes = attributes.getVideoAttributes();
if (audioAttributes == null && videoAttributes == null) {
throw new IllegalArgumentException(
"Both audio and video attributes are null");
}
target = target.getAbsoluteFile();
target.getParentFile().mkdirs();
FFMPEGExecutor ffmpeg = locator.createExecutor();
if (offsetAttribute != null) {
ffmpeg.addArgument("-ss");
ffmpeg.addArgument(String.valueOf(offsetAttribute.floatValue()));
}
ffmpeg.addArgument("-i");
ffmpeg.addArgument(source.getAbsolutePath());
if (durationAttribute != null) {
ffmpeg.addArgument("-t");
ffmpeg.addArgument(String.valueOf(durationAttribute.floatValue()));
}
if (videoAttributes == null) {
ffmpeg.addArgument("-vn");
} else {
String codec = videoAttributes.getCodec();
if (codec != null) {
ffmpeg.addArgument("-vcodec");
ffmpeg.addArgument(codec);
}
String tag = videoAttributes.getTag();
if (tag != null) {
ffmpeg.addArgument("-vtag");
ffmpeg.addArgument(tag);
}
Integer bitRate = videoAttributes.getBitRate();
if (bitRate != null) {
ffmpeg.addArgument("-b");
ffmpeg.addArgument(String.valueOf(bitRate.intValue()));
}
Integer frameRate = videoAttributes.getFrameRate();
if (frameRate != null) {
ffmpeg.addArgument("-r");
ffmpeg.addArgument(String.valueOf(frameRate.intValue()));
}
VideoSize size = videoAttributes.getSize();
if (size != null) {
ffmpeg.addArgument("-s");
ffmpeg.addArgument(String.valueOf(size.getWidth()) + "x"
+ String.valueOf(size.getHeight()));
}
}
if (audioAttributes == null) {
ffmpeg.addArgument("-an");
} else {
String codec = audioAttributes.getCodec();
if (codec != null) {
ffmpeg.addArgument("-acodec");
ffmpeg.addArgument(codec);
}
Integer bitRate = audioAttributes.getBitRate();
if (bitRate != null) {
ffmpeg.addArgument("-ab");
ffmpeg.addArgument(String.valueOf(bitRate.intValue()));
}
Integer channels = audioAttributes.getChannels();
if (channels != null) {
ffmpeg.addArgument("-ac");
ffmpeg.addArgument(String.valueOf(channels.intValue()));
}
Integer samplingRate = audioAttributes.getSamplingRate();
if (samplingRate != null) {
ffmpeg.addArgument("-ar");
ffmpeg.addArgument(String.valueOf(samplingRate.intValue()));
}
Integer volume = audioAttributes.getVolume();
if (volume != null) {
ffmpeg.addArgument("-vol");
ffmpeg.addArgument(String.valueOf(volume.intValue()));
}
}
ffmpeg.addArgument("-f");
ffmpeg.addArgument(formatAttribute);
ffmpeg.addArgument("-y");
ffmpeg.addArgument(target.getAbsolutePath());
try {
ffmpeg.execute();
} catch (IOException e) {
throw new EncoderException(e);
}
try {
String lastWarning = null;
long duration;
long progress = 0;
RBufferedReader reader = null;
reader = new RBufferedReader(new InputStreamReader(ffmpeg
.getErrorStream()));
MultimediaInfo info = parseMultimediaInfo(source, reader);
if (durationAttribute != null) {
duration = (long) Math
.round((durationAttribute.floatValue() * 1000L));
} else {
duration = info.getDuration();
if (offsetAttribute != null) {
duration -= (long) Math
.round((offsetAttribute.floatValue() * 1000L));
}
}
if (listener != null) {
listener.sourceInfo(info);
}
int step = 0;
String line;
while ((line = reader.readLine()) != null) {
if (step == 0) {
if (line.startsWith("WARNING: ")) {
if (listener != null) {
listener.message(line);
}
} else if (!line.startsWith("Output #0")) {
throw new EncoderException(line);
} else {
step++;
}
} else if (step == 1) {
if (!line.startsWith(" ")) {
step++;
}
}
if (step == 2) {
if (!line.startsWith("Stream mapping:")) {
throw new EncoderException(line);
} else {
step++;
}
} else if (step == 3) {
if (!line.startsWith(" ")) {
step++;
}
}
if (step == 4) {
line = line.trim();
if (line.length() > 0) {
Hashtable table = parseProgressInfoLine(line);
if (table == null) {
if (listener != null) {
listener.message(line);
}
lastWarning = line;
} else {
if (listener != null) {
String time = (String) table.get("time");
if (time != null) {
int dot = time.indexOf('.');
if (dot > 0 && dot == time.length() - 2
&& duration > 0) {
String p1 = time.substring(0, dot);
String p2 = time.substring(dot + 1);
try {
long i1 = Long.parseLong(p1);
long i2 = Long.parseLong(p2);
progress = (i1 * 1000L)
+ (i2 * 100L);
int perm = (int) Math
.round((double) (progress * 1000L)
/ (double) duration);
if (perm > 1000) {
perm = 1000;kMDItemAudioEncodingApplication = "Lavf57.26.100"
}
listener.progress(perm);
} catch (NumberFormatException e) {
;
}
}
}
}
lastWarning = null;
}
}
}
}
if (lastWarning != null) {
if (!SUCCESS_PATTERN.matcher(lastWarning).matches()) {
throw new EncoderException(lastWarning);
}
}
} catch (IOException e) {
throw new EncoderException(e);
} finally {
ffmpeg.destroy();
}
}
} -
Android recording video with overlay view
6 mars 2016, par t0mI am trying app in android which have functionality to capture video with overlay views. (Also i need onPreviewFrame method.)
Via SurfaceView and JavaCV with FFmpeg :
OpenCVCameraActivity.java :
import android.app.Activity;
import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.util.Log;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.widget.FrameLayout;
import android.widget.Toast;
import org.bytedeco.javacv.FFmpegFrameRecorder;
import org.bytedeco.javacv.Frame;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ShortBuffer;
import java.util.List;
@SuppressWarnings("ALL")
public class OpenCVCameraActivity extends Activity {
private static final String TAG = OpenCVCameraActivity.class.getSimpleName();
private long startTime = 0;
private boolean isPreviewOn = false;
private int sampleAudioRateInHz = 44100;
private Camera.Size previewSize; //preview and Camera and Recorder width and height
private int recorderFrameRate = 25;
// audio data getting thread
private AudioRecord audioRecord;
private AudioRecordRunnable audioRecordRunnable;
private Thread audioThread;
private volatile boolean runAudioThread = true;
// video data getting thread
private Camera mCamera;
private CameraView mPreview;
private FFmpegFrameRecorder recorder;
private boolean recording = false;
private Frame yuvImage = null;
//storage
private Storage storage;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if(Static.DEBUG) Log.i(TAG, "onCreate()");
Thread.setDefaultUncaughtExceptionHandler(uncaughtExceptionHandler);
setContentView(R.layout.activity_opencv);
prepareCamera();
}
private Thread.UncaughtExceptionHandler uncaughtExceptionHandler =
new Thread.UncaughtExceptionHandler() {
public void uncaughtException(Thread thread, Throwable ex) {
if(Static.DEBUG) Log.e(TAG, "Uncaught exception", ex);
}
};
@Override
protected void onRestart() {
super.onRestart();
if (Static.DEBUG) Log.i(TAG, "onRestart()");
}
@Override
protected void onStart() {
super.onStart();
if (Static.DEBUG) Log.i(TAG, "onStart()");
}
@Override
protected void onResume() {
super.onResume();
if (Static.DEBUG) Log.i(TAG, "onResume()");
storage = new Storage(this);
if(storage.mExternalStorageAvailable == true && storage.mExternalStorageWriteable == false)
Static.showToast(this, getString(R.string.errExternalStorageReadOnly), Toast.LENGTH_LONG);
else if (storage.mExternalStorageAvailable == false && storage.mExternalStorageWriteable == false)
Static.showToast(this, getString(R.string.errExternalStorage), Toast.LENGTH_LONG);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
if (Static.DEBUG) Log.i(TAG, "onCreateOptionsMenu()");
return super.onCreateOptionsMenu(menu);
}
@Override
protected void onPause() {
super.onPause();
if (Static.DEBUG) Log.i(TAG, "onPause()");
}
@Override
protected void onStop() {
super.onStop();
if (Static.DEBUG) Log.i(TAG, "onStop()");
}
@Override
protected void onDestroy() {
super.onDestroy();
if (Static.DEBUG) Log.i(TAG, "onDestroy()");
recording = false;
if (mPreview != null) {
mPreview.stopPreview();
}
if (mCamera != null) {
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
}
/** Prepare camera object.
* */
private void prepareCamera() {
//1. Open camera object
try {
mCamera = getCameraInstance(this);
} catch (Exception e) {
e.printStackTrace();
Static.showToast(this, e.getMessage(), Toast.LENGTH_LONG);
finish();
return;
}
setOptimalCameraParams();
//2. Connect Preview + 3. Start Preview + 8e Stop preview v metode surfaceChanged
mPreview = new CameraView(this, mCamera);
FrameLayout preview = (FrameLayout) findViewById(R.id.cameraPreview);
preview.addView(mPreview); //surfaceView to FrameLayout
if(Static.DEBUG) Log.i(TAG, "camera preview start: OK");
}
/**
* A safe way to get an instance of the Camera object.
*/
@SuppressWarnings("deprecation")
public static Camera getCameraInstance(Context ctx) throws Exception {
Camera c = Camera.open();
if (c == null)
throw new Exception(ctx.getString(R.string.errCameraNotAvailable));
if(Static.DEBUG) Log.i(TAG, "camera open");
return c; // attempt to get a Camera instance, otherwise null
}
/**Method set optmial parameters. Depend on HW possibilities.*/
@SuppressWarnings("deprecation")
private void setOptimalCameraParams(){
// Camera parameters
Camera.Parameters params = mCamera.getParameters();
List<string> focusModes = params.getSupportedFocusModes();
if (Static.DEBUG) Log.i(TAG, "focusModes():" + focusModes.toString());
if (Static.DEBUG) Log.i(TAG, "Camera parameters:\n" + params.flatten());
params.setRecordingHint(true); //MediaRecorder.start() to start faster
//Automatically autofocus if it's possible
if (params.getSupportedFocusModes().contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
} else if (focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) { //at least focus auto
params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
}
// set Camera parameters
mCamera.setParameters(params);
}
//---------------------------------------
// initialize ffmpeg_recorder
//---------------------------------------
private void initRecorder() throws Exception {
if(Static.DEBUG) Log.i(TAG,"init recorder");
File output = null;
try {
output = storage.getOutputMediaFile(storage.MEDIA_TYPE_VIDEO);
if(output == null)
throw new Exception();
} catch (Exception e) {
e.printStackTrace();
throw new Exception(getString(R.string.errSetOutputFile));
}
if (yuvImage == null) {
yuvImage = new Frame(previewSize.width, previewSize.height, Frame.DEPTH_UBYTE, 2);
if(Static.DEBUG) Log.i(TAG, "create yuvImage");
}
if(Static.DEBUG) Log.i(TAG, "ffmpeg_url: " + output.getPath());
recorder = new FFmpegFrameRecorder(output.getPath(), previewSize.width, previewSize.height, 1);
//recorder.setVideoCodec(avcodec.AV_CODEC_ID_H264);
//recorder.setVideoOption("preset", "veryfast"); // or ultrafast or fast, etc.
//recorder.setVideoQuality(0); // maximum quality, replace recorder.setVideoBitrate(16384);
//recorder.setPixelFormat(avutil.AV_PIX_FMT_YUV420P);
recorder.setFormat("mp4");
recorder.setSampleRate(sampleAudioRateInHz);
recorder.setFrameRate(recorderFrameRate);
if(Static.DEBUG) Log.i(TAG, "recorder initialize success");
audioRecordRunnable = new AudioRecordRunnable();
audioThread = new Thread(audioRecordRunnable);
runAudioThread = true;
}
public void startRecording() {
try {
initRecorder();
} catch (Exception e){
e.printStackTrace();
Static.showToast(this, e.getMessage(), Toast.LENGTH_LONG);
}
try {
recorder.start();
startTime = System.currentTimeMillis();
recording = true;
audioThread.start();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
}
public void stopRecording() {
runAudioThread = false;
try {
audioThread.join();
} catch (InterruptedException e) {
// reset interrupt to be nice
Thread.currentThread().interrupt();
return;
}
audioRecordRunnable = null;
audioThread = null;
if (recorder != null && recording) {
recording = false;
if(Static.DEBUG) Log.i(TAG,"Finishing recording, calling stop and release on recorder");
try {
recorder.stop();
recorder.release();
} catch (FFmpegFrameRecorder.Exception e) {
e.printStackTrace();
}
recorder = null;
}
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
if (recording) {
stopRecording();
}
finish();
return true;
}
return super.onKeyDown(keyCode, event);
}
public void onClickBtnStartRecord(View v) {
if (!recording) {
startRecording();
if(Static.DEBUG) Log.i(TAG, "Start Button Pushed");
} else {
// This will trigger the audio recording loop to stop and then set isRecorderStart = false;
stopRecording();
if(Static.DEBUG) Log.i(TAG, "Stop Button Pushed");
}
}
//---------------------------------------------
// audio thread, gets and encodes audio data
//---------------------------------------------
class AudioRecordRunnable implements Runnable {
@Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
// Audio
int bufferSize;
ShortBuffer audioData;
int bufferReadResult;
bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleAudioRateInHz,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
audioData = ShortBuffer.allocate(bufferSize);
if(Static.DEBUG) Log.i(TAG, "audioRecord.startRecording()");
audioRecord.startRecording();
/* ffmpeg_audio encoding loop */
while (runAudioThread) {
//if(Static.DEBUG) Log.i(TAG,"recording? " + recording);
bufferReadResult = audioRecord.read(audioData.array(), 0, audioData.capacity());
audioData.limit(bufferReadResult);
if (bufferReadResult > 0) {
//if(Static.DEBUG) Log.i(TAG,"bufferReadResult: " + bufferReadResult);
// If "recording" isn't true when start this thread, it never get's set according to this if statement...!!!
// Why? Good question...
if (recording) {
try {
recorder.recordSamples(audioData);
//if(Static.DEBUG) Log.i(TAG,"recording " + 1024*i + " to " + 1024*i+1024);
} catch (FFmpegFrameRecorder.Exception e) {
if(Static.DEBUG) Log.i(TAG,e.getMessage());
e.printStackTrace();
}
}
}
}
if(Static.DEBUG) Log.i(TAG,"AudioThread Finished, release audioRecord");
/* encoding finish, release recorder */
if (audioRecord != null) {
audioRecord.stop();
audioRecord.release();
audioRecord = null;
if(Static.DEBUG) Log.i(TAG,"audioRecord released");
}
}
}
/**TODO*/
private Camera.Size getBestPreviewSize(int width, int height, Camera.Parameters parameters) {
Camera.Size result=null;
for (Camera.Size size : parameters.getSupportedPreviewSizes()) {
if(Static.DEBUG) Log.i(TAG, size.width + "/" + size.height);
if (size.width<=width && size.height<=height) {
if (result==null) {
result=size;
} else {
int resultArea=result.width*result.height;
int newArea=size.width*size.height;
if (newArea>resultArea) {
result=size;
}
}
}
}
return(result);
}
//---------------------------------------------
// camera thread, gets and encodes video data
//---------------------------------------------
private class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {
private final String TAG = CameraView.class.getSimpleName();
private SurfaceHolder mHolder;
private Camera mCamera;
public CameraView(Context context, Camera camera) {
super(context);
if(Static.DEBUG) Log.i(TAG, "camera view");
mCamera = camera;
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(CameraView.this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mCamera.setPreviewCallback(CameraView.this);
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
try {
stopPreview();
mCamera.setPreviewDisplay(holder);
} catch (IOException exception) {
mCamera.release();
mCamera = null;
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
if(Static.DEBUG) Log.i(TAG, "surfaceChanged() => w=" + w + ", h=" + h);
// If your preview can change or rotate, take care of those events here.
// Make sure to stop the preview before resizing or reformatting it.
if (mHolder.getSurface() == null){
// preview surface does not exist
return;
}
// stop preview before making changes
try {
stopPreview();
} catch (Exception e){
// ignore: tried to stop a non-existent preview
}
// start preview with new settings
try {
Camera.Parameters params = mCamera.getParameters();
previewSize = getBestPreviewSize(w, h, params);
if(Static.DEBUG) Log.i(TAG, "getBestPreviewSize() => w=" + previewSize.width + ", h=" + previewSize.height);
if (previewSize != null)
params.setPreviewSize(previewSize.width, previewSize.height);
params.setPreviewFrameRate(recorderFrameRate);
if(Static.DEBUG) Log.i(TAG,"Preview Framerate: " + params.getPreviewFrameRate());
mCamera.setParameters(params);
mCamera.setPreviewDisplay(holder);
mCamera.setPreviewCallback(CameraView.this);
startPreview();
} catch (Exception e){
if(Static.DEBUG) Log.i(TAG, "Could not set preview display in surfaceChanged");
e.printStackTrace();
}
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
try {
mHolder.addCallback(null);
mCamera.setPreviewCallback(null);
} catch (RuntimeException e) {
// The camera has probably just been released, ignore.
}
}
public void startPreview() {
if (!isPreviewOn && mCamera != null) {
isPreviewOn = true;
mCamera.startPreview();
}
}
public void stopPreview() {
if (isPreviewOn && mCamera != null) {
isPreviewOn = false;
mCamera.stopPreview();
}
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
if (audioRecord == null || audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
startTime = System.currentTimeMillis();
return;
}
// get video data
if (yuvImage != null && recording) {
((ByteBuffer)yuvImage.image[0].position(0)).put(data);
try {
long t = 1000 * (System.currentTimeMillis() - startTime);
if(Static.DEBUG) Log.i(TAG,"Writing Frame on timestamp: "+t);
if (t > recorder.getTimestamp()) {
recorder.setTimestamp(t);
}
recorder.record(yuvImage);
} catch (FFmpegFrameRecorder.Exception e) {
if(Static.DEBUG) Log.i(TAG,e.getMessage());
e.printStackTrace();
}
}
}
}
}
</string>activity_opencv.xml :
<?xml version="1.0" encoding="utf-8"?>
<relativelayout>
<framelayout>
</framelayout>
<ImageButton<br />
android:id="@+id/btnStartRecord"<br />
android:layout_width="70dp"<br />
android:layout_height="70dp"<br />
android:scaleType="fitXY"<br />
android:src="@drawable/record_icon"<br />
android:background="@null"<br />
android:text="@string/btnStartRecord"<br />
android:onClick="onClickBtnStartRecord"<br />
android:clickable="true"<br />
android:layout_centerVertical="true"<br />
android:layout_alignParentRight="true"<br />
android:layout_alignParentEnd="true"/>
<textview></textview>
</relativelayout>Overlay views working, but recorded video is without overlay views.