
Recherche avancée
Médias (3)
-
MediaSPIP Simple : futur thème graphique par défaut ?
26 septembre 2013, par
Mis à jour : Octobre 2013
Langue : français
Type : Video
-
GetID3 - Bloc informations de fichiers
9 avril 2013, par
Mis à jour : Mai 2013
Langue : français
Type : Image
-
GetID3 - Boutons supplémentaires
9 avril 2013, par
Mis à jour : Avril 2013
Langue : français
Type : Image
Autres articles (20)
-
Selection of projects using MediaSPIP
2 mai 2011, parThe examples below are representative elements of MediaSPIP specific uses for specific projects.
MediaSPIP farm @ Infini
The non profit organizationInfini develops hospitality activities, internet access point, training, realizing innovative projects in the field of information and communication technologies and Communication, and hosting of websites. It plays a unique and prominent role in the Brest (France) area, at the national level, among the half-dozen such association. Its members (...) -
L’espace de configuration de MediaSPIP
29 novembre 2010, parL’espace de configuration de MediaSPIP est réservé aux administrateurs. Un lien de menu "administrer" est généralement affiché en haut de la page [1].
Il permet de configurer finement votre site.
La navigation de cet espace de configuration est divisé en trois parties : la configuration générale du site qui permet notamment de modifier : les informations principales concernant le site (...) -
Installation en mode ferme
4 février 2011, parLe mode ferme permet d’héberger plusieurs sites de type MediaSPIP en n’installant qu’une seule fois son noyau fonctionnel.
C’est la méthode que nous utilisons sur cette même plateforme.
L’utilisation en mode ferme nécessite de connaïtre un peu le mécanisme de SPIP contrairement à la version standalone qui ne nécessite pas réellement de connaissances spécifique puisque l’espace privé habituel de SPIP n’est plus utilisé.
Dans un premier temps, vous devez avoir installé les mêmes fichiers que l’installation (...)
Sur d’autres sites (5292)
-
ffmpeg command not working with white space of directory path in Android
13 avril 2017, par Sakib SyedI am working with
FFMPEG
library in which I want to rotate video using this library which works fine if I amfile path
has no anywhite space
. But in my case I have white space of video directory (you can see full path inString commandStr
onPreExecute()
method ofasynctask
) path then it is not working at all. I have also seen same question like this and some more but not get any idea how to resolved it properly. Below is my code of MainActivity.classpublic class MainActivity extends AppCompatActivity implements View.OnClickListener{
String workFolder = null;
String demoVideoFolder = null;
String vkLogPath = null;
private boolean commandValidationFailedFlag = false;
private Button btnRun;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
GeneralUtils.checkForPermissionsMAndAbove(MainActivity.this, true);
setIds();
setListner();
demoVideoFolder = Environment.getExternalStorageDirectory().getAbsolutePath() + "/videokit/";
Log.i(Prefs.TAG, getString(R.string.app_name) + " version: " + GeneralUtils.getVersionName(getApplicationContext()) );
workFolder = getApplicationContext().getFilesDir().getAbsolutePath() + "/";
vkLogPath = workFolder + "vk.log";
GeneralUtils.copyLicenseFromAssetsToSDIfNeeded(this, workFolder);
GeneralUtils.copyDemoVideoFromAssetsToSDIfNeeded(this, demoVideoFolder);
int rc = GeneralUtils.isLicenseValid(getApplicationContext(), workFolder);
Log.i(Prefs.TAG, "License check RC: " + rc);
}
private void setListner() {
btnRun.setOnClickListener(this);
}
private void setIds() {
try {
btnRun = (Button)findViewById(R.id.btnRun);
} catch (Exception e) {
e.printStackTrace();
}
}
@Override
public void onClick(View view) {
switch (view.getId()){
case R.id.btnRun:
Log.i(Prefs.TAG, "run clicked.");
if (GeneralUtils.checkIfFileExistAndNotEmpty(workFolder)) {
new TranscdingBackground(MainActivity.this).execute();
}
else {
Toast.makeText(getApplicationContext(), workFolder + " not found", Toast.LENGTH_LONG).show();
}
break;
}
}
public class TranscdingBackground extends AsyncTask
{
ProgressDialog progressDialog;
Activity _act;
String commandStr;
public TranscdingBackground (Activity act) {
_act = act;
}
@Override
protected void onPreExecute() {
// commandStr = "ffmpeg -y -i /storage/emulated/0/WhatsApp/Media/in.mp4 -vf rotate=270*(PI/180) /sdcard/videokit/out.mp4";
commandStr = "ffmpeg -y -i /storage/emulated/0/WhatsApp/Media/WhatsApp Video/in.mp4 -vf rotate=270*(PI/180) /sdcard/videokit/out.mp4";
progressDialog = new ProgressDialog(_act);
progressDialog.setMessage("FFmpeg4Android Transcoding in progress...");
progressDialog.show();
}
protected Integer doInBackground(String... paths) {
Log.i(Prefs.TAG, "doInBackground started...");
// delete previous log
boolean isDeleted = GeneralUtils.deleteFileUtil(workFolder + "/vk.log");
Log.i(Prefs.TAG, "vk deleted: " + isDeleted);
PowerManager powerManager = (PowerManager)_act.getSystemService(Activity.POWER_SERVICE);
PowerManager.WakeLock wakeLock = powerManager.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, "VK_LOCK");
Log.d(Prefs.TAG, "Acquire wake lock");
wakeLock.acquire();
LoadJNI vk = new LoadJNI();
try {
vk.run(GeneralUtils.utilConvertToComplex(commandStr), workFolder, getApplicationContext());
// copying vk.log (internal native log) to the videokit folder
GeneralUtils.copyFileToFolder(vkLogPath, demoVideoFolder);
} catch (CommandValidationException e) {
Log.e(Prefs.TAG, "vk run exeption.", e);
commandValidationFailedFlag = true;
} catch (Throwable e) {
Log.e(Prefs.TAG, "vk run exeption.", e);
}
finally {
if (wakeLock.isHeld())
wakeLock.release();
else{
Log.i(Prefs.TAG, "Wake lock is already released, doing nothing");
}
}
Log.i(Prefs.TAG, "doInBackground finished");
return Integer.valueOf(0);
}
protected void onProgressUpdate(Integer... progress) {
}
@Override
protected void onCancelled() {
Log.i(Prefs.TAG, "onCancelled");
//progressDialog.dismiss();
super.onCancelled();
}
@Override
protected void onPostExecute(Integer result) {
Log.i(Prefs.TAG, "onPostExecute");
progressDialog.dismiss();
super.onPostExecute(result);
// finished Toast
String rc = null;
if (commandValidationFailedFlag) {
rc = "Command Vaidation Failed";
}
else {
rc = GeneralUtils.getReturnCodeFromLog(vkLogPath);
}
final String status = rc;
MainActivity.this.runOnUiThread(new Runnable() {
public void run() {
Toast.makeText(MainActivity.this, status, Toast.LENGTH_LONG).show();
if (status.equals("Transcoding Status: Failed")) {
Toast.makeText(MainActivity.this, "Check: " + vkLogPath + " for more information.", Toast.LENGTH_LONG).show();
}
}
});
}
}
}Here
onPreExecute()
method I have given video file path. -
After scaled video, it convert transparent background to white
8 mars 2017, par Sohaib RazaI scaled video using ffmpeg. When i scaled video, the output video having white background ,while the input video having transparent background.
below command using for scaled video.ffmpeg -i inputVideo.mp4 -vf scale=100:100 outputVideo.mp4
you can see the image below that is take from my video,for more understanding my problem.
you can see the above image from input video having no white background.
you can see the above image from output video having white backgournd.
please give me your valuable answer regarding this question.
any answer will be very appreciate.D: onProgress: configuration: --target-os=linux --cross-prefix=/home/vagrant/SourceCode/ffmpeg-android/toolchain-android/bin/arm-linux-androideabi- --arch=arm --cpu=cortex-a8 --enable-runtime-cpudetect --sysroot=/home/vagrant/SourceCode/ffmpeg-android/toolchain-android/sysroot --enable-pic --enable-libx264 --enable-libass --enable-libfreetype --enable-libfribidi --enable-libmp3lame --enable-fontconfig --enable-pthreads --disable-debug --disable-ffserver --enable-version3 --enable-hardcoded-tables --disable-ffplay --disable-ffprobe --enable-gpl --enable-yasm --disable-doc --disable-shared --enable-static --pkg-config=/home/vagrant/SourceCode/ffmpeg-android/ffmpeg-pkg-config --prefix=/home/vagrant/SourceCode/ffmpeg-android/build/armeabi-v7a --extra-cflags='-I/home/vagrant/SourceCode/ffmpeg-android/toolchain-android/include -U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=2 -fno-strict-overflow -fstack-protector-all' --extra-ldflags='-L/home/vagrant/SourceCode/ffmpeg-android/toolchain-android/lib -Wl,-z,relro -Wl,-z,now -pie' --extra-libs='-lpng -lexpat -lm' --extra-cxxflags=
D: onProgress: libavutil 55. 17.103 / 55. 17.103
D: onProgress: libavcodec 57. 24.102 / 57. 24.102
D: onProgress: libavformat 57. 25.100 / 57. 25.100
D: onProgress: libavdevice 57. 0.101 / 57. 0.101
D: onProgress: libavfilter 6. 31.100 / 6. 31.100
D: onProgress: libswscale 4. 0.100 / 4. 0.100
D: onProgress: libswresample 2. 0.101 / 2. 0.101
D: onProgress: libpostproc 54. 0.100 / 54. 0.100
D: onProgress: Input #0, gif, from '/storage/emulated/0/Pictures/eye.gif':
D: onProgress: Duration: N/A, bitrate: N/A
D: onProgress: Stream #0:0: Video: gif, bgra, 480x190, 5 fps, 10 tbr, 100 tbn, 100 tbc
D: onProgress: Output #0, gif, to '/storage/emulated/0/GIF/REV20170308_101145.gif':
D: onProgress: Metadata:
D: onProgress: encoder : Lavf57.25.100
D: onProgress: Stream #0:0: Video: gif, bgr8, 125x56, q=2-31, 200 kb/s, 10 fps, 100 tbn, 10 tbc
D: onProgress: Metadata:
D: onProgress: encoder : Lavc57.24.102 gif
D: onProgress: Stream mapping:
D: onProgress: Stream #0:0 -> #0:0 (gif (native) -> gif (native))
D: onProgress: Press [q] to stop, [?] for help
D: onProgress: frame= 11 fps=0.0 q=-0.0 Lsize= 17kB time=00:00:02.90 bitrate= 48.8kbits/s speed=14.8x<br />
D: onProgress: video:16kB audio:0kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: 5.287261%
D: Started command : ffmpeg [Ljava.lang.String;@422f62d0
D: onSuccess: ffmpeg version n3.0.1 Copyright (c) 2000-2016 the FFmpeg developers
built with gcc 4.8 (GCC)
configuration: --target-os=linux --cross-prefix=/home/vagrant/SourceCode/ffmpeg-android/toolchain-android/bin/arm-linux-androideabi- --arch=arm --cpu=cortex-a8 --enable-runtime-cpudetect --sysroot=/home/vagrant/SourceCode/ffmpeg-android/toolchain-android/sysroot --enable-pic --enable-libx264 --enable-libass --enable-libfreetype --enable-libfribidi --enable-libmp3lame --enable-fontconfig --enable-pthreads --disable-debug --disable-ffserver --enable-version3 --enable-hardcoded-tables --disable-ffplay --disable-ffprobe --enable-gpl --enable-yasm --disable-doc --disable-shared --enable-static --pkg-config=/home/vagrant/SourceCode/ffmpeg-android/ffmpeg-pkg-config --prefix=/home/vagrant/SourceCode/ffmpeg-android/build/armeabi-v7a --extra-cflags='-I/home/vagrant/SourceCode/ffmpeg-android/toolchain-android/include -U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=2 -fno-strict-overflow -fstack-protector-all' --extra-ldflags='-L/home/vagrant/SourceCode/ffmpeg-android/toolchain-android/lib -Wl,-z,relro -Wl,-z,now -pie' --extra-libs='-lpng -lexpat -lm' --extra-cxxflags=
libavutil 55. 17.103 / 55. 17.103
libavcodec 57. 24.102 / 57. 24.102
libavformat 57. 25.100 / 57. 25.100
libavdevice 57. 0.101 / 57. 0.101
libavfilter 6. 31.100 / 6. 31.100
libswscale 4. 0.100 / 4. 0.100
libswresample 2. 0.101 / 2. 0.101
libpostproc 54. 0.100 / 54. 0.100
Input #0, gif, from '/storage/emulated/0/Pictures/eye.gif':
Duration: N/A, bitrate: N/A
Stream #0:0: Video: gif, bgra, 480x190, 5 fps, 10 tbr, 100 tbn, 100 tbc
Output #0, gif, to '/storage/emulated/0/GIF/REV20170308_101145.gif':
Metadata:
encoder : Lavf57.25.100
Stream #0:0: Video: gif, bgr8, 125x56, q=2-31, 200 kb/s, 10 fps, 100 tbn, 10 tbc
Metadata:
encoder : Lavc57.24.102 gif
Stream mapping:
Stream #0:0 -> #0:0 (gif (native) -> gif (native))
Press [q] to stop, [?] for help
frame= 11 fps=0.0 q=-0.0 Lsize= 17kB time=00:00:02.90 bitrate= 48.8kbits/s speed=14.8x<br />
video:16kB audio:0kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: 5.287261% -
libavformat : calling avformat_open_input 2 times results in decoding white frames
25 avril 2017, par explodus- pre build ffmpeg libs format/util/scale
- version 57.56.101
- don’t use any deprecated function
- use the actual style
- av_read_frame -> avcodec_send_packet -> avcodec_receive_frame -> sws_scale
Everything is fine on the first run, but when i wanna load/open another file i only get white frames.
void video::app::flush_cached_frames() {
if (nullptr == avcontext)
return;
if (nullptr == avpicture)
return;
// send an empty packet which instructs the codec to start flushing
AVPacket pkt;
av_init_packet(&pkt);
pkt.data = NULL;
pkt.size = 0;
avcodec_send_packet(avcontext, &pkt);
// drain the codec
while (true) {
int r = avcodec_receive_frame(avcontext, avpicture);
if (r != 0)
break;
}
}
void video::app::close_avi() {
flush_cached_frames();
if (avformat && avstream)
seek_to_frame(0);
avstream = nullptr;
if (avfile)
fclose(avfile);
avfile = nullptr;
if (avcontext)
avcodec_close(avcontext);
avcontext = nullptr;
if (avformat)
avformat_free_context(avformat);
avformat = nullptr;
if (sws_ctx)
sws_freeContext(sws_ctx);
sws_ctx = nullptr;
if (avparser)
av_parser_close(avparser);
avparser = nullptr;
if (avinbuf)
av_free(avinbuf);
avinbuf = nullptr;
}I think i close anything perfectly. Has anyone an idea ?
edit1 : init/load
unsigned video::app::load(const std::string& name) {
_file = name_;
close_avi();
av_register_all();
avcodec_register_all();
av_init_packet(&avpkt);
AVCodecID codec_id = AV_CODEC_ID_H264;
int64_t duration = 0;
double fps = .0;
int ret = 0;
{
av_log_set_level(1);
avfile = fopen(name_.c_str(), "rb");
avformat = avformat_alloc_context();
ret = avformat_open_input(&avformat, name_.c_str(), nullptr, nullptr);
ret = avformat_find_stream_info(avformat, nullptr);
duration = avformat->duration;
avstream = nullptr;
if (avformat->nb_streams == 1) {
avstream = avformat->streams[0];
} else {
avstream = avformat->streams[av_find_default_stream_index(avformat)];
}
if (avstream) {
fps = (double(avstream->avg_frame_rate.num) / double(avstream->avg_frame_rate.den));
codec_id = avstream->codecpar->codec_id;
duration = avstream->duration;
_vid.v_width = avstream->codecpar->width;
_vid.v_height = avstream->codecpar->height;
_vid.lastframe = duration / fps;
_vid.lastframe = avstream->nb_frames;
}
avcodec = avcodec_find_decoder(avstream->codecpar->codec_id);
avparser = av_parser_init(avcodec->id);
avcontext = avcodec_alloc_context3(avcodec);
avcontext->flags |= AVFMT_FLAG_NONBLOCK;
avcontext->flags |= AVFMT_FLAG_FLUSH_PACKETS;
avcontext->flags |= AVFMT_FLAG_DISCARD_CORRUPT;
avcontext->flags |= AVFMT_FLAG_NOBUFFER;
ret = avcodec_parameters_to_context(avcontext, avstream->codecpar);
ret = avcodec_open2(avcontext, avcodec, nullptr);
// Determine required buffer size and allocate buffer
auto numBytes = av_image_get_buffer_size(
AV_PIX_FMT_BGRA
, avcontext->width
, avcontext->height
, 1);
if (avinbuf)
av_free(avinbuf);
avinbuf = nullptr;
avinbuf = (uint8_t *)av_malloc(numBytes * sizeof(uint8_t));
ret = av_image_fill_arrays(
avrgb->data
, avrgb->linesize
, avinbuf
, AV_PIX_FMT_BGRA
, avcontext->width
, avcontext->height
, 1);
sws_ctx = sws_getContext(
avcontext->width
, avcontext->height
, avcontext->pix_fmt
, avcontext->width
, avcontext->height
, AV_PIX_FMT_BGRA
, SWS_BILINEAR
, nullptr
, nullptr
, nullptr
);
}
int err = (sws_ctx && avcontext && avformat) ? 0 : 1;
// ...
}getting the frame :
uint8_t * video::app::get_frame(uint32_t frame) {
if (!avcontext)
return nullptr;
if (!avformat)
return nullptr;
if (!avpicture)
return nullptr;
if (!avfile)
return nullptr;
try {
int ret = 0;
if (avpicture->data)
av_frame_unref(avpicture);
while (true) {
if ((ret = av_read_frame(avformat, &avpkt)) < 0)
break;
if (avpkt.stream_index == avstream->index) {
ret = avcodec_send_packet(avcontext, &avpkt);
if (ret < 0)
break;
while (ret >= 0) {
ret = avcodec_receive_frame(avcontext, avpicture);
if (ret == AVERROR_EOF) {
return nullptr;
} else if (ret == -11) {
avpkt.data = nullptr;
avpkt.size = 0;
break;
} else if (ret < 0) {
return nullptr;
}
if (ret == AVERROR(EAGAIN)) {
avpkt.data = nullptr;
avpkt.size = 0;
break;
}
if (ret >= 0) {
int linesize[AV_NUM_DATA_POINTERS] = {
avpicture->linesize[0]
, avpicture->linesize[1]
, avpicture->linesize[2]
, avpicture->linesize[3]
, avpicture->linesize[4]
, avpicture->linesize[5]
, avpicture->linesize[6]
, avpicture->linesize[7]
};
uint8_t * data[AV_NUM_DATA_POINTERS] = {
avpicture->data[0]
, avpicture->data[1]
, avpicture->data[2]
, avpicture->data[3]
, avpicture->data[4]
, avpicture->data[5]
, avpicture->data[6]
, avpicture->data[7]
};
{
// flip the frame, never ever touch this thing again!
// If the planes in the image are unequal size(e.g.YUV420) you need to adapt the height.
auto h = avcontext->height;
for (int i = 0; i < 4; i++) {
if (i)
data[i] += linesize[i] * ((h >> 1) - 1);
else
data[i] += linesize[i] * (h - 1);
linesize[i] = -linesize[i];
}
}
ret = sws_scale(
sws_ctx
, (uint8_t const * const *)data
, linesize
, 0
, avcontext->height
, avrgb->data
, avrgb->linesize);
av_packet_unref(&avpkt);
currPts = avpkt.dts;
currPts *= av_q2d(avstream->time_base);
usleep(1000000 * (currPts - prevPts));
prevPts = currPts;
return avrgb->data[0];
}
}
}
av_packet_unref(&avpkt);
}
} catch (...) {
}
return nullptr;
}