
Recherche avancée
Autres articles (112)
-
Personnaliser en ajoutant son logo, sa bannière ou son image de fond
5 septembre 2013, parCertains thèmes prennent en compte trois éléments de personnalisation : l’ajout d’un logo ; l’ajout d’une bannière l’ajout d’une image de fond ;
-
Ecrire une actualité
21 juin 2013, parPrésentez les changements dans votre MédiaSPIP ou les actualités de vos projets sur votre MédiaSPIP grâce à la rubrique actualités.
Dans le thème par défaut spipeo de MédiaSPIP, les actualités sont affichées en bas de la page principale sous les éditoriaux.
Vous pouvez personnaliser le formulaire de création d’une actualité.
Formulaire de création d’une actualité Dans le cas d’un document de type actualité, les champs proposés par défaut sont : Date de publication ( personnaliser la date de publication ) (...) -
Publier sur MédiaSpip
13 juin 2013Puis-je poster des contenus à partir d’une tablette Ipad ?
Oui, si votre Médiaspip installé est à la version 0.2 ou supérieure. Contacter au besoin l’administrateur de votre MédiaSpip pour le savoir
Sur d’autres sites (6617)
-
Unable to transfer continuous FFmpeg buffer to client browser using node.js
10 décembre 2016, par chintitomasudI have tried to process a Video file transcoding on demand by using FFmpeg to transfer the chunk(buffer) to the client browser as mp4 format but I failed to show the mp4 content on html5 video player . Without using ffmpeg all code run properly . I have replaced createReadSteam with ffmpeg . Using it I have faced some problems. FFmpeg is new to me and I ’m kind of confused with spawn method. When I post a url path it shows the following text on the command line
Spawning new process /samiul113039/1080.mp4:GET
piping ffmpeg output to client, pid 10016
HTTP connection disrupted, killing ffmpeg : 10016
Spawning new process /samiul113039/1080.mp4:GET
piping ffmpeg output to client, pid 4796
HTTP connection disrupted, killing ffmpeg : 4796
ffmpeg didn’t quit on q, sending signals ffmpeg has exited : 10016,
code null ffmpeg didn’t quit on q, sending signals ffmpeg has exited :
4796, code nul=
var fs=require('fs');
var url=require("url");
var urlvalue="http://csestudents.uiu.ac.bd/samiul113039/1080.mp4";
var parseurl=url.parse(urlvalue);
var HDHomeRunIP = parseurl.hostname;
var HDHomeRunPort = parseurl.port;
var childKillTimeoutMs = 1000;
var parseArgs = require('minimist')(process.argv.slice(2));
// define startsWith for string
if (typeof String.prototype.startsWith != 'function') {
// see below for better implementation!
String.prototype.startsWith = function (str){
return this.indexOf(str) == 0;
};
}
// Called when the response object fires the 'close' handler, kills ffmpeg
function responseCloseHandler(command) {
if (command.exited != true) {
console.log('HTTP connection disrupted, killing ffmpeg: ' + command.pid);
// Send a 'q' which signals ffmpeg to quit.
// Then wait half a second, send a nice signal, wait another half second
// and send SIGKILL
command.stdin.write('q\n');
command.stdin.destroy();
// install timeout and wait
setTimeout(function() {
if (command.exited != true) {
console.log('ffmpeg didn\'t quit on q, sending signals');
// still connected, do safe sig kills
command.kill();
try {
command.kill('SIGQUIT');
} catch (err) {}
try {
command.kill('SIGINT');
} catch (err) {}
// wait some more!
setTimeout(function() {
if (command.exited != true) {
console.log('ffmpeg didn\'t quit on signals, sending SIGKILL');
// at this point, just give up and whack it
try {
command.kill('SIGKILL');
} catch (err) {}
}
}, childKillTimeoutMs);
}
}, childKillTimeoutMs);
}
}
// Performs a proxy. Copies data from proxy_request into response
function doProxy(request,response,http,options) {
var proxy_request = http.request(options, function (proxy_response) {
proxy_response.on('data', function(chunk) {
response.write(chunk, 'binary');
});
proxy_response.on('end', function() {
response.end();
});
response.writeHead(proxy_response.statusCode, proxy_response.headers);
});
request.on('data', function(chunk) {
proxy_request.write(chunk, 'binary');
});
// error handler
proxy_request.on('error', function(e) {
console.log('problem with request: ' + e.message);
response.writeHeader(500);
response.end();
});
proxy_request.end();
}
var child_process = require('child_process');
var auth = require('./auth');
// Performs the transcoding after the URL is validated
function doTranscode(request,response) {
//res.setHeader("Accept-Ranges", "bytes");
response.setHeader("Accept-Ranges", "bytes");
response.setHeader("Content-Type", "video/mp4");
response.setHeader("Connection","close");
response.setHeader("Cache-Control","no-cache");
response.setHeader("Pragma","no-cache");
// always write the header
response.writeHeader(200);
// if get, spawn command stream it
if (request.method == 'GET') {
console.log('Spawning new process ' + request.url + ":" + request.method);
var command = child_process.spawn('ffmpeg',
['-i','http://csestudents.uiu.ac.bd/samiul113039/1080.mp4','-f','mpegts','-'],
{ stdio: ['pipe','pipe','ignore'] });
command.exited = false;
// handler for when ffmpeg dies unexpectedly
command.on('exit',function(code,signal) {
console.log('ffmpeg has exited: ' + command.pid + ", code " + code);
// set flag saying we've quit
command.exited = true;
response.end();
});
command.on('error',function(error) {
console.log('ffmpeg error handler - unable to kill: ' + command.pid);
// on well, might as well give up
command.exited = true;
try {
command.stdin.close();
} catch (err) {}
try {
command.stdout.close();
} catch (err) {}
try {
command.stderr.close();
} catch (err) {}
response.end();
});
// handler for when client closes the URL connection - stop ffmpeg
response.on('end',function() {
responseCloseHandler(command);
});
// handler for when client closes the URL connection - stop ffmpeg
response.on('close',function() {
responseCloseHandler(command);
});
// now stream
console.log('piping ffmpeg output to client, pid ' + command.pid);
command.stdout.pipe(response);
command.stdin.on('error',function(err) {
console.log("Weird error in stdin pipe ", err);
response.end();
});
command.stdout.on('error',function(err) {
console.log("Weird error in stdout pipe ",err);
response.end();
});
}
else {
// not GET, so close response
response.end();
}
}
// Load the http module to create an http server.
var http = require('http');
// Configure our HTTP server to respond with Hello World to all requests.
var server = http.createServer(function (request, response) {
//console.log("New connection from " + request.socket.remoteAddress + ":" + request.url);
if (auth.validate(request,response)) {
// first send a HEAD request to our HD Home Run with the same url to see if the address is valid.
// This prevents an ffmpeg instance to spawn when clients request invalid things - like robots.txt/etc
var options = {method: 'HEAD', hostname: HDHomeRunIP, port: HDHomeRunPort, path: request.url};
var req = http.request(options, function(res) {
// if they do a get, and it returns good status
if (request.method == "GET" &&
res.statusCode == 200 &&
res.headers["content-type"] != null &&
res.headers["content-type"].startsWith("video")) {
// transcode is possible, start it now!
doTranscode(request,response);
}
else {
// no video or error, cannot transcode, just forward the response from the HD Home run to the client
if (request.method == "HEAD") {
response.writeHead(res.statusCode,res.headers);
response.end();
}
else {
// do a 301 redirect and have the device response directly
// just proxy it, that way browser doesn't redirect to HDHomeRun IP but keeps the node.js server IP
options = {method: request.method, hostname: HDHomeRunIP, /* port: HDHomeRunPort, */path: request.url};
doProxy(request,response,http,options);
}
}
});
req.on('error', function(e) {
console.log('problem with request: ' + e.message);
response.writeHeader(500);
response.end();
});
// finish the client request, rest of processing done in the async callbacks
req.end();
}
});
// turn on no delay for tcp
server.on('connection', function (socket) {
socket.setNoDelay(true);
});
server.listen(7000);stdio : [’pipe’,’pipe’,’ignore’]
Actually the code was written by someone. i have just modified the code.
[’pipe’,’pipe’,’ignore’] what does pipe,pipe.ignore mean here, -
php-ffmpeg too slow - generating thumbnails ?
25 avril 2013, par Wiggler Jtagfirst time I am trying to create thumbnail preview from video file, uploaded 700mb .avi film with 209747 frames. Now I am trying to create 1 thumbnail, but it takes 4.7 seconds, because I've set frame 10000, if I set it just to 1000, it takes only 0.4 seconds to generate thumbnail.
How could I generate like 5-10 thumbnails from different frames on-the-go in less than a second ? Is it even possible ? Is it different to use exec(ffmpeg) or php-ffmpeg ?
Using 0.6-svn ffmpeg, Debian 6.0.7, php 5.4.14 on machine 2x Xeon L5420 and still slow... Any ideas ? How about to use ffmpeg + time of the video instead of frame ?$movie = 'ai.avi';
$thumbnail = 'thumbnail.jpg';
$mov = new ffmpeg_movie($movie);
$frame = 10000;
$frame = $mov->getFrame($frame);
if($frame) {
$gd_image = $frame->toGDImage();
if($gd_image) {
imagejpeg($gd_image, $thumbnail, 100);
imagedestroy($gd_image);
}
}
echo '<img src="http://stackoverflow.com/feeds/tag/'.$thumbnail.'" style='max-width: 300px; max-height: 300px' /><br />'; -
FFMpeg encoded video will only play in FFPlay
8 novembre 2013, par mohMI've been debugging my program for a couple of weeks now with the output video only showing a blank screen (was testing with VLC, WMP and WMPClassic). I happened to try using FFPlay and lo and behold the video works perfectly. I've read that this is usually caused by an incorrect pixel format, and that switching to PIX_FMT_YUV420P will make it work universally...but I'm already using that pixel format in the encoding process. Is there anything else that is causing this ?
AVCodec* codec;
AVCodecContext* c = NULL;
uint8_t* outbuf;
int i, out_size, outbuf_size;
avcodec_register_all();
printf("Video encoding\n");
// Find the mpeg1 video encoder
codec = avcodec_find_encoder(CODEC_ID_H264);
if (!codec) {
fprintf(stderr, "Codec not found\n");
exit(1);
}
else printf("H264 codec found\n");
c = avcodec_alloc_context3(codec);
c->bit_rate = 400000;
c->width = 1920; // resolution must be a multiple of two (1280x720),(1900x1080),(720x480)
c->height = 1200;
c->time_base.num = 1; // framerate numerator
c->time_base.den = 25; // framerate denominator
c->gop_size = 10; // emit one intra frame every ten frames
c->max_b_frames = 1; // maximum number of b-frames between non b-frames
//c->keyint_min = 1; // minimum GOP size
//c->i_quant_factor = (float)0.71; // qscale factor between P and I frames
//c->b_frame_strategy = 20;
//c->qcompress = (float)0.6;
//c->qmin = 20; // minimum quantizer
//c->qmax = 51; // maximum quantizer
//c->max_qdiff = 4; // maximum quantizer difference between frames
//c->refs = 4; // number of reference frames
//c->trellis = 1; // trellis RD Quantization
c->pix_fmt = PIX_FMT_YUV420P;
c->codec_id = CODEC_ID_H264;
//c->codec_type = AVMEDIA_TYPE_VIDEO;
// Open the encoder
if (avcodec_open2(c, codec,NULL) < 0) {
fprintf(stderr, "Could not open codec\n");
exit(1);
}
else printf("H264 codec opened\n");
outbuf_size = 100000 + c->width*c->height*(32>>3);//*(32>>3); // alloc image and output buffer
outbuf = static_cast(malloc(outbuf_size));
printf("Setting buffer size to: %d\n",outbuf_size);
FILE* f = fopen("example.mpg","wb");
if(!f) printf("x - Cannot open video file for writing\n");
else printf("Opened video file for writing\n");
// encode 5 seconds of video
for(i=0;iwidth, c->height);
uint8_t* outbuffer = (uint8_t*)av_malloc(nbytes*sizeof(uint8_t));
AVFrame* inpic = avcodec_alloc_frame();
AVFrame* outpic = avcodec_alloc_frame();
outpic->pts = (int64_t)((float)i * (1000.0/((float)(c->time_base.den))) * 90);
avpicture_fill((AVPicture*)inpic, (uint8_t*)pPixels, PIX_FMT_RGB32, c->width, c->height); // Fill picture with image
avpicture_fill((AVPicture*)outpic, outbuffer, PIX_FMT_YUV420P, c->width, c->height);
av_image_alloc(outpic->data, outpic->linesize, c->width, c->height, c->pix_fmt, 1);
inpic->data[0] += inpic->linesize[0]*(screenHeight-1); // Flipping frame
inpic->linesize[0] = -inpic->linesize[0]; // Flipping frame
struct SwsContext* fooContext = sws_getContext(screenWidth, screenHeight, PIX_FMT_RGB32, c->width, c->height, PIX_FMT_YUV420P, SWS_FAST_BILINEAR, NULL, NULL, NULL);
sws_scale(fooContext, inpic->data, inpic->linesize, 0, c->height, outpic->data, outpic->linesize);
// encode the image
out_size = avcodec_encode_video(c, outbuf, outbuf_size, outpic);
printf("Encoding frame %3d (size=%5d)\n", i, out_size);
fwrite(outbuf, 1, out_size, f);
delete [] pPixels;
av_free(outbuffer);
av_free(inpic);
av_free(outpic);
}
// get the delayed frames
for(; out_size; i++) {
fflush(stdout);
out_size = avcodec_encode_video(c, outbuf, outbuf_size, NULL);
printf("Writing frame %3d (size=%5d)\n", i, out_size);
fwrite(outbuf, 1, out_size, f);
}
// add sequence end code to have a real mpeg file
outbuf[0] = 0x00;
outbuf[1] = 0x00;
outbuf[2] = 0x01;
outbuf[3] = 0xb7;
fwrite(outbuf, 1, 4, f);
fclose(f);
avcodec_close(c);
free(outbuf);
av_free(c);
printf("Closed codec and Freed\n");