Recherche avancée

Médias (1)

Mot : - Tags -/stallman

Autres articles (64)

  • ANNEXE : Les plugins utilisés spécifiquement pour la ferme

    5 mars 2010, par

    Le site central/maître de la ferme a besoin d’utiliser plusieurs plugins supplémentaires vis à vis des canaux pour son bon fonctionnement. le plugin Gestion de la mutualisation ; le plugin inscription3 pour gérer les inscriptions et les demandes de création d’instance de mutualisation dès l’inscription des utilisateurs ; le plugin verifier qui fournit une API de vérification des champs (utilisé par inscription3) ; le plugin champs extras v2 nécessité par inscription3 (...)

  • Emballe médias : à quoi cela sert ?

    4 février 2011, par

    Ce plugin vise à gérer des sites de mise en ligne de documents de tous types.
    Il crée des "médias", à savoir : un "média" est un article au sens SPIP créé automatiquement lors du téléversement d’un document qu’il soit audio, vidéo, image ou textuel ; un seul document ne peut être lié à un article dit "média" ;

  • Gestion des droits de création et d’édition des objets

    8 février 2011, par

    Par défaut, beaucoup de fonctionnalités sont limitées aux administrateurs mais restent configurables indépendamment pour modifier leur statut minimal d’utilisation notamment : la rédaction de contenus sur le site modifiables dans la gestion des templates de formulaires ; l’ajout de notes aux articles ; l’ajout de légendes et d’annotations sur les images ;

Sur d’autres sites (10173)

  • fail continuous transfer video file into buffer

    9 décembre 2016, par chintitomasud

    here I want to process a video file transcoding on demand by using ffmpeg but I failed. without ffmpeg all code runs properly. but using ffmpeg I face some problem. it shows this message :

    Spawning new process /samiul113039/1080.mp4:GET
    piping ffmpeg output to client, pid 10016
    HTTP connection disrupted, killing ffmpeg : 10016
    Spawning new process /samiul113039/1080.mp4:GET
    piping ffmpeg output to client, pid 4796
    HTTP connection disrupted, killing ffmpeg : 4796
    ffmpeg didn’t quit on q, sending signals
    ffmpeg has exited : 10016, code null
    ffmpeg didn’t quit on q, sending signals
    ffmpeg has exited : 4796, code nul
    
    var fs=require('fs');

    var url=require("url");
    var urlvalue="http://csestudents.uiu.ac.bd/samiul113039/1080.mp4";


    var parseurl=url.parse(urlvalue);

    var HDHomeRunIP = parseurl.hostname;
    var HDHomeRunPort = parseurl.port;
    var childKillTimeoutMs = 1000;

    var parseArgs = require('minimist')(process.argv.slice(2));

    // define startsWith for string
    if (typeof String.prototype.startsWith != 'function') {
     // see below for better implementation!
     String.prototype.startsWith = function (str){
       return this.indexOf(str) == 0;
     };
    }
    // Called when the response object fires the 'close' handler, kills ffmpeg
    function responseCloseHandler(command) {
     if (command.exited != true) {
       console.log('HTTP connection disrupted, killing ffmpeg: ' + command.pid);
       // Send a 'q' which signals ffmpeg to quit.
       // Then wait half a second, send a nice signal, wait another half second
       // and send SIGKILL
       command.stdin.write('q\n');
       command.stdin.destroy();
       // install timeout and wait
       setTimeout(function() {
         if (command.exited != true) {
           console.log('ffmpeg didn\'t quit on q, sending signals');
           // still connected, do safe sig kills
           command.kill();
           try {
             command.kill('SIGQUIT');
           } catch (err) {}
           try {
             command.kill('SIGINT');
           } catch (err) {}
           // wait some more!
           setTimeout(function() {
             if (command.exited != true) {
               console.log('ffmpeg didn\'t quit on signals, sending SIGKILL');
               // at this point, just give up and whack it
               try {
                 command.kill('SIGKILL');
               } catch (err) {}
             }
           }, childKillTimeoutMs);
         }    
       }, childKillTimeoutMs);
     }
    }

    // Performs a proxy. Copies data from proxy_request into response
    function doProxy(request,response,http,options) {
     var proxy_request = http.request(options, function (proxy_response) {
       proxy_response.on('data', function(chunk) {
         response.write(chunk, 'binary');
       });
       proxy_response.on('end', function() {
         response.end();
       });
       response.writeHead(proxy_response.statusCode, proxy_response.headers);
     });
     request.on('data', function(chunk) {
       proxy_request.write(chunk, 'binary');
     });
     // error handler
     proxy_request.on('error', function(e) {
       console.log('problem with request: ' + e.message);
       response.writeHeader(500);
       response.end();
     });

     proxy_request.end();
    }

    var child_process = require('child_process');
    var auth = require('./auth');
    // Performs the transcoding after the URL is validated
    function doTranscode(request,response) {
     //res.setHeader("Accept-Ranges", "bytes");
     response.setHeader("Accept-Ranges", "bytes");
     response.setHeader("Content-Type", "video/mp4");        
     response.setHeader("Connection","close");
     response.setHeader("Cache-Control","no-cache");
     response.setHeader("Pragma","no-cache");

     // always write the header
     response.writeHeader(200);

     // if get, spawn command stream it
     if (request.method == 'GET') {
       console.log('Spawning new process ' + request.url + ":" + request.method);

    var command = child_process.spawn('ffmpeg',
                                         ['-i','http://csestudents.uiu.ac.bd/samiul113039/1080.mp4','-f','mpegts','-'],
                                         { stdio: ['pipe','pipe','ignore'] });

        command.exited = false;
       // handler for when ffmpeg dies unexpectedly
       command.on('exit',function(code,signal) {
         console.log('ffmpeg has exited: ' + command.pid + ", code " + code);
         // set flag saying we've quit
         command.exited = true;
         response.end();
       });
       command.on('error',function(error) {
         console.log('ffmpeg error handler - unable to kill: ' + command.pid);
         // on well, might as well give up
         command.exited = true;
         try {
           command.stdin.close();
         } catch (err) {}
         try {
           command.stdout.close();
         } catch (err) {}
         try {
           command.stderr.close();
         } catch (err) {}
         response.end();
       });
       // handler for when client closes the URL connection - stop ffmpeg
       response.on('end',function() {
        responseCloseHandler(command);
       });
       // handler for when client closes the URL connection - stop ffmpeg
       response.on('close',function() {
         responseCloseHandler(command);
       });

       // now stream
       console.log('piping ffmpeg output to client, pid ' + command.pid);
       command.stdout.pipe(response);
       command.stdin.on('error',function(err) {
         console.log("Weird error in stdin pipe ", err);
         response.end();
       });
       command.stdout.on('error',function(err) {
         console.log("Weird error in stdout pipe ",err);
         response.end();
       });
     }
     else {
       // not GET, so close response
       response.end();
     }
    }

    // Load the http module to create an http server.
    var http = require('http');

    // Configure our HTTP server to respond with Hello World to all requests.
    var server = http.createServer(function (request, response) {
     //console.log("New connection from " + request.socket.remoteAddress + ":" + request.url);

     if (auth.validate(request,response)) {
       // first send a HEAD request to our HD Home Run with the same url to see if the address is valid.
       // This prevents an ffmpeg instance to spawn when clients request invalid things - like robots.txt/etc
       var options = {method: 'HEAD', hostname: HDHomeRunIP, port: HDHomeRunPort, path: request.url};
       var req = http.request(options, function(res) {
         // if they do a get, and it returns good status
         if (request.method == "GET" &&
             res.statusCode == 200 &&
             res.headers["content-type"] != null &&
             res.headers["content-type"].startsWith("video")) {
           // transcode is possible, start it now!
           doTranscode(request,response);
         }
         else {
           // no video or error, cannot transcode, just forward the response from the HD Home run to the client
           if (request.method == "HEAD") {
             response.writeHead(res.statusCode,res.headers);
             response.end();
           }
           else {
             // do a 301 redirect and have the device response directly  

             // just proxy it, that way browser doesn't redirect to HDHomeRun IP but keeps the node.js server IP
             options = {method: request.method, hostname: HDHomeRunIP, /* port: HDHomeRunPort, */path: request.url};
             doProxy(request,response,http,options);
           }
         }
       });
       req.on('error', function(e) {
         console.log('problem with request: ' + e.message);
         response.writeHeader(500);
         response.end();
       });
       // finish the client request, rest of processing done in the async callbacks
       req.end();
     }
    });

    // turn on no delay for tcp
    server.on('connection', function (socket) {
     socket.setNoDelay(true);
    });
    server.listen(7000);
  • ffmpeg H264 Encode Frame at a time for network streaming

    4 février 2017, par Richard Harrow

    I’m working on a remote desktop application, I would like to send an encoded H264 packet over TCP by using ffmpeg for the encoding. However I couldn’t find useful info for the particular case of encoding just one frame (already on YUV444) and get the packet.

    I have several issues, the first was that :

    avcodec_encode_video2

    Was not blocking, I found that most of the time you get the "delayed" frames at the end, however, since this is a real time streaming the solution was :

    av_opt_set(mCodecContext->priv_data, "tune", "zerolatency", 0);

    Now I got the frame, but several issues, it takes a while and even worse I got a gray with trash pixels video as result. My configuration for the Codec Context :

    m_pCodecCtx->bit_rate=8000000;
    m_pCodecCtx->codec_id=AV_CODEC_ID_H264;
    m_pCodecCtx->codec_type = AVMEDIA_TYPE_VIDEO;
    m_pCodecCtx->width=1920;
    m_pCodecCtx->height=1080;
    m_pCodecCtx->pix_fmt=AV_PIX_FMT_YUV444P;
    m_pCodecCtx->time_base.num = 1;
    m_pCodecCtx->time_base.den = 25;
    m_pCodecCtx->gop_size = 1;
    m_pCodecCtx->keyint_min = 1;
    m_pCodecCtx->i_quant_factor = float(0.71);
    m_pCodecCtx->b_frame_strategy = 20;
    m_pCodecCtx->qcompress = (float)0.6;
    m_pCodecCtx->qmax = 51;
    m_pCodecCtx->qmin = 20;
    m_pCodecCtx->max_qdiff = 4;
    m_pCodecCtx->refs = 4;
    m_pCodecCtx->max_b_frames = 1;
    m_pCodecCtx->thread_count = 1;

    I would like to know how this could be done, how do I set the "I Frames" ? and, that would be the optimal for a "one at a time" encoding ? Also I’m not concerned right now with the quality, just need to be fast enough (under 16 ms).

    For the encoding part :

    nres = avcodec_encode_video2(m_pCodecCtx,&packet,m_pFrame,&framefinished);

    if(nres<0){
       qDebug() << "error encoding: " << nres << endl;
    }

    if(framefinished){
       m_pFrame->pts++;
        ofstream vidout("video.h264",ios::app);
        if(vidout.good()){
            vidout.write((const char*)&packet.data[0],packet.size);
        }
        vidout.close();

        av_packet_unref(&packet);

    }

    I’m not using a container, just a raw file, ffplay reproduce raw files if the packets are right, and that’s my principal issue. I’m planning to send the packet over tcp and decode on the client. Any help would be greatly appreciated.

  • Can't replicate FFMpeg Prores quality setting

    3 février 2017, par Ben L

    Using ffmpeg as a library. I’m looking to create a global-quality slider with very inconsistent results. AvCodecContext::global_quality seems like a good place to start. Not all lossy codecs make reference to this member, but it does appear to work for ProRes.

    c:\>ffmpeg.exe -i test.mov -c:v prores_ks -q:v 28 out.mov     # output 10mb file

    c:\>ffmpeg.exe -i test.mov -c:v prores_ks -q:v 2 out.mov     # output 28mb file

    Great. Now let’s do it in code. Based on Muxing.c. I’m distilling down the code a lot just to give a hint about which api’s I’m calling.

    AVStream* AddStream(AVFormatContext* formatContext, int quality)
    {
       AVCodec* codec = AVFindEncoder("prores_ks");

       AVStream* newStream = avformat_new_stream(formatContext, codec);

       newStream->id = formatContext->nb_streams - 1;

       AVCodecContext c = avcodec_alloc_context3(codec);

       c->codec_id = AV_CODEC_ID_PRORES;
       c->codec_type = AVMEDIA_TYPE_VIDEO;
       c->width = 1920;
       c->height = 1080;

       newStream->time_base = av_inv_q(frameRate);
       c->time_base = av_inv_q(frameRate);

       c->pix_fmt = AV_PIX_FMT_YUVA444P10;

       c->global_quality = quality;

       return newStream;
    }
    ......
    //excerpt from WriteFrame()

    AVPacket pkt;
    av_init_packet(&pkt);
    pkt.data= pVideoBuffer;
    pkt.size= iVideoBufferSize;
    int gotpkt = 0;
    int ret = avcodec_encode_video2(pCodecContext, &pkt, pPicture, &gotpkt);
    if (ret == 0)
    {
       av_packet_rescale_ts(&pkt, pCodecContext->time_base, pVideoStream->time_base);

       if (gotpkt) {
           ret = av_interleaved_write_frame(pFormatContext, &pkt);
       }
    }

    I can’t get quality to affect the size of the output. Any ideas ?

    This is the excerpt from proresenc_kostya.c

    ctx->force_quant = avctx->global_quality / FF_QP2LAMBDA;
    if (!ctx->force_quant) {
       if (!ctx->bits_per_mb) {
           for (i = 0; i < NUM_MB_LIMITS - 1; i++)
               if (prores_mb_limits[i] >= ctx->mb_width * ctx->mb_height *
                                          ctx->pictures_per_frame)
                   break;
           ctx->bits_per_mb   = ctx->profile_info->br_tab[i];
       } else if (ctx->bits_per_mb < 128) {
           av_log(avctx, AV_LOG_ERROR, "too few bits per MB, please set at least 128\n");
           return AVERROR_INVALIDDATA;
       }

       min_quant = ctx->profile_info->min_quant;
       max_quant = ctx->profile_info->max_quant;
       for (i = min_quant; i < MAX_STORED_Q; i++) {
           for (j = 0; j < 64; j++)
               ctx->quants[i][j] = ctx->quant_mat[j] * i;
       }

       ctx->slice_q = av_malloc(ctx->slices_per_picture * sizeof(*ctx->slice_q));
       if (!ctx->slice_q) {
           encode_close(avctx);
           return AVERROR(ENOMEM);
       }

       ctx->tdata = av_mallocz(avctx->thread_count * sizeof(*ctx->tdata));
       if (!ctx->tdata) {
           encode_close(avctx);
           return AVERROR(ENOMEM);
       }

       for (j = 0; j < avctx->thread_count; j++) {
           ctx->tdata[j].nodes = av_malloc((ctx->slices_width + 1)
                                           * TRELLIS_WIDTH
                                           * sizeof(*ctx->tdata->nodes));
           if (!ctx->tdata[j].nodes) {
               encode_close(avctx);
               return AVERROR(ENOMEM);
           }
           for (i = min_quant; i < max_quant + 2; i++) {
               ctx->tdata[j].nodes[i].prev_node = -1;
               ctx->tdata[j].nodes[i].bits      = 0;
               ctx->tdata[j].nodes[i].score     = 0;
           }
       }
    }

    Edit :

    Outputs from ffmpeg.exe :

    profile 4, 1020 slices, interlacing: no, 6576 bits per MB
    frame size upper bound: 11429274

    Output from ffmpeg avlog on my app :

    profile 4, 1020 slices, interlacing: no, 1425 bits per MB
    frame size upper bound: 6170103