Recherche avancée

Médias (1)

Mot : - Tags -/ipad

Autres articles (57)

  • Participer à sa traduction

    10 avril 2011

    Vous pouvez nous aider à améliorer les locutions utilisées dans le logiciel ou à traduire celui-ci dans n’importe qu’elle nouvelle langue permettant sa diffusion à de nouvelles communautés linguistiques.
    Pour ce faire, on utilise l’interface de traduction de SPIP où l’ensemble des modules de langue de MediaSPIP sont à disposition. ll vous suffit de vous inscrire sur la liste de discussion des traducteurs pour demander plus d’informations.
    Actuellement MediaSPIP n’est disponible qu’en français et (...)

  • Les autorisations surchargées par les plugins

    27 avril 2010, par

    Mediaspip core
    autoriser_auteur_modifier() afin que les visiteurs soient capables de modifier leurs informations sur la page d’auteurs

  • MediaSPIP v0.2

    21 juin 2013, par

    MediaSPIP 0.2 is the first MediaSPIP stable release.
    Its official release date is June 21, 2013 and is announced here.
    The zip file provided here only contains the sources of MediaSPIP in its standalone version.
    To get a working installation, you must manually install all-software dependencies on the server.
    If you want to use this archive for an installation in "farm mode", you will also need to proceed to other manual (...)

Sur d’autres sites (11380)

  • How to transcode flash movie to mp4, then upload to s3

    13 octobre 2017, par user1790300

    I am using the fluent-ffmpeg library with node.js to transcode videos originally in a flash movie format to the mp3 format with multiple resolutions, 1080p, etc.. Once the transcoding is complete, I would like to move the transcoded video to an s3 bucket.

    I pull the original .flv file from a source s3 bucket and pass the stream to the ffmpeg constructor function. The issue is after the transcoding completes, how do I then get the stream of the mp4 data to send to s3.

    Here is the code I have so far :

           var params = {
               Bucket: process.env.SOURCE_BUCKET,
               Key: fileName
           };
           s3.getObject(params, function(err, data) {
               if (err) console.log(err, err.stack); // an error occurred

               var format = ffmpeg(data)
               .size('854x480')
               .videoCodec('libx264')
               .format('flv')
               .toFormat('mp4');
               .on('end', function () {
                   //Ideally, I would like to do the uploading here

                   var params = {
                      Body: //{This is my confusion, how do I get the stream to add here?},
                      Bucket: process.env.TRANSCODED_BUCKET,
                      Key: fileName
                   };
                   s3.putObject(params, function (err, data) {

                  });
               })
               .on('error', function (err) {
                   console.log('an error happened: ' + err.message);
               });

           });

    For the code above, where can I get the transcoded stream to add to the "Body" property of the params object ?

  • How can I save the h264 encoded video as a sequence of AVPacket-s to the file using libav c++ ?

    8 juillet 2023, par danvik13

    I'm trying to save video, which is generated and encoded on fly using libav (ffmpeg) functional. When I receive next non-empty AVPacket with encoded frame, I call writer.saveNextPacket() method.
Of cause, before saving, I properly init both codec and filewriter and close them in the very ending.
Here is some code :

    


    #include "utils/FileWriter.h"

#define DEBUG 1

void tp::utils::FileWriter::open(const char* path, tp::encoders::IEncoder* encoder) {
    int hr;

    debI("guessing");
    format = av_guess_format(nullptr, path, nullptr);
    if (!format){
        debI("av_guess_format");
    }

    debI("allocating output");
    hr = avformat_alloc_output_context2(&formatContext, format, nullptr, nullptr);
    if (hr < 0){
        debI("avformat_alloc_output_context2", hr);
    }

    debI("allocating stream");
    stream = avformat_new_stream(formatContext, nullptr);
    if (!stream){
        debI("avformat_new_stream");
    }
    debI("setting parameters");
    AVCodecParameters* parameters = avcodec_parameters_alloc();
    hr = avcodec_parameters_from_context(parameters, encoder->getCodecContext());
    if (hr < 0){
        debI("avcodec_parameters_from_context", hr);
    }

    stream->codecpar = parameters;

    debI("opening");
    hr = avio_open(&formatContext->pb, path, AVIO_FLAG_WRITE);
    if (hr < 0){
        debI("avio_open", hr);
    }

    debI("writing headers");
    hr = avformat_write_header(formatContext, nullptr);
    if (hr < 0){
        debI("avformat_write_header", hr);
    }
    debI("writer is set");
}

#define DEBUG 0

void tp::utils::FileWriter::saveNextPacket(AVPacket* packet) {
    int hr;

    hr = av_interleaved_write_frame(formatContext, packet);
    if (hr < 0){
        logF("av_write_frame", hr);
    }
}

void tp::utils::FileWriter::close() {
    int hr;

    hr = av_write_trailer(formatContext);
    if (hr < 0){
        logF("av_write_trailer", hr);
    }

    hr = avio_close(formatContext->pb);
    if (hr < 0){
        logF("avio_close", hr);
    }

    avformat_free_context(formatContext);
}



    


    Here is the setup part of the encoder :

    


        codec = avcodec_find_encoder(AV_CODEC_ID_H264);
    if (!codec){
        logF("avcodec_find_encoder", hr);
    }
    codecContext = avcodec_alloc_context3(codec);
    if (!codecContext){
        logF("avcodec_alloc_context3");
    }
    codecContext->width = parameters.width;
    codecContext->height = parameters.height;
    codecContext->bit_rate = parameters.bitRate;
    codecContext->time_base = (AVRational){1, parameters.fps};
    codecContext->gop_size = 0;
    codecContext->max_b_frames = 0;
    codecContext->pix_fmt = AV_PIX_FMT_YUV420P;
    if (parameters.preset == EncoderParameters::Preset::Fast){
        av_opt_set(codecContext->priv_data, "preset", "ultrafast", 0);
    }else if (parameters.preset == EncoderParameters::Preset::Slow){
        av_opt_set(codecContext->priv_data, "preset", "slow", 0);
    }

    hr = avcodec_open2(codecContext, codec, nullptr);
    if (hr < 0){
        logF("avcodec_open2", hr);
    }


    


    The problem : I open "output.mp4" file and assume that the class properly saves the header of the file. However, the file weights 262 bytes (there is no useful data such as frames) and it is corrupted for running video on any video players
ps : I've enabled libav logging, which seems to be ok, despite some messages :

    


      

    1. [file @ 0000017a74c6c540] Setting default whitelist 'file,crypto,data'
I have some concers about the appropriateness of the whitelist as no crypto is expected
    2. 


    3. [AVIOContext @ 0000017a6a249100] Statistics : 266 bytes written, 2 seeks, 3 writeouts
The actual size of the file is 262. Does it mean that there are some info leaking during saving and even the header is already broken ?
    4. 


    


    I've tried to adjust various codec settings. Even turning off any speed presets. Actually, the encoder was tested in the pair with various libav decoders and image rendering and I'm pretty sure about encoder part.
Moreover, I've written the FileWriter code three times as there are some dissimilar guides. Only the current code does not throw any warnings or errors.
Give me a hint, which part of code may be wrong or provide me with some samples of using latest ffmpeg and answer my questions, please.

    


  • video proccesing : extract frames and encrypt them then insert them back to the video in java using xuggler

    24 juillet 2015, par Anas M. Jubara

    I’m working on a video encryption application .. the main idea is to input the video file to the application and the application should output it in an encrypted form... am using xuggler library to manipulate the video and get to the frames and AES for encryption.
    my code works fine for accessing the frames and encrypting them, what i need help with is how to write the encrypted frame back to the video file to replace the original one without corrupting the video file for the video players.
    Here is my code

    package xuggler;

    import com.xuggle.mediatool.IMediaReader;
    import com.xuggle.mediatool.IMediaWriter;
    import com.xuggle.mediatool.MediaListenerAdapter;
    import com.xuggle.mediatool.ToolFactory;
    import com.xuggle.mediatool.event.IVideoPictureEvent;
    import com.xuggle.xuggler.Global;
    import com.xuggle.xuggler.ICodec;

    import java.awt.Graphics2D;
    import java.awt.Point;
    import java.awt.Transparency;
    import java.awt.image.BufferedImage;
    import java.awt.image.ColorModel;
    import java.awt.image.ComponentColorModel;
    import java.awt.image.DataBuffer;
    import java.awt.image.DataBufferByte;
    import java.awt.image.Raster;
    import java.awt.image.WritableRaster;

    import java.io.File;

    import java.security.InvalidKeyException;
    import java.security.NoSuchAlgorithmException;
    import java.security.SecureRandom;

    import java.util.concurrent.TimeUnit;
    import java.util.logging.Level;
    import java.util.logging.Logger;

    import javax.crypto.BadPaddingException;
    import javax.crypto.Cipher;
    import javax.crypto.IllegalBlockSizeException;
    import javax.crypto.KeyGenerator;
    import javax.crypto.NoSuchPaddingException;
    import javax.crypto.SecretKey;

    import javax.imageio.ImageIO;


    public class DecodeAndCaptureFrames extends MediaListenerAdapter
    {

    // The number of seconds between frames.
         public static final double SECONDS_BETWEEN_FRAMES = 5;

     //The number of micro-seconds between frames.
     public static final long MICRO_SECONDS_BETWEEN_FRAMES =(long)      (Global.DEFAULT_PTS_PER_SECOND * SECONDS_BETWEEN_FRAMES);

     // Time of last frame write
     private static long mLastPtsWrite = Global.NO_PTS;

    private static final double FRAME_RATE = 50;

    private static final int SECONDS_TO_RUN_FOR = 20;

    private static final String outputFilename = "D:\\K.mp4";

    public static IMediaWriter writer = ToolFactory.makeWriter(outputFilename);
    //receive BufferedImage and returns its byte data
       public static byte[] get_byte_data(BufferedImage image) {
       WritableRaster raster = image.getRaster();
       DataBufferByte buffer = (DataBufferByte) raster.getDataBuffer();
       return buffer.getData();
    }


    //create new_img with the attributes of image
    public static BufferedImage user_space(BufferedImage image) {
       //create new_img with the attributes of image
       BufferedImage new_img = new BufferedImage(image.getWidth(), image.getHeight(), BufferedImage.TYPE_3BYTE_BGR);
       Graphics2D graphics = new_img.createGraphics();
       graphics.drawRenderedImage(image, null);
       graphics.dispose(); //release all allocated memory for this image
       return new_img;
    }

    public static BufferedImage toImage(byte[] imagebytes, int width, int height) {
       DataBuffer buffer = new DataBufferByte(imagebytes, imagebytes.length);
       WritableRaster raster = Raster.createInterleavedRaster(buffer, width, height,
          3 * width, 3, new int[]{2, 1, 0}, (Point) null);

       ColorModel cm = new ComponentColorModel(ColorModel.getRGBdefault().getColorSpace(),
               false, true, Transparency.OPAQUE, DataBuffer.TYPE_BYTE);
       return new BufferedImage(cm, raster, true, null);
    }

    public static byte[] encrypt(byte[] orgnlbytes, String key) throws NoSuchPaddingException, IllegalBlockSizeException, BadPaddingException {
       byte[] encbytes = null;
       try {
           Cipher cipher = Cipher.getInstance("AES");
           KeyGenerator keyGen = KeyGenerator.getInstance("AES");
           SecureRandom random = SecureRandom.getInstance("SHA1PRNG");
           // cryptograph. secure random
           random.setSeed(key.getBytes());

           keyGen.init(128, random);
           // for example
           SecretKey secretKey = keyGen.generateKey();
           try {
               cipher.init(Cipher.ENCRYPT_MODE, secretKey);
           } catch (InvalidKeyException ex) {
               Logger.getLogger(DecodeAndCaptureFrames.class.getName()).log(Level.SEVERE, null, ex);
           }
           encbytes = cipher.doFinal(orgnlbytes);
       }
       catch (NoSuchAlgorithmException ex) {
           Logger.getLogger(DecodeAndCaptureFrames.class.getName()).log(Level.SEVERE, null, ex);
       }        catch (NoSuchPaddingException ex)
       {
           System.out.print("can not encrypt buffer");
       }

       return encbytes;
    }


     /**
      * The video stream index, used to ensure we display frames from one
      * and only one video stream from the media container.
      */

     private int mVideoStreamIndex = -1;

     /**
      * Takes a media container (file) as the first argument, opens it and
      *  writes some of it's video frames to PNG image files in the
      *  temporary directory.
      *
      * @param args must contain one string which represents a filename
      */

     public static void main(String[] args)
     {
       // create a new mr. decode and capture frames


       DecodeAndCaptureFrames decodeAndCaptureFrames;
       decodeAndCaptureFrames = new DecodeAndCaptureFrames("D:\\K.mp4");
     }

     /** Construct a DecodeAndCaptureFrames which reads and captures
      * frames from a video file.
      *
      * @param filename the name of the media file to read
      */


     //makes reader to the file and read the data of it
     public DecodeAndCaptureFrames(String filename)
     {
       // create a media reader for processing video

      IMediaReader reader = ToolFactory.makeReader(filename);

    // stipulate that we want BufferedImages created in BGR 24bit color space
    reader.setBufferedImageTypeToGenerate(BufferedImage.TYPE_3BYTE_BGR);


    // note that DecodeAndCaptureFrames is derived from
    // MediaReader.ListenerAdapter and thus may be added as a listener
    // to the MediaReader. DecodeAndCaptureFrames implements
    // onVideoPicture().

    reader.addListener(this);

    // read out the contents of the media file, note that nothing else
    // happens here.  action happens in the onVideoPicture() method
    // which is called when complete video pictures are extracted from
    // the media source

    while (reader.readPacket() == null)
     do {} while(false);
     }

    /**
      * Called after a video frame has been decoded from a media stream.
      * Optionally a BufferedImage version of the frame may be passed
      * if the calling {@link IMediaReader} instance was configured to
      * create BufferedImages.
      *
      * This method blocks, so return quickly.
      */

     public void onVideoPicture(IVideoPictureEvent event)
     {
       try
       {
         // if the stream index does not match the selected stream index,
         // then have a closer look

     if (event.getStreamIndex() != mVideoStreamIndex)
     {
       // if the selected video stream id is not yet set, go ahead an
       // select this lucky video stream

       if (-1 == mVideoStreamIndex)
         mVideoStreamIndex = event.getStreamIndex();

       // otherwise return, no need to show frames from this video stream

       else
         return;
     }

     // if uninitialized, backdate mLastPtsWrite so we get the very
     // first frame

     if (mLastPtsWrite == Global.NO_PTS)
       mLastPtsWrite = event.getTimeStamp() - MICRO_SECONDS_BETWEEN_FRAMES;

     // if it's time to write the next frame

     if (event.getTimeStamp() - mLastPtsWrite >= MICRO_SECONDS_BETWEEN_FRAMES)
     {
       // Make a temporary file name

      // File file = File.createTempFile("frame", ".jpeg");

       // write out PNG

    //        ImageIO.write(event.getImage(), "png", file);

       BufferedImage orgnlimage = event.getImage();
           orgnlimage = user_space(orgnlimage);
           byte[] orgnlimagebytes = get_byte_data(orgnlimage);
           byte[] encryptedbytes = encrypt(orgnlimagebytes, "abc");
           BufferedImage encryptedimage = toImage(encryptedbytes, orgnlimage.getWidth(), orgnlimage.getHeight());


           ImageIO.write(encryptedimage, "png", File.createTempFile("frame", ".png"));
    //         indicate file written

       double seconds = ((double)event.getTimeStamp())
         / Global.DEFAULT_PTS_PER_SECOND;
    //        System.out.printf("at elapsed time of %6.3f seconds wrote: %s\n",seconds, file);

       // update last write time

       mLastPtsWrite += MICRO_SECONDS_BETWEEN_FRAMES;
     }
    }
    catch (Exception e)
    {
     e.printStackTrace();
    }
     }

    }