Recherche avancée

Médias (16)

Mot : - Tags -/mp3

Autres articles (111)

  • Les autorisations surchargées par les plugins

    27 avril 2010, par

    Mediaspip core
    autoriser_auteur_modifier() afin que les visiteurs soient capables de modifier leurs informations sur la page d’auteurs

  • Script d’installation automatique de MediaSPIP

    25 avril 2011, par

    Afin de palier aux difficultés d’installation dues principalement aux dépendances logicielles coté serveur, un script d’installation "tout en un" en bash a été créé afin de faciliter cette étape sur un serveur doté d’une distribution Linux compatible.
    Vous devez bénéficier d’un accès SSH à votre serveur et d’un compte "root" afin de l’utiliser, ce qui permettra d’installer les dépendances. Contactez votre hébergeur si vous ne disposez pas de cela.
    La documentation de l’utilisation du script d’installation (...)

  • HTML5 audio and video support

    13 avril 2011, par

    MediaSPIP uses HTML5 video and audio tags to play multimedia files, taking advantage of the latest W3C innovations supported by modern browsers.
    The MediaSPIP player used has been created specifically for MediaSPIP and can be easily adapted to fit in with a specific theme.
    For older browsers the Flowplayer flash fallback is used.
    MediaSPIP allows for media playback on major mobile platforms with the above (...)

Sur d’autres sites (8309)

  • Rotating a video during encoding with ffmpeg and libav API results in half of video corrupted

    11 mai 2020, par Daniel Kobe

    I'm using the C API for ffmpeg/libav to rotate a vertically filmed iphone video during the encoding step. There are other questions asking to do a similar thing but they are all using the CLI tool to do so.

    



    So far I was able to figure out how to use the AVFilter to rotate the video, base off this example https://github.com/FFmpeg/FFmpeg/blob/master/doc/examples/filtering_video.c

    



    The problem is that half the output file is corrupt.
Corrupt Video Screenshot

    



    Here is the code for my encoding logic. Its written with GOLANG using CGO to interface with the C API.

    



    // Encode encode an AVFrame and return it
func Encode(enc Encoder, frame *C.AVFrame) (*EncodedFrame, error) {
    ctx := enc.Context()

    if ctx.buffersrcctx == nil {
        // initialize filter
        outputs := C.avfilter_inout_alloc()
        inputs  := C.avfilter_inout_alloc()
        m_pFilterGraph := C.avfilter_graph_alloc()
        buffersrc := C.avfilter_get_by_name(C.CString("buffer"))
        argsStr := fmt.Sprintf("video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:pixel_aspect=%d/%d", ctx.avctx.width, ctx.avctx.height, ctx.avctx.pix_fmt, ctx.avctx.time_base.num, ctx.avctx.time_base.den, ctx.avctx.sample_aspect_ratio.num, ctx.avctx.sample_aspect_ratio.den)
        Log.Info.Println("yakotest")
        Log.Info.Println(argsStr)
        args := C.CString(argsStr)
        ret := C.avfilter_graph_create_filter(&ctx.buffersrcctx, buffersrc, C.CString("my_buffersrc"), args, nil, m_pFilterGraph)
        if ret < 0 {
            Log.Info.Printf("\n problem creating filter %v\n", AVError(ret).Error())
        }

        buffersink := C.avfilter_get_by_name(C.CString("buffersink"))
        ret = C.avfilter_graph_create_filter(&ctx.buffersinkctx, buffersink, C.CString("my_buffersink"), nil, nil, m_pFilterGraph)
        if ret < 0 {
            Log.Info.Printf("\n problem creating filter %v\n", AVError(ret).Error())
        }

        /*
         * Set the endpoints for the filter graph. The filter_graph will
         * be linked to the graph described by filters_descr.
         */

        /*
         * The buffer source output must be connected to the input pad of
         * the first filter described by filters_descr; since the first
         * filter input label is not specified, it is set to "in" by
         * default.
         */
        outputs.name       = C.av_strdup(C.CString("in"))
        outputs.filter_ctx = ctx.buffersrcctx
        outputs.pad_idx    = 0
        outputs.next       = nil

        /*
         * The buffer sink input must be connected to the output pad of
         * the last filter described by filters_descr; since the last
         * filter output label is not specified, it is set to "out" by
         * default.
         */
        inputs.name       = C.av_strdup(C.CString("out"))
        inputs.filter_ctx = ctx.buffersinkctx
        inputs.pad_idx    = 0
        inputs.next       = nil

        ret = C.avfilter_graph_parse_ptr(m_pFilterGraph, C.CString("transpose=clock,scale=-2:1080"),
            &inputs, &outputs, nil)
        if ret < 0 {
            Log.Info.Printf("\n problem with avfilter_graph_parse %v\n", AVError(ret).Error())
        }

        ret = C.avfilter_graph_config(m_pFilterGraph, nil)
        if ret < 0 {
            Log.Info.Printf("\n problem with graph config %v\n", AVError(ret).Error())
        }
    }

    filteredFrame :=  C.av_frame_alloc()

    /* push the decoded frame into the filtergraph */
    ret := C.av_buffersrc_add_frame_flags(ctx.buffersrcctx, frame, C.AV_BUFFERSRC_FLAG_KEEP_REF)
    if ret < 0 {
        Log.Error.Printf("\nError while feeding the filter greaph, err = %v\n", AVError(ret).Error())
        return nil, errors.New(ErrorFFmpegCodecFailure)
    }

    /* pull filtered frames from the filtergraph */
    for {
        ret = C.av_buffersink_get_frame(ctx.buffersinkctx, filteredFrame)
        if ret == C.AVERROR_EAGAIN || ret == C.AVERROR_EOF {
            break
        }
        if ret < 0 {
            Log.Error.Printf("\nCouldnt find a frame, err = %v\n", AVError(ret).Error())
            return nil, errors.New(ErrorFFmpegCodecFailure)
        }

        filteredFrame.pts = frame.pts
        frame = filteredFrame
        defer C.av_frame_free(&filteredFrame)
    }

    if frame != nil {
        frame.pict_type = 0 // reset pict type for the encoder
        if C.avcodec_send_frame(ctx.avctx, frame) != 0 {
            Log.Error.Printf("%+v\n", StackErrorf("codec error, could not send frame"))
            return nil, errors.New(ErrorFFmpegCodecFailure)
        }
    }

    for {
        ret := C.avcodec_receive_packet(ctx.avctx, ctx.pkt)
        if ret == C.AVERROR_EAGAIN {
            break
        }
        if ret == C.AVERROR_EOF {
            return nil, fmt.Errorf("EOF")
        }
        if ret < 0 {
            Log.Error.Printf("%+v\n", StackErrorf("codec error, receiving packet"))
            return nil, errors.New(ErrorFFmpegCodecFailure)
        }

        data := C.GoBytes(unsafe.Pointer(ctx.pkt.data), ctx.pkt.size)
        return &EncodedFrame{data, int64(ctx.pkt.pts), int64(ctx.pkt.dts),
            (ctx.pkt.flags & C.AV_PKT_FLAG_KEY) != 0}, nil
    }

    return nil, nil
}


    



    It seems like I need to do something with the scaling here but I'm struggling to find helpful information online.

    


  • mpeg12dec : Extract CC user data into frame side data

    26 novembre 2013, par John Stebbins
    mpeg12dec : Extract CC user data into frame side data
    

    Signed-off-by : Anton Khirnov <anton@khirnov.net>

    • [DH] doc/APIchanges
    • [DH] libavcodec/mpeg12dec.c
    • [DH] libavutil/frame.h
    • [DH] libavutil/version.h
  • Accord.Video.FFMPEG.VideoFileWriter writes different data from the input data

    14 novembre 2017, par Rasool Ahmed

    I’m working on project that encrypting video frames using RC4 algorithm and save these frames in playable video file.

    I used a package named Accord.Video.FFMPEG. This package has a classes (VideoFileReader, & VideoFileWriter) that reads and writes video frames.

    The first step is reads the video :

    VideoHandler v = new VideoHandler();
    OpenFileDialog newimag = new OpenFileDialog();
           if (newimag.ShowDialog() == DialogResult.OK)
           {
               textfile = newimag.FileName;
               picbox.ImageLocation = textfile;
               status1.Text = "loaded";
               //MessageBox.Show("your video file have been loaded seccufly"); // is an idea for viwing message

           }
       bytedata = v.ReadAllFrameBytes(textfile);

    The second step is encrypting the frames of video :

    byte[] bn = new byte[bytedata.Length];// new array to hold the encryptred file

           bn = Encrypt(bytedata, ba);

    The last step is saving the encrypted frames :

    v.WriteFramesData(newfilepath, bn);

    My encrypting algorithm is encrypting and decrypting the cipher with same algorithm and key.

    These stpes works on Text, and Images files, but when I use it on video I can’t restore the encrypted video. After some testings, I found that VideoFileWriter dosn’t write the same input frames. Whyyyyyyy ?

    Here is my VideoFileHandler I made it :

    using System;
    using System.Collections.Generic;
    using System.Drawing;
    using System.Linq;
    using System.Text;
    using System.Drawing.Imaging;
    using System.IO;
    using Accord.Video.FFMPEG;
    namespace imgtobyt_1_in_c
    {
    class VideoHandler
    {
       public List data;
       public byte[] imagedata;
       public int Height, Width;
       public byte[] ReadAllFrameBytes(string FileName)
       {
           // create instance of video reader
           VideoFileReader reader = new VideoFileReader();
           // open video file
           reader.Open(FileName);
           Height = reader.Height;
           Width = reader.Width;

           data = new List();
           // read video frames
           for (int i = 0; i &lt; 100; i++) //change 100 to reader.FrameCount
           {
               Bitmap videoFrame = reader.ReadVideoFrame();
               byte[] framebytes = GetBytesFromFrame(videoFrame);
               data.Add(framebytes);
               // dispose the frame when it is no longer required
               videoFrame.Dispose();
           }
           reader.Close();
           imagedata = new byte[data.Count * data[0].Length];
           int c = 0;
           for (int i = 0; i &lt; data.Count; i++)
           {
               byte[] d = data[i];
               for (int x = 0; x &lt; d.Length; x++)
               {
                   imagedata[c] = d[x];
                   c++;
               }
           }
           return imagedata;
       }

       public byte[] GetBytesFromFrame(Bitmap Frame)
       {
           LockBitmap lockBitmap = new LockBitmap(Frame);
           lockBitmap.LockBits();

           byte[] framebytes = new byte[Frame.Width * Frame.Height * 3];
           int z = 0;
           for (int x = 0; x &lt; lockBitmap.Height; x++)
               for (int y = 0; y &lt; lockBitmap.Width; y++)
               {
                   Color Pixel = lockBitmap.GetPixel(y, x);
                   framebytes[z] = Pixel.R;
                   z++;
                   framebytes[z] = Pixel.G;
                   z++;
                   framebytes[z] = Pixel.B;
                   z++;
               }

           lockBitmap.UnlockBits();
           return framebytes;
           //using (var stream = new MemoryStream())
           //{
           //    Frame.Save(stream, System.Drawing.Imaging.ImageFormat.Png);
           //    return stream.ToArray();
           //}
       }

       public Bitmap GetFrameFromBytes(byte[] Framebytes, ref int offset, int Width, int Height)
       {
           Bitmap Frame = new Bitmap(Width, Height, PixelFormat.Format24bppRgb);
           LockBitmap lockBitmap = new LockBitmap(Frame);
           lockBitmap.LockBits();
           for (int x = 0; x &lt; Height; x++)
               for (int y = 0; y &lt; Width; y++)
               {
                   Color Pixel = Color.FromArgb(Framebytes[offset], Framebytes[offset + 1], Framebytes[offset + 2]); offset += 3;
                   lockBitmap.SetPixel(y, x, Pixel);
               }

           lockBitmap.UnlockBits();

           return Frame;
           //Bitmap bmp;
           //using (var ms = new MemoryStream(Framebytes))
           //{
           //    bmp = new Bitmap(ms);
           //}
           //return bmp;
       }

       public void WriteFramesData(string FileName, byte[] data)
       {
           // create instance of video writer
           VideoFileWriter writer = new VideoFileWriter();
           // create new video file
           writer.Open(FileName, Width, Height);

           int offset = 0;
           // write video frames
           for (int i = 0; i &lt; 100; i++)
           {
               // create a bitmap to save into the video file
               Bitmap Frame = GetFrameFromBytes(data, ref offset, Width, Height);
               writer.WriteVideoFrame(Frame);
           }
           writer.Close();
       }
    }
    }

    Please, I need to make this works.