Recherche avancée

Médias (1)

Mot : - Tags -/stallman

Autres articles (39)

  • MediaSPIP Core : La Configuration

    9 novembre 2010, par

    MediaSPIP Core fournit par défaut trois pages différentes de configuration (ces pages utilisent le plugin de configuration CFG pour fonctionner) : une page spécifique à la configuration générale du squelettes ; une page spécifique à la configuration de la page d’accueil du site ; une page spécifique à la configuration des secteurs ;
    Il fournit également une page supplémentaire qui n’apparait que lorsque certains plugins sont activés permettant de contrôler l’affichage et les fonctionnalités spécifiques (...)

  • Les autorisations surchargées par les plugins

    27 avril 2010, par

    Mediaspip core
    autoriser_auteur_modifier() afin que les visiteurs soient capables de modifier leurs informations sur la page d’auteurs

  • HTML5 audio and video support

    13 avril 2011, par

    MediaSPIP uses HTML5 video and audio tags to play multimedia files, taking advantage of the latest W3C innovations supported by modern browsers.
    The MediaSPIP player used has been created specifically for MediaSPIP and can be easily adapted to fit in with a specific theme.
    For older browsers the Flowplayer flash fallback is used.
    MediaSPIP allows for media playback on major mobile platforms with the above (...)

Sur d’autres sites (5640)

  • Concat mpeg-ts files to mp4 missing keyframe data

    21 juillet 2022, par Jona

    I have written custom code to concat multiple mpeg-ts files into an mp4 video file. I've used as reference the remuxing code sample.

    


    I'm having issues where the final output is unable to fast-forward or rewind as the video loses its information and plays the same frame till the end. But if I play from the beginning it plays fine.

    


    I compared using ffprobe and a hex tool my custom code remuxer results to that of using the following terminal command :

    


    ffmpeg -i "concat:input1.ts|input2.ts|input3.ts" -c copy output.mp4


    


    To my surprise, the videos look almost identical but I'm noticing that I'm missing stss values on the MP4 header. This is where the key and intra frames are stored according to the MP4 Specs. I'm wondering if I'm missing something on my code. Please find below how I'm currently doing things.

    


    int remuxVideos() {
    // Other code removed....
    //

    for (auto file : files) {
    if (FcStringUtils::endsWith(file.c_str(), ".ts") &&
        FC_TIMELAPSE_ACTIVE_RECORDING_FILENAME != file) {
        FcVideoStream videoStream;
        
        error = videoStream.openStream(sourceDir + "/" + file);
        if (ERROR_NO_ERROR != error) {
            break;
        }
        
        // If the format context is not yet open, we open it using the stream
        // settings.
        if (!mpFormatCtx) {
            error = openFormatContext(sourceDir + "/app.mp4", videoStream.getStream());
            if (ERROR_NO_ERROR != error) {
                break;
            }
        }
        
        // Read video stream frames and mux them back into output.
        int64_t pts = 0;
        int64_t dts = 0;
        int64_t duration = 0;
        int ret = 0;
        
        while (1) {
            ret = videoStream.readFrame(pPacket);
            if (0 > ret) {
                // Any error we are technically EOF or just an error.
                break;
            }
            
            if (pPacket->duration == AV_NOPTS_VALUE || pPacket->dts == AV_NOPTS_VALUE || pPacket->pts == AV_NOPTS_VALUE) {
                LOGE("Invalid packet time");
                continue;
            }
            
            pPacket->stream_index = 0;
            pPacket->pos = -1;
            // pPacket->flags |= AV_PKT_FLAG_KEY; // << Does not make a difference
            
            // pts and dts should increase monotonically pts should be >= dts
            pts = pPacket->pts;
            pPacket->pts += nextPts;
            dts = pPacket->dts;
            pPacket->dts += nextDts;
            duration = pPacket->duration;
            
            // Write packet to encoder.
            ret = av_interleaved_write_frame(mpFormatCtx, pPacket);
            if (0 > ret) {
                LOGE("Failed to write frame! ret=%d %s", ret, av_err2str(ret));
                break;
            }
            
            // Release packet regardless if write frame failed.
            av_packet_unref(pPacket);
        }
        
        // Update last dts and pts.
        nextDts = nextDts + dts + duration;
        nextPts = nextPts + pts + duration;
        
        videoStream.closeStream();
    }
}

if (ERROR_NO_ERROR == error) {
    av_write_trailer(mpFormatCtx);
    
    // close output
    avio_closep(&mpFormatCtx->pb);
}

av_packet_free(&pPacket);
if (mpFormatCtx) {
    mpVideoStream = nullptr;
    avformat_free_context(mpFormatCtx);
}

int openFormatContext(const std::string &output, AVStream *pSourceStream) {
    int ret = avformat_alloc_output_context2(&mpFormatCtx,
                                         nullptr,
                                         nullptr,
                                         output.c_str());
    if (!mpFormatCtx) {
        LOGE("Unable to output codec: %s", av_err2str(ret));
        return ret;
    }
    
    mpFormatCtx->interrupt_callback.callback = ffmpeg_interrupt_cb;
    mpFormatCtx->interrupt_callback.opaque = this;
    
    /*
     * since all input files are supposed to be identical (framerate, dimension, color format, ...)
     * we can safely set output codec values from first input file
     */
    mpVideoStream = avformat_new_stream(mpFormatCtx, nullptr);
    
    ret = avcodec_parameters_copy(mpVideoStream->codecpar, pSourceStream->codecpar);
    if (0 > ret) {
        LOGE("Failed to copy codec parameters");
        return ret;
    }
    
    mpVideoStream->codecpar->codec_tag = 0;
    
    av_dump_format(mpFormatCtx, 0, output.c_str(), 1);
    
    ret = avio_open(&mpFormatCtx->pb, output.c_str(), AVIO_FLAG_WRITE);
    if (0 > ret) {
        LOGE("Error occurred when opening output file: %s", av_err2str(ret));
        return ret;
    }
    
    ret = avformat_write_header(mpFormatCtx, nullptr);
    if (0 > ret) {
        LOGE("Error occurred when opening output file: %s", av_err2str(ret));
        return ret;
    }
    
    return 0;
}


    


  • C++ FFMPEG remuxing RTSP stream to mp4 video

    12 août 2017, par hung

    I try to use ffmpeg remuxing.c example to save RTSP stream from my IP camera to a mp4 video. But I just receive an one frame video. It returns an error message

    [mp4 @ 0x18de4e0] Application provided invalid, non monotonically increasing dts to muxer in stream 0: 37206 >= 7202

    I check the property of output video and see that information (dimension, codec, framerate, bitrate) is fine.I think the problem is because of pts and dts. But I don’t know how to correct. Please help me.
    Here’s my code :

    extern "C" {
    #include <libavcodec></libavcodec>avcodec.h>
    #include <libavformat></libavformat>avio.h>
    #include <libavformat></libavformat>avformat.h>
    #include <libavutil></libavutil>timestamp.h>
    }

    int main(int argc, char **argv)
    {
    AVOutputFormat *ofmt = NULL;
    AVFormatContext *ifmt_ctx = NULL, *ofmt_ctx = NULL;
    AVPacket pkt;
    const char *in_filename, *out_filename;
    int ret, i;
    if (argc &lt; 3) {
       printf("usage: %s input output\n"
              "API example program to remux a media file with libavformat and libavcodec.\n"
              "The output format is guessed according to the file extension.\n"
              "\n", argv[0]);
       return 1;
    }
    in_filename  = argv[1];
    out_filename = argv[2];

    av_register_all();
    avformat_network_init();

    if ((ret = avformat_open_input(&amp;ifmt_ctx, in_filename, 0, 0)) &lt; 0) {
       fprintf(stderr, "Could not open input file '%s'", in_filename);
       goto end;
    }
    if ((ret = avformat_find_stream_info(ifmt_ctx, 0)) &lt; 0) {
       fprintf(stderr, "Failed to retrieve input stream information");
       goto end;
    }
    av_dump_format(ifmt_ctx, 0, in_filename, 0);


    avformat_alloc_output_context2(&amp;ofmt_ctx, NULL, NULL, out_filename);
    if (!ofmt_ctx) {
       fprintf(stderr, "Could not create output context\n");
       ret = AVERROR_UNKNOWN;
       goto end;
    }
    ofmt = ofmt_ctx->oformat;
    for (i = 0; i &lt; ifmt_ctx->nb_streams; i++) {
       AVStream *in_stream = ifmt_ctx->streams[i];
       AVStream *out_stream = avformat_new_stream(ofmt_ctx, in_stream->codec->codec);
       if (!out_stream) {
           fprintf(stderr, "Failed allocating output stream\n");
           ret = AVERROR_UNKNOWN;
           goto end;
       }
       ret = avcodec_copy_context(out_stream->codec, in_stream->codec);
       if (ret &lt; 0) {
           fprintf(stderr, "Failed to copy context from input to output stream codec context\n");
           goto end;
       }
       out_stream->codec->codec_tag = 0;
       if (ofmt_ctx->oformat->flags &amp; AVFMT_GLOBALHEADER)
           out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
    }
    av_dump_format(ofmt_ctx, 0, out_filename, 1);

    if (!(ofmt->flags &amp; AVFMT_NOFILE)) {
       ret = avio_open(&amp;ofmt_ctx->pb, out_filename, AVIO_FLAG_WRITE);
       if (ret &lt; 0) {
           fprintf(stderr, "Could not open output file '%s'", out_filename);
           goto end;
       }
    }
    ret = avformat_write_header(ofmt_ctx, NULL);
    if (ret &lt; 0) {
       fprintf(stderr, "Error occurred when opening output file\n");
       goto end;
    }
    while (1) {
       AVStream *in_stream, *out_stream;
       ret = av_read_frame(ifmt_ctx, &amp;pkt);
       if (ret &lt; 0)
           break;
       in_stream  = ifmt_ctx->streams[pkt.stream_index];
       out_stream = ofmt_ctx->streams[pkt.stream_index];

       /* copy packet */
       //pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
       //pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
       //pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);
       pkt.dts += av_rescale_q(1, in_stream->codec->time_base, out_stream->codec->time_base);
       pkt.pts += av_rescale_q(1, in_stream->codec->time_base, out_stream->codec->time_base);
       pkt.duration += av_rescale_q(pkt.duration, in_stream->codec->time_base, out_stream->codec->time_base);

       pkt.pos = -1;

       ret = av_interleaved_write_frame(ofmt_ctx, &amp;pkt);
       if (ret &lt; 0) {
           fprintf(stderr, "Error muxing packet\n");
           break;
       }
       av_free_packet(&amp;pkt);
    }
    av_write_trailer(ofmt_ctx);
    end:
    avformat_close_input(&amp;ifmt_ctx);
    /* close output */
    if (ofmt_ctx &amp;&amp; !(ofmt->flags &amp; AVFMT_NOFILE))
       avio_close(ofmt_ctx->pb);
    avformat_free_context(ofmt_ctx);
    if (ret &lt; 0 &amp;&amp; ret != AVERROR_EOF) {
       fprintf(stderr, "Error occurred\n");
       return 1;
    }
    return 0;
    }

    If I check with a video file instead of RTSP link, the code work fine.

    How I run it :

    ./ffmpeg_stream rtsp://admin:centic.vn@10.49.34.234/Streaming/Channels/1?tcp video.mp4
  • how to send audio or video by packet though udp

    20 janvier 2019, par Wei Wen

    how to send part of video and audio from mp4 as packet though udp from server
    Client will play the part of packet resevice.

    import java.awt.Dimension ; import java.awt.image.BufferedImage ; import
    java.io.ByteArrayOutputStream ; import java.io.IOException ; import
    java.io.ObjectOutputStream ; import java.math.BigInteger ; import
    java.net.DatagramPacket ; import java.net.DatagramSocket ; import
    java.net.ServerSocket ; import java.net.Socket ; import
    java.nio.ByteBuffer ; import java.nio.ShortBuffer ; import
    java.util.ArrayList ; import java.util.Arrays ; import
    javax.imageio.ImageIO ; import javax.sound.sampled.AudioFileFormat ;
    import javax.sound.sampled.AudioFormat ; import javax.swing.JTextArea ;

    import org.bytedeco.javacv.FFmpegFrameGrabber ; import
    org.bytedeco.javacv.Frame ; import
    org.bytedeco.javacv.Java2DFrameConverter ;

    import Enum.EType.ClientState ; import View.SingleDisplayWindow ;

    import java.security.InvalidKeyException ; import
    java.security.NoSuchAlgorithmException ; import java.util.Timer ; import
    java.util.TimerTask ; import java.util.concurrent.CountDownLatch ;
    import java.util.concurrent.ExecutionException ;

    import javax.crypto.BadPaddingException ; import
    javax.crypto.IllegalBlockSizeException ; import
    javax.crypto.NoSuchPaddingException ; import
    org.bytedeco.javacv.FrameGrabber ;

    public class SCon private final static int PORT = 8888 ;

    private final JTextArea TEXT_AREA ; private volatile
    SingleDisplayWindow DISPLAY ; /////

    private final String BD_USER_NAME, DB_PASSWORD ; private Database
    database ;

    private boolean isRunning ;

    private RSA serverRSA, clientRSA ;

    private int keyIndex, typeID = 0 ; private String mediatype = "" ;
    private ArrayList sHandlers ;

    private FileStreamingThread fileStreamingThread ; private
    VideoStreamingThread videoStreamingThread ; private BroadcastThread
    broadcastThread ; private ConnectThread connectThread ;

    private volatile static byte[] currentVideoFrame = new byte[0],
    currentAudioFrame = new byte[0] ; // current image music

    public void run() startServer() ;

     isRunning = true;       fileStreamingThread = new

    FileStreamingThread(videoFile) ; videoStreamingThread = new
    VideoStreamingThread(videoFile) ;
    //CountDownLatch latch = new CountDownLatch(1) ; fileStreamingThread.start() ; videoStreamingThread.start() ;
    //latch.countDown() ;

             broadcastThread = new BroadcastThread();        broadcastThread.start();

     connectThread = new ConnectThread();        connectThread.start();  }

    public void stop() isRunning = false ;

     try {           new Socket("localhost", PORT);

     } catch (IOException e) {           e.printStackTrace();        }

     while (fileStreamingThread.isAlive()) {

     }

     while (broadcastThread.isAlive()) {

     }

     while (connectThread.isAlive()) {

     }

     for (SHandler sHandler : sHandlers) {           sHandler.connectionClose();
     }       sHandlers.clear();      DISPLAY.dispose();
     TEXT_AREA.append("\nServer stop\n");    }


     private class VideoStreamingThread extends Thread {         private

    FFmpegFrameGrabber grabber ; // Used to extract frames from video file.
    private Java2DFrameConverter converter ; // Used to convert frames to
    image private int curIndex ; // Current key index

     public VideoStreamingThread(String video_file) {            videoFile =

    videoFile ; grabber = new FFmpegFrameGrabber(videoFile) ;
    converter = new Java2DFrameConverter() ; try
    grabber.restart() ;

         } catch (FrameGrabber.Exception e) {
             e.printStackTrace();            }           curIndex = keyIndex;        }

     public void run() {             try {

             while (isRunning) {
                 curIndex = keyIndex;
                 Frame frame = null;
                 System.out.println("v1");
                 if ((frame = grabber.grab()) != null) { // Grab next frame from video file
                     if (frame.image != null) { // image frame

                         BufferedImage bi = converter.convert(frame); // convert frame to image

                         // Convert BufferedImage to byte[]
                         ByteArrayOutputStream baos = new ByteArrayOutputStream();
                         ImageIO.write(bi, "jpg", baos);
                         // Encrypt data and store as the current image of byte[] type
                         currentVideoFrame = ciphers[curIndex].doFinal(baos.toByteArray());                                                                          
                         //////////////////
                         DISPLAY.setSize(new Dimension(bi.getWidth(), bi.getHeight()));
                         DISPLAY.updateImage(bi); // Display image
                     //  Thread.sleep((long) ( 999 / grabber.getFrameRate()));

                         ///////////////
                         typeID = 1;
                         mediatype = grabber.getFormat();

                     }
                 } else {
                     grabber.restart();
                 } // Restart when reached end of video
             }
             grabber.close();

         } catch (IOException e) {
             e.printStackTrace();

         } catch (IllegalBlockSizeException e) {
             e.printStackTrace();

         } catch (BadPaddingException e) {
             e.printStackTrace();

         }           //catch (InterruptedException e) {e.printStackTrace(); }        }

     public synchronized int getCurKeyIndex() {          return curIndex;        }

     public synchronized void getVideoFile(String video_file) {
         videoFile = video_file;             grabber = new

    FFmpegFrameGrabber(video_file) ; converter = new
    Java2DFrameConverter() ;

         try {
             grabber.release();
             grabber.restart();

         } catch (FrameGrabber.Exception e) {
             e.printStackTrace();            }       }   }       private class FileStreamingThread extends Thread {      private FFmpegFrameGrabber

    grabber ; // Used to extract frames from video file. private int
    curIndex ; // Current key index

     public FileStreamingThread(String video_file) {             videoFile =

    videoFile ; grabber = new FFmpegFrameGrabber(videoFile) ; try
    grabber.restart() ;

         } catch (FrameGrabber.Exception e) {
             e.printStackTrace();            }           curIndex = keyIndex;        }

     public void run() {             try {

             while (isRunning) {
                 curIndex = keyIndex;
                 Frame frame = null;

                 System.out.println("a2");
                 if ((frame = grabber.grabSamples()) != null) { // Grab next frame from video file
                     if (frame.samples != null) { // audio frame
                         // Encrypt audio

                         ShortBuffer channelSamplesShortBuffer = (ShortBuffer) frame.samples[0];
                         channelSamplesShortBuffer.rewind();

                         ByteBuffer outBuffer = ByteBuffer.allocate(channelSamplesShortBuffer.capacity() * 2);

                         for (int i = 0; i &lt; channelSamplesShortBuffer.capacity(); i++) {
                             short val = channelSamplesShortBuffer.get(i);
                             outBuffer.putShort(val);
                         }

                         AudioFileFormat audiofileFormat = new AudioFileFormat(null, null, typeID);
                         AudioFormat audioFormat = new AudioFormat(44100, 16, 2, true, true);
                         //System.out.println(grabber.getSampleFormat());
                         // Encrypt data and store as the current audio of byte[] type
                         currentAudioFrame = ciphers[curIndex].doFinal(outBuffer.array());

                         DISPLAY.updateAudio(outBuffer.array(), grabber.getFormat()); // Display image audio
                     //  Thread.sleep((long) (1000 / grabber.getSampleRate()));
                     //  Thread.sleep((long) (1000 / grabber.getAudioBitrate()));
                     //  System.out.println(grabber.getFormat());
                          //                         System.out.println("audioInputStream.getFormat() = " +

    grabber.getFormat()) ; // System.out.println("Sample.length
    = " + grabber.length) ; // System.out.println("FrameLength :" + grabber.getFrameLength()) ; //
    System.out.println("Frame Size :" + grabber.getFrameSize()) ; //
    System.out.println("SampleSizeInBits :" +
    grabber.getSampleSizeInBits()) ; //
    System.out.println("Frame Rate : " + grabber.getFrameRate()) ; //
    System.out.println("Sample Rate :" + grabber.getSampleRate()) ; //
    System.out.println("Encoding :" + grabber.getEncoding()) ; //
    System.out.println("Channels : " + grabber.getChannels()) ;
    // AudioFormat audioFormat = new AudioFormat(grabber.getSampleRate(), grabber.getAudioBitrate(),
    grabber.getAudioChannels(), true, true) ;
    // DISPLAY.updateAudio(outBuffer.array(), audioFormat) ; //
    Display image audio
    outBuffer.clear() ;

                         typeID = 2;
                         mediatype = grabber.getFormat();

                     }      
                 } else {
                     grabber.restart();
                 } // Restart when reached end of video
             }
             grabber.close();

         } catch (IOException e) {
             e.printStackTrace();

         } catch (IllegalBlockSizeException e) {
             e.printStackTrace();

         } catch (BadPaddingException e) {
             e.printStackTrace();

         }       }

     public synchronized int getCurKeyIndex() {          return curIndex;        }

     public synchronized void getVideoFile(String video_file) {
         videoFile = video_file;             grabber = new

    FFmpegFrameGrabber(video_file) ;

         try {
             grabber.release();
             grabber.restart();

         } catch (FrameGrabber.Exception e) {
             e.printStackTrace();            }       }   }

    public void setVideoFile(String videoFile) this.videoFile =
    videoFile ;

    public void setThreadFile(String video_file)
    fileStreamingThread.getVideoFile(video_file) ;
    videoStreamingThread.getVideoFile(video_file) ;

    private class BroadcastThread extends Thread public void run()
    while (isRunning)
    Thread.yield() ;

             for (int i = 0; i &lt; sHandlers.size(); i++) {
                 if (sHandlers.get(i).getClientState() == ClientState.R) {
                     sHandlers.get(i).setClientState(ClientState.W);
                     BroadcastWorker workerThread = new BroadcastWorker(sHandlers.get(i));
                     workerThread.start();
                 }
             }           }       }   }

    private class BroadcastWorker extends Thread SHandler sHandler =
    null ;

     public BroadcastWorker(SHandler sHandler) {             this.sHandler =

    sHandler ;

     public void run() {             try {
             DatagramSocket out = new DatagramSocket(); // used to send UDP packets

             while (sHandler.getClientState() == ClientState.W) {
                 Thread.yield();

                 StreamFile s = new StreamFile(typeID, currentVideoFrame, currentAudioFrame, mediatype);
                 ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
                 ObjectOutputStream os = new ObjectOutputStream(outputStream);
                 os.writeObject(s);
                 byte[] data = outputStream.toByteArray();
                     // Create and send UDP packet
                 DatagramPacket videoPacket = new DatagramPacket(data, data.length,
                         sHandler.getClientSocket().getInetAddress(),
                         Integer.parseInt(sHandler.getClientPort()));
                     out.send(videoPacket);

             }           } catch (IOException e) {
             e.printStackTrace();            }       }   }

    private class ConnectThread extends Thread public void run()
    TEXT_AREA.append("\nWaiting for clients’ connection.....\n") ;

         try {
             ServerSocket serverSocket = new ServerSocket(PORT);
             Socket clientSocket = null;

             while (isRunning) {
                 clientSocket = serverSocket.accept();

                 if (isRunning) {
                     SHandler sHandler = new SHandler(clientSocket, serverRSA, clientRSA, sessionKeys[keyIndex],
                             TEXT_AREA);
                     sHandler.start();
                     sHandlers.add(sHandler);
                 }
             }
             serverSocket.close();
             if (clientSocket != null) {
                 clientSocket.close();
             }

         } catch (IOException e) {
             e.printStackTrace();            }       }   } }

    my audio and image not sync.