Recherche avancée

Médias (0)

Mot : - Tags -/metadatas

Aucun média correspondant à vos critères n’est disponible sur le site.

Autres articles (65)

  • Publier sur MédiaSpip

    13 juin 2013

    Puis-je poster des contenus à partir d’une tablette Ipad ?
    Oui, si votre Médiaspip installé est à la version 0.2 ou supérieure. Contacter au besoin l’administrateur de votre MédiaSpip pour le savoir

  • Ajouter notes et légendes aux images

    7 février 2011, par

    Pour pouvoir ajouter notes et légendes aux images, la première étape est d’installer le plugin "Légendes".
    Une fois le plugin activé, vous pouvez le configurer dans l’espace de configuration afin de modifier les droits de création / modification et de suppression des notes. Par défaut seuls les administrateurs du site peuvent ajouter des notes aux images.
    Modification lors de l’ajout d’un média
    Lors de l’ajout d’un média de type "image" un nouveau bouton apparait au dessus de la prévisualisation (...)

  • Submit bugs and patches

    13 avril 2011

    Unfortunately a software is never perfect.
    If you think you have found a bug, report it using our ticket system. Please to help us to fix it by providing the following information : the browser you are using, including the exact version as precise an explanation as possible of the problem if possible, the steps taken resulting in the problem a link to the site / page in question
    If you think you have solved the bug, fill in a ticket and attach to it a corrective patch.
    You may also (...)

Sur d’autres sites (7015)

  • Can not add tmcd stream using libavcodec to replicate behavior of ffmpeg -timecode option

    2 août, par Sailor Jerry

    I'm trying to replicate option of command line ffmpeg -timecode in my C/C++ code. For some reasons the tcmd stream is not written to the output file. However the av_dump_format shows it in run time

    


    Here is my minimal test

    


    #include <iostream>&#xA;extern "C" {&#xA;#include <libavcodec></libavcodec>avcodec.h>&#xA;#include <libavformat></libavformat>avformat.h>&#xA;#include <libavutil></libavutil>avutil.h>&#xA;#include <libswscale></libswscale>swscale.h>&#xA;#include <libavutil></libavutil>opt.h>&#xA;#include <libavutil></libavutil>imgutils.h>&#xA;#include <libavutil></libavutil>samplefmt.h>&#xA;}&#xA;bool checkProResAvailability() {&#xA;  const AVCodec* codec = avcodec_find_encoder_by_name("prores_ks");&#xA;  if (!codec) {&#xA;    std::cerr &lt;&lt; "ProRes codec not available. Please install FFmpeg with ProRes support." &lt;&lt; std::endl;&#xA;    return false;&#xA;  }&#xA;  return true;&#xA;}&#xA;&#xA;int main(){&#xA;  av_log_set_level(AV_LOG_INFO);&#xA;&#xA;  const char* outputFileName = "test_tmcd.mov";&#xA;  AVFormatContext* formatContext = nullptr;&#xA;  AVCodecContext* videoCodecContext = nullptr;&#xA;&#xA;  if (!checkProResAvailability()) {&#xA;    return -1;&#xA;  }&#xA;&#xA;  std::cout &lt;&lt; "Creating test file with tmcd stream: " &lt;&lt; outputFileName &lt;&lt; std::endl;&#xA;&#xA;  // Allocate the output format context&#xA;  if (avformat_alloc_output_context2(&amp;formatContext, nullptr, "mov", outputFileName) &lt; 0) {&#xA;    std::cerr &lt;&lt; "Failed to allocate output context!" &lt;&lt; std::endl;&#xA;    return -1;&#xA;  }&#xA;&#xA;  if (avio_open(&amp;formatContext->pb, outputFileName, AVIO_FLAG_WRITE) &lt; 0) {&#xA;    std::cerr &lt;&lt; "Failed to open output file!" &lt;&lt; std::endl;&#xA;    avformat_free_context(formatContext);&#xA;    return -1;&#xA;  }&#xA;&#xA;  // Find ProRes encoder&#xA;  const AVCodec* videoCodec = avcodec_find_encoder_by_name("prores_ks");&#xA;  if (!videoCodec) {&#xA;    std::cerr &lt;&lt; "Failed to find the ProRes encoder!" &lt;&lt; std::endl;&#xA;    avio_close(formatContext->pb);&#xA;    avformat_free_context(formatContext);&#xA;    return -1;&#xA;  }&#xA;&#xA;  // Video stream setup&#xA;  AVStream* videoStream = avformat_new_stream(formatContext, nullptr);&#xA;  if (!videoStream) {&#xA;    std::cerr &lt;&lt; "Failed to create video stream!" &lt;&lt; std::endl;&#xA;    avio_close(formatContext->pb);&#xA;    avformat_free_context(formatContext);&#xA;    return -1;&#xA;  }&#xA;&#xA;  videoCodecContext = avcodec_alloc_context3(videoCodec);&#xA;  if (!videoCodecContext) {&#xA;    std::cerr &lt;&lt; "Failed to allocate video codec context!" &lt;&lt; std::endl;&#xA;    avio_close(formatContext->pb);&#xA;    avformat_free_context(formatContext);&#xA;    return -1;&#xA;  }&#xA;&#xA;  videoCodecContext->width = 1920;&#xA;  videoCodecContext->height = 1080;&#xA;  videoCodecContext->pix_fmt = AV_PIX_FMT_YUV422P10;&#xA;  videoCodecContext->time_base = (AVRational){1, 30}; // Set FPS: 30&#xA;  videoCodecContext->bit_rate = 2000000;&#xA;&#xA;  if (avcodec_open2(videoCodecContext, videoCodec, nullptr) &lt; 0) {&#xA;    std::cerr &lt;&lt; "Failed to open ProRes codec!" &lt;&lt; std::endl;&#xA;    avcodec_free_context(&amp;videoCodecContext);&#xA;    avio_close(formatContext->pb);&#xA;    avformat_free_context(formatContext);&#xA;    return -1;&#xA;  }&#xA;&#xA;  if (avcodec_parameters_from_context(videoStream->codecpar, videoCodecContext) &lt; 0) {&#xA;    std::cerr &lt;&lt; "Failed to copy codec parameters to video stream!" &lt;&lt; std::endl;&#xA;    avcodec_free_context(&amp;videoCodecContext);&#xA;    avio_close(formatContext->pb);&#xA;    avformat_free_context(formatContext);&#xA;    return -1;&#xA;  }&#xA;&#xA;  videoStream->time_base = videoCodecContext->time_base;&#xA;&#xA;  // Timecode stream setup&#xA;  AVStream* timecodeStream = avformat_new_stream(formatContext, nullptr);&#xA;  if (!timecodeStream) {&#xA;    std::cerr &lt;&lt; "Failed to create timecode stream!" &lt;&lt; std::endl;&#xA;    avcodec_free_context(&amp;videoCodecContext);&#xA;    avio_close(formatContext->pb);&#xA;    avformat_free_context(formatContext);&#xA;    return -1;&#xA;  }&#xA;&#xA;  timecodeStream->codecpar->codec_type = AVMEDIA_TYPE_DATA;&#xA;  timecodeStream->codecpar->codec_id = AV_CODEC_ID_TIMED_ID3;&#xA;  timecodeStream->codecpar->codec_tag = MKTAG(&#x27;t&#x27;, &#x27;m&#x27;, &#x27;c&#x27;, &#x27;d&#x27;); // Timecode tag&#xA;  timecodeStream->time_base = (AVRational){1, 30}; // FPS: 30&#xA;&#xA;  if (av_dict_set(&amp;timecodeStream->metadata, "timecode", "00:00:30:00", 0) &lt; 0) {&#xA;    std::cerr &lt;&lt; "Failed to set timecode metadata!" &lt;&lt; std::endl;&#xA;    avcodec_free_context(&amp;videoCodecContext);&#xA;    avio_close(formatContext->pb);&#xA;    avformat_free_context(formatContext);&#xA;    return -1;&#xA;  }&#xA;&#xA;  // Write container header&#xA;  if (avformat_write_header(formatContext, nullptr) &lt; 0) {&#xA;    std::cerr &lt;&lt; "Failed to write file header!" &lt;&lt; std::endl;&#xA;    avcodec_free_context(&amp;videoCodecContext);&#xA;    avio_close(formatContext->pb);&#xA;    avformat_free_context(formatContext);&#xA;    return -1;&#xA;  }&#xA;&#xA;  // Encode a dummy video frame&#xA;  AVFrame* frame = av_frame_alloc();&#xA;  if (!frame) {&#xA;    std::cerr &lt;&lt; "Failed to allocate video frame!" &lt;&lt; std::endl;&#xA;    avcodec_free_context(&amp;videoCodecContext);&#xA;    avio_close(formatContext->pb);&#xA;    avformat_free_context(formatContext);&#xA;    return -1;&#xA;  }&#xA;&#xA;  frame->format = videoCodecContext->pix_fmt;&#xA;  frame->width = videoCodecContext->width;&#xA;  frame->height = videoCodecContext->height;&#xA;&#xA;  if (av_image_alloc(frame->data, frame->linesize, frame->width, frame->height, videoCodecContext->pix_fmt, 32) &lt; 0) {&#xA;    std::cerr &lt;&lt; "Failed to allocate frame buffer!" &lt;&lt; std::endl;&#xA;    av_frame_free(&amp;frame);&#xA;    avcodec_free_context(&amp;videoCodecContext);&#xA;    avio_close(formatContext->pb);&#xA;    avformat_free_context(formatContext);&#xA;    return -1;&#xA;  }&#xA;&#xA;  // Fill frame with black&#xA;  memset(frame->data[0], 0, frame->linesize[0] * frame->height); // Y plane&#xA;  memset(frame->data[1], 128, frame->linesize[1] * frame->height / 2); // U plane&#xA;  memset(frame->data[2], 128, frame->linesize[2] * frame->height / 2); // V plane&#xA;&#xA;  // Encode the frame&#xA;  AVPacket packet;&#xA;  av_init_packet(&amp;packet);&#xA;  packet.data = nullptr;&#xA;  packet.size = 0;&#xA;&#xA;  if (avcodec_send_frame(videoCodecContext, frame) == 0) {&#xA;    if (avcodec_receive_packet(videoCodecContext, &amp;packet) == 0) {&#xA;      packet.stream_index = videoStream->index;&#xA;      av_interleaved_write_frame(formatContext, &amp;packet);&#xA;      av_packet_unref(&amp;packet);&#xA;    }&#xA;  }&#xA;&#xA;  av_frame_free(&amp;frame);&#xA;&#xA;  // Write a dummy packet for the timecode stream&#xA;  AVPacket tmcdPacket;&#xA;  av_init_packet(&amp;tmcdPacket);&#xA;  tmcdPacket.stream_index = timecodeStream->index;&#xA;  tmcdPacket.flags |= AV_PKT_FLAG_KEY;&#xA;  tmcdPacket.data = nullptr; // Empty packet for timecode&#xA;  tmcdPacket.size = 0;&#xA;  tmcdPacket.pts = 0; // Set necessary PTS&#xA;  tmcdPacket.dts = 0;&#xA;  av_interleaved_write_frame(formatContext, &amp;tmcdPacket);&#xA;&#xA;  // Write trailer&#xA;  if (av_write_trailer(formatContext) &lt; 0) {&#xA;    std::cerr &lt;&lt; "Failed to write file trailer!" &lt;&lt; std::endl;&#xA;  }&#xA;&#xA;  av_dump_format(formatContext, 0, "test.mov", 1);&#xA;&#xA;  // Cleanup&#xA;  avcodec_free_context(&amp;videoCodecContext);&#xA;  avio_close(formatContext->pb);&#xA;  avformat_free_context(formatContext);&#xA;&#xA;  std::cout &lt;&lt; "Test file with timecode created successfully: " &lt;&lt; outputFileName &lt;&lt; std::endl;&#xA;&#xA;  return 0;&#xA;}&#xA;</iostream>

    &#xA;

    The code output is :

    &#xA;

    Creating test file with tmcd stream: test_tmcd.mov&#xA;[prores_ks @ 0x11ce05790] Autoselected HQ profile to keep best quality. It can be overridden through -profile option.&#xA;[mov @ 0x11ce04f20] Timestamps are unset in a packet for stream 0. This is deprecated and will stop working in the future. Fix your code to set the timestamps properly&#xA;[mov @ 0x11ce04f20] Encoder did not produce proper pts, making some up.&#xA;Output #0, mov, to &#x27;test.mov&#x27;:&#xA;  Metadata:&#xA;    encoder         : Lavf61.7.100&#xA;  Stream #0:0: Video: prores (HQ) (apch / 0x68637061), yuv422p10le, 1920x1080, q=2-31, 2000 kb/s, 15360 tbn&#xA;  Stream #0:1: Data: timed_id3 (tmcd / 0x64636D74)&#xA;      Metadata:&#xA;        timecode        : 00:00:30:00&#xA;Test file with timecode created successfully: test_tmcd.mov&#xA;

    &#xA;

    The ffprobe output is :

    &#xA;

    $ ffprobe  test_tmcd.mov&#xA;ffprobe version 7.1.1 Copyright (c) 2007-2025 the FFmpeg developers&#xA;  built with Apple clang version 16.0.0 (clang-1600.0.26.6)&#xA;  configuration: --prefix=/opt/homebrew/Cellar/ffmpeg/7.1.1_3 --enable-shared --enable-pthreads --enable-version3 --cc=clang --host-cflags= --host-ldflags=&#x27;-Wl,-ld_classic&#x27; --enable-ffplay --enable-gnutls --enable-gpl --enable-libaom --enable-libaribb24 --enable-libbluray --enable-libdav1d --enable-libharfbuzz --enable-libjxl --enable-libmp3lame --enable-libopus --enable-librav1e --enable-librist --enable-librubberband --enable-libsnappy --enable-libsrt --enable-libssh --enable-libsvtav1 --enable-libtesseract --enable-libtheora --enable-libvidstab --enable-libvmaf --enable-libvorbis --enable-libvpx --enable-libwebp --enable-libx264 --enable-libx265 --enable-libxml2 --enable-libxvid --enable-lzma --enable-libfontconfig --enable-libfreetype --enable-frei0r --enable-libass --enable-libopencore-amrnb --enable-libopencore-amrwb --enable-libopenjpeg --enable-libspeex --enable-libsoxr --enable-libzmq --enable-libzimg --disable-libjack --disable-indev=jack --enable-videotoolbox --enable-audiotoolbox --enable-neon&#xA;  libavutil      59. 39.100 / 59. 39.100&#xA;  libavcodec     61. 19.101 / 61. 19.101&#xA;  libavformat    61.  7.100 / 61.  7.100&#xA;  libavdevice    61.  3.100 / 61.  3.100&#xA;  libavfilter    10.  4.100 / 10.  4.100&#xA;  libswscale      8.  3.100 /  8.  3.100&#xA;  libswresample   5.  3.100 /  5.  3.100&#xA;  libpostproc    58.  3.100 / 58.  3.100&#xA;Input #0, mov,mp4,m4a,3gp,3g2,mj2, from &#x27;test_tmcd.mov&#x27;:&#xA;  Metadata:&#xA;    major_brand     : qt  &#xA;    minor_version   : 512&#xA;    compatible_brands: qt  &#xA;    encoder         : Lavf61.7.100&#xA;  Duration: N/A, start: 0.000000, bitrate: N/A&#xA;  Stream #0:0[0x1]: Video: prores (HQ) (apch / 0x68637061), yuv422p10le, 1920x1080, 15360 tbn (default)&#xA;      Metadata:&#xA;        handler_name    : VideoHandler&#xA;        vendor_id       : FFMP&#xA;$ &#xA;&#xA;

    &#xA;

    Spent hours with all AI models, no help. Appeal to the human intelligence now

    &#xA;

  • QSharedMemory in Real-Time process

    21 novembre 2016, par Seungsoo Kim

    I’m trying to use QSharedMemory Class to share video data between two processes.

    So I tried like following method, but it has problem in simultaneous access of two processes.

    Two process crashes when they access sequentially to same memory name(key) "SharedMemory".

    I locked them while they’re used, but also it doesn’t work well.

    How can i avoid this crash ??

    1. Writing to SharedMemory - data type is and this function called by callback.

      QBuffer buffer;
      buffer.open(QBuffer::ReadWrite);
      QDataStream out(&amp;buffer);

      QByteArray outArray = QByteArray::fromRawData(reinterpret_cast<const>(data), strlen(reinterpret_cast<const>(data)));
      out &lt;&lt; width &lt;&lt; height &lt;&lt; step &lt;&lt; cameraId &lt;&lt; strlen(reinterpret_cast<const>(data));
      out.writeRawData(outArray.data(), outArray.size());

      int size = buffer.size();

      sharedMemory.setKey("SharedMemory");

      if (!sharedMemory.isAttached()) {
         printf("Cannot attach to shared memory to update!\n");
      }
      if (!sharedMemory.create(size))
      {
         printf("failed to allocate memory\n");
      }
      sharedMemory.lock();
      char *to = (char*)sharedMemory.data();
      const char *from = buffer.data().data();
      memcpy(to, from,qMin(sharedMemory.size(),size));
      sharedMemory.unlock();
      </const></const></const>
    2. Using data in SharedMemory. - this function is called by QThread, interval 100ms

      QSharedMemory sharedMemory("SharedMemory");
      sharedMemory.lock();
      if (!sharedMemory.attach()) {
         printf("failed to attach to memory\n");
         return;
      }

      QBuffer buffer;
      QDataStream in(&amp;buffer);

      sharedMemory.create(1920 * 1080);
      buffer.setData((char*)sharedMemory.constData(), sharedMemory.size());
      buffer.open(QBuffer::ReadOnly);
      sharedMemory.unlock();
      sharedMemory.detach();

      int r_width = 0;    
      int r_height = 0;
      int r_cameraId = 0;
      int r_step = 0;
      int r_strlen = 0;
      in >> r_width >> r_height >> r_step >> r_cameraId >> r_strlen;

      char* receive = new char[r_strlen];
      in.readRawData(receive, r_strlen);
      //unsigned char* r_receive = new unsigned char[r_strlen];
      //r_receive = (unsigned char*)receive;

      QPixmap backBuffer = QPixmap::fromImage(QImage((unsigned char*)receive, r_width, r_height, r_step, QImage::Format::Format_RGB888));
      ui.label->setPixmap(backBuffer.scaled(ui.label->size(), Qt::KeepAspectRatio));
      ui.label->show();

    please share your idea ! thank you !

  • How to extract frames at 30 fps using FFMPEG APIs on Android ?

    8 septembre 2016, par Amber Beriwal

    We are working on a project that consumes FFMPEG library for video frame extraction on Android platform.

    On Windows, we have observed :

    • Using CLI, ffmpeg is capable of extracting frames at 30 fps using command ffmpeg -i input.flv -vf fps=1 out%d.png.
    • Using Xuggler, we are able to extract frames at 30 fps.
    • Using FFMPEG APIs directly in code, we are getting frames at 30 fps.

    But when we use FFMPEG APIs directly on Android (See Hardware Details), we are getting following results :

    • 720p video (1280 x 720) - 16 fps (approx. 60 ms/frame)
    • 1080p video (1920 x 1080) - 7 fps (approx. 140 ms/frame)

    We haven’t tested Xuggler/CLI on Android yet.

    Ideally, we should be able to get the data in constant time (approx. 30 ms/frame).

    How can we get 30 fps on Android ?

    Code being used on Android :

    if (avformat_open_input(&amp;pFormatCtx, pcVideoFile, NULL, NULL)) {
       iError = -1;  //Couldn't open file
    }

    if (!iError) {
       //Retrieve stream information
       if (avformat_find_stream_info(pFormatCtx, NULL) &lt; 0)
           iError = -2; //Couldn't find stream information
    }

    //Find the first video stream
    if (!iError) {

       for (i = 0; i &lt; pFormatCtx->nb_streams; i++) {
           if (AVMEDIA_TYPE_VIDEO
                   == pFormatCtx->streams[i]->codec->codec_type) {
               iFramesInVideo = pFormatCtx->streams[i]->nb_index_entries;
               duration = pFormatCtx->streams[i]->duration;
               begin = pFormatCtx->streams[i]->start_time;
               time_base = (pFormatCtx->streams[i]->time_base.num * 1.0f)
                       / pFormatCtx->streams[i]->time_base.den;

               pCodecCtx = avcodec_alloc_context3(NULL);
               if (!pCodecCtx) {
                   iError = -6;
                   break;
               }

               AVCodecParameters params = { 0 };
               iReturn = avcodec_parameters_from_context(&amp;params,
                       pFormatCtx->streams[i]->codec);
               if (iReturn &lt; 0) {
                   iError = -7;
                   break;
               }

               iReturn = avcodec_parameters_to_context(pCodecCtx, &amp;params);
               if (iReturn &lt; 0) {
                   iError = -7;
                   break;
               }

               //pCodecCtx = pFormatCtx->streams[i]->codec;

               iVideoStreamIndex = i;
               break;
           }
       }
    }

    if (!iError) {
       if (iVideoStreamIndex == -1) {
           iError = -3; // Didn't find a video stream
       }
    }

    if (!iError) {
       // Find the decoder for the video stream
       pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
       if (pCodec == NULL) {
           iError = -4;
       }
    }

    if (!iError) {
       // Open codec
       if (avcodec_open2(pCodecCtx, pCodec, NULL) &lt; 0)
           iError = -5;
    }

    if (!iError) {
       iNumBytes = av_image_get_buffer_size(AV_PIX_FMT_RGB24, pCodecCtx->width,
               pCodecCtx->height, 1);

       // initialize SWS context for software scaling
       sws_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
               pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height,
               AV_PIX_FMT_RGB24,
               SWS_BILINEAR,
               NULL,
               NULL,
               NULL);
       if (!sws_ctx) {
           iError = -7;
       }
    }
    clock_gettime(CLOCK_MONOTONIC_RAW, &amp;end);
    delta_us = (end.tv_sec - start.tv_sec) * 1000000
           + (end.tv_nsec - start.tv_nsec) / 1000;
    start = end;
    //LOGI("Starting_Frame_Extraction: %lld", delta_us);
    if (!iError) {
       while (av_read_frame(pFormatCtx, &amp;packet) == 0) {
           // Is this a packet from the video stream?
           if (packet.stream_index == iVideoStreamIndex) {
               pFrame = av_frame_alloc();
               if (NULL == pFrame) {
                   iError = -8;
                   break;
               }

               // Decode video frame
               avcodec_decode_video2(pCodecCtx, pFrame, &amp;iFrameFinished,
                       &amp;packet);
               if (iFrameFinished) {
                   //OUR CODE
               }
               av_frame_free(&amp;pFrame);
               pFrame = NULL;
           }
           av_packet_unref(&amp;packet);
       }
    }