Recherche avancée

Médias (0)

Mot : - Tags -/organisation

Aucun média correspondant à vos critères n’est disponible sur le site.

Autres articles (112)

  • Personnaliser en ajoutant son logo, sa bannière ou son image de fond

    5 septembre 2013, par

    Certains thèmes prennent en compte trois éléments de personnalisation : l’ajout d’un logo ; l’ajout d’une bannière l’ajout d’une image de fond ;

  • Websites made ​​with MediaSPIP

    2 mai 2011, par

    This page lists some websites based on MediaSPIP.

  • Creating farms of unique websites

    13 avril 2011, par

    MediaSPIP platforms can be installed as a farm, with a single "core" hosted on a dedicated server and used by multiple websites.
    This allows (among other things) : implementation costs to be shared between several different projects / individuals rapid deployment of multiple unique sites creation of groups of like-minded sites, making it possible to browse media in a more controlled and selective environment than the major "open" (...)

Sur d’autres sites (15305)

  • AVPacket->Data is empty "0/0" but has size

    24 juin 2024, par CottonBuds

    I am using libAV* to encode frames(unsigned char*) from my streaming application. I encoded my initialized frames but when I tried to get the avpacket. it returns an avpacket with a size but without data inside it "0/0"AvPacket no data

    


    here is my code

    


    StreamCodec.h

    


    &#xA;class StreamCodec : public QObject &#xA;{&#xA;    Q_OBJECT&#xA;public:&#xA;    StreamCodec(int height, int width, int fps);&#xA;&#xA;public slots:&#xA;    void encodeFrame(std::shared_ptr<uchar> pData);&#xA;    void run();&#xA;&#xA;signals:&#xA;    void encodeFinish(AVPacket* packet);&#xA;&#xA;private:&#xA;    void initializeSWS();&#xA;    void initializeCodec();&#xA;&#xA;    AVPacket* allocatepacket(AVFrame* frame);&#xA;    AVFrame* allocateFrame(std::shared_ptr<uchar> pData);&#xA;    AVFrame* formatFrame(AVFrame* frame);&#xA;&#xA;    const AVCodec* codec;&#xA;    AVCodecContext* context;&#xA;    SwsContext *swsContext;&#xA;    int bytesPerPixel;&#xA;    int width;&#xA;    int height;&#xA;    int fps;&#xA;    int pts = 0;&#xA;};&#xA;&#xA;</uchar></uchar>

    &#xA;

    StreamCodec.cpp

    &#xA;

    StreamCodec::StreamCodec(int height, int width, int fps)&#xA;{&#xA;    this->height = height;&#xA;    this->width = width;&#xA;    this->fps = fps;&#xA;}&#xA;&#xA;void StreamCodec::initializeCodec()&#xA;{&#xA;    codec = avcodec_find_encoder(AV_CODEC_ID_H264);&#xA;    if (!codec) {&#xA;        qDebug() &lt;&lt; "Codec not found";&#xA;        exit(1);&#xA;    }&#xA;     &#xA;    context = avcodec_alloc_context3(codec);&#xA;    if (!context) {&#xA;        qDebug() &lt;&lt; "Could not allocate codec context";&#xA;        exit(1);&#xA;    }&#xA;&#xA;    context->height = height;&#xA;    context->width = width;&#xA;    context->time_base.num = 1;&#xA;    context->time_base.den = fps;&#xA;    context->framerate.num = fps;&#xA;    context->framerate.den = 1;&#xA;    context->pix_fmt = AV_PIX_FMT_YUV420P;&#xA;&#xA;    context->gop_size = 0;&#xA;&#xA;    av_opt_set(context->priv_data, "preset", "ultrafast", 0);&#xA;    av_opt_set(context->priv_data, "crf", "35", 0);&#xA;    av_opt_set(context->priv_data, "tune", "zerolatency", 0);&#xA;&#xA;    auto desc = av_pix_fmt_desc_get(AV_PIX_FMT_BGRA);&#xA;    if (!desc){&#xA;        qDebug() &lt;&lt; "Can&#x27;t get descriptor for pixel format";&#xA;        exit(1);&#xA;    }&#xA;    bytesPerPixel = av_get_bits_per_pixel(desc) / 8;&#xA;    if(av_get_bits_per_pixel(desc) % 8 != 0){&#xA;        qDebug() &lt;&lt; "Unhandled bits per pixel, bad in pix fmt";&#xA;        exit(1);&#xA;    }&#xA;&#xA;    int err = avcodec_open2(context, codec, nullptr);&#xA;    if (err &lt; 0) {&#xA;        qDebug() &lt;&lt; "Could not open codec";&#xA;        exit(1);&#xA;    }&#xA;}&#xA;void StreamCodec::initializeSWS()&#xA;{&#xA;    swsContext = sws_getContext(width, height, AV_PIX_FMT_BGRA, width, height, AV_PIX_FMT_YUV420P, SWS_BILINEAR, NULL, NULL, NULL);&#xA;    if (!swsContext) {&#xA;        qDebug() &lt;&lt; "Could not allocate SWS Context";&#xA;        exit(1);&#xA;    }&#xA;}&#xA;&#xA;void StreamCodec::encodeFrame(std::shared_ptr<uchar> pData)&#xA;{&#xA;    int err = 0;&#xA;    AVFrame* frame1 = allocateFrame(pData);&#xA;    AVFrame* frame = formatFrame(frame1);&#xA;&#xA;    err = avcodec_send_frame(context, frame);&#xA;    if (err &lt; 0) {&#xA;        qDebug() &lt;&lt; "Error sending frame to codec";&#xA;        char* errStr = new char;&#xA;        av_make_error_string(errStr, 255, err);&#xA;        qDebug() &lt;&lt; errStr;&#xA;        av_frame_free(&amp;frame);&#xA;        exit(1);&#xA;    }&#xA;&#xA;    while (true) {&#xA;        AVPacket* packet = allocatepacket(frame);&#xA;        err = avcodec_receive_packet(context, packet);&#xA;        if (err == AVERROR_EOF || err == AVERROR(EAGAIN) ) {&#xA;            av_packet_unref(packet);&#xA;            av_packet_free(&amp;packet);&#xA;            break;&#xA;        }&#xA;        if (err &lt; 0) {&#xA;            qDebug() &lt;&lt; "Error recieving to codec";&#xA;            char* errStr = new char;&#xA;            av_make_error_string(errStr, 255, err);&#xA;            qDebug() &lt;&lt; errStr;&#xA;            av_frame_free(&amp;frame);&#xA;            av_frame_free(&amp;frame1);&#xA;            av_packet_free(&amp;packet);&#xA;            exit(1);&#xA;        }&#xA;        emit encodeFinish(packet);&#xA;    }&#xA;&#xA;    av_frame_free(&amp;frame);&#xA;    av_frame_free(&amp;frame1);&#xA;}&#xA;&#xA;void StreamCodec::run()&#xA;{&#xA;    initializeCodec();&#xA;    initializeSWS();&#xA;}&#xA;&#xA;AVPacket* StreamCodec::allocatepacket(AVFrame* frame)&#xA;{&#xA;    AVPacket* packet = av_packet_alloc();&#xA;    if (!packet) {&#xA;        qDebug() &lt;&lt; "Could not allocate memory for packet";&#xA;        av_frame_free(&amp;frame);&#xA;        exit(1);&#xA;    }&#xA;    return packet;&#xA;}&#xA;&#xA;AVFrame* StreamCodec::allocateFrame(std::shared_ptr<uchar> pData)&#xA;{&#xA;    AVFrame* frame = av_frame_alloc();&#xA;    if (!frame) {&#xA;        qDebug() &lt;&lt; "Could not allocate memory for frame";&#xA;        exit(1);&#xA;    }&#xA;&#xA;    frame->format = AV_PIX_FMT_BGRA;&#xA;    frame->width = width;&#xA;    frame->height = height;&#xA;    frame->pts = pts;&#xA;&#xA;    if (av_frame_get_buffer(frame, 0) &lt; 0) {&#xA;        qDebug() &lt;&lt; "Failed to get frame buffer";&#xA;        exit(1);&#xA;    }&#xA;&#xA;    if (av_frame_make_writable(frame) &lt; 0) {&#xA;        qDebug() &lt;&lt; "Failed to make frame writable";&#xA;        exit(1);&#xA;    }&#xA;&#xA;    frame->data[0] = pData.get();&#xA;&#xA;    return frame;&#xA;}&#xA;&#xA;AVFrame* StreamCodec::formatFrame(AVFrame* frame)&#xA;{&#xA;    AVFrame* yuvFrame = av_frame_alloc();&#xA;    if (!yuvFrame) {&#xA;        qDebug() &lt;&lt; "Unable to allocate memory for yuv frame";&#xA;        av_frame_free(&amp;frame);&#xA;        exit(1);&#xA;    }&#xA;&#xA;    yuvFrame->format = context->pix_fmt;&#xA;    yuvFrame->width = width;&#xA;    yuvFrame->height = height;&#xA;    yuvFrame->pts = pts;&#xA;    pts &#x2B;= 1;&#xA;    &#xA;    if (av_frame_get_buffer(yuvFrame, 0) &lt; 0) {&#xA;        qDebug() &lt;&lt; "Failed to get frame buffer";&#xA;        exit(1);&#xA;    }&#xA;&#xA;    if (av_frame_make_writable(yuvFrame) &lt; 0) {&#xA;        qDebug() &lt;&lt; "Failed to make frame writable";&#xA;        exit(1);&#xA;    }&#xA;&#xA;    int err = sws_scale(swsContext, (const uint8_t* const*)frame->data, frame->linesize, 0, height, (uint8_t* const*)yuvFrame->data, yuvFrame->linesize);&#xA;    if (err &lt; 0) {&#xA;        qDebug() &lt;&lt; "Could not format frame to yuv420p";&#xA;        exit(1);&#xA;    }&#xA;    return yuvFrame;&#xA;}&#xA;&#xA;&#xA;</uchar></uchar>

    &#xA;

    I tried checking for the frames and I'm pretty sure the data is there. I just dont know what to do at this point.

    &#xA;

    edit 1

    &#xA;

    I tried viewing the data using visual studio code "view" button it showed me this

    &#xA;

    enter image description here

    &#xA;

    Thank you so much to all that commented and pointed me to the right direction.

    &#xA;

  • ffmpeg - data is being "removed" while h264 is being processed by ffmpeg

    21 mai 2024, par Laki

    I got a file which is created based on messages coming from streaming, all of the "messages" are ending with b'h264\x00'&#xA;I got a need to

    &#xA;

      &#xA;
    • load the data into ffmpeg
    • &#xA;

    • perform some processing of the data
    • &#xA;

    • re-attach the data to same "messages"
    • &#xA;

    &#xA;

    Data is loaded with ffmpeg and saved with ffmpeg - however - ffmpeg removes "part" of the data&#xA;I have simplified the process and currently I am only loading and saving the data, without any processing, but still - part of the data is being removed

    &#xA;

    I have used several commands - but always, part of my data is being removed

    &#xA;

    ffmpeg -i sthg3.h264  -c copy st3.h264&#xA;ffmpeg  -err_detect ignore_err -i sthg3.h264  -c copy st3.h264&#xA;ffmpeg -fflags &#x2B;genpts -i sthg3.h264 -c copy st3.h264 &#xA;

    &#xA;

    I have created the script for calculating that

    &#xA;

    file_out = &#x27;sthg3.h264&#x27;&#xA;def split_file(input_file,chunki):&#xA;    output_files = []&#xA;    with open(input_file, &#x27;rb&#x27;) as f:&#xA;        file_number = 0&#xA;        while True:&#xA;            chunk = f.read(504096)  # Read a chunk of data&#xA;            if not chunk:  # End of file&#xA;                break&#xA;            index = chunk.find(chunki)  # Find the delimiter&#xA;            while index != -1:&#xA;                chunk = chunk[index&#x2B;len(chunki):]&#xA;                file_number &#x2B;= 1&#xA;                index = chunk.find(chunki)  # Find the next delimiter&#xA;    return file_number&#xA;&#xA;chunki = b&#x27;h264\x00&#x27;&#xA;print(split_file(file_out,chunki))&#xA;chunki = b&#x27;\x00\x01\x00&#x27;&#xA;print(split_file(file_out,chunki))&#xA;    &#xA;chunki = b&#x27;h264\x00&#x27;&#xA;#chunki = b&#x27;\x00\x00\xdc\x9e&#x27;&#xA;print(split_file(&#x27;st3.h264&#x27;,chunki))&#xA;chunki = b&#x27;\x00\x01\x00&#x27;&#xA;print(split_file(&#x27;st3.h264&#x27;,chunki))&#xA;

    &#xA;

    and here is the question, how to push data through ffmpeg to avoid removing data, or replace it with something that would not be removed ?

    &#xA;

  • java.io.IOException : Cannot run program "/data/user/0/com.voi.myapplication8/files/ffmpeg" : error=13, Permission denied

    1er avril 2024, par Harsha
    &#xA;

    java.io.IOException : Cannot run program&#xA;"/data/user/0/com.voi.myapplication8/files/ffmpeg" : error=13,&#xA;Permission denied

    &#xA;

    &#xA;

    I am using this dependencies&#xA;implementation 'com.writingminds:FFmpegAndroid:0.3.2'

    &#xA;

    &#xA;

    2024-03-31 21:40:31.045 15937-16762 FFmpeg
    &#xA;com.voi.myapplication8 E Exception while trying to run :&#xA;[Ljava.lang.String ;@71d4c0f&#xA;java.io.IOException : Cannot run program&#xA;"/data/user/0/com.voi.myapplication8/files/ffmpeg" : error=13,&#xA;Permission denied&#xA;at java.lang.ProcessBuilder.start(ProcessBuilder.java:1050)&#xA;at java.lang.Runtime.exec(Runtime.java:712)&#xA;at java.lang.Runtime.exec(Runtime.java:571)&#xA;at&#xA;com.github.hiteshsondhi88.libffmpeg.ShellCommand.run(ShellCommand.java:10)&#xA;at&#xA;com.github.hiteshsondhi88.libffmpeg.FFmpegExecuteAsyncTask.doInBackground(FFmpegExecuteAsyncTask.java:38)&#xA;at&#xA;com.github.hiteshsondhi88.libffmpeg.FFmpegExecuteAsyncTask.doInBackground(FFmpegExecuteAsyncTask.java:10)&#xA;at android.os.AsyncTask$3.call(AsyncTask.java:394)&#xA;at java.util.concurrent.FutureTask.run(FutureTask.java:264)&#xA;at android.os.AsyncTask$SerialExecutor$1.run(AsyncTask.java:305)&#xA;at&#xA;java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)&#xA;at&#xA;java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:644)&#xA;at java.lang.Thread.run(Thread.java:1012)&#xA;Caused by : java.io.IOException : error=13, Permission denied&#xA;at java.lang.UNIXProcess.forkAndExec(Native Method)&#xA;at java.lang.UNIXProcess.(UNIXProcess.java:133)&#xA;at java.lang.ProcessImpl.start(ProcessImpl.java:141)&#xA;at java.lang.ProcessBuilder.start(ProcessBuilder.java:1029)&#xA;at java.lang.Runtime.exec(Runtime.java:712) &#xA;at java.lang.Runtime.exec(Runtime.java:571) &#xA;at&#xA;com.github.hiteshsondhi88.libffmpeg.ShellCommand.run(ShellCommand.java:10) &#xA;at&#xA;com.github.hiteshsondhi88.libffmpeg.FFmpegExecuteAsyncTask.doInBackground(FFmpegExecuteAsyncTask.java:38) &#xA;at&#xA;com.github.hiteshsondhi88.libffmpeg.FFmpegExecuteAsyncTask.doInBackground(FFmpegExecuteAsyncTask.java:10) &#xA;at android.os.AsyncTask$3.call(AsyncTask.java:394) &#xA;at java.util.concurrent.FutureTask.run(FutureTask.java:264) &#xA;at android.os.AsyncTask$SerialExecutor$1.run(AsyncTask.java:305) &#xA;at&#xA;java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) &#xA;at&#xA;java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:644) &#xA;at java.lang.Thread.run(Thread.java:1012)  2024-03-31 21:40:31.045&#xA;15937-15937 FFmpeg com.voi.myapplication8
    &#xA;E Video cropping failed :

    &#xA;

    &#xA;