Recherche avancée

Médias (1)

Mot : - Tags -/Christian Nold

Autres articles (62)

  • Websites made ​​with MediaSPIP

    2 mai 2011, par

    This page lists some websites based on MediaSPIP.

  • Participer à sa traduction

    10 avril 2011

    Vous pouvez nous aider à améliorer les locutions utilisées dans le logiciel ou à traduire celui-ci dans n’importe qu’elle nouvelle langue permettant sa diffusion à de nouvelles communautés linguistiques.
    Pour ce faire, on utilise l’interface de traduction de SPIP où l’ensemble des modules de langue de MediaSPIP sont à disposition. ll vous suffit de vous inscrire sur la liste de discussion des traducteurs pour demander plus d’informations.
    Actuellement MediaSPIP n’est disponible qu’en français et (...)

  • Creating farms of unique websites

    13 avril 2011, par

    MediaSPIP platforms can be installed as a farm, with a single "core" hosted on a dedicated server and used by multiple websites.
    This allows (among other things) : implementation costs to be shared between several different projects / individuals rapid deployment of multiple unique sites creation of groups of like-minded sites, making it possible to browse media in a more controlled and selective environment than the major "open" (...)

Sur d’autres sites (10405)

  • c++ - using FFmpeg encode and UDP with a Webcam

    14 mars, par Rendres

    I'm trying to get frames from a Webcam using OpenCV, encode them with FFmpeg and send them using UDP.

    



    I did before a similar project that instead of sending the packets with UDP, it saved them in a video file.

    



    My code is.

    



    #include &#xA;#include &#xA;#include &#xA;#include &#xA;&#xA;extern "C" {&#xA;#include <libavcodec></libavcodec>avcodec.h>&#xA;#include <libavformat></libavformat>avformat.h>&#xA;#include <libavutil></libavutil>opt.h>&#xA;#include <libavutil></libavutil>imgutils.h>&#xA;#include <libavutil></libavutil>mathematics.h>&#xA;#include <libswscale></libswscale>swscale.h>&#xA;#include <libswresample></libswresample>swresample.h>&#xA;}&#xA;&#xA;#include <opencv2></opencv2>opencv.hpp>&#xA;&#xA;using namespace std;&#xA;using namespace cv;&#xA;&#xA;#define WIDTH 640&#xA;#define HEIGHT 480&#xA;#define CODEC_ID AV_CODEC_ID_H264&#xA;#define STREAM_PIX_FMT AV_PIX_FMT_YUV420P&#xA;&#xA;static AVFrame *frame, *pFrameBGR;&#xA;&#xA;int main(int argc, char **argv)&#xA;{&#xA;VideoCapture cap(0);&#xA;const char *url = "udp://127.0.0.1:8080";&#xA;&#xA;AVFormatContext *formatContext;&#xA;AVStream *stream;&#xA;AVCodec *codec;&#xA;AVCodecContext *c;&#xA;AVDictionary *opts = NULL;&#xA;&#xA;int ret, got_packet;&#xA;&#xA;if (!cap.isOpened())&#xA;{&#xA;    return -1;&#xA;}&#xA;&#xA;av_log_set_level(AV_LOG_TRACE);&#xA;&#xA;av_register_all();&#xA;avformat_network_init();&#xA;&#xA;avformat_alloc_output_context2(&amp;formatContext, NULL, "h264", url);&#xA;if (!formatContext)&#xA;{&#xA;    av_log(NULL, AV_LOG_FATAL, "Could not allocate an output context for &#x27;%s&#x27;.\n", url);&#xA;}&#xA;&#xA;codec = avcodec_find_encoder(CODEC_ID);&#xA;if (!codec)&#xA;{&#xA;    av_log(NULL, AV_LOG_ERROR, "Could not find encoder.\n");&#xA;}&#xA;&#xA;stream = avformat_new_stream(formatContext, codec);&#xA;&#xA;c = avcodec_alloc_context3(codec);&#xA;&#xA;stream->id = formatContext->nb_streams - 1;&#xA;stream->time_base = (AVRational){1, 25};&#xA;&#xA;c->codec_id = CODEC_ID;&#xA;c->bit_rate = 400000;&#xA;c->width = WIDTH;&#xA;c->height = HEIGHT;&#xA;c->time_base = stream->time_base;&#xA;c->gop_size = 12;&#xA;c->pix_fmt = STREAM_PIX_FMT;&#xA;&#xA;if (formatContext->flags &amp; AVFMT_GLOBALHEADER)&#xA;    c->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;&#xA;&#xA;av_dict_set(&amp;opts, "preset", "fast", 0);&#xA;&#xA;av_dict_set(&amp;opts, "tune", "zerolatency", 0);&#xA;&#xA;ret = avcodec_open2(c, codec, NULL);&#xA;if (ret &lt; 0)&#xA;{&#xA;    av_log(NULL, AV_LOG_ERROR, "Could not open video codec.\n");&#xA;}&#xA;&#xA;pFrameBGR = av_frame_alloc();&#xA;if (!pFrameBGR)&#xA;{&#xA;    av_log(NULL, AV_LOG_ERROR, "Could not allocate video frame.\n");&#xA;}&#xA;&#xA;frame = av_frame_alloc();&#xA;if (!frame)&#xA;{&#xA;    av_log(NULL, AV_LOG_ERROR, "Could not allocate video frame.\n");&#xA;}&#xA;&#xA;frame->format = c->pix_fmt;&#xA;frame->width = c->width;&#xA;frame->height = c->height;&#xA;&#xA;ret = avcodec_parameters_from_context(stream->codecpar, c);&#xA;if (ret &lt; 0)&#xA;{&#xA;    av_log(NULL, AV_LOG_ERROR, "Could not open video codec.\n");&#xA;}&#xA;&#xA;av_dump_format(formatContext, 0, url, 1);&#xA;&#xA;ret = avformat_write_header(formatContext, NULL);&#xA;if (ret != 0)&#xA;{&#xA;    av_log(NULL, AV_LOG_ERROR, "Failed to connect to &#x27;%s&#x27;.\n", url);&#xA;}&#xA;&#xA;Mat image(Size(HEIGHT, WIDTH), CV_8UC3);&#xA;SwsContext *swsctx = sws_getContext(WIDTH, HEIGHT, AV_PIX_FMT_BGR24, WIDTH, HEIGHT, AV_PIX_FMT_YUV420P, SWS_BILINEAR, NULL, NULL, NULL);&#xA;int frame_pts = 0;&#xA;&#xA;while (1)&#xA;{&#xA;    cap >> image;&#xA;&#xA;    int numBytesYUV = av_image_get_buffer_size(STREAM_PIX_FMT, WIDTH, HEIGHT, 1);&#xA;    uint8_t *bufferYUV = (uint8_t *)av_malloc(numBytesYUV * sizeof(uint8_t));&#xA;&#xA;    avpicture_fill((AVPicture *)pFrameBGR, image.data, AV_PIX_FMT_BGR24, WIDTH, HEIGHT);&#xA;    avpicture_fill((AVPicture *)frame, bufferYUV, STREAM_PIX_FMT, WIDTH, HEIGHT);&#xA;&#xA;    sws_scale(swsctx, (uint8_t const *const *)pFrameBGR->data, pFrameBGR->linesize, 0, HEIGHT, frame->data, frame->linesize);&#xA;&#xA;    AVPacket pkt = {0};&#xA;    av_init_packet(&amp;pkt);&#xA;&#xA;    frame->pts = frame_pts;&#xA;&#xA;    ret = avcodec_encode_video2(c, &amp;pkt, frame, &amp;got_packet);&#xA;    if (ret &lt; 0)&#xA;    {&#xA;        av_log(NULL, AV_LOG_ERROR, "Error encoding frame\n");&#xA;    }&#xA;&#xA;    if (got_packet)&#xA;    {&#xA;        pkt.pts = av_rescale_q_rnd(pkt.pts, c->time_base, stream->time_base, AVRounding(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));&#xA;        pkt.dts = av_rescale_q_rnd(pkt.dts, c->time_base, stream->time_base, AVRounding(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));&#xA;        pkt.duration = av_rescale_q(pkt.duration, c->time_base, stream->time_base);&#xA;        pkt.stream_index = stream->index;&#xA;&#xA;        return av_interleaved_write_frame(formatContext, &amp;pkt);&#xA;&#xA;        cout &lt;&lt; "Seguro que si" &lt;&lt; endl;&#xA;    }&#xA;    frame_pts&#x2B;&#x2B;;&#xA;}&#xA;&#xA;avcodec_free_context(&amp;c);&#xA;av_frame_free(&amp;frame);&#xA;avformat_free_context(formatContext);&#xA;&#xA;return 0;&#xA;}&#xA;

    &#xA;&#xA;

    The code compiles but it returns Segmentation fault in the function av_interleaved_write_frame(). I've tried several implementations or several codecs (in this case I'm using libopenh264, but using mpeg2video returns the same segmentation fault). I tried also with av_write_frame() but it returns the same error.

    &#xA;&#xA;

    As I told before, I only want to grab frames from a webcam connected via USB, encode them to H264 and send the packets through UDP to another PC.

    &#xA;&#xA;

    My console log when I run the executable is.

    &#xA;&#xA;

    [100%] Built target display&#xA;[OpenH264] this = 0x0x244b4f0, Info:CWelsH264SVCEncoder::SetOption():ENCODER_OPTION_TRACE_CALLBACK callback = 0x7f0c302a87c0.&#xA;[libopenh264 @ 0x244aa00] [OpenH264] this = 0x0x244b4f0, Info:CWelsH264SVCEncoder::InitEncoder(), openh264 codec version = 5a5c4f1&#xA;[libopenh264 @ 0x244aa00] [OpenH264] this = 0x0x244b4f0, Info:iUsageType = 0,iPicWidth= 640;iPicHeight= 480;iTargetBitrate= 400000;iMaxBitrate= 400000;iRCMode= 0;iPaddingFlag= 0;iTemporalLayerNum= 1;iSpatialLayerNum= 1;fFrameRate= 25.000000f;uiIntraPeriod= 12;eSpsPpsIdStrategy = 0;bPrefixNalAddingCtrl = 0;bSimulcastAVC=0;bEnableDenoise= 0;bEnableBackgroundDetection= 1;bEnableSceneChangeDetect = 1;bEnableAdaptiveQuant= 1;bEnableFrameSkip= 0;bEnableLongTermReference= 0;iLtrMarkPeriod= 30, bIsLosslessLink=0;iComplexityMode = 0;iNumRefFrame = 1;iEntropyCodingModeFlag = 0;uiMaxNalSize = 0;iLTRRefNum = 0;iMultipleThreadIdc = 1;iLoopFilterDisableIdc = 0 (offset(alpha/beta): 0,0;iComplexityMode = 0,iMaxQp = 51;iMinQp = 0)&#xA;[libopenh264 @ 0x244aa00] [OpenH264] this = 0x0x244b4f0, Info:sSpatialLayers[0]: .iVideoWidth= 640; .iVideoHeight= 480; .fFrameRate= 25.000000f; .iSpatialBitrate= 400000; .iMaxSpatialBitrate= 400000; .sSliceArgument.uiSliceMode= 1; .sSliceArgument.iSliceNum= 0; .sSliceArgument.uiSliceSizeConstraint= 1500;uiProfileIdc = 66;uiLevelIdc = 41&#xA;[libopenh264 @ 0x244aa00] [OpenH264] this = 0x0x244b4f0, Warning:SliceArgumentValidationFixedSliceMode(), unsupported setting with Resolution and uiSliceNum combination under RC on! So uiSliceNum is changed to 6!&#xA;[libopenh264 @ 0x244aa00] [OpenH264] this = 0x0x244b4f0, Info:Setting MaxSpatialBitrate (400000) the same at SpatialBitrate (400000) will make the    actual bit rate lower than SpatialBitrate&#xA;[libopenh264 @ 0x244aa00] [OpenH264] this = 0x0x244b4f0, Warning:bEnableFrameSkip = 0,bitrate can&#x27;t be controlled for RC_QUALITY_MODE,RC_BITRATE_MODE and RC_TIMESTAMP_MODE without enabling skip frame.&#xA;[libopenh264 @ 0x244aa00] [OpenH264] this = 0x0x244b4f0, Warning:Change QP Range from(0,51) to (12,42)&#xA;[libopenh264 @ 0x244aa00] [OpenH264] this = 0x0x244b4f0, Info:WELS CPU features/capacities (0x4007fe3f) detected:   HTT:      Y, MMX:      Y, MMXEX:    Y, SSE:      Y, SSE2:     Y, SSE3:     Y, SSSE3:    Y, SSE4.1:   Y, SSE4.2:   Y, AVX:      Y, FMA:      Y, X87-FPU:  Y, 3DNOW:    N, 3DNOWEX:  N, ALTIVEC:  N, CMOV:     Y, MOVBE:    Y, AES:      Y, NUMBER OF LOGIC PROCESSORS ON CHIP: 8, CPU CACHE LINE SIZE (BYTES):        64&#xA;[libopenh264 @ 0x244aa00] [OpenH264] this = 0x0x244b4f0, Info:WelsInitEncoderExt() exit, overall memory usage: 4542878 bytes&#xA;[libopenh264 @ 0x244aa00] [OpenH264] this = 0x0x244b4f0, Info:WelsInitEncoderExt(), pCtx= 0x0x245a400.&#xA;Output #0, h264, to &#x27;udp://192.168.100.39:8080&#x27;:&#xA;Stream #0:0, 0, 1/25: Video: h264 (libopenh264), 1 reference frame, yuv420p, 640x480 (0x0), 0/1, q=2-31, 400 kb/s, 25 tbn&#xA;[libopenh264 @ 0x244aa00] [OpenH264] this = 0x0x244b4f0, Debug:RcUpdateIntraComplexity iFrameDqBits = 385808,iQStep= 2016,iIntraCmplx = 777788928&#xA;[libopenh264 @ 0x244aa00] [OpenH264] this = 0x0x244b4f0, Debug:[Rc]Layer 0: Frame timestamp = 0, Frame type = 2, encoding_qp = 30, average qp = 30, max qp = 33, min qp = 27, index = 0, iTid = 0, used = 385808, bitsperframe = 16000, target = 64000, remainingbits = -257808, skipbuffersize = 200000&#xA;[libopenh264 @ 0x244aa00] [OpenH264] this = 0x0x244b4f0, Debug:WelsEncoderEncodeExt() OutputInfo iLayerNum = 2,iFrameSize = 48252&#xA;[libopenh264 @ 0x244aa00] [OpenH264] this = 0x0x244b4f0, Debug:WelsEncoderEncodeExt() OutputInfo iLayerId = 0,iNalType = 0,iNalCount = 2, first Nal Length=18,uiSpatialId = 0,uiTemporalId = 0,iSubSeqId = 0&#xA;[libopenh264 @ 0x244aa00] [OpenH264] this = 0x0x244b4f0, Debug:WelsEncoderEncodeExt() OutputInfo iLayerId = 1,iNalType = 1,iNalCount = 6, first Nal Length=6057,uiSpatialId = 0,uiTemporalId = 0,iSubSeqId = 0&#xA;[libopenh264 @ 0x244aa00] 6 slices&#xA;./scriptBuild.sh: line 20: 10625 Segmentation fault      (core dumped) ./display&#xA;

    &#xA;&#xA;

    As you can see, FFmpeg uses libopenh264 and configures it correctly. However, no matter what. It always returns the same Segmentation fault error...

    &#xA;&#xA;

    I've used commands like this.

    &#xA;&#xA;

    ffmpeg -s 640x480 -f video4linux2 -i /dev/video0 -r 30 -vcodec libopenh264 -an -f h264 udp://127.0.0.1:8080&#xA;

    &#xA;&#xA;

    And it works perfectly, but I need to process the frames before sending them. Thats why I'm trying to use the libs.

    &#xA;&#xA;

    My FFmpeg version is.

    &#xA;&#xA;

    ffmpeg version 3.3.6 Copyright (c) 2000-2017 the FFmpeg developers&#xA;built with gcc 4.8 (Ubuntu 4.8.4-2ubuntu1~14.04.3)&#xA;configuration: --disable-yasm --enable-shared --enable-libopenh264 --cc=&#x27;gcc -fPIC&#x27;&#xA;libavutil      55. 58.100 / 55. 58.100&#xA;libavcodec     57. 89.100 / 57. 89.100&#xA;libavformat    57. 71.100 / 57. 71.100&#xA;libavdevice    57.  6.100 / 57.  6.100&#xA;libavfilter     6. 82.100 /  6. 82.100&#xA;libswscale      4.  6.100 /  4.  6.100&#xA;libswresample   2.  7.100 /  2.  7.100&#xA;

    &#xA;&#xA;

    I tried to get more information of the error using gbd, but it didn't give me debugging info.

    &#xA;&#xA;

    How can I solve this problem ? I don't know what else can I try...

    &#xA;&#xA;

    Thank you !

    &#xA;

  • How to keep transparency when scale webm file with ffmpeg

    5 octobre 2022, par Sonia Kidman

    I'm using ffmpeg to scale my WEBM file, by using below command : &#xA;ffmpeg -i in.webm -c:v libvpx -vf scale=100:100 out.webm&#xA;The output has correct resolution as I expected but the problem is transparency become black background.

    &#xA;&#xA;

    Could someone give me a solution for this.

    &#xA;&#xA;

    Thank you so much.

    &#xA;&#xA;

    Below is the log of the operation :

    &#xA;&#xA;

    ffmpeg version 3.4 Copyright (c) 2000-2017 the FFmpeg developers&#xA;  built with gcc 7.2.0 (GCC)&#xA;  configuration: --enable-gpl --enable-version3 --enable-sdl2 --enable-bzlib --enable-fontconfig --enable-gnutls --enable-iconv --enable-libass --enable-libbluray --enable-libfreetype --enable-libmp3lame --enable-libopenjpeg --enable-libopus --enable-libshine --enable-libsnappy --enable-libsoxr --enable-libtheora --enable-libtwolame --enable-libvpx --enable-libwavpack --enable-libwebp --enable-libx264 --enable-libx265 --enable-libxml2 --enable-libzimg --enable-lzma --enable-zlib --enable-gmp --enable-libvidstab --enable-libvorbis --enable-cuda --enable-cuvid --enable-d3d11va --enable-nvenc --enable-dxva2 --enable-avisynth --enable-libmfx&#xA;  libavutil      55. 78.100 / 55. 78.100&#xA;  libavcodec     57.107.100 / 57.107.100&#xA;  libavformat    57. 83.100 / 57. 83.100&#xA;  libavdevice    57. 10.100 / 57. 10.100&#xA;  libavfilter     6.107.100 /  6.107.100&#xA;  libswscale      4.  8.100 /  4.  8.100&#xA;  libswresample   2.  9.100 /  2.  9.100&#xA;  libpostproc    54.  7.100 / 54.  7.100&#xA;Splitting the commandline.&#xA;Reading option &#x27;-v&#x27; ... matched as option &#x27;v&#x27; (set logging level) with argument &#x27;56&#x27;.&#xA;Reading option &#x27;-i&#x27; ... matched as input url with argument &#x27;in.webm&#x27;.&#xA;Reading option &#x27;-c:v&#x27; ... matched as option &#x27;c&#x27; (codec name) with argument &#x27;libvpx&#x27;.&#xA;Reading option &#x27;-vf&#x27; ... matched as option &#x27;vf&#x27; (set video filters) with argument &#x27;scale=320:240&#x27;.&#xA;Reading option &#x27;out.webm&#x27; ... matched as output url.&#xA;Finished splitting the commandline.&#xA;Parsing a group of options: global .&#xA;Applying option v (set logging level) with argument 56.&#xA;Successfully parsed a group of options.&#xA;Parsing a group of options: input url in.webm.&#xA;Successfully parsed a group of options.&#xA;Opening an input file: in.webm.&#xA;[NULL @ 000002387e6322a0] Opening &#x27;in.webm&#x27; for reading&#xA;[file @ 000002387e632ea0] Setting default whitelist &#x27;file,crypto&#x27;&#xA;Probing matroska,webm score:100 size:2048&#xA;Probing mp3 score:1 size:2048&#xA;[matroska,webm @ 000002387e6322a0] Format matroska,webm probed with size=2048 and score=100&#xA;st:0 removing common factor 1000000 from timebase&#xA;[matroska,webm @ 000002387e6322a0] Before avformat_find_stream_info() pos: 634 bytes read:32768 seeks:0 nb_streams:1&#xA;[matroska,webm @ 000002387e6322a0] All info found&#xA;[matroska,webm @ 000002387e6322a0] stream 0: start_time: 0.000 duration: -9223372036854776.000&#xA;[matroska,webm @ 000002387e6322a0] format: start_time: 0.000 duration: 0.400 bitrate=1432 kb/s&#xA;[matroska,webm @ 000002387e6322a0] After avformat_find_stream_info() pos: 34843 bytes read:65536 seeks:0 frames:1&#xA;Input #0, matroska,webm, from &#x27;in.webm&#x27;:&#xA;  Metadata:&#xA;    ENCODER         : Lavf57.83.100&#xA;  Duration: 00:00:00.40, start: 0.000000, bitrate: 1432 kb/s&#xA;    Stream #0:0, 1, 1/1000: Video: vp8, 1 reference frame, yuv420p(progressive), 640x480, 0/1, SAR 1:1 DAR 4:3, 10 fps, 10 tbr, 1k tbn, 1k tbc (default)&#xA;    Metadata:&#xA;      alpha_mode      : 1&#xA;      ENCODER         : Lavc57.107.100 libvpx&#xA;      DURATION        : 00:00:00.400000000&#xA;Successfully opened the file.&#xA;Parsing a group of options: output url out.webm.&#xA;Applying option c:v (codec name) with argument libvpx.&#xA;Applying option vf (set video filters) with argument scale=320:240.&#xA;Successfully parsed a group of options.&#xA;Opening an output file: out.webm.&#xA;[file @ 000002387e658b40] Setting default whitelist &#x27;file,crypto&#x27;&#xA;Successfully opened the file.&#xA;detected 4 logical cores&#xA;Stream mapping:&#xA;  Stream #0:0 -> #0:0 (vp8 (native) -> vp8 (libvpx))&#xA;Press [q] to stop, [?] for help&#xA;cur_dts is invalid (this is harmless if it occurs once at the start per stream)&#xA;    Last message repeated 4 times&#xA;[Parsed_scale_0 @ 000002387e718a60] Setting &#x27;w&#x27; to value &#x27;320&#x27;&#xA;[Parsed_scale_0 @ 000002387e718a60] Setting &#x27;h&#x27; to value &#x27;240&#x27;&#xA;[Parsed_scale_0 @ 000002387e718a60] Setting &#x27;flags&#x27; to value &#x27;bicubic&#x27;&#xA;[Parsed_scale_0 @ 000002387e718a60] w:320 h:240 flags:&#x27;bicubic&#x27; interl:0&#xA;[graph 0 input from stream 0:0 @ 000002387e743b00] Setting &#x27;video_size&#x27; to value &#x27;640x480&#x27;&#xA;[graph 0 input from stream 0:0 @ 000002387e743b00] Setting &#x27;pix_fmt&#x27; to value &#x27;0&#x27;&#xA;[graph 0 input from stream 0:0 @ 000002387e743b00] Setting &#x27;time_base&#x27; to value &#x27;1/1000&#x27;&#xA;[graph 0 input from stream 0:0 @ 000002387e743b00] Setting &#x27;pixel_aspect&#x27; to value &#x27;1/1&#x27;&#xA;[graph 0 input from stream 0:0 @ 000002387e743b00] Setting &#x27;sws_param&#x27; to value &#x27;flags=2&#x27;&#xA;[graph 0 input from stream 0:0 @ 000002387e743b00] Setting &#x27;frame_rate&#x27; to value &#x27;10/1&#x27;&#xA;[graph 0 input from stream 0:0 @ 000002387e743b00] w:640 h:480 pixfmt:yuv420p tb:1/1000 fr:10/1 sar:1/1 sws_param:flags=2&#xA;[format @ 000002387e7fe1e0] compat: called with args=[yuv420p|yuva420p]&#xA;[format @ 000002387e7fe1e0] Setting &#x27;pix_fmts&#x27; to value &#x27;yuv420p|yuva420p&#x27;&#xA;[AVFilterGraph @ 000002387e634e60] query_formats: 4 queried, 3 merged, 0 already done, 0 delayed&#xA;[Parsed_scale_0 @ 000002387e718a60] w:640 h:480 fmt:yuv420p sar:1/1 -> w:320 h:240 fmt:yuv420p sar:1/1 flags:0x4&#xA;[libvpx @ 000002387e657fe0] v1.6.1&#xA;[libvpx @ 000002387e657fe0] --prefix=/Users/kyle/software/libvpx/win64/libvpx-1.6.1-win64 --target=x86_64-win64-gcc&#xA;[libvpx @ 000002387e657fe0] vpx_codec_enc_cfg&#xA;[libvpx @ 000002387e657fe0] generic settings&#xA;  g_usage:                      0&#xA;  g_threads:                    0&#xA;  g_profile:                    0&#xA;  g_w:                          320&#xA;  g_h:                          240&#xA;  g_bit_depth:                  8&#xA;  g_input_bit_depth:            8&#xA;  g_timebase:                   {1/30}&#xA;  g_error_resilient:            0&#xA;  g_pass:                       0&#xA;  g_lag_in_frames:              0&#xA;[libvpx @ 000002387e657fe0] rate control settings&#xA;  rc_dropframe_thresh:          0&#xA;  rc_resize_allowed:            0&#xA;  rc_resize_up_thresh:          60&#xA;  rc_resize_down_thresh:        30&#xA;  rc_end_usage:                 0&#xA;  rc_twopass_stats_in:          0000000000000000(0)&#xA;  rc_target_bitrate:            256&#xA;[libvpx @ 000002387e657fe0] quantizer settings&#xA;  rc_min_quantizer:             4&#xA;  rc_max_quantizer:             63&#xA;[libvpx @ 000002387e657fe0] bitrate tolerance&#xA;  rc_undershoot_pct:            100&#xA;  rc_overshoot_pct:             100&#xA;[libvpx @ 000002387e657fe0] decoder buffer model&#xA;  rc_buf_sz:                    6000&#xA;  rc_buf_initial_sz:            4000&#xA;  rc_buf_optimal_sz:            5000&#xA;[libvpx @ 000002387e657fe0] 2 pass rate control settings&#xA;  rc_2pass_vbr_bias_pct:        50&#xA;  rc_2pass_vbr_minsection_pct:  0&#xA;  rc_2pass_vbr_maxsection_pct:  400&#xA;[libvpx @ 000002387e657fe0] keyframing settings&#xA;  kf_mode:                      1&#xA;  kf_min_dist:                  0&#xA;  kf_max_dist:                  128&#xA;[libvpx @ 000002387e657fe0] &#xA;[libvpx @ 000002387e657fe0] vpx_codec_enc_cfg&#xA;[libvpx @ 000002387e657fe0] generic settings&#xA;  g_usage:                      0&#xA;  g_threads:                    0&#xA;  g_profile:                    0&#xA;  g_w:                          320&#xA;  g_h:                          240&#xA;  g_bit_depth:                  8&#xA;  g_input_bit_depth:            8&#xA;  g_timebase:                   {1/10}&#xA;  g_error_resilient:            0&#xA;  g_pass:                       0&#xA;  g_lag_in_frames:              25&#xA;[libvpx @ 000002387e657fe0] rate control settings&#xA;  rc_dropframe_thresh:          0&#xA;  rc_resize_allowed:            0&#xA;  rc_resize_up_thresh:          60&#xA;  rc_resize_down_thresh:        30&#xA;  rc_end_usage:                 0&#xA;  rc_twopass_stats_in:          0000000000000000(0)&#xA;  rc_target_bitrate:            200&#xA;[libvpx @ 000002387e657fe0] quantizer settings&#xA;  rc_min_quantizer:             4&#xA;  rc_max_quantizer:             63&#xA;[libvpx @ 000002387e657fe0] bitrate tolerance&#xA;  rc_undershoot_pct:            100&#xA;  rc_overshoot_pct:             100&#xA;[libvpx @ 000002387e657fe0] decoder buffer model&#xA;  rc_buf_sz:                    6000&#xA;  rc_buf_initial_sz:            4000&#xA;  rc_buf_optimal_sz:            5000&#xA;[libvpx @ 000002387e657fe0] 2 pass rate control settings&#xA;  rc_2pass_vbr_bias_pct:        50&#xA;  rc_2pass_vbr_minsection_pct:  0&#xA;  rc_2pass_vbr_maxsection_pct:  400&#xA;[libvpx @ 000002387e657fe0] keyframing settings&#xA;  kf_mode:                      1&#xA;  kf_min_dist:                  0&#xA;  kf_max_dist:                  128&#xA;[libvpx @ 000002387e657fe0] &#xA;[libvpx @ 000002387e657fe0] vpx_codec_control&#xA;[libvpx @ 000002387e657fe0]   VP8E_SET_CPUUSED:             1&#xA;[libvpx @ 000002387e657fe0]   VP8E_SET_ARNR_MAXFRAMES:      0&#xA;[libvpx @ 000002387e657fe0]   VP8E_SET_ARNR_STRENGTH:       3&#xA;[libvpx @ 000002387e657fe0]   VP8E_SET_ARNR_TYPE:           3&#xA;[libvpx @ 000002387e657fe0]   VP8E_SET_NOISE_SENSITIVITY:   0&#xA;[libvpx @ 000002387e657fe0]   VP8E_SET_TOKEN_PARTITIONS:    0&#xA;[libvpx @ 000002387e657fe0]   VP8E_SET_STATIC_THRESHOLD:    0&#xA;[libvpx @ 000002387e657fe0] Using deadline: 1000000&#xA;Output #0, webm, to &#x27;out.webm&#x27;:&#xA;  Metadata:&#xA;    encoder         : Lavf57.83.100&#xA;    Stream #0:0, 0, 1/1000: Video: vp8 (libvpx), 1 reference frame, yuv420p, 320x240 [SAR 1:1 DAR 4:3], 0/1, q=-1--1, 200 kb/s, 10 fps, 1k tbn, 10 tbc (default)&#xA;    Metadata:&#xA;      alpha_mode      : 1&#xA;      DURATION        : 00:00:00.400000000&#xA;      encoder         : Lavc57.107.100 libvpx&#xA;    Side data:&#xA;      cpb: bitrate max/min/avg: 0/0/0 buffer size: 0 vbv_delay: -1&#xA;Clipping frame in rate conversion by 0.000008&#xA;[webm @ 000002387e656880] get_metadata_duration returned: 400000&#xA;[webm @ 000002387e656880] Write early duration from metadata = 400&#xA;[webm @ 000002387e656880] Writing block at offset 3, size 11223, pts 0, dts 0, duration 100, keyframe 1&#xA;[webm @ 000002387e656880] Writing block at offset 11233, size 1288, pts 100, dts 100, duration 100, keyframe 0&#xA;[webm @ 000002387e656880] Writing block at offset 12528, size 1504, pts 200, dts 200, duration 100, keyframe 0&#xA;[webm @ 000002387e656880] Writing block at offset 14039, size 2481, pts 300, dts 300, duration 100, keyframe 0&#xA;[out_0_0 @ 000002387e743d60] EOF on sink link out_0_0:default.&#xA;No more output streams to write to, finishing.&#xA;[webm @ 000002387e656880] end duration = 400&#xA;[webm @ 000002387e656880] stream 0 end duration = 400&#xA;frame=    4 fps=0.0 q=0.0 Lsize=      17kB time=00:00:00.30 bitrate= 457.8kbits/s speed=4.45x    &#xA;video:16kB audio:0kB subtitle:0kB other streams:0kB global headers:0kB muxing overhead: 4.413191%&#xA;Input file #0 (in.webm):&#xA;  Input stream #0:0 (video): 4 packets read (34992 bytes); 4 frames decoded; &#xA;  Total: 4 packets (34992 bytes) demuxed&#xA;Output file #0 (out.webm):&#xA;  Output stream #0:0 (video): 4 frames encoded; 4 packets muxed (16496 bytes); &#xA;  Total: 4 packets (16496 bytes) muxed&#xA;4 frames successfully decoded, 0 decoding errors&#xA;[AVIOContext @ 000002387e698c20] Statistics: 14 seeks, 10 writeouts&#xA;[AVIOContext @ 000002387cc773e0] Statistics: 71649 bytes read, 0 seeks&#xA;

    &#xA;

  • avcodec_receive_packet() doesn't see the output

    1er mars 2018, par Eugene Alexeev

    I’m trying to create a converter which will make a video out of set of images. Everything is at its place, AVFormatContext, AVCodecContext, AVCodec. I’m creating YUV AVFrame out of UIImage and send it to encoder by avcodec_send_frame() method. Everything goes fine until I’m trying to get AVPacket with method avcodec_receive_packet(). Every time it returns -53 which means - output is not available in the current state - user must try to send input. As I said, I’m sending an input before I’m trying to get something and sending is successful.

    Here’s my code :

    Init ffmpeg entities :

    - (BOOL)setupForConvert:(DummyFVPVideoFile *)videoFile outputPath:(NSString *)path
    {
       if (!videoFile) {
           [self.delegate convertationFailed:@"VideoFile is nil!"];
           return NO;
       }
       currentVideoFile = videoFile;
       outputPath = path;
       BOOL success = NO;

       success = [self initFormatCtxAndCodecs:path];
       if (!success) {
           return NO;
       }

       success = [self addCameraStreams:videoFile];
       if (!success) {
           return NO;
       }

       success = [self openIOContext:path];
       if (!success) {
           return NO;
       }

       return YES;
    }

    - (BOOL)initFormatCtxAndCodecs:(NSString *)path
    {
       //AVOutputFormat *fmt = av_guess_format("mp4", NULL, NULL);
       int ret = avformat_alloc_output_context2(&amp;pFormatCtx, NULL, NULL, [path UTF8String]);
       if (ret &lt; 0) {
           NSLog(@"Couldn't create output context");
           return NO;
       }

       //encoder codec init
       pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
       if (!pCodec) {
           NSLog(@"Couldn't find a encoder codec!");
           return NO;
       }

       pCodecCtx = avcodec_alloc_context3(pCodec);
       if (!pCodecCtx) {
           NSLog(@"Couldn't alloc encoder codec context!");
           return NO;
       }

       pCodecCtx->codec_tag = AV_CODEC_ID_H264;
       pCodecCtx->bit_rate = 400000;
       pCodecCtx->width = currentVideoFile.size.width;
       pCodecCtx->height = currentVideoFile.size.height;
       pCodecCtx->time_base = (AVRational){1, (int)currentVideoFile.framerate};
       pCodecCtx->framerate = (AVRational){(int)currentVideoFile.framerate, 1};
       pCodecCtx->gop_size = 10;
       pCodecCtx->max_b_frames = 1;
       pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;

       if (avcodec_open2(pCodecCtx, pCodec, NULL) &lt; 0) {
           NSLog(@"Couldn't open the encoder codec!");
           return NO;
       }

       pPacket = av_packet_alloc();

       return YES;
    }

    - (BOOL)addCameraStreams:(DummyFVPVideoFile *)videoFile
    {
       AVCodecParameters *params = avcodec_parameters_alloc();
       if (!params) {
           NSLog(@"Couldn't allocate codec parameters!");
           return NO;
       }

       if (avcodec_parameters_from_context(params, pCodecCtx) &lt; 0) {
           NSLog(@"Couldn't copy parameters from context!");
           return NO;
       }

       for (int i = 0; i &lt; videoFile.idCameras.count - 1; i++)
       {
           NSString *path = [videoFile.url URLByAppendingPathComponent:videoFile.idCameras[i]].path;
           AVStream *stream = avformat_new_stream(pFormatCtx, pCodec);
           if (!stream) {
               NSLog(@"Couldn't alloc stream!");
               return NO;
           }

           if (avcodec_parameters_copy(stream->codecpar, params) &lt; 0) {
               NSLog(@"Couldn't copy parameters into stream!");
               return NO;
           }

           stream->avg_frame_rate.num = videoFile.framerate;
           stream->avg_frame_rate.den = 1;
           stream->codecpar->codec_tag = 0;    //some silly workaround
           stream->index = i;
           streams[path] = [[VideoStream alloc] initWithStream:stream];
       }

       return YES;
    }

    - (BOOL)openIOContext:(NSString *)path
    {
       AVIOContext *ioCtx = nil;
       if (avio_open(&amp;ioCtx, [path UTF8String], AVIO_FLAG_WRITE) &lt; 0) {
           return NO;
       }
       pFormatCtx->pb = ioCtx;

       return YES;
    }

    And here’s convertation process :

    - (void)launchConvert:(DummyFVPVideoFile *)videoFile
    {
       BOOL convertInProgress = YES;
       unsigned int frameCount = 1;
       unsigned long pts = 0;
       BOOL success = NO;

       success = [self writeHeader];
       if (!success) {
           NSLog(@"Couldn't write header!");
           return;
       }

       AVRational defaultTimeBase;
       defaultTimeBase.num = 1;
       defaultTimeBase.den = videoFile.framerate;
       AVRational streamTimeBase = streams.allValues.firstObject.stream->time_base;

       while (convertInProgress)
       {
           pts += av_rescale_q(1, defaultTimeBase, streamTimeBase);
           for (NSString *path in streams.allKeys)
           {
               UIImage *img = [UIImage imageWithContentsOfFile:[NSString stringWithFormat:@"%@/%u.jpg", path, frameCount]];
               AVPacket *pkt = [self getAVPacket:img withPts:pts];
               if (!pkt->data) {   continue;   }
               pkt->stream_index = streams[path].stream->index;
               //check all settings of pkt

               if (![self writePacket:pkt]) {
                   NSLog(@"Couldn't write packet!");
                   convertInProgress = NO;
                   break;
               }
           }

           frameCount++;
       }

       success = [self writeTrailer];
       if (!success) {
           NSLog(@"Couldn't write trailer!");
           return;
       }

       NSLog(@"Convertation finished!");
       //delegate convertationFinished method
    }

    - (BOOL)writeHeader
    {
       if (avformat_write_header(pFormatCtx, NULL) &lt; 0) {
           return NO;
       }

       return YES;
    }

    - (BOOL)writePacket:(AVPacket *)pkt
    {
       if (av_interleaved_write_frame(pFormatCtx, pkt) != 0) {
           return NO;
       }

       return YES;
    }

    - (BOOL)writeTrailer
    {
       if (av_write_trailer(pFormatCtx) != 0) {
           return NO;
       }

       return YES;
    }


    /**
    This method will create AVPacket out of UIImage.

    @return AVPacket
    */
    - (AVPacket *)getAVPacket:(UIImage *)img withPts:(unsigned long)pts
    {
       if (!img) {
           NSLog(@"imgData is nil!");
           return nil;
       }
       uint8_t *imgData = [self getPixelDataFromImage:img];

       AVFrame *frame_yuv = av_frame_alloc();
       if (!frame_yuv) {
           NSLog(@"frame_yuv is nil!");
           return nil;
       }
       frame_yuv->format = AV_PIX_FMT_YUV420P;
       frame_yuv->width = (int)img.size.width;
       frame_yuv->height = (int)img.size.height;

       int ret = av_image_alloc(frame_yuv->data,
                                  frame_yuv->linesize,
                                  frame_yuv->width,
                                  frame_yuv->height,
                                  frame_yuv->format,
                                  32);
       if (ret &lt; 0) {
           NSLog(@"Couldn't alloc yuv frame!");
           return nil;
       }

       struct SwsContext *sws_ctx = nil;
       sws_ctx = sws_getContext((int)img.size.width, (int)img.size.height, AV_PIX_FMT_RGB24,
                                (int)img.size.width, (int)img.size.height, AV_PIX_FMT_YUV420P,
                                0, NULL, NULL, NULL);
       const uint8_t *scaleData[1] = { imgData };
       int inLineSize[1] = { 4 * img.size.width };
       sws_scale(sws_ctx, scaleData, inLineSize, 0, (int)img.size.height, frame_yuv->data, frame_yuv->linesize);

       frame_yuv->pict_type = AV_PICTURE_TYPE_I;
       frame_yuv->pts = pCodecCtx->frame_number;

       ret = avcodec_send_frame(pCodecCtx, frame_yuv);   //every time everything is fine
       if (ret != 0) {
           NSLog(@"Couldn't send yuv frame!");
           return nil;
       }

       av_init_packet(pPacket);
       pPacket->dts = pPacket->pts = pts;
       do {
           ret = avcodec_receive_packet(pCodecCtx, pPacket);   //every time -35 error
           NSLog(@"ret = %d", ret);
           if (ret == AVERROR_EOF) {
               NSLog(@"AVERROR_EOF!");
           } else if (ret == AVERROR(EAGAIN)) {
               NSLog(@"AVERROR(EAGAIN)");
           } else if (ret == AVERROR(EINVAL)) {
               NSLog(@"AVERROR(EINVAL)");
           }
           if (ret != 0) {
               NSLog(@"Couldn't receive packet!");
               //return nil;
           }
       } while ( ret == 0 );

       free(imgData);
       av_packet_unref(pPacket);
       av_packet_free(pPacket);
       av_frame_unref(&amp;frame_yuv);
       av_frame_free(&amp;frame_yuv);
       //perform other clean up and test dat shit

       return pPacket;
    }

    Any insights would be helpful. Thanks !