Recherche avancée

Médias (1)

Mot : - Tags -/publicité

Autres articles (48)

  • Personnaliser en ajoutant son logo, sa bannière ou son image de fond

    5 septembre 2013, par

    Certains thèmes prennent en compte trois éléments de personnalisation : l’ajout d’un logo ; l’ajout d’une bannière l’ajout d’une image de fond ;

  • Ecrire une actualité

    21 juin 2013, par

    Présentez les changements dans votre MédiaSPIP ou les actualités de vos projets sur votre MédiaSPIP grâce à la rubrique actualités.
    Dans le thème par défaut spipeo de MédiaSPIP, les actualités sont affichées en bas de la page principale sous les éditoriaux.
    Vous pouvez personnaliser le formulaire de création d’une actualité.
    Formulaire de création d’une actualité Dans le cas d’un document de type actualité, les champs proposés par défaut sont : Date de publication ( personnaliser la date de publication ) (...)

  • Publier sur MédiaSpip

    13 juin 2013

    Puis-je poster des contenus à partir d’une tablette Ipad ?
    Oui, si votre Médiaspip installé est à la version 0.2 ou supérieure. Contacter au besoin l’administrateur de votre MédiaSpip pour le savoir

Sur d’autres sites (7117)

  • FFMpeg CUDA yuvj420p frame conversion to cv::Mat layers shifted

    26 février 2023, par AcidTonic

    I am trying to retrieve hardware decoded H264 frames from the cuda backend of ffmpeg and display them as a cv::Mat. I got decently far and was able to get color images but it seems the conversion is not quite right as the image I get has a green bar at the top and if you look closely the blue parts of the image are offset down and to the right a little bit making everything look a little wonky.

    


    Correct Image as shown by ffplay using the same driver
enter image description here
Image I am getting
enter image description here

    


    Here is the full source code in the hopes someone can help me to get the correct image here...

    


    #include &#xA;&#xA;#include &#xA;&#xA;#include &#xA;&#xA;#include &#xA;&#xA;#include &#xA;&#xA;#include &#xA;&#xA;#include <iostream>&#xA;&#xA;#include <fstream>&#xA;&#xA;#include <cstdlib>&#xA;&#xA;#include <chrono>&#xA;&#xA;#include <cstring>&#xA;&#xA;extern "C" {&#xA;&#xA;  //Linker errors if not inside extern. FFMPEG headers are not C&#x2B;&#x2B; aware&#xA;  #include <libavcodec></libavcodec>avcodec.h>&#xA;&#xA;  #include <libavformat></libavformat>avformat.h>&#xA;&#xA;  #include <libavutil></libavutil>pixdesc.h>&#xA;&#xA;  #include <libavutil></libavutil>hwcontext.h>&#xA;&#xA;  #include <libavutil></libavutil>opt.h>&#xA;&#xA;  #include <libavutil></libavutil>avassert.h>&#xA;&#xA;  #include <libavutil></libavutil>imgutils.h>&#xA;&#xA;}&#xA;&#xA;#include <iomanip>&#xA;&#xA;#include <string>&#xA;&#xA;#include <sstream>&#xA;&#xA;#include <opencv2></opencv2>opencv.hpp>&#xA;&#xA;#ifdef __cplusplus&#xA;extern "C" {&#xA;  #endif // __cplusplus&#xA;  #include <libavdevice></libavdevice>avdevice.h>&#xA;&#xA;  #include <libavfilter></libavfilter>avfilter.h>&#xA;&#xA;  #include <libavformat></libavformat>avio.h>&#xA;&#xA;  #include <libavutil></libavutil>avutil.h>&#xA;&#xA;  #include <libpostproc></libpostproc>postprocess.h>&#xA;&#xA;  #include <libswresample></libswresample>swresample.h>&#xA;&#xA;  #include <libswscale></libswscale>swscale.h>&#xA;&#xA;  #ifdef __cplusplus&#xA;} // end extern "C".&#xA;#endif // __cplusplus&#xA;&#xA;static AVBufferRef * hw_device_ctx = NULL;&#xA;static enum AVPixelFormat hw_pix_fmt;&#xA;static FILE * output_file_fd = NULL;&#xA;cv::Mat output_mat;&#xA;int bgr_size;&#xA;&#xA;static int hw_decoder_init(AVCodecContext * ctx,&#xA;  const enum AVHWDeviceType type) {&#xA;  int err = 0;&#xA;&#xA;  if ((err = av_hwdevice_ctx_create( &amp; hw_device_ctx, type,&#xA;      NULL, NULL, 0)) &lt; 0) {&#xA;    fprintf(stderr, "Failed to create specified HW device.\n");&#xA;    return err;&#xA;  }&#xA;  ctx -> hw_device_ctx = av_buffer_ref(hw_device_ctx);&#xA;&#xA;  return err;&#xA;}&#xA;&#xA;static enum AVPixelFormat get_hw_format(AVCodecContext * ctx,&#xA;  const enum AVPixelFormat * pix_fmts) {&#xA;  const enum AVPixelFormat * p;&#xA;&#xA;  for (p = pix_fmts;* p != -1; p&#x2B;&#x2B;) {&#xA;    if ( * p == hw_pix_fmt)&#xA;      return * p;&#xA;  }&#xA;&#xA;  fprintf(stderr, "Failed to get HW surface format.\n");&#xA;  return AV_PIX_FMT_NONE;&#xA;}&#xA;&#xA;static int decode_write(AVCodecContext * avctx, AVPacket * packet) {&#xA;  AVFrame * frame = NULL, * sw_frame = NULL;&#xA;  AVFrame * tmp_frame = NULL;&#xA;  uint8_t * buffer = NULL;&#xA;  int size;&#xA;  int ret = 0;&#xA;&#xA;  ret = avcodec_send_packet(avctx, packet);&#xA;  if (ret &lt; 0) {&#xA;    fprintf(stderr, "Error during decoding\n");&#xA;    return ret;&#xA;  }&#xA;&#xA;  while (1) {&#xA;    if (!(frame = av_frame_alloc()) || !(sw_frame = av_frame_alloc())) {&#xA;      fprintf(stderr, "Can not alloc frame\n");&#xA;      ret = AVERROR(ENOMEM);&#xA;      av_frame_free( &amp; frame);&#xA;      av_frame_free( &amp; sw_frame);&#xA;      av_freep( &amp; buffer);&#xA;      if (ret &lt; 0) {&#xA;        return ret;&#xA;      }&#xA;&#xA;    }&#xA;&#xA;    ret = avcodec_receive_frame(avctx, frame);&#xA;    if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {&#xA;      av_frame_free( &amp; frame);&#xA;      av_frame_free( &amp; sw_frame);&#xA;      return 0;&#xA;    } else if (ret &lt; 0) {&#xA;      fprintf(stderr, "Error while decoding\n");&#xA;      av_frame_free( &amp; frame);&#xA;      av_frame_free( &amp; sw_frame);&#xA;      av_freep( &amp; buffer);&#xA;      if (ret &lt; 0) {&#xA;        return ret;&#xA;      }&#xA;&#xA;    }&#xA;&#xA;    if (frame -> format == hw_pix_fmt) {&#xA;      /* retrieve data from GPU to CPU */&#xA;      if ((ret = av_hwframe_transfer_data(sw_frame, frame, 0)) &lt; 0) {&#xA;        fprintf(stderr, "Error transferring the data to system memory\n");&#xA;        av_frame_free( &amp; frame);&#xA;        av_frame_free( &amp; sw_frame);&#xA;        av_freep( &amp; buffer);&#xA;        if (ret &lt; 0) {&#xA;          return ret;&#xA;        }&#xA;&#xA;      }&#xA;      tmp_frame = sw_frame;&#xA;    } else {&#xA;      tmp_frame = frame;&#xA;    }&#xA;&#xA;    AVPixelFormat format_to_use = AV_PIX_FMT_YUVJ420P;&#xA;    cv::Mat mat_src = cv::Mat(sw_frame -> height &#x2B; (sw_frame -> height / 2), sw_frame -> width, CV_8UC1, sw_frame -> data[0]);&#xA;    cv::Mat out_mat;&#xA;    cv::cvtColor(mat_src, out_mat, cv::COLOR_YUV2RGB_NV21);&#xA;&#xA;    output_mat = out_mat;&#xA;&#xA;    if (output_mat.empty() == false) {&#xA;      cv::imshow("image", output_mat);&#xA;      cv::waitKey(1);&#xA;    }&#xA;&#xA;    av_frame_free( &amp; frame);&#xA;    av_frame_free( &amp; sw_frame);&#xA;    av_freep( &amp; buffer);&#xA;    return ret;&#xA;  }&#xA;}&#xA;&#xA;TEST_CASE("CUDAH264", "Tests hardware h264 decoding") {&#xA;&#xA;  AVFormatContext * input_ctx = NULL;&#xA;  int video_stream, ret;&#xA;  AVStream * video = NULL;&#xA;  AVCodecContext * decoder_ctx = NULL;&#xA;  AVCodec * decoder = NULL;&#xA;  AVPacket * packet = NULL;&#xA;  enum AVHWDeviceType type;&#xA;  int i;&#xA;&#xA;  std::string device_type = "cuda";&#xA;  std::string input_file = "rtsp://10.100.2.152"; //My H264 network stream here...&#xA;&#xA;  /* The stream data is below...&#xA;  Input #0, rtsp, from &#x27;rtsp://10.100.2.152&#x27;:&#xA;    Metadata:&#xA;      title           : VCP IPC Realtime stream&#xA;    Duration: N/A, start: 0.000000, bitrate: N/A&#xA;    Stream #0:0: Video: h264 (High), yuvj420p(pc, bt709, progressive), 1920x1080, 10 fps, 10 tbr, 90k tbn, 20 tbc&#xA;  */&#xA;&#xA;  type = av_hwdevice_find_type_by_name(device_type.c_str());&#xA;  if (type == AV_HWDEVICE_TYPE_NONE) {&#xA;    fprintf(stderr, "Device type %s is not supported.\n", device_type.c_str());&#xA;    fprintf(stderr, "Available device types:");&#xA;    while ((type = av_hwdevice_iterate_types(type)) != AV_HWDEVICE_TYPE_NONE)&#xA;      fprintf(stderr, " %s", av_hwdevice_get_type_name(type));&#xA;    fprintf(stderr, "\n");&#xA;    throw std::runtime_error("Error");&#xA;  }&#xA;&#xA;  packet = av_packet_alloc();&#xA;  if (!packet) {&#xA;    fprintf(stderr, "Failed to allocate AVPacket\n");&#xA;    throw std::runtime_error("Error");&#xA;  }&#xA;&#xA;  /* open the input file */&#xA;  if (avformat_open_input( &amp; input_ctx, input_file.c_str(), NULL, NULL) != 0) {&#xA;    fprintf(stderr, "Cannot open input file &#x27;%s&#x27;\n", input_file.c_str());&#xA;    throw std::runtime_error("Error");&#xA;  }&#xA;&#xA;  if (avformat_find_stream_info(input_ctx, NULL) &lt; 0) {&#xA;    fprintf(stderr, "Cannot find input stream information.\n");&#xA;    throw std::runtime_error("Error");&#xA;  }&#xA;&#xA;  av_dump_format(input_ctx, 0, input_file.c_str(), 0);&#xA;&#xA;  for (int i = 0; i &lt; input_ctx -> nb_streams; i&#x2B;&#x2B;) {&#xA;    auto pCodec = avcodec_find_decoder(input_ctx -> streams[i] -> codecpar -> codec_id);&#xA;    auto pCodecCtx = avcodec_alloc_context3(pCodec);&#xA;    avcodec_parameters_to_context(pCodecCtx, input_ctx -> streams[i] -> codecpar);&#xA;&#xA;    printf("Found Video stream with ID: %d\n", input_ctx -> streams[i] -> id);&#xA;    printf("\t Stream Index: %d\n", input_ctx -> streams[i] -> index);&#xA;&#xA;    AVCodecParameters * codecpar = input_ctx -> streams[i] -> codecpar;&#xA;    printf("\t Codec Type: %s\n", av_get_media_type_string(codecpar -> codec_type));&#xA;    printf("\t Side data count: %d\n", input_ctx -> streams[i] -> nb_side_data);&#xA;    printf("\t Pixel format: %i\n", input_ctx -> streams[i] -> codecpar -> format);&#xA;    printf("\t Pixel Format Name: %s\n", av_get_pix_fmt_name((AVPixelFormat) input_ctx -> streams[i] -> codecpar -> format));&#xA;    printf("\t Metadata count: %d\n", av_dict_count(input_ctx -> streams[i] -> metadata));&#xA;  }&#xA;&#xA;  /* find the video stream information */&#xA;  ret = av_find_best_stream(input_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, &amp; decoder, 0);&#xA;  if (ret &lt; 0) {&#xA;    fprintf(stderr, "Cannot find a video stream in the input file\n");&#xA;    throw std::runtime_error("Error");&#xA;  }&#xA;&#xA;  video_stream = ret;&#xA;&#xA;  for (i = 0;; i&#x2B;&#x2B;) {&#xA;    const AVCodecHWConfig * config = avcodec_get_hw_config(decoder, i);&#xA;    if (!config) {&#xA;      fprintf(stderr, "Decoder %s does not support device type %s.\n",&#xA;        decoder -> name, av_hwdevice_get_type_name(type));&#xA;      throw std::runtime_error("Error");&#xA;    }&#xA;    if (config -> methods &amp; AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX &amp;&amp;&#xA;      config -> device_type == type) {&#xA;      hw_pix_fmt = config -> pix_fmt;&#xA;      break;&#xA;    }&#xA;  }&#xA;&#xA;  if (!(decoder_ctx = avcodec_alloc_context3(decoder))) {&#xA;    throw std::runtime_error("NO MEMORY");&#xA;  }&#xA;&#xA;  video = input_ctx -> streams[video_stream];&#xA;  if (avcodec_parameters_to_context(decoder_ctx, video -> codecpar) &lt; 0) {&#xA;    throw std::runtime_error("Error");&#xA;  }&#xA;&#xA;  decoder_ctx -> get_format = get_hw_format;&#xA;&#xA;  if (hw_decoder_init(decoder_ctx, type) &lt; 0) {&#xA;    throw std::runtime_error("Error");&#xA;  }&#xA;&#xA;  if ((ret = avcodec_open2(decoder_ctx, decoder, NULL)) &lt; 0) {&#xA;    fprintf(stderr, "Failed to open codec for stream #%u\n", video_stream);&#xA;    throw std::runtime_error("Error");&#xA;  }&#xA;&#xA;  /* actual decoding and dump the raw data */&#xA;  while (ret >= 0) {&#xA;    if ((ret = av_read_frame(input_ctx, packet)) &lt; 0)&#xA;      break;&#xA;&#xA;    if (video_stream == packet -> stream_index)&#xA;      ret = decode_write(decoder_ctx, packet);&#xA;&#xA;    av_packet_unref(packet);&#xA;  }&#xA;&#xA;  /* flush the decoder */&#xA;  ret = decode_write(decoder_ctx, NULL);&#xA;&#xA;  if (output_file_fd) {&#xA;    fclose(output_file_fd);&#xA;  }&#xA;  av_packet_free( &amp; packet);&#xA;  avcodec_free_context( &amp; decoder_ctx);&#xA;  avformat_close_input( &amp; input_ctx);&#xA;  av_buffer_unref( &amp; hw_device_ctx);&#xA;&#xA;}&#xA;</sstream></string></iomanip></cstring></chrono></cstdlib></fstream></iostream>

    &#xA;

  • RaspberryPi HLS streaming with nginx and ffmpeg ; v4l2 error : ioctl(VIDIOC_STREAMON) : Protocol error

    22 janvier 2021, par Mirco Weber

    I'm trying to realize a baby monitoring with a Raspberry Pi (Model 4B, 4GB RAM) and an ordinary Webcam (with integrated Mic).&#xA;I followed this Tutorial : https://github.com/DeTeam/webcam-stream/blob/master/Tutorial.md

    &#xA;

    Shortly described :

    &#xA;

      &#xA;
    1. I installed and configured an nginx server with rtmp module enabled.
    2. &#xA;

    3. I installed ffmpeg with this configuration —enable-gpl —enable-nonfree —enable-mmal —enable-omx-rpi
    4. &#xA;

    5. I tried to stream ;)
    6. &#xA;

    &#xA;

    The configuration of nginx seems to be working (sometimes streaming works, the server starts without any complication and when the server is up and running, the webpage is displayed).&#xA;The configuration of ffmpeg seems to be fine as well, since streaming sometimes works...

    &#xA;

    I was trying a couple of different ffmpeg-commands ; all of them are sometimes working and sometimes resulting in an error.&#xA;The command looks like following :

    &#xA;

    ffmpeg -re&#xA;-f v4l2&#xA;-i /dev/video0&#xA;-f alsa&#xA;-ac 1&#xA;-thread_queue_size 4096&#xA;-i hw:CARD=Camera,DEV=0&#xA;-profile:v high&#xA;-level:v 4.1&#xA;-vcodec h264_omx&#xA;-r 10&#xA;-b:v 512k&#xA;-s 640x360&#xA;-acodec aac&#xA;-strict&#xA;-2&#xA;-ac 2&#xA;-ab 32k&#xA;-ar 44100&#xA;-f flv&#xA;rtmp://localhost/show/stream;&#xA;

    &#xA;

    Note : I rearranged the code to make it easier to read. In the terminal, it is all in one line.&#xA;Note : There is no difference when using -f video4linux2 instead of -f v4l2

    &#xA;

    The camera is recognized by the system :

    &#xA;

    pi@raspberrypi:~ $ v4l2-ctl --list-devices&#xA;bcm2835-codec-decode (platform:bcm2835-codec):&#xA;    /dev/video10&#xA;    /dev/video11&#xA;    /dev/video12&#xA;&#xA;bcm2835-isp (platform:bcm2835-isp):&#xA;    /dev/video13&#xA;    /dev/video14&#xA;    /dev/video15&#xA;    /dev/video16&#xA;&#xA;HD Web Camera: HD Web Camera (usb-0000:01:00.0-1.2):&#xA;    /dev/video0&#xA;    /dev/video1&#xA;

    &#xA;

    When only using -i /dev/video0, audio transmission never worked.&#xA;The output of arecord -L was :

    &#xA;

    pi@raspberrypi:~ $ arecord -L&#xA;default&#xA;    Playback/recording through the PulseAudio sound server&#xA;null&#xA;    Discard all samples (playback) or generate zero samples (capture)&#xA;jack&#xA;    JACK Audio Connection Kit&#xA;pulse&#xA;    PulseAudio Sound Server&#xA;usbstream:CARD=Headphones&#xA;    bcm2835 Headphones&#xA;    USB Stream Output&#xA;sysdefault:CARD=Camera&#xA;    HD Web Camera, USB Audio&#xA;    Default Audio Device&#xA;front:CARD=Camera,DEV=0&#xA;    HD Web Camera, USB Audio&#xA;    Front speakers&#xA;surround21:CARD=Camera,DEV=0&#xA;    HD Web Camera, USB Audio&#xA;    2.1 Surround output to Front and Subwoofer speakers&#xA;surround40:CARD=Camera,DEV=0&#xA;    HD Web Camera, USB Audio&#xA;    4.0 Surround output to Front and Rear speakers&#xA;surround41:CARD=Camera,DEV=0&#xA;    HD Web Camera, USB Audio&#xA;    4.1 Surround output to Front, Rear and Subwoofer speakers&#xA;surround50:CARD=Camera,DEV=0&#xA;    HD Web Camera, USB Audio&#xA;    5.0 Surround output to Front, Center and Rear speakers&#xA;surround51:CARD=Camera,DEV=0&#xA;    HD Web Camera, USB Audio&#xA;    5.1 Surround output to Front, Center, Rear and Subwoofer speakers&#xA;surround71:CARD=Camera,DEV=0&#xA;    HD Web Camera, USB Audio&#xA;    7.1 Surround output to Front, Center, Side, Rear and Woofer speakers&#xA;iec958:CARD=Camera,DEV=0&#xA;    HD Web Camera, USB Audio&#xA;    IEC958 (S/PDIF) Digital Audio Output&#xA;dmix:CARD=Camera,DEV=0&#xA;    HD Web Camera, USB Audio&#xA;    Direct sample mixing device&#xA;dsnoop:CARD=Camera,DEV=0&#xA;    HD Web Camera, USB Audio&#xA;    Direct sample snooping device&#xA;hw:CARD=Camera,DEV=0&#xA;    HD Web Camera, USB Audio&#xA;    Direct hardware device without any conversions&#xA;plughw:CARD=Camera,DEV=0&#xA;    HD Web Camera, USB Audio&#xA;    Hardware device with all software conversions&#xA;usbstream:CARD=Camera&#xA;    HD Web Camera&#xA;    USB Stream Output&#xA;

    &#xA;

    that's why i added -i hw:CARD=Camera,DEV=0.

    &#xA;

    As mentioned above, it worked very well a couple of times with this configuration and commands.&#xA;But very often, i get the following error message when starting to stream :

    &#xA;

    pi@raspberrypi:~ $ ffmpeg -re -f video4linux2 -i /dev/video0 -f alsa -ac 1 -thread_queue_size 4096 -i hw:CARD=Camera,DEV=0 -profile:v high -level:v 4.1 -vcodec h264_omx -r 10 -b:v 512k -s 640x360 -acodec aac -strict -2 -ac 2 -ab 32k -ar 44100 -f flv rtmp://localhost/show/stream&#xA;ffmpeg version N-100673-g553eb07737 Copyright (c) 2000-2021 the FFmpeg developers&#xA;  built with gcc 8 (Raspbian 8.3.0-6&#x2B;rpi1)&#xA;  configuration: --enable-gpl --enable-nonfree --enable-mmal --enable-omx-rpi --extra-ldflags=-latomic&#xA;  libavutil      56. 63.101 / 56. 63.101&#xA;  libavcodec     58.117.101 / 58.117.101&#xA;  libavformat    58. 65.101 / 58. 65.101&#xA;  libavdevice    58. 11.103 / 58. 11.103&#xA;  libavfilter     7. 96.100 /  7. 96.100&#xA;  libswscale      5.  8.100 /  5.  8.100&#xA;  libswresample   3.  8.100 /  3.  8.100&#xA;  libpostproc    55.  8.100 / 55.  8.100&#xA;[video4linux2,v4l2 @ 0x2ea4600] ioctl(VIDIOC_STREAMON): Protocol error&#xA;/dev/video0: Protocol error&#xA;

    &#xA;

    And when I'm swithing to /dev/video1 (since this was also an output for v4l2-ctl --list-devices), I get the following error message :

    &#xA;

    pi@raspberrypi:~ $ ffmpeg -re -f v4l2 -i /dev/video1 -f alsa -ac 1 -thread_queue_size 4096 -i hw:CARD=Camera,DEV=0 -profile:v high -level:v 4.1 -vcodec h264_omx -r 10 -b:v 512k -s 640x360 -acodec aac -strict -2 -ac 2 -ab 32k -ar 44100 -f flv rtmp://localhost/show/stream&#xA;ffmpeg version N-100673-g553eb07737 Copyright (c) 2000-2021 the FFmpeg developers&#xA;  built with gcc 8 (Raspbian 8.3.0-6&#x2B;rpi1)&#xA;  configuration: --enable-gpl --enable-nonfree --enable-mmal --enable-omx-rpi --extra-ldflags=-latomic&#xA;  libavutil      56. 63.101 / 56. 63.101&#xA;  libavcodec     58.117.101 / 58.117.101&#xA;  libavformat    58. 65.101 / 58. 65.101&#xA;  libavdevice    58. 11.103 / 58. 11.103&#xA;  libavfilter     7. 96.100 /  7. 96.100&#xA;  libswscale      5.  8.100 /  5.  8.100&#xA;  libswresample   3.  8.100 /  3.  8.100&#xA;  libpostproc    55.  8.100 / 55.  8.100&#xA;[video4linux2,v4l2 @ 0x1aa4610] ioctl(VIDIOC_G_INPUT): Inappropriate ioctl for device&#xA;/dev/video1: Inappropriate ioctl for device&#xA;

    &#xA;

    When using the video0 input, the webcam's LED that recognizes an access is constantly on. When using video1not.

    &#xA;

    After hours and days of googling and tears and whiskey, for the sake of my liver, my marriage and my physical and mental health, I'm very sincerly asking for your help...&#xA;What the f**k is happening and what can I do to make it work ???

    &#xA;

    Thanks everybody :)

    &#xA;

    UPDATE 1 :

    &#xA;

      &#xA;
    1. using the full path to ffmpeg does not change anything...
    2. &#xA;

    3. /dev/video0 and /dev/video1 have access rights for everybody
    4. &#xA;

    5. sudo ffmpeg ... does not change anything as well
    6. &#xA;

    7. the problem seems to be at an "early stage". Stripping the command down to ffmpeg -i /dev/video0 results in the same problem
    8. &#xA;

    &#xA;

    UPDATE 2 :
    &#xA;It seems that everything is working when I first start another Application that needs access to the webcam and then ffmpeg...&#xA;Might be some driver issue, but when I'm looking for loaded modules with lsmod, there is absolutely no change before and after I started the application...&#xA;Any help still appreciated...

    &#xA;

    UPDATE 3 :
    &#xA;I was checking the output of dmesg.
    &#xA;When I started the first application I received this message :
    &#xA;uvcvideo: Failed to query (GET_DEF) UVC control 12 on unit 2: -32 (exp. 4).&#xA;
    And when I started ffmpeg, nothing happend but everything worked...

    &#xA;

  • FFMPEG can't find audio device for webcam on one computer

    13 mars 2018, par David

    I am building a webcam recording app. I am using FFMPEG to capture the audio and video. I have a Logitech C930 webcam.

    >ffmpeg -list_devices true -f dshow -i dummy
    ffmpeg version N-90155-g28924f4b48 Copyright (c) 2000-2018 the FFmpeg developers
      built with gcc 7.3.0 (GCC)
      configuration : —enable-gpl —enable-version3 —enable-sdl2 —enable-bzlib —enable-fontconfig —enable-gnutls —enable-iconv —enable-libass —enable-libbluray —enable-libfreetype —enable-libmp3lame —enable-libopencore-amrnb —enable-libopencore-amrwb —enable-libopenjpeg —enable-libopus —enable-libshine —enable-libsnappy —enable-libsoxr —enable-libtheora —enable-libtwolame —enable-libvpx —enable-libwavpack —enable-libwebp —enable-libx264 —enable-libx265 —enable-libxml2 —enable-libzimg —enable-lzma —enable-zlib —enable-gmp —enable-libvidstab —enable-libvorbis —enable-libvo-amrwbenc —enable-libmysofa —enable-libspeex —enable-libxvid —enable-libmfx —enable-amf —enable-cuda —enable-cuvid —enable-d3d11va —enable-nvenc —enable-dxva2 —enable-avisynth
      libavutil      56.  7.101 / 56.  7.101
      libavcodec     58. 12.102 / 58. 12.102
      libavformat    58.  9.100 / 58.  9.100
      libavdevice    58.  2.100 / 58.  2.100
      libavfilter     7. 12.100 /  7. 12.100
      libswscale      5.  0.101 /  5.  0.101
      libswresample   3.  0.101 /  3.  0.101
      libpostproc    55.  0.100 / 55.  0.100
    [dshow @ 0000024786c38680] DirectShow video devices (some may be both video and audio devices)
    [dshow @ 0000024786c38680]  "Microsoft Camera Front"
    [dshow @ 0000024786c38680]     Alternative name "@device_pnp_~?\display#int3470#4&1835d135&0&uid13424#65e8773d-8f56-11d0-a3b9-00a0c9223196bf89b5a5-61f7-4127-a279-e187013d7caf"
    [dshow @ 0000024786c38680]  "Microsoft Camera Rear"
    [dshow @ 0000024786c38680]     Alternative name "@device_pnp_~?\display#int3470#4&1835d135&0&uid13424#65e8773d-8f56-11d0-a3b9-00a0c92231967c9bbcea-909c-47b3-8cf9-2aa8237e1d4b"
    [dshow @ 0000024786c38680]  "Logitech Webcam C930e"
    [dshow @ 0000024786c38680]     Alternative name "@device_pnp_~?\usb#vid_046d&pid_0843&mi_00#8&dd2d9e1&0&0000#65e8773d-8f56-11d0-a3b9-00a0c9223196\global"
    [dshow @ 0000024786c38680]  "ChromaCam"
    [dshow @ 0000024786c38680]     Alternative name "@device_sw_860BB310-5D01-11D0-BD3B-00A0C911CE863403512D-FE5D-4D68-B053-E86D662E4D58"
    [dshow @ 0000024786c38680] DirectShow audio devices
    [dshow @ 0000024786c38680]  "Microphone (2- Logitech Webcam C930e)"
    [dshow @ 0000024786c38680]     Alternative name "@device_cm_33D9A762-90C8-11D0-BD43-00A0C911CE86\wave_1AF7CBFB-477F-44D9-B8C4-E178F036B5D1"
    dummy : Immediate exit requested

    Notice the "Microphone (2- Logitech Webcam C930e)" audio device. I can capture from it using FFMPEG.

    Now, I move the same camera to a different computer (Intel Compute Stick) running nearly the same build of windows (16299.251 on the stick, 16299.248 on the laptop). Stick is 32-bit home and laptop is 64-bit Pro (shouldn’t matter, should it ?).
    I run the same command with the exact same camera and same drivers installed :

    >ffmpeg -list_devices true -f dshow -i dummy
    ffmpeg version 3.4.2 Copyright (c) 2000-2018 the FFmpeg developers
      built with gcc 7.3.0 (GCC)
      configuration : —enable-gpl —enable-version3 —enable-sdl2 —enable-bzlib —enable-fontconfig —enable-gnutls —enable-iconv —enable-libass —enable-libbluray —enable-libfreetype —enable-libmp3lame —enable-libopencore-amrnb —enable-libopencore-amrwb —enable-libopenjpeg —enable-libopus —enable-libshine —enable-libsnappy —enable-libsoxr —enable-libtheora —enable-libtwolame —enable-libvpx —enable-libwavpack —enable-libwebp —enable-libx264 —enable-libx265 —enable-libxml2 —enable-libzimg —enable-lzma —enable-zlib —enable-gmp —enable-libvidstab —enable-libvorbis —enable-libvo-amrwbenc —enable-libmysofa —enable-libspeex —enable-libxvid —enable-libmfx —enable-cuda —enable-cuvid —enable-d3d11va —enable-nvenc —enable-dxva2 —enable-avisynth
      libavutil      55. 78.100 / 55. 78.100
      libavcodec     57.107.100 / 57.107.100
      libavformat    57. 83.100 / 57. 83.100
      libavdevice    57. 10.100 / 57. 10.100
      libavfilter     6.107.100 /  6.107.100
      libswscale      4.  8.100 /  4.  8.100
      libswresample   2.  9.100 /  2.  9.100
      libpostproc    54.  7.100 / 54.  7.100
    [dshow @ 00797be0] DirectShow video devices (some may be both video and audio devices)
    [dshow @ 00797be0]  "Logitech Webcam C930e"
    [dshow @ 00797be0]     Alternative name "@device_pnp_~?\usb#vid_046d&pid_0843&mi_00#6&dd2d9e1&0&0000#65e8773d-8f56-11d0-a3b9-00a0c9223196\global"
    [dshow @ 00797be0] DirectShow audio devices
    [dshow @ 00797be0] Could not enumerate audio only devices (or none found).
    dummy : Immediate exit requested
    

    No Audio device listed ? What ? If I use python’s sounddevice module, it sees the audio device :

    > python -m sounddevice
    0 Microsoft Sound Mapper - Output, MME (0 in, 2 out)
     1 Remote Audio, MME (0 in, 2 out)
      2 Primary Sound Driver, Windows DirectSound (0 in, 2 out)
      3 Remote Audio, Windows DirectSound (0 in, 2 out)
      4 Remote Audio, Windows WASAPI (0 in, 2 out)
      5 Microphone (Logitech Webcam C930e), Windows WDM-KS (2 in, 0 out)
    

    I can capture from that audio device with sounddevice, but I can’t get FFMPEG to capture from it.

    >ffmpeg -report -rtbufsize 50000k-f dshow -i video="Logitech Webcam C930e":audio="Microphone (Logitech Webcam C930e)" -t 120 -y -s 1920x1080 -q 2 -video_pin_name 1 -audio_pin_name 5 -filter:v setpts=1*PTS Watchmen_20180312095119_c.avi
    ffmpeg started on 2018-03-12 at 17:07:08
    Report written to "ffmpeg-20180312-170708.log"
    ffmpeg version 3.4.2 Copyright (c) 2000-2018 the FFmpeg developers
      built with gcc 7.3.0 (GCC)
      configuration : —enable-gpl —enable-version3 —enable-sdl2 —enable-bzlib —enable-fontconfig —enable-gnutls —enable-iconv —enable-libass —enable-libbluray —enable-libfreetype —enable-libmp3lame —enable-libopencore-amrnb —enable-libopencore-amrwb —enable-libopenjpeg —enable-libopus —enable-libshine —enable-libsnappy —enable-libsoxr —enable-libtheora —enable-libtwolame —enable-libvpx —enable-libwavpack —enable-libwebp —enable-libx264 —enable-libx265 —enable-libxml2 —enable-libzimg —enable-lzma —enable-zlib —enable-gmp —enable-libvidstab —enable-libvorbis —enable-libvo-amrwbenc —enable-libmysofa —enable-libspeex —enable-libxvid —enable-libmfx —enable-cuda —enable-cuvid —enable-d3d11va —enable-nvenc —enable-dxva2 —enable-avisynth
      libavutil      55. 78.100 / 55. 78.100
      libavcodec     57.107.100 / 57.107.100
      libavformat    57. 83.100 / 57. 83.100
      libavdevice    57. 10.100 / 57. 10.100
      libavfilter     6.107.100 /  6.107.100
      libswscale      4.  8.100 /  4.  8.100
      libswresample   2.  9.100 /  2.  9.100
      libpostproc    54.  7.100 / 54.  7.100
    video=Logitech Webcam C930e:audio=Microphone (Logitech Webcam C930e) : No such file or directory
    

    I have been trying to debug by removing and reinstalling drivers, etc, but to no avail. I also grabbed a Microsoft Lifecam webcam and got similar results. I can see the lifecam’s audio on the laptop but not the stick in just the same manner as the logitech camera.
    How can I get the audio device to show up on the stick so I can capture from it ?